tinkit 0.0.0
Sign up to get free protection for your applications and to get access to all the features.
- data/LICENSE +176 -0
- data/README +11 -0
- data/Rakefile +75 -0
- data/lib/glue_envs/couchrest/couchrest_attachment_handler.rb +260 -0
- data/lib/glue_envs/couchrest/couchrest_files_mgr.rb +198 -0
- data/lib/glue_envs/couchrest_glue_env.rb +536 -0
- data/lib/glue_envs/files_mgr_base.rb +51 -0
- data/lib/glue_envs/filesystem/filesystem_files_mgr.rb +187 -0
- data/lib/glue_envs/filesystem_glue_env.rb +395 -0
- data/lib/glue_envs/mysql/mysql_files_mgr.rb +175 -0
- data/lib/glue_envs/mysql_glue_env.rb +428 -0
- data/lib/glue_envs/sdb_s3/sdb_s3_files_mgr.rb +314 -0
- data/lib/glue_envs/sdb_s3_glue_env.rb +248 -0
- data/lib/helpers/camel.rb +21 -0
- data/lib/helpers/filesystem_helpers.rb +27 -0
- data/lib/helpers/hash_helpers.rb +74 -0
- data/lib/helpers/log_helper.rb +34 -0
- data/lib/helpers/mime_types_new.rb +126 -0
- data/lib/helpers/old_more_open_struct.rb +28 -0
- data/lib/helpers/require_helper.rb +45 -0
- data/lib/helpers/tk_escape.rb +17 -0
- data/lib/midas/bufs_data_structure.rb +84 -0
- data/lib/midas/node_element_operations.rb +264 -0
- data/lib/tinkit.rb +38 -0
- data/lib/tinkit_base_node.rb +733 -0
- data/lib/tinkit_node_factory.rb +47 -0
- data/spec/couchrest_files_mgr_spec.rb +551 -0
- data/spec/couchrest_glue_spec.rb +246 -0
- data/spec/filesystem_files_mgr_spec.rb +236 -0
- data/spec/filesystem_glue_spec.rb +243 -0
- data/spec/filesystem_helpers_spec.rb +42 -0
- data/spec/helpers/bufs_node_builder.rb +17 -0
- data/spec/helpers/bufs_sample_dataset.rb +160 -0
- data/spec/helpers/bufs_test_environments.rb +81 -0
- data/spec/helpers/tmp_view_cleaner.rb +15 -0
- data/spec/lib_helpers/tk_escape_spec.rb +45 -0
- data/spec/mysql_files_mgr_spec.rb +250 -0
- data/spec/mysql_glue_spec.rb +214 -0
- data/spec/node_element_operations_spec.rb +392 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec1.rb +82 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec2.rb +68 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec3.rb +80 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec4.rb +110 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec5.rb +84 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec6.rb +83 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec7.rb +101 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec8.rb +92 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec_all.rb +266 -0
- data/spec/sdb_s3_glue_spec.rb +230 -0
- data/spec/tinkit_node_factory_spec.rb +1108 -0
- metadata +114 -0
@@ -0,0 +1,51 @@
|
|
1
|
+
#This is more of a guideline than an abstract class
|
2
|
+
#that must be inherited from.
|
3
|
+
|
4
|
+
#module <PersistentLayer>Interface
|
5
|
+
#persistent layer helper classes/modules can go here
|
6
|
+
|
7
|
+
class FilesMgrBase
|
8
|
+
#persistent layer class initialization code
|
9
|
+
#persistent layer accessors
|
10
|
+
|
11
|
+
def initialize(glue_env, node_key_value)
|
12
|
+
#persistent layer object creation
|
13
|
+
raise NotImplementedError
|
14
|
+
end
|
15
|
+
|
16
|
+
def add(node, file_datas)
|
17
|
+
raise NotImplementedError
|
18
|
+
end
|
19
|
+
|
20
|
+
def add_raw_data(node, attach_name, content_type, raw_data, file_modified_at = nil)
|
21
|
+
raise NotImplementedError
|
22
|
+
end
|
23
|
+
|
24
|
+
def get_attachments_metadata(node)
|
25
|
+
raise NotImplementedError
|
26
|
+
end
|
27
|
+
|
28
|
+
def get_raw_data(node, file_basename)
|
29
|
+
raise NotImplementedError
|
30
|
+
end
|
31
|
+
|
32
|
+
def list(node)
|
33
|
+
raise NotImplementedError
|
34
|
+
end
|
35
|
+
|
36
|
+
def subtract(node, file_basenames)
|
37
|
+
raise NotImplementedError
|
38
|
+
end
|
39
|
+
|
40
|
+
def subtract_all(node) #can be called from subtract using :all instead of a list of file_basenames
|
41
|
+
raise NotImplementedError
|
42
|
+
end
|
43
|
+
|
44
|
+
def subtract_some(node, file_basenames)
|
45
|
+
raise NotImplementedError
|
46
|
+
end
|
47
|
+
|
48
|
+
#other persistent layer methods needed
|
49
|
+
|
50
|
+
end
|
51
|
+
#end of module
|
@@ -0,0 +1,187 @@
|
|
1
|
+
require 'time'
|
2
|
+
require 'json'
|
3
|
+
|
4
|
+
#Tinkit directory organization defined in lib/helpers/require_helper
|
5
|
+
require Tinkit.helpers 'mime_types_new'
|
6
|
+
require Tinkit.helpers 'log_helper'
|
7
|
+
require Tinkit.helpers 'tk_escape' #TODO: move to helpers
|
8
|
+
|
9
|
+
#TODO: Move this into a MonkeyPatch named module (called by file glue)
|
10
|
+
# Tinkit.monkey_patch punching_dir or something
|
11
|
+
class Dir #monkey patch (duck punching?)
|
12
|
+
def self.working_entries(dir=Dir.pwd)
|
13
|
+
ignore_list = ['thumbs.db','all_child_files']
|
14
|
+
all_entries = if File.exists?(dir)
|
15
|
+
Dir.entries(dir)
|
16
|
+
else
|
17
|
+
nil
|
18
|
+
end
|
19
|
+
wgk_entries = nil
|
20
|
+
wkg_entries = all_entries.delete_if {|x| x[0] == '.'} if all_entries
|
21
|
+
wkg_entries = wkg_entries.delete_if {|x| ignore_list.include?(x.downcase)} if wkg_entries
|
22
|
+
return wkg_entries
|
23
|
+
end
|
24
|
+
|
25
|
+
#TODO: this duplicates working_entries is it needed?
|
26
|
+
def self.file_data_entries(dir=Dir.pwd)
|
27
|
+
ignore_list = ['parent_categories.txt', 'description.txt']
|
28
|
+
wkg_entries = Dir.working_entries(dir)
|
29
|
+
file_data_entries = wkg_entries.delete_if {|x| ignore_list.include?(x.downcase)}
|
30
|
+
return file_data_entries
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
module FilesystemInterface
|
35
|
+
class FilesMgr
|
36
|
+
#Set Logger
|
37
|
+
@@log = TinkitLog.set(self.name)
|
38
|
+
|
39
|
+
attr_accessor :attachment_location, :attachment_packages
|
40
|
+
|
41
|
+
def self.get_att_doc(node)
|
42
|
+
root_path = node.my_GlueEnv.user_datastore_location
|
43
|
+
#my_cat dependency
|
44
|
+
node_loc = node._user_data[node.my_GlueEnv.node_key]
|
45
|
+
node_path = File.join(root_path, node_loc)
|
46
|
+
model_basenames = Dir.working_entries(node_path)
|
47
|
+
filenames = model_basenames.map{|b| File.join(node_path, TkEscape.escape(b))}
|
48
|
+
end
|
49
|
+
|
50
|
+
def initialize(node_env, node_key)
|
51
|
+
#for bufs node_key is the value of :my_category
|
52
|
+
@node_key = node_key
|
53
|
+
@attachment_location = File.join(node_env.user_datastore_location, node_key)
|
54
|
+
end
|
55
|
+
|
56
|
+
#TODO: Is passing node in methods duplicative now that the moab FileMgr is bound to an env at initialization?
|
57
|
+
|
58
|
+
def add(node, file_datas)
|
59
|
+
filenames = []
|
60
|
+
file_datas.each do |file_data|
|
61
|
+
#TODO Validate file data before saving
|
62
|
+
filenames << file_data[:src_filename]
|
63
|
+
end
|
64
|
+
filenames.each do |filename|
|
65
|
+
my_dest_basename = TkEscape.escape(File.basename(filename))
|
66
|
+
node_dir = @attachment_location
|
67
|
+
#File.join(node.my_GlueEnv.user_datastore_selector, node.my_category) #TODO: this should be node id, not my cat
|
68
|
+
my_dest = File.join(node_dir, my_dest_basename)
|
69
|
+
#FIXME: obj.attached_files is broken, list_attached_files should work
|
70
|
+
#@attached_files << my_dest
|
71
|
+
same_file = filename if filename == my_dest
|
72
|
+
@@log.debug {"File model attachments:"} if @@log.debug?
|
73
|
+
@@log.debug { "Copy #{filename} to #{my_dest} if #{same_file.nil?}"} if @@log.debug?
|
74
|
+
#was breaking if the dest path didn't exist
|
75
|
+
FileUtils.mkdir_p(File.dirname(my_dest)) unless File.exist?(File.dirname(my_dest))
|
76
|
+
FileUtils.cp(filename, my_dest, :preserve => true, :verbose => false ) unless same_file
|
77
|
+
#self.file_metadata = {filename => {'file_modified' => File.mtime(filename).to_s}}
|
78
|
+
end
|
79
|
+
filenames.map {|f| TkEscape.escape(File.basename(f))} #return basenames
|
80
|
+
end
|
81
|
+
|
82
|
+
def add_raw_data(node, attach_name, content_type, raw_data, file_modified_at = nil)
|
83
|
+
raise "No Data provided for file" unless raw_data
|
84
|
+
#bia_class = @model_actor[:attachment_actor_class]
|
85
|
+
file_metadata = {}
|
86
|
+
if file_modified_at
|
87
|
+
file_metadata['file_modified'] = file_modified_at
|
88
|
+
else
|
89
|
+
file_metadata['file_modified'] = Time.now.to_s
|
90
|
+
end
|
91
|
+
file_metadata['content_type'] = content_type #TODO: is unknown content handled gracefully?
|
92
|
+
attachment_package = {}
|
93
|
+
esc_attach_name = TkEscape.escape(attach_name)
|
94
|
+
node_path = @attachment_location
|
95
|
+
FileUtils.mkdir_p(node_path) unless File.exist?(node_path)
|
96
|
+
raw_data_filename = File.join(node_path, esc_attach_name)
|
97
|
+
File.open(raw_data_filename, 'wb'){|f| f.write(raw_data)}
|
98
|
+
if file_modified_at
|
99
|
+
File.utime(Time.parse(file_modified_at), Time.parse(file_modified_at), raw_data_filename)
|
100
|
+
else
|
101
|
+
file_modified_at = File.mtime(raw_data_filename).to_s
|
102
|
+
end
|
103
|
+
[esc_attach_name]
|
104
|
+
end
|
105
|
+
|
106
|
+
#TODO Document the :all shortcut somewhere
|
107
|
+
def subtract(node, model_basenames)
|
108
|
+
if model_basenames == :all
|
109
|
+
subtract_all(node)
|
110
|
+
else
|
111
|
+
subtract_some(node, model_basenames)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
#TODO: make private
|
116
|
+
def subtract_some(node, file_basenames)
|
117
|
+
file_basenames = [file_basenames].flatten
|
118
|
+
model_key = node.my_GlueEnv.model_key
|
119
|
+
|
120
|
+
node_path = @attachment_location
|
121
|
+
filenames = file_basenames.map{|b| File.join(node_path, TkEscape.escape(b))}
|
122
|
+
FileUtils.rm_f(filenames)
|
123
|
+
end
|
124
|
+
#TODO: make private
|
125
|
+
def subtract_all(node)
|
126
|
+
node_path = @attachment_location
|
127
|
+
attached_entries = Dir.working_entries(node_path)
|
128
|
+
attached_filenames = attached_entries.map{|e| File.join(node_path, e)}
|
129
|
+
FileUtils.rm(attached_filenames)
|
130
|
+
end
|
131
|
+
|
132
|
+
def get_raw_data(node, model_basename)
|
133
|
+
node_dir = @attachment_location
|
134
|
+
filename = File.join(node_dir, model_basename)
|
135
|
+
return nil unless File.exist?(filename)
|
136
|
+
File.open(filename, "r"){|f| f.read}
|
137
|
+
end
|
138
|
+
|
139
|
+
def get_attachments_metadata(node)
|
140
|
+
att_md = {}
|
141
|
+
node_dir = @attachment_location
|
142
|
+
att_basenames = Dir.working_entries(node_dir)
|
143
|
+
att_basenames.each do |att|
|
144
|
+
file_md = {}
|
145
|
+
|
146
|
+
filename = File.join(node_dir, att)
|
147
|
+
file_md[:file_modified] = File.mtime(filename).to_s
|
148
|
+
file_md[:content_type] = MimeNew.for_ofc_x(filename)
|
149
|
+
att_md[att.to_sym] = file_md
|
150
|
+
end
|
151
|
+
att_md
|
152
|
+
end
|
153
|
+
|
154
|
+
#Not used and I don't think it will work anyway
|
155
|
+
def list(node)
|
156
|
+
Dir.working_entries(@attachment_location)
|
157
|
+
end
|
158
|
+
|
159
|
+
|
160
|
+
private
|
161
|
+
=begin
|
162
|
+
def subtract_some(node, model_basenames)
|
163
|
+
if node.attached_files
|
164
|
+
#TODO: replace the duplicative namespaces with path to the node dir
|
165
|
+
root_path = node.my_GlueEnv.user_datastore_location
|
166
|
+
node_loc = node._user_data[node.my_GlueEnv.node_key]
|
167
|
+
node_path = File.join(root_path, node_loc)
|
168
|
+
filenames = model_basenames.map{|b| File.join(node_path, TkEscape.escape(b))}
|
169
|
+
#raise filenames.inspect
|
170
|
+
FileUtils.rm_f(filenames)
|
171
|
+
#subtract_all(node) if rem_atts.empty?
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
def subtract_all(node)
|
176
|
+
root_path = node.my_GlueEnv.user_datastore_location
|
177
|
+
node_loc = node._user_data[node.my_GlueEnv.node_key]
|
178
|
+
node_path = File.join(root_path, node_loc)
|
179
|
+
attached_entries = Dir.working_entries(node_path)
|
180
|
+
#alternate approach would be to use node.files_attached
|
181
|
+
#FIXME: What is the e for in the File.join? is it needed?
|
182
|
+
attached_filenames = attached_entries.map{|e| File.join(node_path, e)}
|
183
|
+
FileUtils.rm(attached_filenames)
|
184
|
+
end
|
185
|
+
=end
|
186
|
+
end
|
187
|
+
end
|
@@ -0,0 +1,395 @@
|
|
1
|
+
#Tinkit directory organization defined in lib/helpers/require_helper.rb
|
2
|
+
require Tinkit.midas 'bufs_data_structure'
|
3
|
+
require Tinkit.glue 'filesystem/filesystem_files_mgr'
|
4
|
+
require Tinkit.helpers 'hash_helpers'
|
5
|
+
|
6
|
+
#class ViewsMgr
|
7
|
+
module TinkitFileSystemViews
|
8
|
+
#Set Logger
|
9
|
+
@@log = TinkitLog.set(self.name, :warn)
|
10
|
+
|
11
|
+
#Dependency on TinkitInfoDocEnvMethods
|
12
|
+
attr_accessor :model_actor
|
13
|
+
|
14
|
+
|
15
|
+
#def initialize(model_actor=nil)
|
16
|
+
# @model_actor = model_actor #provides the model actor that can provide views
|
17
|
+
# @data_file = model_actor[:data_file]
|
18
|
+
#end
|
19
|
+
|
20
|
+
|
21
|
+
#TODO create an index to speed queries? sync issues?
|
22
|
+
def self.by_my_category(moab_data, user_datastore_location, match_keys)
|
23
|
+
data_file = moab_data[:moab_datastore_name]
|
24
|
+
#raise "nt: #{nodetest.my_category.inspect}" if nodetest
|
25
|
+
#raise "No category provided for search" unless my_cat
|
26
|
+
#puts "Searching for #{my_cat.inspect}"
|
27
|
+
match_keys = [match_keys].flatten
|
28
|
+
my_dir = user_datastore_location
|
29
|
+
bfss = nil
|
30
|
+
match_keys.each do |match_key|
|
31
|
+
my_cat_dir = match_key
|
32
|
+
wkg_dir = File.join(my_dir, my_cat_dir)
|
33
|
+
if File.exists?(wkg_dir)
|
34
|
+
bfss = bfss || []
|
35
|
+
data_file_path = File.join(wkg_dir, data_file)
|
36
|
+
node_data = JSON.parse(File.open(data_file_path){|f| f.read})
|
37
|
+
#bfs = self.new(node_data)
|
38
|
+
bfss << node_data #bfs
|
39
|
+
end
|
40
|
+
#return bfss #returned as an array for compatibility with other search and node types
|
41
|
+
#else
|
42
|
+
# puts "Warning: #{wkg_dir.inspect} was not found"
|
43
|
+
# return nil
|
44
|
+
end
|
45
|
+
return bfss
|
46
|
+
end
|
47
|
+
|
48
|
+
def self.by_parent_categories(moab_data, user_datastore_location, match_keys)
|
49
|
+
data_file = moab_data[:moab_datastore_name]
|
50
|
+
match_keys = [match_keys].flatten
|
51
|
+
#all_nodes = all collection method when all is moved into here
|
52
|
+
matching_node_data = []
|
53
|
+
all_wkg_entries = Dir.working_entries(user_datastore_location)
|
54
|
+
all_wkg_entries.each do |entry|
|
55
|
+
wkg_dir = File.join(user_datastore_location, entry)
|
56
|
+
if File.exists?(wkg_dir)
|
57
|
+
data_file_path = File.join(wkg_dir, data_file)
|
58
|
+
json_data = JSON.parse(File.open(data_file_path){|f| f.read})
|
59
|
+
node_data = HashKeys.str_to_sym(json_data)
|
60
|
+
match_keys.each do |k|
|
61
|
+
pc = node_data[:parent_categories]
|
62
|
+
if pc && pc.include?(k)
|
63
|
+
matching_node_data << node_data
|
64
|
+
break #we don't need to loop through each parent cat, if one already matches
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
#we now have all mathcing data
|
70
|
+
return matching_node_data
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
module FilesystemViews
|
75
|
+
|
76
|
+
def call_view(field_name, moab_data, namespace_key, user_datastore_location, match_key, view_name = nil)
|
77
|
+
data_file = moab_data[:moab_datastore_name]
|
78
|
+
matching_records = []
|
79
|
+
all_file_records = Dir.working_entries(user_datastore_location)
|
80
|
+
all_file_records.each do |file_record|
|
81
|
+
record_path = File.join(user_datastore_location, file_record)
|
82
|
+
if File.exists?(record_path)
|
83
|
+
data_file_path = File.join(record_path, data_file)
|
84
|
+
json_data = JSON.parse(File.open(data_file_path){|f| f.read})
|
85
|
+
record = HashKeys.str_to_sym(json_data)
|
86
|
+
field_data = record[field_name]
|
87
|
+
if field_data == match_key
|
88
|
+
matching_records << record
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
matching_records
|
93
|
+
end
|
94
|
+
|
95
|
+
def self.by_my_category(moab_data, user_datastore_location, match_keys)
|
96
|
+
data_file = moab_data[:moab_datastore_name]
|
97
|
+
#raise "nt: #{nodetest.my_category.inspect}" if nodetest
|
98
|
+
#raise "No category provided for search" unless my_cat
|
99
|
+
#puts "Searching for #{my_cat.inspect}"
|
100
|
+
match_keys = [match_keys].flatten
|
101
|
+
my_dir = user_datastore_location
|
102
|
+
bfss = nil
|
103
|
+
match_keys.each do |match_key|
|
104
|
+
my_cat_dir = match_key
|
105
|
+
wkg_dir = File.join(my_dir, my_cat_dir)
|
106
|
+
if File.exists?(wkg_dir)
|
107
|
+
bfss = bfss || []
|
108
|
+
data_file_path = File.join(wkg_dir, data_file)
|
109
|
+
node_data = JSON.parse(File.open(data_file_path){|f| f.read})
|
110
|
+
#bfs = self.new(node_data)
|
111
|
+
bfss << node_data #bfs
|
112
|
+
end
|
113
|
+
#return bfss #returned as an array for compatibility with other search and node types
|
114
|
+
#else
|
115
|
+
# puts "Warning: #{wkg_dir.inspect} was not found"
|
116
|
+
# return nil
|
117
|
+
end
|
118
|
+
return bfss
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
|
123
|
+
module FilesystemEnv
|
124
|
+
#EnvName = :filesystem_env
|
125
|
+
BADCHARS = /:/ #there's a lot more
|
126
|
+
|
127
|
+
class GlueEnv
|
128
|
+
#This class provides a generic persistence layer interface to the
|
129
|
+
#outside world that maps to the specific implementations of the
|
130
|
+
#underlying persistent layers
|
131
|
+
#Set Logger
|
132
|
+
@@log = TinkitLog.set(self.name, :warn)
|
133
|
+
|
134
|
+
include FilesystemViews
|
135
|
+
|
136
|
+
PersistLayerKey = :node_path # is the full path including node_key transform
|
137
|
+
#see mysql_glue_env to decouple persistent layer key from node key
|
138
|
+
VersionKey = :_rev #to have timestamp
|
139
|
+
NamespaceKey = :files_namespace
|
140
|
+
|
141
|
+
#FileSystemMetadataKeys = [PersistLayerKey]
|
142
|
+
|
143
|
+
MoabDataStoreDir = ".model"
|
144
|
+
MoabDatastoreName = ".node_data.json"
|
145
|
+
|
146
|
+
#include FileSystemEnv
|
147
|
+
|
148
|
+
#TODO: Rather than using File class directly, should a special class be used?
|
149
|
+
attr_accessor :user_id,
|
150
|
+
:user_datastore_location,
|
151
|
+
:metadata_keys,
|
152
|
+
:required_instance_keys,
|
153
|
+
:required_save_keys,
|
154
|
+
:node_key,
|
155
|
+
:model_key,
|
156
|
+
:version_key,
|
157
|
+
:namespace_key,
|
158
|
+
:_files_mgr_class,
|
159
|
+
:views,
|
160
|
+
:model_save_params,
|
161
|
+
:moab_data,
|
162
|
+
:persist_layer_key,
|
163
|
+
#accessors specific to this persitence model
|
164
|
+
:moab_datastore_name
|
165
|
+
|
166
|
+
|
167
|
+
def initialize(persist_env, data_model_bindings)
|
168
|
+
#TODO: determine if class_name is needed to segment cluster data within user data
|
169
|
+
|
170
|
+
#via environmental settings
|
171
|
+
filesystem_env = persist_env[:env]
|
172
|
+
#key_fields = persist_env[:key_fields]
|
173
|
+
fs_path = filesystem_env[:path]
|
174
|
+
@user_id = filesystem_env[:user_id]
|
175
|
+
@cluster_name = persist_env[:name]
|
176
|
+
|
177
|
+
#data_model_bindings from NodeElementOperations
|
178
|
+
key_fields = data_model_bindings[:key_fields]
|
179
|
+
initial_views_data = data_model_bindings[:views]
|
180
|
+
|
181
|
+
@required_instance_keys = key_fields[:required_keys] #DataStructureModels::Tinkit::RequiredInstanceKeys
|
182
|
+
@required_save_keys = key_fields[:required_keys] #DataStructureModels::Tinkit::RequiredSaveKeys
|
183
|
+
@node_key = key_fields[:primary_key] #DataStructureModels::Tinkit::NodeKey
|
184
|
+
|
185
|
+
|
186
|
+
@moab_datastore_name = MoabDatastoreName
|
187
|
+
@version_key = VersionKey #
|
188
|
+
@model_key = @node_key #ModelKey
|
189
|
+
|
190
|
+
#Change model key to persiste layer key ...
|
191
|
+
#TODO: See about not making the filesystem dependent upon the node key
|
192
|
+
@persist_layer_key = PersistLayerKey
|
193
|
+
|
194
|
+
@namespace_key = NamespaceKey
|
195
|
+
@metadata_keys = [@persist_layer_key, @version_key, @namespace_key ] #@persist_layer_key, @namespace_key]
|
196
|
+
@user_datastore_location = File.join(fs_path, @user_id, MoabDataStoreDir)
|
197
|
+
|
198
|
+
|
199
|
+
|
200
|
+
@model_save_params = {:nodes_save_path => @user_datastore_location, :data_file => @moab_datastore_name, :node_key => @node_key}
|
201
|
+
@_files_mgr_class = FilesystemInterface::FilesMgr
|
202
|
+
@views = TinkitFileSystemViews
|
203
|
+
@moab_data = {:moab_datastore_name => @moab_datastore_name}
|
204
|
+
#@views_mgr = ViewsMgr.new({:data_file => @data_file_name})
|
205
|
+
|
206
|
+
FileUtils.mkdir_p(fs_path) unless File.exists?(fs_path)
|
207
|
+
end
|
208
|
+
|
209
|
+
def query_all #TODO move to ViewsMgr
|
210
|
+
unless File.exists?(@user_datastore_location)
|
211
|
+
@@log.debug {"Warning: Can't query records. The File System Directory to work from does not exist: #{@user_datastore_location}"} if @@log.debug?
|
212
|
+
end
|
213
|
+
all_records = []
|
214
|
+
my_dir = @user_datastore_location + '/' #TODO: Can this be removed?
|
215
|
+
all_entries = Dir.working_entries(my_dir) || []
|
216
|
+
@@log.debug "querying directory: #{my_dir.inspect} and got entries: #{all_entries.inspect}"
|
217
|
+
all_entries.each do|entry|
|
218
|
+
all_records << get(entry) || {}
|
219
|
+
end
|
220
|
+
@@log.debug {"query_all returning: #{all_records.inspect}" } if @@log.debug?
|
221
|
+
return all_records
|
222
|
+
end
|
223
|
+
|
224
|
+
#TODO: reconcile raw_all with query_all, this is the only glue env using raw_all
|
225
|
+
def raw_all
|
226
|
+
query_all
|
227
|
+
=begin
|
228
|
+
@@log.debug {"Getting All (Raw) Data using "\
|
229
|
+
"data loc: #{@user_datastore_location.inspect} "\
|
230
|
+
"datastore name: #{@moab_datastore_name.inspect}" } if @@log.debug?
|
231
|
+
entries = query_all
|
232
|
+
raw_nodes = []
|
233
|
+
entries.each do |record|
|
234
|
+
data_path = File.join(record[@persist_layer_key], @moab_datastore_name)
|
235
|
+
data_json = File.open(data_path, 'r'){|f| f.read}
|
236
|
+
data = JSON.parse(data_json)
|
237
|
+
raw_nodes << data
|
238
|
+
end
|
239
|
+
raw_nodes
|
240
|
+
=end
|
241
|
+
end
|
242
|
+
#current relations supported:
|
243
|
+
# - :equals (data in the key field matches this_value)
|
244
|
+
# - :contains (this_value is contained in the key field data (same as equals for non-enumerable types )
|
245
|
+
def find_nodes_where(key, relation, this_value)
|
246
|
+
res = case relation
|
247
|
+
when :equals
|
248
|
+
find_equals(key, this_value)
|
249
|
+
when :contains
|
250
|
+
find_contains(key, this_value)
|
251
|
+
end #case
|
252
|
+
return res
|
253
|
+
end
|
254
|
+
|
255
|
+
def find_equals(key, this_value)
|
256
|
+
results =[]
|
257
|
+
query_all.each do |record|
|
258
|
+
test_val = record[key]
|
259
|
+
results << record if test_val == this_value
|
260
|
+
end
|
261
|
+
results
|
262
|
+
end
|
263
|
+
|
264
|
+
def find_contains(key, this_value)
|
265
|
+
results =[]
|
266
|
+
query_all.each do |record|
|
267
|
+
test_val = record[key]
|
268
|
+
results << record if find_contains_type_helper(test_val, this_value)
|
269
|
+
end
|
270
|
+
results
|
271
|
+
end
|
272
|
+
|
273
|
+
def find_contains_type_helper(stored_data, this_value)
|
274
|
+
#p stored_dataj
|
275
|
+
resp = nil
|
276
|
+
#stored_data = jparse(stored_dataj)
|
277
|
+
if stored_data.respond_to?(:"include?")
|
278
|
+
resp = (stored_data.include?(this_value))
|
279
|
+
else
|
280
|
+
resp = (stored_data == this_value)
|
281
|
+
end
|
282
|
+
return resp
|
283
|
+
end
|
284
|
+
|
285
|
+
def get(id)
|
286
|
+
return nil unless id
|
287
|
+
#maybe put in some validations to ensure its from the proper collection namespace?
|
288
|
+
#id may be the entry or the full path
|
289
|
+
id_path = if id.include? (@user_datastore_location)
|
290
|
+
id
|
291
|
+
else
|
292
|
+
convert_node_value_to_file_value(id)
|
293
|
+
end
|
294
|
+
#id_path = id #convert_node_value_to_file_value(id)
|
295
|
+
data_file_path = File.join(id_path, @moab_datastore_name)
|
296
|
+
rtn = if File.exists?(data_file_path)
|
297
|
+
#data_file_path = File.join(id_path, @moab_datastore_name)
|
298
|
+
json_data = File.open(data_file_path, 'r'){|f| f.read}
|
299
|
+
node_data = JSON.parse(json_data)
|
300
|
+
node_data = HashKeys.str_to_sym(node_data)
|
301
|
+
else
|
302
|
+
puts "Warning: File path doesn't exist: #{data_file_path.inspect}"
|
303
|
+
[]
|
304
|
+
end
|
305
|
+
end
|
306
|
+
|
307
|
+
def save(new_data)
|
308
|
+
save_data = nil
|
309
|
+
if new_data[@persist_layer_key]
|
310
|
+
save_data = new_data
|
311
|
+
elsif new_data[@node_key]
|
312
|
+
persist_key_value = convert_node_value_to_file_value(new_data[@node_key])
|
313
|
+
save_data = new_data.merge({@persist_layer_key => persist_key_value})
|
314
|
+
else
|
315
|
+
raise "Save Data did not include any keys for saving, data: #{new_data.inspect}"
|
316
|
+
end
|
317
|
+
save_path = @user_datastore_location
|
318
|
+
save_location = save_data[@persist_layer_key]
|
319
|
+
@@log.debug {"Save Directory: #{save_location.inspect}"} if @@log.debug?
|
320
|
+
#was in FileSystemEnv mixin
|
321
|
+
#fs_save(@model_save_params, model_data)
|
322
|
+
#puts "FSG save udl: #{@user_datastore_location.inspect}"
|
323
|
+
#parent_path = @user_datastore_location
|
324
|
+
#node_key = @model_save_params[:node_key]
|
325
|
+
#node_key = @node_key
|
326
|
+
#puts "parent_path: #{parent_path.inspect}"
|
327
|
+
#puts "new data node key: #{new_data[node_key].inspect}"
|
328
|
+
#node_path = File.join(parent_path, new_data[node_key])
|
329
|
+
file_name = File.basename(@moab_datastore_name)
|
330
|
+
file_location = File.join(save_location, file_name)
|
331
|
+
|
332
|
+
#error_str = "Filesystem can't handle some characters in/
|
333
|
+
#{file_location.inspect}"
|
334
|
+
#raise error_str if new_data [node_key] =~ /::/ #FilesystemEnv::BADCHARS
|
335
|
+
@@log.info {"File Location: #{file_location.inspect}"} if @@log.info?
|
336
|
+
model_data = HashKeys.sym_to_str(save_data)
|
337
|
+
|
338
|
+
FileUtils.mkdir_p(save_location) unless File.exist?(save_location)
|
339
|
+
#raise "WTF?" unless File.exist?(save_location)
|
340
|
+
rev = Time.now.hash #<- I would use File.mtime, but how to get the mod time before saving?
|
341
|
+
model_data[@version_key] = rev
|
342
|
+
f = File.open(file_location, 'w')
|
343
|
+
f.write(model_data.to_json)
|
344
|
+
f.close
|
345
|
+
model_data['rev'] = model_data[@version_key] #TODO <-Investigate to see if it could be consistent
|
346
|
+
return model_data
|
347
|
+
end
|
348
|
+
|
349
|
+
def destroy_node(model_metadata)
|
350
|
+
#root_dir = @user_datastore_location
|
351
|
+
#model_path(model_metadata[@model_key])
|
352
|
+
node_id = model_metadata[@persist_layer_key] || generate_model_key(nil, model_metadata[@node_key])
|
353
|
+
|
354
|
+
node_dir = node_id #File.join(root_dir, node_id)
|
355
|
+
|
356
|
+
FileUtils.rm_rf(node_dir)
|
357
|
+
#node = nil
|
358
|
+
end
|
359
|
+
|
360
|
+
#namespace is used to distinguish between unique
|
361
|
+
#data sets (i.e., users) within the model
|
362
|
+
def generate_model_key(namespace, node_key_value)
|
363
|
+
#was in FileSystemEnv mixin
|
364
|
+
#fs_generate_model_key(namespace, node_key)
|
365
|
+
#TODO: Make sure namespace is portable across model migrations
|
366
|
+
file_name = convert_node_value_to_file_value(node_key_value)
|
367
|
+
#TODO: Expand bad character set
|
368
|
+
#FIXME namespace is redundant so removed it
|
369
|
+
#"#{namespace}/#{node_key}"
|
370
|
+
file_name
|
371
|
+
end
|
372
|
+
|
373
|
+
def model_path(model_key_value)
|
374
|
+
#model_key_value.gsub("::","/")
|
375
|
+
end
|
376
|
+
|
377
|
+
def destroy_bulk(list_of_native_records)
|
378
|
+
@@log.info {"Bulk Destroy: #{list_of_native_records.inspect}"} if @@log.info?
|
379
|
+
return [] unless (list_of_native_records)
|
380
|
+
list_of_native_records.each do |recs|
|
381
|
+
next unless (recs && recs.size > 0)#raise "recs not hash: #{recs}" unless recs.class == Hash
|
382
|
+
rec_id = recs[@persist_layer_key] || generate_model_key(nil, recs[@node_key])
|
383
|
+
#rec_id = File.join(@user_datastore_location, rec_id) if File.dirname(rec_id) == "."
|
384
|
+
#puts "Removing: #{r.inspect}"
|
385
|
+
FileUtils.rm_rf(rec_id)
|
386
|
+
end
|
387
|
+
end
|
388
|
+
|
389
|
+
def convert_node_value_to_file_value(node_key_value)
|
390
|
+
file_base_value = node_key_value.gsub("::", "_")
|
391
|
+
file_value = File.join(@user_datastore_location, file_base_value)
|
392
|
+
end
|
393
|
+
|
394
|
+
end
|
395
|
+
end
|