tinkit 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/LICENSE +176 -0
- data/README +11 -0
- data/Rakefile +75 -0
- data/lib/glue_envs/couchrest/couchrest_attachment_handler.rb +260 -0
- data/lib/glue_envs/couchrest/couchrest_files_mgr.rb +198 -0
- data/lib/glue_envs/couchrest_glue_env.rb +536 -0
- data/lib/glue_envs/files_mgr_base.rb +51 -0
- data/lib/glue_envs/filesystem/filesystem_files_mgr.rb +187 -0
- data/lib/glue_envs/filesystem_glue_env.rb +395 -0
- data/lib/glue_envs/mysql/mysql_files_mgr.rb +175 -0
- data/lib/glue_envs/mysql_glue_env.rb +428 -0
- data/lib/glue_envs/sdb_s3/sdb_s3_files_mgr.rb +314 -0
- data/lib/glue_envs/sdb_s3_glue_env.rb +248 -0
- data/lib/helpers/camel.rb +21 -0
- data/lib/helpers/filesystem_helpers.rb +27 -0
- data/lib/helpers/hash_helpers.rb +74 -0
- data/lib/helpers/log_helper.rb +34 -0
- data/lib/helpers/mime_types_new.rb +126 -0
- data/lib/helpers/old_more_open_struct.rb +28 -0
- data/lib/helpers/require_helper.rb +45 -0
- data/lib/helpers/tk_escape.rb +17 -0
- data/lib/midas/bufs_data_structure.rb +84 -0
- data/lib/midas/node_element_operations.rb +264 -0
- data/lib/tinkit.rb +38 -0
- data/lib/tinkit_base_node.rb +733 -0
- data/lib/tinkit_node_factory.rb +47 -0
- data/spec/couchrest_files_mgr_spec.rb +551 -0
- data/spec/couchrest_glue_spec.rb +246 -0
- data/spec/filesystem_files_mgr_spec.rb +236 -0
- data/spec/filesystem_glue_spec.rb +243 -0
- data/spec/filesystem_helpers_spec.rb +42 -0
- data/spec/helpers/bufs_node_builder.rb +17 -0
- data/spec/helpers/bufs_sample_dataset.rb +160 -0
- data/spec/helpers/bufs_test_environments.rb +81 -0
- data/spec/helpers/tmp_view_cleaner.rb +15 -0
- data/spec/lib_helpers/tk_escape_spec.rb +45 -0
- data/spec/mysql_files_mgr_spec.rb +250 -0
- data/spec/mysql_glue_spec.rb +214 -0
- data/spec/node_element_operations_spec.rb +392 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec1.rb +82 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec2.rb +68 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec3.rb +80 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec4.rb +110 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec5.rb +84 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec6.rb +83 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec7.rb +101 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec8.rb +92 -0
- data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec_all.rb +266 -0
- data/spec/sdb_s3_glue_spec.rb +230 -0
- data/spec/tinkit_node_factory_spec.rb +1108 -0
- metadata +114 -0
@@ -0,0 +1,264 @@
|
|
1
|
+
#require helper for cleaner require statements
|
2
|
+
require File.join(File.dirname(__FILE__), '../helpers/require_helper')
|
3
|
+
|
4
|
+
require Tinkit.helpers 'log_helper'
|
5
|
+
|
6
|
+
|
7
|
+
#TODO This should be a class and instance assigned to a node class
|
8
|
+
# otherwise different node classes will clobber each other
|
9
|
+
|
10
|
+
#rename module to DefaultOpSets
|
11
|
+
module DefaultOpSets
|
12
|
+
|
13
|
+
class << self; attr_accessor :op_sets_to_def_table end
|
14
|
+
#definitions WIP
|
15
|
+
#op_name => type of operations (add, subtract, get, etc)
|
16
|
+
#op_def => anonymous function that defines a particular operations behavior in a particular context
|
17
|
+
# addition in the context of lists for example
|
18
|
+
#op_set => the set of all operations that belong with a certain context
|
19
|
+
# the set of operations that can work with lists for example
|
20
|
+
#op_behav => types of context, for example static (unchanging), replacing, appending, merging, etc
|
21
|
+
#fields => the key portion of a key-value persisted record
|
22
|
+
#field value => the value portion of key-value persisted record
|
23
|
+
#key field => the primary identifier for a key-value record (akin to a primary key)
|
24
|
+
#field_op_set => the assignment of context to a field, and associated op_behav's belonging
|
25
|
+
# to that context
|
26
|
+
#record_op_set => the collection fo field operation assignments for an entire record
|
27
|
+
|
28
|
+
#Building Field Operation Definitions
|
29
|
+
#General:
|
30
|
+
#A Proc that accepts two input parameters, the first (this) is the current value assigned to the field,
|
31
|
+
#the second (other) is the value to be used by the operation.
|
32
|
+
#A Hash is returned with the folowing parameters
|
33
|
+
# :update_this => <Update the current field to this value> (mandatory)
|
34
|
+
# :return_val => <return this value from the operation> (optional, if not included the value of "update_this" is returned
|
35
|
+
|
36
|
+
#Static Operations are for fixed values (i.e., any attempts at changes are ignored)
|
37
|
+
StaticAddOpDef = lambda{|this, other| Hash[:update_this => this] }
|
38
|
+
StaticSubtractOpDef = lambda{|this, other| Hash[:update_this => this]}
|
39
|
+
|
40
|
+
StaticOpSet = {:add => StaticAddOpDef, :subtract => StaticSubtractOpDef}
|
41
|
+
|
42
|
+
|
43
|
+
#We define a field where adding will replace the existing value for that field, and subtracting a matching value will set the value to nil
|
44
|
+
ReplaceAddOpDef = lambda { |this, other| Hash[:update_this => other] }
|
45
|
+
ReplaceSubtractOpDef = lambda do |this, other|
|
46
|
+
if (this == other)
|
47
|
+
Hash[:update_this => nil]
|
48
|
+
else
|
49
|
+
Hash[:update_this => this]
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
ReplaceOpSet = {:add => ReplaceAddOpDef, :subtract => ReplaceSubtractOpDef}
|
54
|
+
|
55
|
+
#We define a field where adding will add the value to the existing list, and subtracting will remove matching values from the list
|
56
|
+
ListAddOpDef = lambda do |this,other|
|
57
|
+
this = [this].flatten
|
58
|
+
other = [other].flatten
|
59
|
+
this = this + other
|
60
|
+
this.uniq!; this.compact!
|
61
|
+
Hash[:update_this => this]
|
62
|
+
end
|
63
|
+
|
64
|
+
ListSubtractOpDef = lambda do |this,other|
|
65
|
+
this = [this].flatten
|
66
|
+
other = [other].flatten
|
67
|
+
this -= other
|
68
|
+
this.uniq!
|
69
|
+
this.compact!
|
70
|
+
Hash[:update_this => this]
|
71
|
+
end
|
72
|
+
|
73
|
+
ListOpSet = {:add => ListAddOpDef, :subtract => ListSubtractOpDef}
|
74
|
+
|
75
|
+
#A bit more complicated is if we have a field that holds key-value pairs, but we want our operations
|
76
|
+
#to operate on the underlying values of the key-value pair, and not on the actual key value sets.
|
77
|
+
#Here the values are a list type. What happens is if an existing key is passed, the value is added to the
|
78
|
+
#set of values for the existing key. If a new key is passed, the new key and its value are added to the list
|
79
|
+
KListAddOpDef = lambda do |this, other|
|
80
|
+
this = this || {}
|
81
|
+
other = other || {}
|
82
|
+
all_keys = this.keys + other.keys
|
83
|
+
combined = {}
|
84
|
+
all_keys.each do |k|
|
85
|
+
this_list = [this[k]].flatten
|
86
|
+
other_list = [other[k]].flatten
|
87
|
+
combined[k] = (this_list + other_list).flatten
|
88
|
+
#if this[k]
|
89
|
+
# this[k] = [this[k] ].flatten + [ other[k] ].flatten
|
90
|
+
#else
|
91
|
+
# this[k] = [ other[k] ].flatten
|
92
|
+
#end
|
93
|
+
combined[k].uniq!
|
94
|
+
combined[k].compact!
|
95
|
+
end
|
96
|
+
Hash[:update_this => combined]
|
97
|
+
end
|
98
|
+
|
99
|
+
KListSubtractOpDef = lambda do |this, other|
|
100
|
+
this = this || {}
|
101
|
+
other = other || {}
|
102
|
+
subtracted_list = {}
|
103
|
+
this.keys.each do |k|
|
104
|
+
this_list = [this[k]].flatten
|
105
|
+
other_list = [other[k]].flatten
|
106
|
+
#other[s].each {|olnk| this[k].delete(olnk) if this[k]}
|
107
|
+
#this[k].delete(other[k]) if this[k]
|
108
|
+
subtracted_list[k] = (this_list - other_list).flatten
|
109
|
+
subtracted_list[k].compact!
|
110
|
+
subtracted_list[k].uniq!
|
111
|
+
#this.delete(k) if (this[k].nil? || this[k].empty?)
|
112
|
+
end
|
113
|
+
Hash[:update_this => subtracted_list]
|
114
|
+
end
|
115
|
+
|
116
|
+
# With the KVP, we might want the keys that contain a given value
|
117
|
+
#note that in this case, the return value is not the same as the value stored in the field, hence the explicit return_value parameter
|
118
|
+
#Something to think about is whether this should be some type of recursive operation since the record is key-value, and the field is key-value
|
119
|
+
KListGetKeyforValueOpDef = lambda do |this, values|
|
120
|
+
values = [values].flatten
|
121
|
+
this = this|| {}
|
122
|
+
keys = []
|
123
|
+
this.each do |k,v|
|
124
|
+
values.each do |value|
|
125
|
+
keys << k if v.include? value
|
126
|
+
end
|
127
|
+
end
|
128
|
+
rtn_val = if keys.size > 0
|
129
|
+
{:return_value => keys, :update_this => this}
|
130
|
+
else
|
131
|
+
{:return_value => nil, :update_this => this}
|
132
|
+
end
|
133
|
+
rtn_val
|
134
|
+
end
|
135
|
+
|
136
|
+
KListOpSet = {:add => KListAddOpDef,
|
137
|
+
:subtract => KListSubtractOpDef,
|
138
|
+
:getkeys => KListGetKeyforValueOpDef}
|
139
|
+
|
140
|
+
|
141
|
+
self.op_sets_to_def_table = { :static_ops => StaticOpSet,
|
142
|
+
:replace_ops => ReplaceOpSet,
|
143
|
+
:list_ops => ListOpSet,
|
144
|
+
:key_list_ops => KListOpSet
|
145
|
+
}
|
146
|
+
|
147
|
+
#default_config = {:id => StaticFieldOps, :label => ReplaceFieldOps, :tags => ListFieldOps, :kvps=> KVListOps}
|
148
|
+
|
149
|
+
#default_config = {:id => StaticFieldOps}
|
150
|
+
#self.configuration = default_config
|
151
|
+
|
152
|
+
#the keys represent the data type, the values represent the operations to perform on those datatypes
|
153
|
+
#Ops = {:id => StaticFieldOps, :label => ReplaceFieldOps, :tags => ListFieldOps, :kvps=> KVListOps}
|
154
|
+
#Ops = NodeElementOperations.configuration
|
155
|
+
|
156
|
+
#attr_accessor :ops
|
157
|
+
#def self.ops
|
158
|
+
# NodeElementOperations.configuration
|
159
|
+
#end
|
160
|
+
|
161
|
+
end
|
162
|
+
|
163
|
+
module DataModelViews
|
164
|
+
|
165
|
+
OpIdToViewType = {
|
166
|
+
:static_ops => :value_match,
|
167
|
+
:replace_ops => :value_match,
|
168
|
+
:list_ops => :included_match,
|
169
|
+
:key_list_ops => :key_of_included_match
|
170
|
+
}
|
171
|
+
#note: currently "get" is defined as part of the node, and returns the unique record for a given key
|
172
|
+
#keep there or move here?
|
173
|
+
|
174
|
+
#views return a list of matches (which may be empty)
|
175
|
+
|
176
|
+
def default_views(field_op_set)
|
177
|
+
views = {}
|
178
|
+
field_op_set.each do |field, op_id|
|
179
|
+
view_name = "by_#{field.to_s}"
|
180
|
+
type_of_view = OpIdToViewType[op_id] || :value_match
|
181
|
+
views[view_name] = {:field => field.to_sym, :type_of_view => type_of_view}
|
182
|
+
end
|
183
|
+
views
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
class NodeElementOperations
|
188
|
+
include DataModelViews
|
189
|
+
#Set Logger
|
190
|
+
@@log = TinkitLog.set(self.name, :warn)
|
191
|
+
|
192
|
+
DefaultFieldOpSet = {:id => :static_ops,
|
193
|
+
:data => :replace_ops,
|
194
|
+
:name => :replace_ops, #convenience field for a node name
|
195
|
+
:tags => :list_ops} #convenience field for a list of tags
|
196
|
+
#:kvlist => :key_list_ops} #convenience field for a list of lists
|
197
|
+
|
198
|
+
#Default works for node element operations, but not glue operations
|
199
|
+
DefaultKeyFields = { :required_keys => [:id], :primary_key => :id}
|
200
|
+
|
201
|
+
attr_accessor :field_op_defs,
|
202
|
+
:field_op_set_sym, #used in model for views
|
203
|
+
:required_instance_keys ,
|
204
|
+
:required_save_keys,
|
205
|
+
:node_key,
|
206
|
+
:key_fields,
|
207
|
+
:views
|
208
|
+
|
209
|
+
#With no parameters - Defaults are used
|
210
|
+
#:op_sets_mod => The module with the data operations that apply to the data fields
|
211
|
+
#:field_op_set => The assignment of data fields to the data operations
|
212
|
+
def initialize(op_data = {})
|
213
|
+
@@log.debug {"Node Element Initialized with: #{op_data.inspect}"} if @@log.debug?
|
214
|
+
|
215
|
+
#set the module with the operation definition and include them
|
216
|
+
@ops_set_module = op_data[:op_sets_mod] ||DefaultOpSets
|
217
|
+
self.class.__send__(:include, @ops_set_module) #why is this private? am I doing something wrong?
|
218
|
+
|
219
|
+
#set the mapping between fields and the type of operations supported by those fields
|
220
|
+
@field_op_set_sym = DefaultFieldOpSet.merge(op_data[:field_op_set] || {})
|
221
|
+
@@log.info {"Field Operations Set: #{@field_op_set_sym.inspect}"} if @@log.info?
|
222
|
+
@field_op_defs = get_field_op_procs(@field_op_set_sym)
|
223
|
+
|
224
|
+
#set the key fields that will work as node/record identifiers or other key fields
|
225
|
+
@key_fields = op_data[:key_fields]||DefaultKeyFields
|
226
|
+
raise "key_fields are required" unless @key_fields
|
227
|
+
|
228
|
+
#we are no longer differentiating between keys required for insantiation and persistence
|
229
|
+
#this can be added in the future easily though.
|
230
|
+
@required_instance_keys = @key_fields[:required_keys]
|
231
|
+
@required_save_keys = @key_fields[:required_keys]
|
232
|
+
@node_key = @key_fields[:primary_key]
|
233
|
+
@views = default_views(@field_op_set_sym) #TODO: Allow custom views in the future
|
234
|
+
end
|
235
|
+
|
236
|
+
def set_op(ops)
|
237
|
+
ops.each do |field, ops_sym|
|
238
|
+
op_proc = self.lookup_op_proc(ops_sym)
|
239
|
+
ops[field] = op_proc
|
240
|
+
end
|
241
|
+
@field_op_defs = @field_op_defs.merge(ops)
|
242
|
+
end
|
243
|
+
|
244
|
+
def lookup_op_proc(ops_sym)
|
245
|
+
proc = @ops_set_module.op_sets_to_def_table[ops_sym]
|
246
|
+
end
|
247
|
+
|
248
|
+
def get_field_op_procs(field_op_set_sym)
|
249
|
+
field_op_defs = {}
|
250
|
+
#convert from symbol to actual Proc. Using symbol allows the type of op to be passed around
|
251
|
+
#needed because the Proc is anonymous so self-referential data is hard to get
|
252
|
+
@field_op_set_sym.each do |field, ops_sym|
|
253
|
+
if ops_sym.class == Symbol
|
254
|
+
ops_proc = lookup_op_proc(ops_sym)
|
255
|
+
field_op_defs[field] = ops_proc
|
256
|
+
else
|
257
|
+
raise "Unrecognized operation definition label #{ops_orig.inspect}"
|
258
|
+
end
|
259
|
+
end
|
260
|
+
field_op_defs
|
261
|
+
end
|
262
|
+
|
263
|
+
end
|
264
|
+
|
data/lib/tinkit.rb
ADDED
@@ -0,0 +1,38 @@
|
|
1
|
+
# Note on Structure
|
2
|
+
# I prefer that my dependencies be explicitly called by the "dependee".
|
3
|
+
# In other words, if I have a class Foo that depends on Bar, I would have
|
4
|
+
# the file foo.rb look like this:
|
5
|
+
#
|
6
|
+
# require 'bar'
|
7
|
+
# class Foo
|
8
|
+
# end
|
9
|
+
#
|
10
|
+
# However, I also don't want to put everything in a top level path just so I
|
11
|
+
# have cleaner require statements. But I really dislike the
|
12
|
+
# require File.join(File.expand_path(File.dirname(__FILE__)), 'bar') syntax though
|
13
|
+
# functionally it provides the expansion that I like.
|
14
|
+
#
|
15
|
+
# My compromise is to have a helper called from the top level file that maps
|
16
|
+
# the directories of my code structure to an easy to remember method.
|
17
|
+
# For example if my project was Foobar with class Foo requiring file bar, that
|
18
|
+
# provided Baz functionality (found in the lib/baz/ directory of my project), I'd have:
|
19
|
+
#
|
20
|
+
# require Foobar.baz 'bar'
|
21
|
+
# class Foo
|
22
|
+
# end
|
23
|
+
#
|
24
|
+
# Not as clean but pretty close, and with the following benefits:
|
25
|
+
# - Each file can reference its dependencies explicitly. Great for testing, and invaluable for debugging
|
26
|
+
# - Changes to the directory structure are easy to implement and don't require any changes to
|
27
|
+
# existing codebase (except for the require helper)
|
28
|
+
# - Fairly easy to read
|
29
|
+
#
|
30
|
+
|
31
|
+
#require helper for cleaner require statements
|
32
|
+
$LOAD_PATH << File.expand_path( File.dirname(__FILE__) )
|
33
|
+
require 'helpers/require_helper'
|
34
|
+
|
35
|
+
#require File.join(File.expand_path(File.dirname(__FILE__)), 'helpers/require_helper')
|
36
|
+
|
37
|
+
require Tinkit.lib 'tinkit_node_factory'
|
38
|
+
|
@@ -0,0 +1,733 @@
|
|
1
|
+
#require helper for cleaner require statements
|
2
|
+
require File.join(File.dirname(__FILE__), '/helpers/require_helper')
|
3
|
+
|
4
|
+
#bufs libraries
|
5
|
+
require Tinkit.midas 'node_element_operations'
|
6
|
+
require Tinkit.helpers 'hash_helpers'
|
7
|
+
require Tinkit.helpers 'camel'
|
8
|
+
require Tinkit.helpers 'tk_escape'
|
9
|
+
require Tinkit.helpers 'log_helper'
|
10
|
+
|
11
|
+
|
12
|
+
|
13
|
+
#This is the base abstract class used. Each user would get a unique
|
14
|
+
#class derived from this one. In other words, a class context
|
15
|
+
#is specific to a user.
|
16
|
+
#[User being used loosely to indicate a client-like relationship]
|
17
|
+
|
18
|
+
#The generic model environment would be defined in this class, and the specific
|
19
|
+
#bindings would be implemented when the class was instantiated.
|
20
|
+
#since this is the abstract base class, we only open up the class here to
|
21
|
+
#provide a bit more helpful error if we can't find a particular method
|
22
|
+
#When created it should support the following methods and/or accessors
|
23
|
+
# Methods
|
24
|
+
# initialize(env) - Uses env parameters to set up the model environment
|
25
|
+
# query_all - Queries for all records.
|
26
|
+
# get(id) - Get a specific record based on its id
|
27
|
+
# save(model_data) - save the record to the persistence model
|
28
|
+
# destroy_node(node) - removes the record from the persistence model
|
29
|
+
# generate_model_key(namespace, node_key) - generates a unique id for that model
|
30
|
+
# raw_all - retreive all records in native persistence model format
|
31
|
+
# destroy_bulk - destroy records in native persistence model format
|
32
|
+
# Important Accessors
|
33
|
+
# :_files_mgr - points to the FilesMgr object that handles
|
34
|
+
# files
|
35
|
+
|
36
|
+
#TODO: Have Persistent Layer GlueEnv inherit from this GlueEnv
|
37
|
+
# Or better see if it can follow the FilesMgr interface
|
38
|
+
#move into its own file
|
39
|
+
#Also, put common code across glue envs here (can still be overwritten)
|
40
|
+
class GlueEnv
|
41
|
+
|
42
|
+
attr_accessor :glue_interface,
|
43
|
+
#common accessors
|
44
|
+
:user_id,
|
45
|
+
:user_datastore_location,
|
46
|
+
:metadata_keys,
|
47
|
+
:required_instance_keys,
|
48
|
+
:required_save_keys,
|
49
|
+
:node_key,
|
50
|
+
:model_key,
|
51
|
+
:version_key,
|
52
|
+
:namespace_key,
|
53
|
+
:_files_mgr_class,
|
54
|
+
:views,
|
55
|
+
:model_save_params,
|
56
|
+
:moab_data
|
57
|
+
|
58
|
+
|
59
|
+
def method_missing(name)
|
60
|
+
#resp = @glue_interface.__send__(name)
|
61
|
+
raise NameError,"Method #{name} was not found in #{self.class.name}. Has it been"\
|
62
|
+
" built into the persistent model interface yet?"
|
63
|
+
#resp
|
64
|
+
end
|
65
|
+
|
66
|
+
def initialize(glue_interface)
|
67
|
+
#glue interface is the GlueEnv object for the persistence layer
|
68
|
+
@glue_interface = glue_interface
|
69
|
+
common_accessors = [ :user_id, :user_datastore_location, :metadata_keys, :required_instance_keys, :required_save_keys,
|
70
|
+
:node_key, :model_key, :version_key, :namespace_key, :_files_mgr_class,
|
71
|
+
:views, :model_save_params, :moab_data ]
|
72
|
+
common_accessors.each do |acc_sym|
|
73
|
+
accessor = "@#{acc_sym.to_s}".to_sym
|
74
|
+
accessor_value = @glue_interface.__send__(acc_sym)
|
75
|
+
self.instance_variable_set(accessor, accessor_value)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
#common methods
|
80
|
+
def destroy_bulk(list_of_native_records)
|
81
|
+
@glue_interface.destroy_bulk(list_of_native_records)
|
82
|
+
end
|
83
|
+
|
84
|
+
def destroy_node(model_metadata)
|
85
|
+
@glue_interface.destroy_node(model_metadata)
|
86
|
+
end
|
87
|
+
|
88
|
+
def generate_model_key(namespace, node_key)
|
89
|
+
#so far the model key is "#{namespace}::#{node_key}" in all persitence models
|
90
|
+
@glue_interface.generate_model_key(namespace, node_key)
|
91
|
+
end
|
92
|
+
|
93
|
+
def get(id)
|
94
|
+
@glue_interface.get(id)
|
95
|
+
end
|
96
|
+
|
97
|
+
def query_all
|
98
|
+
@glue_interface.query_all
|
99
|
+
end
|
100
|
+
|
101
|
+
def raw_all
|
102
|
+
@glue_interface.raw_all
|
103
|
+
end
|
104
|
+
|
105
|
+
def save(new_data)
|
106
|
+
@glue_interface.save(new_data)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
class FilesMgr
|
111
|
+
|
112
|
+
#def method_missing(name)
|
113
|
+
# raise NameError,"#{name} not found in #{self.class}. Has it been"\
|
114
|
+
# " overwritten to support file/attachment management yet?"
|
115
|
+
|
116
|
+
#Allow dynamically adding of user data
|
117
|
+
#TODO Add name checking to make sure its not misspelled or other clues that its not data
|
118
|
+
#end
|
119
|
+
|
120
|
+
|
121
|
+
attr_accessor :moab_interface
|
122
|
+
|
123
|
+
def initialize(moab_interface)
|
124
|
+
@moab_interface = moab_interface
|
125
|
+
end
|
126
|
+
|
127
|
+
#TODO: Move common file management functions from base node to here
|
128
|
+
#TODO: fix naming inconsistencies
|
129
|
+
def add_files(node, file_datas)
|
130
|
+
@moab_interface.add(node, file_datas)
|
131
|
+
end
|
132
|
+
|
133
|
+
def add_raw_data(node, attach_name, content_type, raw_data, file_modified_at = nil)
|
134
|
+
@moab_interface.add_raw_data(node, attach_name, content_type, raw_data, file_modified_at = nil)
|
135
|
+
end
|
136
|
+
|
137
|
+
def subtract_files(node, params)
|
138
|
+
@moab_interface.subtract(node, params)
|
139
|
+
end
|
140
|
+
|
141
|
+
def get_raw_data(node, basename)
|
142
|
+
@moab_interface.get_raw_data(node, basename)
|
143
|
+
end
|
144
|
+
|
145
|
+
#todo change name to get_files_metadata
|
146
|
+
def get_attachments_metadata(node)
|
147
|
+
@moab_interface.get_attachments_metadata(node)
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
|
152
|
+
class TinkitBaseNode
|
153
|
+
#this_file = File.basename(__FILE__)
|
154
|
+
#Set Logger
|
155
|
+
@@log = TinkitLog.set("TinkitBaseNode", :warn)
|
156
|
+
|
157
|
+
#TODO Figure out a way to distinguish method calls from dynamically set data
|
158
|
+
# that were assigned as instance variables
|
159
|
+
#TODO Dynamic Class definition should include the data store, structure and
|
160
|
+
#evironmental models
|
161
|
+
|
162
|
+
#Class Accessors
|
163
|
+
class << self; attr_accessor :myGlueEnv, #uppercased to highlight its supporting the class
|
164
|
+
:data_struc,
|
165
|
+
:metadata_keys
|
166
|
+
end
|
167
|
+
|
168
|
+
##Instance Accessors
|
169
|
+
attr_accessor :_user_data, :_model_metadata, :attached_files,
|
170
|
+
:my_GlueEnv, #note the "_" to differentiate from class accessor
|
171
|
+
:_files_mgr
|
172
|
+
|
173
|
+
#def method_missing(name, *otherstuff)
|
174
|
+
#raise NameError,"#{name} not found in #{self.class}. Has it been"\
|
175
|
+
# " overwritten to support file/attachment management yet?"
|
176
|
+
|
177
|
+
#Allow dynamically adding of user data
|
178
|
+
#TODO Add name checking to make sure its not misspelled or other clues that its not data
|
179
|
+
#self.__set_userdata_key(name.to_sym, nil)
|
180
|
+
#end
|
181
|
+
|
182
|
+
|
183
|
+
|
184
|
+
|
185
|
+
#Class Methods
|
186
|
+
#Setting up the Class Environment - The class environment holds all
|
187
|
+
# model-specific implementation details (not used when created by factory?)
|
188
|
+
def self.set_environment(persist_env, data_model_bindings)
|
189
|
+
class_name = persist_env[:name]
|
190
|
+
model_name = class_name
|
191
|
+
model_env = persist_env[:env]
|
192
|
+
#key_fields = data_model_bindings[:key_fields]
|
193
|
+
#initial_views_data = data_model_bindings[:data_ops_set]
|
194
|
+
|
195
|
+
#dynamically determine what's needed
|
196
|
+
glue_file_name = "#{model_name}_glue_env"
|
197
|
+
#moab_file_name = "moab_#{model_name}_env"
|
198
|
+
|
199
|
+
#dynamic require (maybe just keep this static?)
|
200
|
+
require Tinkit.glue glue_file_name
|
201
|
+
#require Tinkit.moabs moab_file_name
|
202
|
+
|
203
|
+
glue_lc_name = "#{model_name}_env"
|
204
|
+
glue_const_name = Camel.ize(glue_lc_name)
|
205
|
+
glueModule = Object.const_get(glue_const_name)
|
206
|
+
glueClass = glueModule::GlueEnv
|
207
|
+
|
208
|
+
#orig
|
209
|
+
#@myGlueEnv = glueClass.new(persist_env, data_model_bindings)
|
210
|
+
#/orig
|
211
|
+
#new
|
212
|
+
persistent_model_glue_obj = glueClass.new(persist_env, data_model_bindings)
|
213
|
+
@myGlueEnv = persistent_model_glue_obj #GlueEnv.new(persistent_model_glue_obj)
|
214
|
+
|
215
|
+
@metadata_keys = @myGlueEnv.metadata_keys
|
216
|
+
end
|
217
|
+
|
218
|
+
#Collection Methods
|
219
|
+
#This returns all records, but does not create
|
220
|
+
#an instance of this class for each record. Each record is provided
|
221
|
+
#in its native form.
|
222
|
+
def self.all_native_records
|
223
|
+
@myGlueEnv.query_all
|
224
|
+
end
|
225
|
+
|
226
|
+
#TODO: Add the very cool feature to spec (creating new fields on the fly)
|
227
|
+
#TODO: Document the feature too!!
|
228
|
+
def self.all(data_structure_changes = {})
|
229
|
+
#add_keys = data_structure_changes[:add]
|
230
|
+
#remove_keys = data_structure_changes[:remove]
|
231
|
+
#TODO: test for proper format
|
232
|
+
|
233
|
+
raw_nodes = @myGlueEnv.raw_all
|
234
|
+
|
235
|
+
|
236
|
+
raw_nodes.map! do |base_data|
|
237
|
+
combined_data = self.modify_data_structures(base_data, data_structure_changes)
|
238
|
+
self.new(combined_data)
|
239
|
+
end
|
240
|
+
raw_nodes
|
241
|
+
end
|
242
|
+
|
243
|
+
def self.modify_data_structures(base_data, changes)
|
244
|
+
add_keys_values = changes[:add]||{}
|
245
|
+
remove_keys = changes[:remove]||[] #note its an array
|
246
|
+
removed_data = base_data.delete_if {|k,v| remove_keys.include?(k)}
|
247
|
+
added_data = add_keys_values.merge(removed_data) #so that add doesn't overwrite existing keys
|
248
|
+
end
|
249
|
+
|
250
|
+
#Not implemented on all persistence layers yet (just couchrest and filesystem)
|
251
|
+
def self.call_new_view(view_name, match_key)
|
252
|
+
results = if @myGlueEnv.respond_to? :call_view
|
253
|
+
@myGlueEnv.call_view(view_name,
|
254
|
+
@myGlueEnv.moab_data,
|
255
|
+
@myGlueEnv.namespace_key,
|
256
|
+
@myGlueEnv.user_datastore_location,
|
257
|
+
match_key)
|
258
|
+
end
|
259
|
+
results
|
260
|
+
end
|
261
|
+
|
262
|
+
def self.find_nodes_where(key, relation, this_value)
|
263
|
+
records = @myGlueEnv.find_nodes_where(key, relation, this_value)
|
264
|
+
nodes = []
|
265
|
+
records.map do |base_data|
|
266
|
+
if base_data
|
267
|
+
#combined_data = self.modify_data_structures(base_data, data_structure_changes)
|
268
|
+
#nodes << self.new(combined_data)
|
269
|
+
nodes << self.new(base_data)
|
270
|
+
end
|
271
|
+
end
|
272
|
+
return nodes
|
273
|
+
end
|
274
|
+
|
275
|
+
#Not implemented on all persistence layers yet (just couchrest and filesystem)
|
276
|
+
#may be deprecated
|
277
|
+
def self.call_view(param, match_keys, data_structure_changes = {})
|
278
|
+
view_method_name = "by_#{param}".to_sym #using CouchDB style for now
|
279
|
+
records = if @myGlueEnv.views.respond_to? view_method_name
|
280
|
+
@myGlueEnv.views.__send__(view_method_name,
|
281
|
+
@myGlueEnv.moab_data,
|
282
|
+
@myGlueEnv.user_datastore_location,
|
283
|
+
match_keys)
|
284
|
+
else
|
285
|
+
#TODO: Think of a more elegant way to handle an unknown view
|
286
|
+
raise "Unknown design view #{view_method_name} called for: #{param}"
|
287
|
+
end
|
288
|
+
|
289
|
+
nodes = []
|
290
|
+
records.map do |base_data|
|
291
|
+
if base_data
|
292
|
+
combined_data = self.modify_data_structures(base_data, data_structure_changes)
|
293
|
+
nodes << self.new(combined_data)
|
294
|
+
end
|
295
|
+
end
|
296
|
+
return nodes
|
297
|
+
end
|
298
|
+
|
299
|
+
def self.get(id)
|
300
|
+
data = @myGlueEnv.get(id)
|
301
|
+
rtn = if data
|
302
|
+
self.new(data)
|
303
|
+
else
|
304
|
+
nil
|
305
|
+
end
|
306
|
+
end
|
307
|
+
|
308
|
+
#This destroys all nodes in the model
|
309
|
+
#this is more efficient than calling
|
310
|
+
#destroy on instances of this class
|
311
|
+
#as it avoids instantiating only to destroy it
|
312
|
+
def self.destroy_all
|
313
|
+
all_records = self.all_native_records
|
314
|
+
@myGlueEnv.destroy_bulk(all_records)
|
315
|
+
end
|
316
|
+
|
317
|
+
#Create the document in the BUFS node format from an existing node.
|
318
|
+
def self.__create_from_other_node(other_node)
|
319
|
+
#TODO:Figure out data structure imports
|
320
|
+
#Idea, for duplicates, this node takes precedence
|
321
|
+
#for new data structures, other node operations (if they exist) are used
|
322
|
+
#Not implemented yet, though
|
323
|
+
#TODO: add to spec
|
324
|
+
#TODO: what about node id collisions? currently ignoring it
|
325
|
+
#and letting the persistence model work it out
|
326
|
+
this_node = self.new(other_node._user_data)
|
327
|
+
this_node.__save
|
328
|
+
this_node.__import_attachments(other_node.__export_attachments) if other_node.attached_files
|
329
|
+
end
|
330
|
+
|
331
|
+
#Returns the id that will be appended to the document ID to uniquely
|
332
|
+
#identify attachment documents associated with the main document
|
333
|
+
#TODO: NOT COMPLETELY ABSTRACTED YET
|
334
|
+
def self.attachment_base_id
|
335
|
+
@myGlueEnv.attachment_base_id
|
336
|
+
end
|
337
|
+
|
338
|
+
|
339
|
+
#Normal instantiation can take two forms that differ only in the source
|
340
|
+
#for the initial parameters. The constructor could be called by the user
|
341
|
+
#and passed only user data, or the constructor could be called by a class
|
342
|
+
#collection method and the initial parameters would come from a datastore.
|
343
|
+
#In the latter case, some of the parameters will include information about
|
344
|
+
#the datastore (model metadata).
|
345
|
+
def initialize(init_params = {})
|
346
|
+
#setting the class accessor to also be an instance accessor
|
347
|
+
#for convenience and hopefully doesn't create confusion
|
348
|
+
@my_GlueEnv = self.class.myGlueEnv
|
349
|
+
@@log.debug {"initializing with: #{init_params.inspect}"} if @@log.debug?
|
350
|
+
raise "init_params cannot be nil" unless init_params
|
351
|
+
@saved_to_model = nil #TODO rename to sychronized_to_model
|
352
|
+
#make sure keys are symbols
|
353
|
+
init_params = HashKeys.str_to_sym(init_params)
|
354
|
+
@_user_data, @_model_metadata = filter_user_from_model_data(init_params)
|
355
|
+
|
356
|
+
@@log.debug {"data filtered into user data: #{@_user_data}"} if @@log.debug?
|
357
|
+
@@log.debug {"data filtered into model metadata: #{@_model_metadata}"} if @@log.debug?
|
358
|
+
|
359
|
+
instance_data_validations(@_user_data)
|
360
|
+
node_key = get__user_data_id(@_user_data)
|
361
|
+
|
362
|
+
moab_file_mgr = @my_GlueEnv._files_mgr_class.new(@my_GlueEnv, node_key)
|
363
|
+
@_files_mgr = FilesMgr.new(moab_file_mgr)
|
364
|
+
@_model_metadata = update__model_metadata(@_model_metadata, node_key)
|
365
|
+
|
366
|
+
@@log.debug {"Updated model metadata: #{@_model_metadata.inspect}"} if @@log.debug?
|
367
|
+
|
368
|
+
init_params.each do |attr_name, attr_value|
|
369
|
+
__set_userdata_key(attr_name.to_sym, attr_value)
|
370
|
+
end
|
371
|
+
end
|
372
|
+
|
373
|
+
#This will take a key-value pair and create an instance variable (actually
|
374
|
+
# it's a method)using key as the method name, and sets the return value to
|
375
|
+
# the value associated with that key changes to the key's value are reflected
|
376
|
+
# in subsequent method calls, and the value can be updated by using
|
377
|
+
# method_name = some value. Additionally, any custom operations that have
|
378
|
+
# been defined for that key name will be loaded in and assigned methods in
|
379
|
+
# the form methodname_operation
|
380
|
+
def __set_userdata_key(attr_var, attr_value)
|
381
|
+
ops = self.class.data_struc.field_op_defs #data_ops #|| NodeElementOperations.ops
|
382
|
+
#@@log.debug {"Ops Def: #{ops.inspect}"} if @@log.debug?
|
383
|
+
#ops = NodeElementOperations::Ops
|
384
|
+
#incorporates predefined methods
|
385
|
+
#@@log.debug {"Setting method #{attr_var.inspect}, #{ops[attr_var].inspect}"} if @@log.debug?
|
386
|
+
add_op_method(attr_var, ops[attr_var]) if (ops && ops[attr_var])
|
387
|
+
unless self.class.metadata_keys.include? attr_var.to_sym
|
388
|
+
@_user_data[attr_var] = attr_value
|
389
|
+
else
|
390
|
+
raise "Metadata Keys: #{self.class.metadata_keys.inspect}
|
391
|
+
Key match: #{attr_var.to_sym.inspect} UserData: #{@_user_data.inspect}"
|
392
|
+
end
|
393
|
+
#manually setting instance variable (rather than using instance_variable_set),
|
394
|
+
# so @node_data_hash can be updated
|
395
|
+
#dynamic method acting like an instance variable getter
|
396
|
+
self.class.__send__(:define_method, "#{attr_var}".to_sym,
|
397
|
+
lambda {@_user_data[attr_var]} )
|
398
|
+
#dynamic method acting like an instance variable setter
|
399
|
+
self.class.__send__(:define_method, "#{attr_var}=".to_sym,
|
400
|
+
lambda {|new_val| @_user_data[attr_var] = new_val} )
|
401
|
+
end
|
402
|
+
|
403
|
+
#TODO: Method Wrapper is not sufficiently tested
|
404
|
+
#The method operations are completely decoupled from the object that they are bound to.
|
405
|
+
#This creates a problem when operations act on themselves (for example adding x to
|
406
|
+
#the current value requires the adder to determine the current value of x). To get
|
407
|
+
#around this self-referential problem while maintaining the decoupling this wrapper is used.
|
408
|
+
#Essentially it takes the unbound two parameter (this, other) and binds the current value
|
409
|
+
#to (this). This allows a more natural form of calling these operations. In other words
|
410
|
+
# description_add(new_string) can be used, rather than description_add(current_string, new_string).
|
411
|
+
def __method_wrapper(param, unbound_op)
|
412
|
+
@@log.debug {"__method_wrapper with #{param.inspect}, #{unbound_op.inspect}"} if @@log.debug?
|
413
|
+
#What I want is to call obj.param_op(other) example: obj.links_add(new_link)
|
414
|
+
#which would then add new_link to obj.links
|
415
|
+
#however, the predefined operation (add in the example) has no way of knowing
|
416
|
+
#about links, so the predefined operation takes two parameters (this, other)
|
417
|
+
#and this method wraps the obj.links so that the links_add method doesn't have to
|
418
|
+
#include itself as a paramter to the predefined operation
|
419
|
+
#lambda {|other| @node_data_hash[param] = unbound_op.call(@node_data_hash[param], other)}
|
420
|
+
lambda {|other| old_this = self.__send__("#{param}".to_sym) #original value
|
421
|
+
#we're going to compare the new value to the old later
|
422
|
+
if old_this
|
423
|
+
this = old_this.dup
|
424
|
+
else
|
425
|
+
this = old_this
|
426
|
+
end
|
427
|
+
rtn_data = unbound_op.call(this, other)
|
428
|
+
new_this = rtn_data[:update_this]
|
429
|
+
self.__send__("#{param}=".to_sym, new_this)
|
430
|
+
it_changed = true
|
431
|
+
it_changed = false if (old_this == new_this) || !(rtn_data.has_key?(:update_this))
|
432
|
+
not_in_model = !@saved_to_model
|
433
|
+
self.__save if (not_in_model || it_changed)#unless (@saved_to_model && save) #don't save if the value hasn't changed
|
434
|
+
rtn = rtn_data[:return_value] || rtn_data[:update_this]
|
435
|
+
rtn
|
436
|
+
}
|
437
|
+
end
|
438
|
+
|
439
|
+
def __unset_userdata_key(param)
|
440
|
+
self.class.__send__(:remove_method, param.to_sym)
|
441
|
+
@_user_data.delete(param)
|
442
|
+
end
|
443
|
+
|
444
|
+
#NOTE: For ruby objects that are automatically added that collide with user data names
|
445
|
+
#that ruby functionality (currently) will be lost
|
446
|
+
|
447
|
+
#Save the object to the CouchDB database
|
448
|
+
def __save
|
449
|
+
save_data_validations(self._user_data)
|
450
|
+
node_key = @my_GlueEnv.node_key
|
451
|
+
node_id = self._model_metadata[node_key]
|
452
|
+
@@log.debug {"User Data to save: #{self._user_data}"} if @@log.debug?
|
453
|
+
model_data = inject_node_metadata
|
454
|
+
#raise model_data.inspect
|
455
|
+
@@log.debug { "saving (including injected model data): #{model_data.inspect}"} if @@log.debug?
|
456
|
+
res = @my_GlueEnv.save(model_data)
|
457
|
+
version_key = @my_GlueEnv.version_key
|
458
|
+
#TODO: Make consistent with rev keys
|
459
|
+
rev_data = {version_key => res['rev']}
|
460
|
+
update_self(rev_data)
|
461
|
+
return self
|
462
|
+
end
|
463
|
+
|
464
|
+
|
465
|
+
def __export_attachment(attachment_name)
|
466
|
+
md = __get_attachment_metadata(attachment_name)
|
467
|
+
data = get_raw_data(attachment_name)
|
468
|
+
export = {:metadata => md, :raw_data => data}
|
469
|
+
end
|
470
|
+
|
471
|
+
def __import_attachment(attach_name, att_xfer_format)
|
472
|
+
#transfer format is the format of the export method
|
473
|
+
content_type = att_xfer_format[:metadata][:content_type]
|
474
|
+
file_modified_at = att_xfer_format[:metadata][:file_modified]
|
475
|
+
raw_data = att_xfer_format[:raw_data]
|
476
|
+
#raise "Attachment provided no data to import" unless raw_data
|
477
|
+
add_raw_data(attach_name, content_type, raw_data, file_modified_at)
|
478
|
+
end
|
479
|
+
|
480
|
+
#Deletes the object
|
481
|
+
def __destroy_node
|
482
|
+
@my_GlueEnv.destroy_node(self._model_metadata)
|
483
|
+
end
|
484
|
+
|
485
|
+
def self.__create_from_other_node(other_node)
|
486
|
+
#TODO: How to deal with differently defined data structures?
|
487
|
+
#currently assume transfers are between models of identical data structures
|
488
|
+
#either enforce that, or figure out generic solution
|
489
|
+
|
490
|
+
#create new node
|
491
|
+
new_basic_node = self.new(other_node._user_data)
|
492
|
+
|
493
|
+
#transfer attachments
|
494
|
+
if other_node.attached_files
|
495
|
+
other_node.attached_files.each do |att_file|
|
496
|
+
exported_data = other_node.__export_attachment(att_file)
|
497
|
+
#raise "Attachment had no data to export: #{att_file}" unless exported_data
|
498
|
+
new_basic_node.__import_attachment(att_file, exported_data) if att_file
|
499
|
+
end
|
500
|
+
end
|
501
|
+
new_basic_node
|
502
|
+
end
|
503
|
+
|
504
|
+
def __get_attachments_metadata
|
505
|
+
md = @_files_mgr.get_attachments_metadata(self)
|
506
|
+
md = HashKeys.str_to_sym(md)
|
507
|
+
md.each do |fbn, fmd|
|
508
|
+
md[fbn] = HashKeys.str_to_sym(fmd)
|
509
|
+
end
|
510
|
+
md
|
511
|
+
end
|
512
|
+
|
513
|
+
def __get_attachment_metadata(attachment_name)
|
514
|
+
all_md = __get_attachments_metadata
|
515
|
+
index_name = TkEscape.escape(attachment_name)
|
516
|
+
all_md[index_name.to_sym]
|
517
|
+
end
|
518
|
+
|
519
|
+
|
520
|
+
|
521
|
+
#Deprecated Methods------------------------
|
522
|
+
#Adds parent categories, it can accept a single category or an array of categories
|
523
|
+
#aliased for backwards compatibility, this method is dynamically defined and generated
|
524
|
+
def add_parent_categories(new_cats)
|
525
|
+
raise "Warning:: add_parent_categories is being deprecated, use <param_name>_add instead ex: parent_categories_add(cats_to_add) "
|
526
|
+
parent_categories_add(new_cats)
|
527
|
+
end
|
528
|
+
|
529
|
+
#Can accept a single category or an array of categories
|
530
|
+
#aliased for backwards compatiblity the method is dynamically defined and generated
|
531
|
+
def remove_parent_categories(cats_to_remove)
|
532
|
+
raise "Warning:: remove_parent_categories is being deprecated, use <param_name>_subtract instead ex: parent_categories_subtract(cats_to_remove)"
|
533
|
+
parent_categories_subtract(cats_to_remove)
|
534
|
+
end
|
535
|
+
#-------------------------------------------
|
536
|
+
|
537
|
+
#Attachment File Operation Methods-------------------------------
|
538
|
+
|
539
|
+
#Get attachment content. Note that the data is read in as a complete block, this may be something that needs optimized.
|
540
|
+
#TODO: add_raw_data parameters to a hash?
|
541
|
+
def add_raw_data(attach_name, content_type, raw_data, file_modified_at = nil)
|
542
|
+
attached_basenames = @_files_mgr.add_raw_data(self, attach_name, content_type, raw_data, file_modified_at = nil)
|
543
|
+
if self.attached_files
|
544
|
+
self.attached_files += attached_basenames
|
545
|
+
self.attached_files.uniq! #removing duplicates is ok because these names are keys to the underlying attached file data (dupes would point to the same data)
|
546
|
+
else
|
547
|
+
self.__set_userdata_key(:attached_files, attached_basenames)
|
548
|
+
end
|
549
|
+
|
550
|
+
self.__save
|
551
|
+
end
|
552
|
+
|
553
|
+
def files_add(file_datas)
|
554
|
+
file_datas = [file_datas].flatten
|
555
|
+
#TODO keep original names, and have model abstract character issues
|
556
|
+
#TODO escaping is spread all over, do it in one place
|
557
|
+
attached_basenames = @_files_mgr.add_files(self, file_datas)
|
558
|
+
if self.attached_files
|
559
|
+
self.attached_files += attached_basenames
|
560
|
+
self.attached_files.uniq! #removing duplicates is ok because these names are keys to the underlying attached file data (dupes would point to the same data)
|
561
|
+
else
|
562
|
+
self.__set_userdata_key(:attached_files, attached_basenames)
|
563
|
+
end
|
564
|
+
self.__save
|
565
|
+
end
|
566
|
+
|
567
|
+
def files_subtract(file_basenames)
|
568
|
+
file_basenames = [file_basenames].flatten
|
569
|
+
@_files_mgr.subtract_files(self, file_basenames)
|
570
|
+
self.attached_files -= file_basenames
|
571
|
+
self.__save
|
572
|
+
end
|
573
|
+
|
574
|
+
def files_remove_all
|
575
|
+
@_files_mgr.subtract_files(self, :all)
|
576
|
+
self.attached_files = nil
|
577
|
+
self.__save
|
578
|
+
end
|
579
|
+
|
580
|
+
def get_raw_data(attachment_name)
|
581
|
+
@_files_mgr.get_raw_data(self, attachment_name)
|
582
|
+
end
|
583
|
+
|
584
|
+
|
585
|
+
#TODO: Add to spec (currently not used) I think used by web server, need to genericize (use FilesMgr?)
|
586
|
+
#def attachment_url(attachment_name)
|
587
|
+
# current_node_doc = self.class.get(self['_id'])
|
588
|
+
# att_doc_id = current_node_doc['attachment_doc_id']
|
589
|
+
# current_node_attachment_doc = self.class.user_attachClass.get(att_doc_id)
|
590
|
+
# current_node_attachment_doc.attachment_url(attachment_name)
|
591
|
+
#end
|
592
|
+
|
593
|
+
def get_file_data(attachment_name)
|
594
|
+
@_files_mgr.get_file_data(self, attachment_name)
|
595
|
+
#current_node_doc = self.class.get(self['_id'])
|
596
|
+
#att_doc_id = current_node_doc['attachment_doc_id']
|
597
|
+
#current_node_attachment_doc = self.class.user_attachClass.get(att_doc_id)
|
598
|
+
#current_node_attachment_doc.read_attachment(attachment_name)
|
599
|
+
end
|
600
|
+
|
601
|
+
def raise_method_missing(meth_sym, *args)
|
602
|
+
raise NoMethodError, <<-ERRORINFO
|
603
|
+
base class: TinkitBaseNode
|
604
|
+
actual class: #{self.class}
|
605
|
+
method: #{meth_sym.inspect}
|
606
|
+
args: #{args.inspect}
|
607
|
+
ERRORINFO
|
608
|
+
end
|
609
|
+
|
610
|
+
def method_missing(meth_sym, *args, &block)
|
611
|
+
meth_str = meth_sym.to_s
|
612
|
+
raise_method_missing(meth_sym, args) unless @_user_data
|
613
|
+
@@log.debug { "User Data (methods generated from these keys): #{@_user_data.inspect}"} if @@log.debug?
|
614
|
+
return_value = "method_not_found_here"
|
615
|
+
@_user_data.keys.each do |existing_methods_base|
|
616
|
+
meth_regex_str ="^#{existing_methods_base}_"
|
617
|
+
meth_regex = Regexp.new(meth_regex_str)
|
618
|
+
if meth_str.match(meth_regex)
|
619
|
+
return_value = @_user_data[existing_methods_base]
|
620
|
+
break
|
621
|
+
end
|
622
|
+
end
|
623
|
+
|
624
|
+
if return_value == "method_not_found_here"
|
625
|
+
raise_method_missing(meth_sym, *args)
|
626
|
+
else
|
627
|
+
puts "Warning: Method #{meth_sym.inspect} not defined for all fields\
|
628
|
+
returning value of the field in those cases"
|
629
|
+
return return_value
|
630
|
+
end
|
631
|
+
end
|
632
|
+
#-----------------------------------------------------------
|
633
|
+
#------------------------------------------------------------
|
634
|
+
private
|
635
|
+
|
636
|
+
def add_op_method(param, ops)
|
637
|
+
@@log.debug {"Addion Op method #{param.inspect}, #{ops.inspect}"} if @@log.debug?
|
638
|
+
ops.each do |op_name, op_proc|
|
639
|
+
method_name = "#{param.to_s}_#{op_name.to_s}".to_sym
|
640
|
+
wrapped_op = __method_wrapper(param, op_proc)
|
641
|
+
self.class.__send__(:define_method, method_name, wrapped_op)
|
642
|
+
end
|
643
|
+
end
|
644
|
+
|
645
|
+
|
646
|
+
def filter_user_from_model_data(init_params)
|
647
|
+
raise ArgumentError, "Filter parameters nil" unless init_params
|
648
|
+
_model_metadata_keys = @my_GlueEnv.metadata_keys
|
649
|
+
raise "Metadata Keys missing" unless _model_metadata_keys
|
650
|
+
_model_metadata = {}
|
651
|
+
#_model_metadata = {@my_GlueEnv.node_key => init_params[@my_GlueEnv.node_key] }
|
652
|
+
_model_metadata_keys.each do |k|
|
653
|
+
#next if k == @my_GlueEnv.node_key #node key does not get deleted from init_params
|
654
|
+
_model_metadata[k] = init_params.delete(k) if init_params[k] #delete returns deleted value
|
655
|
+
end
|
656
|
+
[init_params, _model_metadata]
|
657
|
+
end
|
658
|
+
|
659
|
+
def instance_data_validations(_user_data)
|
660
|
+
#Check for Required Keys
|
661
|
+
required_keys = @my_GlueEnv.required_instance_keys
|
662
|
+
|
663
|
+
#TODO:
|
664
|
+
#required_keys.delete(@my_GlueEnv.node_key)
|
665
|
+
|
666
|
+
required_keys.each do |rk|
|
667
|
+
err_str = "The key #{rk.inspect} must be associated with a"\
|
668
|
+
" value for instantiation for the data: #{_user_data.inspect}"
|
669
|
+
raise ArgumentError, err_str unless _user_data[rk]
|
670
|
+
end
|
671
|
+
end
|
672
|
+
|
673
|
+
def save_data_validations(_user_data)
|
674
|
+
required_keys = @my_GlueEnv.required_save_keys
|
675
|
+
required_keys.each do |rk|
|
676
|
+
err_str = "The key #{rk.inspect} must be associated with a"\
|
677
|
+
" value before saving"
|
678
|
+
raise ArgumentError, err_str unless _user_data[rk]
|
679
|
+
end
|
680
|
+
end
|
681
|
+
|
682
|
+
#TODO Rename to remove extra line space
|
683
|
+
def get__user_data_id(_user_data)
|
684
|
+
user_node_key = @my_GlueEnv.node_key
|
685
|
+
_user_data[user_node_key]
|
686
|
+
end
|
687
|
+
|
688
|
+
def update__model_metadata(metadata, node_key)
|
689
|
+
@@log.debug {"Updating model metadata: #{metadata.inspect} using node key: #{node_key}"} if @@log.debug?
|
690
|
+
#updates @saved_to_model (make a method instead)?
|
691
|
+
#TODO, Metadata keys should be assigned not hard coded
|
692
|
+
persist_layer_key = @my_GlueEnv.persist_layer_key
|
693
|
+
version_key = @my_GlueEnv.version_key
|
694
|
+
namespace_key = @my_GlueEnv.namespace_key
|
695
|
+
@@log.debug {"Metadata Keys: #{[persist_layer_key, version_key, namespace_key].inspect}"} if @@log.debug?
|
696
|
+
id = metadata[persist_layer_key]
|
697
|
+
namespace = metadata[namespace_key]
|
698
|
+
rev = metadata[version_key]
|
699
|
+
#if metadata key fields doen't exist we have to create them
|
700
|
+
namespace = @my_GlueEnv.user_datastore_location unless namespace
|
701
|
+
id = @my_GlueEnv.generate_model_key(namespace, node_key) unless id
|
702
|
+
updated_key_metadata = {persist_layer_key => id, namespace_key => namespace}
|
703
|
+
updated_key_metadata.delete(version_key) unless rev
|
704
|
+
metadata.merge!(updated_key_metadata)
|
705
|
+
if rev
|
706
|
+
@saved_to_model = rev
|
707
|
+
metadata.merge!({version_key => rev})
|
708
|
+
else
|
709
|
+
metadata.delete(version_key) #TODO Is this too model specific?
|
710
|
+
end
|
711
|
+
@@log.debug {"Updated Metadata: #{metadata.inspect}"} if @@log.debug?
|
712
|
+
metadata
|
713
|
+
end
|
714
|
+
|
715
|
+
#TODO: Can't this be simplified (inject -> inject -> merge?)
|
716
|
+
def inject_node_metadata
|
717
|
+
inject_metadata(@_user_data)
|
718
|
+
end
|
719
|
+
|
720
|
+
def inject_metadata(node_data)
|
721
|
+
@@log.debug {"Node Data: #{node_data.inspect}"} if @@log.debug?
|
722
|
+
@@log.debug { "Model Metadata: #{@_model_metadata}"} if @@log.debug?
|
723
|
+
node_data.merge(@_model_metadata)
|
724
|
+
end
|
725
|
+
|
726
|
+
def update_self(rev_data)
|
727
|
+
self._model_metadata.merge!(rev_data)
|
728
|
+
version_key = @my_GlueEnv.version_key
|
729
|
+
@saved_to_model = rev_data[version_key]
|
730
|
+
end
|
731
|
+
|
732
|
+
end
|
733
|
+
|