rubysync 0.1.1 → 0.2.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -26,7 +26,7 @@ require 'ruby_sync/event'
26
26
 
27
27
 
28
28
  module RubySync
29
- VERSION = '0.1.1'
29
+ VERSION = '0.2.1'
30
30
  module Connectors
31
31
  end
32
32
  module Pipelines
@@ -13,378 +13,262 @@
13
13
  # You should have received a copy of the GNU General Public License along with RubySync; if not, write to the
14
14
  # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
15
15
 
16
- require 'ruby_sync/connectors/connector_event_processing'
17
- require 'yaml'
18
- require 'yaml/dbm'
19
- require 'digest/md5'
20
16
 
21
- module RubySync::Connectors
22
- class BaseConnector
23
-
24
- include RubySync::Utilities
25
- include ConnectorEventProcessing
26
-
27
- attr_accessor :once_only, :name, :is_vault, :pipeline
28
- option :dbm_path
29
-
30
- # set a default dbm path
31
- def dbm_path()
32
- p = "#{base_path}/db"
33
- ::FileUtils.mkdir_p p
34
- ::File.join(p,name)
35
- end
36
17
 
37
- # Stores association keys indexed by path:association_context
38
- def path_to_association_dbm_filename
39
- dbm_path + "_path_to_assoc"
40
- end
41
-
42
- # Stores paths indexed by association_context:association_key
43
- def association_to_path_dbm_filename
44
- dbm_path + "_assoc_to_path"
45
- end
46
-
47
- # Stores a hash for each entry so we can tell when
48
- # entries are added, deleted or modified
49
- def mirror_dbm_filename
50
- dbm_path + "_mirror"
51
- end
52
-
53
- def initialize options={}
54
- base_path # call this once to get the working directory before anything else
55
- # in the connector changes the cwd
56
- options = self.class.default_options.merge(options)
57
- once_only = false
58
- self.name = options[:name]
59
- self.is_vault = options[:is_vault]
60
- if is_vault && !can_act_as_vault?
61
- raise "#{self.class.name} can't act as an identity vault."
62
- end
63
- options.each do |key, value|
64
- if self.respond_to? "#{key}="
65
- self.send("#{key}=", value)
66
- else
67
- log.debug "#{name}: doesn't respond to #{key}="
68
- end
69
- end
70
- end
71
-
72
-
73
- # Subclasses must override this. Called by perform_add to actually
74
- # store the new record in the datastore. Returned value will be used
75
- # as the association id if this connector is acting as the client.
76
- def add id, operations
77
- raise "add method not implemented"
78
- end
79
-
80
-
81
- # Override this to return a string that will be included within the class definition of
82
- # of configurations based on your connector.
83
- def self.sample_config
84
- end
18
+ module RubySync::Connectors
19
+ class BaseConnector
20
+
21
+ include RubySync::Utilities
22
+ meta_eval {include RubySync::Utilities}
23
+ include ConnectorEventProcessing
24
+
25
+ attr_accessor :once_only, :name, :is_vault, :pipeline
26
+
27
+ def initialize options={}
28
+ base_path # call this once to get the working directory before anything else
29
+ # in the connector changes the cwd
30
+ options = self.class.default_options.merge(options)
31
+ once_only = false
32
+ self.name = options[:name]
33
+ self.is_vault = options[:is_vault]
34
+ if is_vault && !can_act_as_vault?
35
+ raise "#{self.class.name} can't act as an identity vault."
36
+ end
37
+ options.each do |key, value|
38
+ if self.respond_to? "#{key}="
39
+ self.send("#{key}=", value)
40
+ else
41
+ log.debug "#{name}: doesn't respond to #{key}="
42
+ end
43
+ end
44
+ end
45
+
46
+
47
+ # Subclasses must override this. Called by perform_add to actually
48
+ # store the new record in the datastore. Returned value will be used
49
+ # as the association id if this connector is acting as the client.
50
+ def add id, operations
51
+ raise "add method not implemented"
52
+ end
85
53
 
86
54
 
87
- # Subclasses must override this to
88
- # interface with the external system and generate entries for every
89
- # entry in the scope passing the entry path (id) and its data (as a hash of arrays).
90
- # This method will be called repeatedly until the connector is
91
- # stopped.
92
- def each_entry
93
- raise "Not implemented"
94
- end
55
+ # Override this to return a string that will be included within the class definition of
56
+ # of configurations based on your connector.
57
+ def self.sample_config
58
+ end
95
59
 
96
- # Subclasses MAY override this to interface with the external system
97
- # and generate an event for every change that affects items within
98
- # the scope of this connector.
99
- #
100
- # The default behaviour is to compare a hash of each entry in the
101
- # database with a stored hash of its previous value and generate
102
- # add, modify and delete events appropriately. This is normally a very
103
- # inefficient way to operate so overriding this method is highly
104
- # recommended if you can detect changes in a more efficient manner.
105
- #
106
- # This method will be called repeatedly until the connector is
107
- # stopped.
108
- def each_change
109
- DBM.open(self.mirror_dbm_filename) do |dbm|
110
- # scan existing entries to see if any new or modified
111
- each_entry do |path, entry|
112
- digest = digest(entry)
113
- unless stored_digest = dbm[path.to_s] and digest == stored_digest
114
- operations = create_operations_for(entry)
115
- yield RubySync::Event.add(self, path, nil, operations)
116
- dbm[path.to_s] = digest
117
- end
118
- end
119
-
120
- # scan dbm to find deleted
121
- dbm.each do |key, stored_hash|
122
- unless self[key]
123
- yield RubySync::Event.delete(self, key)
124
- dbm.delete key
125
- if is_vault? and @pipeline
126
- association = association_for @pipeline.association_context, key
127
- remove_association association
128
- end
129
- end
130
- end
131
- end
60
+ def self.event_method name,&blk
61
+ define_method name do |event|
62
+ event.instance_eval(&blk)
132
63
  end
133
-
134
- def digest(o)
135
- Digest::MD5.hexdigest(o.to_yaml)
136
- end
137
-
64
+ end
138
65
 
66
+
67
+ def self.target_transform(&blk) event_method :target_transform,&blk; end
68
+
69
+ # Subclasses must override this to
70
+ # interface with the external system and generate entries for every
71
+ # entry in the scope passing the entry path (id) and its data (as a hash of arrays).
72
+ # This method will be called repeatedly until the connector is
73
+ # stopped.
74
+ def each_entry
75
+ raise "Not implemented"
76
+ end
139
77
 
140
- # Call each_change repeatedly (or once if in once_only mode)
141
- # to generate events.
142
- # Should generally only be called by the pipeline to which it is attached.
143
- def start &blk
144
- log.debug "#{name}: Started"
145
- @running = true
146
- sync_started()
147
- while @running
148
- each_change do |event|
149
- if event.type == :force_resync
150
- each_entry(&blk)
151
- next
152
- end
153
- if is_delete_echo?(event) || is_echo?(event)
154
- log.debug "Ignoring echoed event"
155
- else
156
- call_if_exists :source_transform, event
157
- yield(event)
158
- end
159
- end
160
78
 
161
- if once_only
162
- log.debug "#{name}: Stopped"
163
- @running = false
164
- else
165
- log.debug "#{name}: sleeping"
166
- sleep 1
167
- end
168
- end
169
- sync_stopped
170
- end
79
+ # Call each_change repeatedly (or once if in once_only mode)
80
+ # to generate events.
81
+ # Should generally only be called by the pipeline to which it is attached.
82
+ def start &blk
83
+ log.debug "#{name}: Started"
84
+ @running = true
85
+ sync_started()
86
+ while @running
87
+ each_change do |event|
88
+ if event.type == :force_resync
89
+ each_entry(&blk)
90
+ next
91
+ end
92
+ if is_delete_echo?(event) || is_echo?(event)
93
+ log.debug "Ignoring echoed event"
94
+ else
95
+ call_if_exists :source_transform, event
96
+ yield(event)
97
+ end
98
+ end
99
+
100
+ if once_only
101
+ log.debug "#{name}: Stopped"
102
+ @running = false
103
+ else
104
+ log.debug "#{name}: sleeping"
105
+ sleep 1
106
+ end
107
+ end
108
+ sync_stopped
109
+ end
171
110
 
172
111
 
173
- # Called by start() after last call to each_change or each_entry
174
- def sync_stopped; end
112
+ # Called by start() after last call to each_change or each_entry
113
+ def sync_stopped; end
175
114
 
176
- # Called by start() before first call to each_change or each_entry
177
- def sync_started; end
115
+ # Called by start() before first call to each_change or each_entry
116
+ def sync_started; end
178
117
 
179
- # Override this to perform actions that must be performed the
180
- # when the connector starts running. (Eg, opening network connections)
181
- def started
182
- end
118
+ # Override this to perform actions that must be performed the
119
+ # when the connector starts running. (Eg, opening network connections)
120
+ def started
121
+ end
183
122
 
184
- # Override this to perform actions that must be performed when
185
- # the connector exits (eg closing network conections).
186
- def stopped; end
123
+ # Override this to perform actions that must be performed when
124
+ # the connector exits (eg closing network conections).
125
+ def stopped; end
187
126
 
188
127
 
189
- # Politely stop the connector.
190
- def stop
191
- log.info "#{name}: Attempting to stop"
192
- @running = false
193
- end
128
+ # Politely stop the connector.
129
+ def stop
130
+ log.info "#{name}: Attempting to stop"
131
+ @running = false
132
+ end
194
133
 
195
134
 
196
- def is_vault?
197
- @is_vault
198
- end
135
+ def is_vault?
136
+ @is_vault
137
+ end
199
138
 
200
139
 
201
- # Returns the association key for the given path. Called if this connector is
202
- # the client.
203
- # The default implementation returns the path itself. If there is a more
204
- # efficient key for looking up an entry in the client, override to return
205
- # that instead.
206
- def own_association_key_for(path)
207
- path
208
- end
140
+ # Returns the association key for the given path. Called if this connector is
141
+ # the client.
142
+ # The default implementation returns the path itself. If there is a more
143
+ # efficient key for looking up an entry in the client, override to return
144
+ # that instead.
145
+ def own_association_key_for(path)
146
+ path
147
+ end
209
148
 
210
149
 
211
- # Returns the appropriate entry for the association key. This key will have been provided
212
- # by a previous call to the association_key method.
213
- # This will only be called on the client connector. It is not expected that the client will
214
- # have to store this key.
215
- def path_for_own_association_key(key)
216
- key
217
- end
150
+ # Returns the appropriate entry for the association key. This key will have been provided
151
+ # by a previous call to the association_key method.
152
+ # This will only be called on the client connector. It is not expected that the client will
153
+ # have to store this key.
154
+ def path_for_own_association_key(key)
155
+ key
156
+ end
218
157
 
219
- # Returns the entry matching the association key. This is only called on the client.
220
- def entry_for_own_association_key(key)
221
- self[path_for_own_association_key(key)]
222
- end
158
+ # Returns the entry matching the association key. This is only called on the client.
159
+ def entry_for_own_association_key(key)
160
+ self[path_for_own_association_key(key)]
161
+ end
223
162
 
224
- # True if there is an entry matching the association key. Only called on the client.
225
- # Override if you have a quicker way of determining whether an entry exists for
226
- # given key than retrieving the entry.
227
- def has_entry_for_key?(key)
228
- entry_for_own_association_key(key)
229
- end
163
+ # True if there is an entry matching the association key. Only called on the client.
164
+ # Override if you have a quicker way of determining whether an entry exists for
165
+ # given key than retrieving the entry.
166
+ def has_entry_for_key?(key)
167
+ entry_for_own_association_key(key)
168
+ end
230
169
 
231
- # Whether this connector is capable of acting as a vault.
232
- # The vault is responsible for storing the association key of the client application
233
- # and must be able to retrieve records for that association key.
234
- def can_act_as_vault?
235
- defined? associate and
170
+ # Whether this connector is capable of acting as a vault.
171
+ # The vault is responsible for storing the association key of the client application
172
+ # and must be able to retrieve records for that association key.
173
+ def can_act_as_vault?
174
+ defined? associate and
236
175
  defined? path_for_association and
237
176
  defined? association_key_for and
238
177
  defined? remove_association and
239
178
  defined? associations_for
240
- end
241
-
242
-
243
- # Store association for the given path
244
- def associate association, path
245
- YAML::DBM.open(path_to_association_dbm_filename) do |dbm|
246
- assocs = dbm[path.to_s] || {}
247
- assocs[association.context.to_s] = association.key.to_s
248
- dbm[path.to_s] = assocs
249
- end
250
- DBM.open(association_to_path_dbm_filename) do |dbm|
251
- dbm[association.to_s] = path
252
- end
253
- end
254
-
255
- def path_for_association association
256
- is_vault? or return path_for_own_association_key(association.key)
257
- DBM.open(association_to_path_dbm_filename) do |dbm|
258
- dbm[association.to_s]
259
- end
260
- end
261
-
262
- def associations_for path
263
- YAML::DBM.open(path_to_association_dbm_filename) do |dbm|
264
- assocs = dbm[path.to_s]
265
- assocs.values
266
- end
267
- end
268
-
269
-
270
- def remove_association association
271
- path = nil
272
- DBM.open(association_to_path_dbm_filename) do |dbm|
273
- return unless path =dbm.delete(association.to_s)
274
- end
275
- YAML::DBM.open(path_to_association_dbm_filename) do |dbm|
276
- assocs = dbm[path.to_s]
277
- assocs.delete(association.context) and dbm[path.to_s] = assocs
278
- end
279
- end
179
+ end
280
180
 
281
- def association_key_for context, path
282
- YAML::DBM.open(path_to_association_dbm_filename) do |dbm|
283
- assocs = dbm[path.to_s] || {}
284
- assocs[context.to_s]
285
- end
286
- end
287
181
 
288
-
289
- # Return the association object given the association context and path.
290
- # This should only be called on the vault.
291
- def association_for(context, path)
292
- raise "#{name} is not a vault." unless is_vault?
293
- key = association_key_for context, path
294
- key and RubySync::Association.new(context, key)
295
- end
182
+
183
+ # Return the association object given the association context and path.
184
+ # This should only be called on the vault.
185
+ def association_for(context, path)
186
+ raise "#{name} is not a vault." unless is_vault?
187
+ key = association_key_for context, path
188
+ key and RubySync::Association.new(context, key)
189
+ end
296
190
 
297
- # Should only be called on the vault. Returns the entry associated with
298
- # the association passed. Some connectors may wish to override this if
299
- # they have a more efficient way of retrieving the record for a given
300
- # association.
301
- def find_associated association
302
- path = path_for_association association
303
- path and self[path]
304
- end
305
-
306
- # The context to be used to for all associations created where this
307
- # connector is the client.
308
- def association_context
309
- self.name
310
- end
191
+ # Should only be called on the vault. Returns the entry associated with
192
+ # the association passed. Some connectors may wish to override this if
193
+ # they have a more efficient way of retrieving the record for a given
194
+ # association.
195
+ def find_associated association
196
+ path = path_for_association association
197
+ path and self[path]
198
+ end
311
199
 
312
- def remove_mirror
313
- File.delete_if_exists(["#{mirror_dbm_filename}.db"])
314
- end
200
+ # The context to be used to for all associations created where this
201
+ # connector is the client.
202
+ def association_context
203
+ self.name
204
+ end
315
205
 
316
- def remove_associations
317
- File.delete_if_exists(["#{association_to_path_dbm_filename}.db","#{path_to_association_dbm_filename}.db"])
318
- end
319
-
320
- def clean
321
- remove_associations
322
- remove_mirror
323
- end
206
+ def clean
207
+ end
324
208
 
325
- # Attempts to delete non-existent items may occur due to echoing. Many systems won't be able to record
326
- # the fact that an entry has been deleted by rubysync because after the delete, there is no entry left to
327
- # record the information in. Therefore, they may issue a notification that the item has been deleted. This
328
- # becomes an event and the connector won't know that it caused the delete. The story usually has a reasonably happy
329
- # ending though.
330
- # The inappropriate delete event is processed by the pipeline and a delete attempt is made on the
331
- # datastore that actually triggered the original delete event in the first place. Most of the time, there will
332
- # be no entry there for it to delete and it will fail harmlessly.
333
- # Problems may arise, however, if the original delete event was the result of manipulation in the pipeline and
334
- # the original entry is in fact supposed to stay there. For example, say a student in an enrolment system was marked
335
- # as not enrolled anymore. This modify event is translated by the pipeline that connects to the identity vault to become
336
- # a delete because only the enrolment system is interested in non-enrolled students. As the student is removed
337
- # from the identity vault, a new delete event is generated targeted back and the enrolment system.
338
- # If the pipeline has been configured to honour delete requests from the vault to the enrolment system then the
339
- # students entry in the enrolment system would be deleted.
340
- def is_delete_echo? event
341
- false #TODO implement delete event caching
342
- end
209
+ # Attempts to delete non-existent items may occur due to echoing. Many systems won't be able to record
210
+ # the fact that an entry has been deleted by rubysync because after the delete, there is no entry left to
211
+ # record the information in. Therefore, they may issue a notification that the item has been deleted. This
212
+ # becomes an event and the connector won't know that it caused the delete. The story usually has a reasonably happy
213
+ # ending though.
214
+ # The inappropriate delete event is processed by the pipeline and a delete attempt is made on the
215
+ # datastore that actually triggered the original delete event in the first place. Most of the time, there will
216
+ # be no entry there for it to delete and it will fail harmlessly.
217
+ # Problems may arise, however, if the original delete event was the result of manipulation in the pipeline and
218
+ # the original entry is in fact supposed to stay there. For example, say a student in an enrolment system was marked
219
+ # as not enrolled anymore. This modify event is translated by the pipeline that connects to the identity vault to become
220
+ # a delete because only the enrolment system is interested in non-enrolled students. As the student is removed
221
+ # from the identity vault, a new delete event is generated targeted back and the enrolment system.
222
+ # If the pipeline has been configured to honour delete requests from the vault to the enrolment system then the
223
+ # students entry in the enrolment system would be deleted.
224
+ def is_delete_echo? event
225
+ false #TODO implement delete event caching
226
+ end
343
227
 
344
- def is_echo? event; false end
228
+ def is_echo? event; false end
345
229
 
346
- # Called by unit tests to inject data
347
- def test_add id, details
348
- add id, details
349
- end
230
+ # Called by unit tests to inject data
231
+ def test_add id, details
232
+ add id, details
233
+ end
350
234
 
351
- # Called by unit tests to modify data
352
- def test_modify id, details
353
- modify id, details
354
- end
235
+ # Called by unit tests to modify data
236
+ def test_modify id, details
237
+ modify id, details
238
+ end
355
239
 
356
- # Called by unit tests to delete a record
357
- def test_delete id
358
- delete id
359
- end
240
+ # Called by unit tests to delete a record
241
+ def test_delete id
242
+ delete id
243
+ end
360
244
 
361
- # Return an array of operations that would create the given record
362
- # if applied to an empty hash.
363
- def create_operations_for record
364
- record.keys.map {|key| RubySync::Operation.new(:add, key, record[key])}
365
- end
245
+ # Return an array of operations that would create the given record
246
+ # if applied to an empty hash.
247
+ def create_operations_for record
248
+ record.keys.map {|key| RubySync::Operation.new(:add, key, record[key])}
249
+ end
366
250
 
367
251
 
368
- # Return an array of possible fields for this connector.
369
- # Implementations should override this to query the datasource
370
- # for possible fields.
371
- def self.fields
372
- nil
373
- end
252
+ # Return an array of possible fields for this connector.
253
+ # Implementations should override this to query the datasource
254
+ # for possible fields.
255
+ def self.fields
256
+ nil
257
+ end
374
258
 
375
- # Ensures that the named connector is loaded and returns its class object
376
- def self.class_for connector_name
377
- name = class_name_for connector_name
378
- (name)? eval(name) : nil
379
- end
259
+ # Ensures that the named connector is loaded and returns its class object
260
+ # def self.class_for connector_name
261
+ # name = class_name_for connector_name
262
+ # (name)? eval(name) : nil
263
+ # end
380
264
 
381
- # Return the class name for a path style connector name
382
- def self.class_name_for connector_name
383
- '::' + "#{connector_name}_connector".camelize
384
- end
265
+ # # Return the class name for a path style connector name
266
+ # def self.class_name_for connector_name
267
+ # '::' + "#{connector_name}_connector".camelize
268
+ # end
385
269
 
386
- def self.sample_config
387
- return <<END
270
+ def self.sample_config
271
+ return <<-END
388
272
  # The comments in this file should help you to create a custom connector.
389
273
  # We're going to assume that you know how to program in Ruby. If you don't then
390
274
  # quickly pop-off and learn it: http://ruby-lang.org.
@@ -426,70 +310,94 @@ module RubySync::Connectors
426
310
  # Kind of sparse. Isn't it?
427
311
  #
428
312
  #
429
- END
430
- end
313
+ end
314
+ end
431
315
 
432
- ####### Reading methods
316
+ ####### Reading methods
433
317
 
434
- # If your datasource supports random access (as would, for example, a database) then
435
- # implement the following:
436
- #
437
- #def [](path)
438
- # #return the entry at location indicated by 'path'
439
- # #An 'entry' is a hash where the key is the attribute name and the value is an
440
- # #array containing the value or values for the the attribute
441
- #end
318
+ # If your datasource supports random access (as would, for example, a database) then
319
+ # implement the following:
320
+ #
321
+ #def [](path)
322
+ # #return the entry at location indicated by 'path'
323
+ # #An 'entry' is a hash where the key is the attribute name and the value is an
324
+ # #array containing the value or values for the the attribute
325
+ #end
442
326
 
443
- # Subclasses must override this to
444
- # interface with the external system and generate entries for every
445
- # entry in the scope passing the entry path (id) and its data (as a hash of arrays).
446
- def each_entry
447
- raise "Not implemented"
448
- end
327
+ # Subclasses must override this to
328
+ # interface with the external system and generate entries for every
329
+ # entry in the scope passing the entry path (id) and its data (as a hash of arrays).
330
+ def each_entry
331
+ raise "Not implemented"
332
+ end
449
333
 
450
- # Subclasses MAY override this to interface with the external system
451
- # and generate an event for every change that affects items within
452
- # the scope of this connector.
453
- #
454
- # The default behaviour is to compare a hash of each entry in the
455
- # database with a stored hash of its previous value and generate
456
- # add, modify and delete events appropriately. This is normally a very
457
- # inefficient way to operate so overriding this method is highly
458
- # recommended if you can detect changes in a more efficient manner.
459
- #
460
- # This method will be called repeatedly until the connector is
461
- # stopped.
462
- #def each_change
463
- #end
334
+ # Subclasses MAY override this to interface with the external system
335
+ # and generate an event for every change that affects items within
336
+ # the scope of this connector.
337
+ #
338
+ # The default behaviour is to compare a hash of each entry in the
339
+ # database with a stored hash of its previous value and generate
340
+ # add, modify and delete events appropriately. This is normally a very
341
+ # inefficient way to operate so overriding this method is highly
342
+ # recommended if you can detect changes in a more efficient manner.
343
+ #
344
+ # This method will be called repeatedly until the connector is
345
+ # stopped.
346
+ #def each_change
347
+ #end
464
348
 
465
- ######## Writing methods
466
-
349
+ ######## Writing methods
467
350
 
468
- # Apply operations to create database a entry at path
469
- def add(path, operations)
470
- end
351
+ # Apply operations to create database a entry at path
352
+ def add(path, operations)
353
+ end
471
354
 
472
- # Apply operations to alter database entry at path
473
- def modify(path, operations)
474
- end
355
+ # Apply operations to alter database entry at path
356
+ def modify(path, operations)
357
+ end
358
+
359
+ # Remove database entry at path
360
+ def delete(path)
361
+ end
362
+ END
363
+ end
475
364
 
365
+ def self.track_changes_with method
366
+ include_something_called method, "change_tracking"
367
+ end
368
+
369
+ def self.track_associations_with method
370
+ include_something_called method, "association_tracking"
371
+ end
476
372
 
477
- # Remove database entry at path
478
- def delete(path)
479
- end
480
- END
481
- end
482
373
 
483
- private
374
+ private
375
+
376
+ # set a default dbm path in case one of the dbm tracking
377
+ # modules is used.
378
+ def dbm_path()
379
+ p = "#{base_path}/db"
380
+ ::FileUtils.mkdir_p p
381
+ ::File.join(p,name)
382
+ end
484
383
 
485
- def self.options options
486
- @options = options
384
+ def self.include_something_called name, extension, message=nil
385
+ module_name = class_name_for(name, extension)
386
+ m = eval(module_name)
387
+ unless include(m)
388
+ message ||= "Couldn't find a module called #{module_name}"
389
+ log.error message
487
390
  end
391
+ end
392
+
393
+ def self.options options
394
+ @options = options
395
+ end
488
396
 
489
- def self.default_options
490
- @options ||= {}
491
- end
492
-
397
+ def self.default_options
398
+ @options ||= {}
493
399
  end
400
+
401
+ end
494
402
  end
495
403