tinkit 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. data/LICENSE +176 -0
  2. data/README +11 -0
  3. data/Rakefile +75 -0
  4. data/lib/glue_envs/couchrest/couchrest_attachment_handler.rb +260 -0
  5. data/lib/glue_envs/couchrest/couchrest_files_mgr.rb +198 -0
  6. data/lib/glue_envs/couchrest_glue_env.rb +536 -0
  7. data/lib/glue_envs/files_mgr_base.rb +51 -0
  8. data/lib/glue_envs/filesystem/filesystem_files_mgr.rb +187 -0
  9. data/lib/glue_envs/filesystem_glue_env.rb +395 -0
  10. data/lib/glue_envs/mysql/mysql_files_mgr.rb +175 -0
  11. data/lib/glue_envs/mysql_glue_env.rb +428 -0
  12. data/lib/glue_envs/sdb_s3/sdb_s3_files_mgr.rb +314 -0
  13. data/lib/glue_envs/sdb_s3_glue_env.rb +248 -0
  14. data/lib/helpers/camel.rb +21 -0
  15. data/lib/helpers/filesystem_helpers.rb +27 -0
  16. data/lib/helpers/hash_helpers.rb +74 -0
  17. data/lib/helpers/log_helper.rb +34 -0
  18. data/lib/helpers/mime_types_new.rb +126 -0
  19. data/lib/helpers/old_more_open_struct.rb +28 -0
  20. data/lib/helpers/require_helper.rb +45 -0
  21. data/lib/helpers/tk_escape.rb +17 -0
  22. data/lib/midas/bufs_data_structure.rb +84 -0
  23. data/lib/midas/node_element_operations.rb +264 -0
  24. data/lib/tinkit.rb +38 -0
  25. data/lib/tinkit_base_node.rb +733 -0
  26. data/lib/tinkit_node_factory.rb +47 -0
  27. data/spec/couchrest_files_mgr_spec.rb +551 -0
  28. data/spec/couchrest_glue_spec.rb +246 -0
  29. data/spec/filesystem_files_mgr_spec.rb +236 -0
  30. data/spec/filesystem_glue_spec.rb +243 -0
  31. data/spec/filesystem_helpers_spec.rb +42 -0
  32. data/spec/helpers/bufs_node_builder.rb +17 -0
  33. data/spec/helpers/bufs_sample_dataset.rb +160 -0
  34. data/spec/helpers/bufs_test_environments.rb +81 -0
  35. data/spec/helpers/tmp_view_cleaner.rb +15 -0
  36. data/spec/lib_helpers/tk_escape_spec.rb +45 -0
  37. data/spec/mysql_files_mgr_spec.rb +250 -0
  38. data/spec/mysql_glue_spec.rb +214 -0
  39. data/spec/node_element_operations_spec.rb +392 -0
  40. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec1.rb +82 -0
  41. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec2.rb +68 -0
  42. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec3.rb +80 -0
  43. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec4.rb +110 -0
  44. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec5.rb +84 -0
  45. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec6.rb +83 -0
  46. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec7.rb +101 -0
  47. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec8.rb +92 -0
  48. data/spec/sdb_s3_files_mgr_spec/sdb_s3_files_mgr_spec_all.rb +266 -0
  49. data/spec/sdb_s3_glue_spec.rb +230 -0
  50. data/spec/tinkit_node_factory_spec.rb +1108 -0
  51. metadata +114 -0
@@ -0,0 +1,314 @@
1
+ #require helper for cleaner require statements
2
+ require File.join(File.expand_path(File.dirname(__FILE__)), '../../../lib/helpers/require_helper')
3
+ require Tinkit.helpers 'tk_escape'
4
+
5
+ require 'aws/s3'
6
+
7
+ module AWS::S3
8
+ class NoSuchBucket < ResponseError
9
+ end
10
+
11
+ class BucketNotEmpty < ResponseError
12
+ end
13
+ end
14
+
15
+ module SdbS3Interface
16
+
17
+ class NilBucketError < StandardError
18
+ end
19
+
20
+ class FilesMgr
21
+ include AWS
22
+ AccessKey = ENV["AMAZON_ACCESS_KEY_ID"]
23
+ SecretKey = ENV["AMAZON_SECRET_ACCESS_KEY"]
24
+
25
+ BucketNamespacePrefix = 'forforf'
26
+
27
+ @@s3_connection = S3::Base.establish_connection!(:access_key_id => AccessKey,
28
+ :secret_access_key => SecretKey,
29
+ :persistent => false)
30
+
31
+ attr_accessor :bucket_name
32
+
33
+ def initialize(glue_env, node_key_value)
34
+ #@s3_connection = S3::Base.establish_connection!(:access_key_id => AccessKey,
35
+ # :secret_access_key => SecretKey)
36
+ @bucket_name = "#{BucketNamespacePrefix}_#{glue_env.user_datastore_location}"
37
+ #@attachment_bucket = use_bucket(@bucket_name) #This can be stale!!!
38
+ #verify bucket is ready
39
+ #puts "Previous Response: #{S3::Service.response}"
40
+ #puts "#{__LINE__} - #{ S3::Service.buckets(true).map{|b| b.name} }"
41
+ #puts "This Bucket: #{@attachment_bucket.name}"
42
+ #puts "Last Response: #{S3::Service.response}"
43
+ #size = @attachment_bucket.size
44
+ end
45
+
46
+ #TODO: Move common file management functions from base node to here
47
+ def add(node, file_datas)
48
+ filenames = []
49
+ file_datas.each do |file_data|
50
+ filenames << file_data[:src_filename]
51
+ end
52
+
53
+ filenames.each do |filename|
54
+ basename = File.basename(filename)
55
+ esc_basename = TkEscape.escape(basename)
56
+ begin
57
+ S3::S3Object.store(esc_basename, open(filename), @bucket_name)
58
+ rescue AWS::S3::NoSuchBucket
59
+ puts "Rescued while adding files, retrying"
60
+ retry_request { S3::S3Object.store(esc_basename, open(filename), @bucket_name) }
61
+ end
62
+ end
63
+ #verify files are there
64
+ files = self.list_attachments
65
+
66
+ filenames.each do |f|
67
+ bname = TkEscape.escape(File.basename(f))
68
+ esc_bname = TkEscape.escape(bname)
69
+ retry_request { S3::S3Object.store(esc_bname, open(f), @bucket_name) } unless files.include?(esc_bname)
70
+ end
71
+
72
+ #update the metadata (have to wait until they're uploaded *sigh*
73
+ filenames.each do |f|
74
+ basename = File.basename(f)
75
+ attach_name = TkEscape.escape(basename)
76
+ begin
77
+ s3_obj = S3::S3Object.find(attach_name, @bucket_name)
78
+ rescue AWS::S3::NoSuchBucket
79
+ puts "Rescued while finding bucket, retrying"
80
+ retry_request { S3::S3Object.find(attach_name, @bucket_name) }
81
+ end
82
+ modified_at = File.mtime(f).to_s
83
+ s3_obj.metadata[:modified_at] = modified_at
84
+ s3_obj.store
85
+ end
86
+
87
+ filenames.map {|f| TkEscape.escape(File.basename(f))} #return basenames
88
+ end
89
+
90
+ def add_raw_data(node, file_name, content_type, raw_data, file_modified_at = nil)
91
+
92
+ attach_name = TkEscape.escape(file_name)
93
+
94
+ options = {:content_type => content_type}
95
+ #=begin
96
+ begin
97
+ resp = S3::S3Object.store(attach_name, raw_data, @bucket_name, options)
98
+ rescue AWS::S3::NoSuchBucket
99
+ puts "Rescued while adding raw data, retrying"
100
+ retry_request { S3::S3Object.store(attach_name, raw_data, @bucket_name, options) }
101
+ end
102
+
103
+ obj = S3::S3Object.find(attach_name, @bucket_name)
104
+ obj.metadata[:modified_at] = "2011-01-13" #CGI.escape(file_modified_at)
105
+ obj.store
106
+
107
+ #verify files are there
108
+ max_wait_time = 20
109
+ now = Time.now
110
+ until (S3::S3Object.exists?(attach_name, @bucket_name) ) || (Time.now > (now + max_wait_time))
111
+ puts "Retrying store for add raw data"
112
+ sleep 2
113
+ S3::S3Object.store(attach_name, raw_data, @bucket_name, options)
114
+ end
115
+
116
+ [attach_name]
117
+ end
118
+
119
+ def list(node)
120
+ #conforming to base file mgr
121
+ list_attachments
122
+ end
123
+
124
+ def subtract(node, file_basenames)
125
+ #conforming to base file mgr
126
+ subtract_files(node, file_basenames)
127
+ end
128
+
129
+ def subtract_files(node, file_basenames)
130
+ if file_basenames == :all
131
+ subtract_all
132
+ else
133
+ subtract_some(file_basenames)
134
+ end
135
+ end
136
+
137
+ def get_raw_data(node, basename)
138
+ rtn = nil
139
+
140
+ attach_name = TkEscape.escape(basename)
141
+
142
+ begin
143
+ rtn = S3::S3Object.value(attach_name, @bucket_name)
144
+ rescue AWS::S3::NoSuchBucket
145
+ puts "Rescued while getting raw data, bucket name: #{@bucket_name}"
146
+ begin
147
+ rtn = retry_request(attach_name, @bucket_name){|obj, buck| puts "sdbs3: #{obj.inspect} - #{buck.inspect}"; S3::S3Object.value(obj, buck)}
148
+ rescue AWS::S3::NoSuchKey
149
+ rtn = nil
150
+ end
151
+ rescue AWS::S3::NoSuchKey
152
+ rtn = nil
153
+ end
154
+ rtn
155
+ end
156
+
157
+ #todo change name to get_files_metadata
158
+ def get_attachments_metadata(node)
159
+ files_md = {}
160
+ begin
161
+ this_bucket = use_bucket(@bucket_name)
162
+ objects = this_bucket.objects
163
+ rescue AWS::S3::NoSuchBucket
164
+ puts "rescued while getting objects from bucket to check metadata"
165
+ objects = retry_request{ this_bucket.objects }
166
+ end
167
+ objects.each do |object|
168
+ begin
169
+ obj_md = object.about.merge(object.metadata)
170
+ rescue AWS::S3::NoSuchBucket
171
+ puts "Rescued while getting metadata from object"
172
+ obj_md = retry_request{object.about}
173
+ end
174
+ time_str = obj_md["x-amz-meta-modified-at"]||Time.parse(obj_md["last_modified"]).to_s
175
+ obj_md_file_modified = time_str
176
+ obj_md_content_type = obj_md["content-type"]
177
+ new_md = {:content_type => obj_md_content_type, :file_modified => obj_md_file_modified}
178
+
179
+ new_md.merge(obj_md) #where does the original metadata go?
180
+ files_md[object.key] = new_md
181
+ end
182
+ files_md
183
+ end#def
184
+
185
+ def list_objects
186
+ list = nil
187
+ this_bucket = use_bucket(@bucket_name)
188
+ begin
189
+ list = this_bucket.objects
190
+ rescue AWS::S3::NoSuchBucket
191
+ puts "Rescued while listing attachments"
192
+ list = retry_request{this_bucket.objects}
193
+ end
194
+ list
195
+ end
196
+
197
+ def list_attachments
198
+ objs = list_objects
199
+ atts = objs.map{|o| o.key} if objs
200
+ atts || []
201
+ end
202
+
203
+ def destroy_file_container
204
+ this_bucket = use_bucket(@bucket_name)
205
+ begin
206
+ this_bucket.delete(:force => true)
207
+ rescue AWS::S3::NoSuchBucket
208
+ puts "Running sanity check"
209
+ buckets = S3::Service.buckets(true).map{|b| b.name}
210
+ if buckets.include?(@bucket_name)
211
+ puts "AWS temporarily lost bucket before finding it so it can be deleted"
212
+ retry_request { this_bucket.delete(:force => true) }
213
+ end
214
+ end
215
+ end
216
+
217
+ def subtract_some(file_basenames)
218
+ file_basenames.each do |basename|
219
+ attach_name = TkEscape.escape(basename)
220
+ S3::S3Object.delete(attach_name, @bucket_name)
221
+ end
222
+ end
223
+
224
+ def subtract_all
225
+ #Changed behavior to leave bucket (this is different than other FileMgrs)
226
+ this_bucket = use_bucket(@bucket_name)
227
+ begin
228
+ this_bucket.delete_all
229
+ rescue AWS::S3::NoSuchBucket
230
+ puts "Bucket not found while deleting all. Maybe it's already been deleted?"
231
+ return nil
232
+ #aws_names = retry_request{@attachment_bucket.objects}
233
+ end
234
+
235
+ max_wait_time = 20
236
+ now = Time.now
237
+
238
+ while Time.now < (now + max_wait_time)
239
+ begin
240
+ this_bucket.delete
241
+ rescue AWS::S3::BucketNotEmpty
242
+ sleep 1
243
+ puts "Bucket not empty yet, trying again"
244
+ this_bucket.delete_all
245
+ next
246
+ end
247
+ break
248
+ end
249
+
250
+ use_bucket(@bucket_name)
251
+ #file_basenames = aws_names.map{|o| o.key} if aws_names
252
+ #self.subtract_some(file_basenames) if file_basenames
253
+ end
254
+
255
+ def retry_request(*args, &block)
256
+ puts "RETRYING Request with block: #{block.inspect}"
257
+ wait_time = 2
258
+ backoff_delay = 0.5
259
+ max_retries = 10
260
+
261
+ resp = nil
262
+
263
+ 1.upto(max_retries) do |i|
264
+ puts "Wating #{wait_time} secs to try again"
265
+ sleep wait_time
266
+ begin
267
+ resp = yield *args
268
+ raise TypeError, "Response was Nil, retrying" unless resp
269
+ break
270
+ rescue AWS::S3::NoSuchKey => e
271
+ raise e #we want to raise this one"
272
+ rescue AWS::S3::ResponseError => e
273
+ puts "rescued #{e.inspect}"
274
+ backoff_delay += backoff_delay# * i
275
+ wait_time += backoff_delay
276
+ if (wait_time > 3) && (e.class == AWS::S3::NoSuchBucket)
277
+ puts "Attempting to reset bucket"
278
+ @attachment_bucket = use_bucket(@bucket_name)
279
+ end
280
+ next
281
+ end#begin-rescue
282
+ end#upto
283
+
284
+
285
+ end#def
286
+
287
+ def use_bucket(bucket_name)
288
+ begin
289
+ bucket = S3::Bucket.find(bucket_name)
290
+ rescue (AWS::S3::NoSuchBucket||NilBucketError) => e
291
+ begin
292
+ puts "Rescued error in use_bucket: #{e.inspect}"
293
+ S3::Bucket.create(bucket_name)
294
+ bucket = S3::Bucket.find(bucket_name)
295
+ rescue AWS::S3::NoSuchBucket #we just made it!!
296
+ bucket = retry_request(bucket_name){|buck_name| S3::Bucket.find(buck_name)}
297
+ end#inner begin-rescue
298
+ end#outer begin-rescue
299
+
300
+ #verify bucket exists
301
+ found_buckets = S3::Service.buckets(true).map{|b| b.name}
302
+ unless found_buckets.include?(bucket_name)
303
+ #bucket = retry(:retry_block, bucket_name){|buck_name| S3::Bucket.find(buck_name)}
304
+ end#unless
305
+ unless bucket
306
+ puts "NIL Bucket cannot be returned"
307
+ retry_request(bucket_name){|buck_name| S3::Bucket.find(buck_name)}
308
+ end
309
+ raise(NilBucketError, "NIL Bucket cannot be returned",nil) unless bucket
310
+ return bucket
311
+ end#def
312
+
313
+ end#class
314
+ end#module
@@ -0,0 +1,248 @@
1
+ #Tinkit directory structure defined in lib/helpers/require_helpers'
2
+ require Tinkit.midas 'bufs_data_structure'
3
+ require Tinkit.glue '/sdb_s3/sdb_s3_files_mgr'
4
+ require Tinkit.helpers 'hash_helpers'
5
+ require Tinkit.helpers 'log_helper'
6
+
7
+ #require 'right_aws'
8
+ require 'aws_sdb' #published as forforf-aws-sdb
9
+ #require 'aws/s3'
10
+ require 'json'
11
+
12
+ module SdbS3Env
13
+ class GlueEnv
14
+
15
+
16
+ @@log = TinkitLog.set(self.name, :warn)
17
+ #used to identify metadata for models (should be consistent across models)
18
+ #PersistLayerKey not needed, node key can be used as persistent layer key
19
+ #see mysql_glue_env to decouple persistent layer key from node key
20
+ VersionKey = :_rev #to have timestamp
21
+ NamespaceKey = :sdbs3_namespace
22
+
23
+ #MoabDataStoreDir = ".model"
24
+ #MoabDatastoreName = ".node_data.json"
25
+
26
+ #TODO: Rather than using File class directly, should a special class be used? <- still applicable?
27
+ attr_accessor :user_id,
28
+ :user_datastore_location,
29
+ :metadata_keys,
30
+ :required_instance_keys,
31
+ :required_save_keys,
32
+ :node_key,
33
+ :model_key,
34
+ :version_key,
35
+ :namespace_key,
36
+ :_files_mgr_class,
37
+ :views,
38
+ :model_save_params,
39
+ :moab_data,
40
+ :persist_layer_key
41
+ #accessors specific to this persitence model
42
+
43
+
44
+
45
+ def initialize(persist_env, data_model_bindings)
46
+ #TODO: determine if class_name is needed to segment cluster data within user data
47
+ #host = "https://sdb.amazonaws.com/" (not provided by user)
48
+
49
+ #user_id = env[:user_id]
50
+ sdb_s3_env = persist_env[:env]
51
+ #TODO: validations on format
52
+ domain_base_name = sdb_s3_env[:path]
53
+ @user_id = sdb_s3_env[:user_id]
54
+ @cluster_name = persist_env[:name]
55
+
56
+ #data_model_bindings from NodeElementOperations
57
+ key_fields = data_model_bindings[:key_fields]
58
+ initial_views_data = data_model_bindings[:views]
59
+
60
+ @required_instance_keys = key_fields[:required_keys] #DataStructureModels::Tinkit::RequiredInstanceKeys
61
+ @required_save_keys = key_fields[:required_keys] #DataStructureModels::Tinkit::RequiredSaveKeys
62
+ @node_key = key_fields[:primary_key] #DataStructureModels::Tinkit::NodeKey
63
+ @persist_layer_key = @node_key
64
+ #@moab_datastore_name = MoabDatastoreName
65
+ @version_key = VersionKey
66
+ @model_key = @node_key #ModelKey
67
+ @namespace_key = NamespaceKey
68
+ @metadata_keys = [@version_key, @namespace_key]
69
+ aak = ENV["AMAZON_ACCESS_KEY_ID"]
70
+ asak = ENV["AMAZON_SECRET_ACCESS_KEY"]
71
+ #rightaws_log = TinkitLog.set("RightAWS::SDBInterface", :warn)
72
+ #sdb = RightAws::SdbInterface.new(aak, asak, :logger => rightaws_log, :multi_thread => true)
73
+ sdb = AwsSdb::Service.new #aws-sdb
74
+ @user_datastore_location = use_domain!(sdb, "#{domain_base_name}__#{@user_id}")
75
+ @model_save_params = {:sdb => sdb, :domain => user_datastore_location, :node_key => @node_key}
76
+ @_files_mgr_class = SdbS3Interface::FilesMgr
77
+ @views = "temp"
78
+ @moab_data = {}
79
+ #@views_mgr = ViewsMgr.new({:data_file => @data_file_name})
80
+ #@record_locker = {} #tracks records that are in the process of being saved
81
+ end
82
+
83
+ def query_all #TODO move to ViewsMgr
84
+ sdb = @model_save_params[:sdb]
85
+ domain = @model_save_params[:domain]
86
+ query = "select * from `#{domain}`"
87
+ raw_data = sdb.select(query).first
88
+ data = {}
89
+ #puts "QA Raw: #{raw_data.inspect}"
90
+ raw_data.each do |k,v|
91
+ data[k] = from_sdb(v)
92
+ end
93
+ @@log.info{"Query All data: #{data.values}"} if @@log.info?
94
+ data.values
95
+ end
96
+
97
+ #current relations supported:
98
+ # - :equals (data in the key field matches this_value)
99
+ # - :contains (this_value is contained in the key field data (same as equals for non-enumerable types )
100
+ def find_nodes_where(key, relation, this_value)
101
+ res = case relation
102
+ when :equals
103
+ find_equals(key, this_value)
104
+ when :contains
105
+ find_contains(key, this_value)
106
+ end #case
107
+ return res
108
+ end
109
+
110
+ def find_equals(key, this_value)
111
+ results =[]
112
+ query_all.each do |record|
113
+ test_val = record[key]
114
+ results << record if test_val == this_value
115
+ end
116
+ results
117
+ end
118
+
119
+ def find_contains(key, this_value)
120
+ #there is probably an optimized way to do this natively
121
+ #in sdb's pseodo sql, but I can't figure it out
122
+ results =[]
123
+ query_all.each do |record|
124
+ test_val = record[key]
125
+ results << record if find_contains_type_helper(test_val, this_value)
126
+ end
127
+ results
128
+ end
129
+
130
+ def find_contains_type_helper(stored_data, this_value)
131
+ #p stored_dataj
132
+ resp = nil
133
+ #stored_data = jparse(stored_dataj)
134
+ if stored_data.respond_to?(:"include?")
135
+ resp = (stored_data.include?(this_value))
136
+ else
137
+ resp = (stored_data == this_value)
138
+ end
139
+ return resp
140
+ end
141
+
142
+ def get(id)
143
+ sdb = @model_save_params[:sdb]
144
+ domain = @model_save_params[:domain]
145
+ raw_data = sdb.get_attributes(domain, id)
146
+ #puts "Raw Data: #{raw_data.inspect}"
147
+ data = from_sdb(raw_data)
148
+ data = nil if data.empty?
149
+ return data
150
+ end
151
+
152
+ def save(new_data)
153
+ sdb = @model_save_params[:sdb]
154
+ domain = @model_save_params[:domain]
155
+ #although we could pull @node_key directly, I do it this way to make it clear
156
+ #that it's a parameter used in saving to the persistence model
157
+ #I should try to be consistent on this
158
+ node_key = @model_save_params[:node_key]
159
+ rev_data = new_data.dup
160
+ rev_data[@version_key] = Time.now.hash
161
+ raw_model_data = HashKeys.sym_to_str(rev_data)
162
+ model_data = to_sdb(raw_model_data)
163
+ sdb.put_attributes(domain, new_data[node_key], model_data)
164
+ raw_model_data['rev'] = raw_model_data[@version_key]
165
+ return raw_model_data
166
+ end
167
+
168
+ def destroy_node(model_metadata)
169
+ sdb = @model_save_params[:sdb]
170
+ domain = @model_save_params[:domain]
171
+ #node_key = @model_save_params[:node_key]
172
+ item_name = model_metadata[@model_key]
173
+ @@log.info {"Deleting node: #{model_metadata.inspect} with key #{item_name} from domain: #{domain}"} if @@log.info?
174
+ sdb.delete_attributes(domain, item_name)
175
+ end
176
+
177
+ #namespace is used to distinguish between unique
178
+ #data sets (i.e., users) within the model, for sdb, each user
179
+ #has their own domain, so the key only has to be unique within the domain
180
+ def generate_model_key(namespace, node_key)
181
+ "#{node_key}"
182
+ #"#{namespace}::#{node_key}"
183
+ end
184
+
185
+ def raw_all
186
+ query_all
187
+ end
188
+
189
+ def destroy_bulk(list_of_native_records)
190
+ sdb = @model_save_params[:sdb]
191
+ domain = @model_save_params[:domain]
192
+ node_key = @model_save_params[:node_key]
193
+ list_of_native_records.each do |rcd|
194
+ item_name = rcd[node_key]
195
+ #TODO: use the batch delete request
196
+ sdb.delete_attributes(domain, item_name)
197
+ end
198
+ end
199
+
200
+ private
201
+
202
+ def use_domain!(sdb, domain_name)
203
+ all_domains = parse_sdb_domains(sdb.list_domains)
204
+ if all_domains.include?(domain_name)
205
+ return domain_name
206
+ else #no domain by that name exists yet
207
+ sdb.create_domain(domain_name)
208
+ return domain_name
209
+ end
210
+ end
211
+
212
+ def parse_sdb_domains(raw_list_results)
213
+ if raw_list_results.last == ""
214
+ #if raw_list_results[:next_token].nil? #right-aws
215
+ #return raw_list_results[:domains] #right-aws
216
+ return raw_list_results.first #aws-sdb
217
+ else
218
+ raise "Have not implemented large list handling yet"
219
+ end
220
+ end
221
+
222
+ def from_sdb(sdb_data)
223
+ rtn_data = {}
224
+ sdb_data.each do |k_s, v_json|
225
+ k = k_s.to_sym
226
+ rtn_data[k] = jparse(v_json.first)
227
+ end
228
+ rtn_data
229
+ end
230
+
231
+ def to_sdb(data)
232
+ formatted_data = {}
233
+ data.each do |k,v|
234
+ k_f = k.to_s
235
+ v_f = v.to_json
236
+ formatted_data[k_f] = v_f
237
+ end
238
+ formatted_data
239
+ end
240
+
241
+ def jparse(str)
242
+ return JSON.parse(str) if str =~ /\A\s*[{\[]/
243
+ JSON.parse("[#{str}]")[0]
244
+ #JSON.parse(str)
245
+ end
246
+
247
+ end#class
248
+ end#module