mosql 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,227 @@
1
+ module MoSQL
2
+ class Streamer
3
+ include MoSQL::Logging
4
+
5
+ BATCH = 1000
6
+
7
+ attr_reader :options, :tailer
8
+
9
+ NEW_KEYS = [:options, :tailer, :mongo, :sql, :schema]
10
+
11
+ def initialize(opts)
12
+ NEW_KEYS.each do |parm|
13
+ unless opts.key?(parm)
14
+ raise ArgumentError.new("Required argument `#{parm}' not provided to #{self.class.name}#new.")
15
+ end
16
+ instance_variable_set(:"@#{parm.to_s}", opts[parm])
17
+ end
18
+
19
+ @done = false
20
+ end
21
+
22
+ def stop
23
+ @done = true
24
+ end
25
+
26
+ def import
27
+ if options[:reimport] || tailer.read_timestamp.seconds == 0
28
+ initial_import
29
+ end
30
+ end
31
+
32
+ def collection_for_ns(ns)
33
+ dbname, collection = ns.split(".", 2)
34
+ @mongo.db(dbname).collection(collection)
35
+ end
36
+
37
+ def unsafe_handle_exceptions(ns, obj)
38
+ begin
39
+ yield
40
+ rescue Sequel::DatabaseError => e
41
+ wrapped = e.wrapped_exception
42
+ if wrapped.result && options[:unsafe]
43
+ log.warn("Ignoring row (#{obj.inspect}): #{e}")
44
+ else
45
+ log.error("Error processing #{obj.inspect} for #{ns}.")
46
+ raise e
47
+ end
48
+ end
49
+ end
50
+
51
+ def bulk_upsert(table, ns, items)
52
+ begin
53
+ @schema.copy_data(table.db, ns, items)
54
+ rescue Sequel::DatabaseError => e
55
+ log.debug("Bulk insert error (#{e}), attempting invidual upserts...")
56
+ cols = @schema.all_columns(@schema.find_ns(ns))
57
+ items.each do |it|
58
+ h = {}
59
+ cols.zip(it).each { |k,v| h[k] = v }
60
+ unsafe_handle_exceptions(ns, h) do
61
+ @sql.upsert!(table, @schema.primary_sql_key_for_ns(ns), h)
62
+ end
63
+ end
64
+ end
65
+ end
66
+
67
+ def with_retries(tries=10)
68
+ tries.times do |try|
69
+ begin
70
+ yield
71
+ rescue Mongo::ConnectionError, Mongo::ConnectionFailure, Mongo::OperationFailure => e
72
+ # Duplicate key error
73
+ raise if e.kind_of?(Mongo::OperationFailure) && [11000, 11001].include?(e.error_code)
74
+ # Cursor timeout
75
+ raise if e.kind_of?(Mongo::OperationFailure) && e.message =~ /^Query response returned CURSOR_NOT_FOUND/
76
+ delay = 0.5 * (1.5 ** try)
77
+ log.warn("Mongo exception: #{e}, sleeping #{delay}s...")
78
+ sleep(delay)
79
+ end
80
+ end
81
+ end
82
+
83
+ def track_time
84
+ start = Time.now
85
+ yield
86
+ Time.now - start
87
+ end
88
+
89
+ def initial_import
90
+ @schema.create_schema(@sql.db, !options[:no_drop_tables])
91
+
92
+ unless options[:skip_tail]
93
+ start_ts = @mongo['local']['oplog.rs'].find_one({}, {:sort => [['$natural', -1]]})['ts']
94
+ end
95
+
96
+ @mongo.database_names.each do |dbname|
97
+ next unless spec = @schema.find_db(dbname)
98
+ log.info("Importing for Mongo DB #{dbname}...")
99
+ db = @mongo.db(dbname)
100
+ db.collections.select { |c| spec.key?(c.name) }.each do |collection|
101
+ ns = "#{dbname}.#{collection.name}"
102
+ import_collection(ns, collection)
103
+ exit(0) if @done
104
+ end
105
+ end
106
+
107
+ tailer.write_timestamp(start_ts) unless options[:skip_tail]
108
+ end
109
+
110
+ def did_truncate; @did_truncate ||= {}; end
111
+
112
+ def import_collection(ns, collection)
113
+ log.info("Importing for #{ns}...")
114
+ count = 0
115
+ batch = []
116
+ table = @sql.table_for_ns(ns)
117
+ unless options[:no_drop_tables] || did_truncate[table.first_source]
118
+ table.truncate
119
+ did_truncate[table.first_source] = true
120
+ end
121
+
122
+ start = Time.now
123
+ sql_time = 0
124
+ collection.find(nil, :batch_size => BATCH) do |cursor|
125
+ with_retries do
126
+ cursor.each do |obj|
127
+ batch << @schema.transform(ns, obj)
128
+ count += 1
129
+
130
+ if batch.length >= BATCH
131
+ sql_time += track_time do
132
+ bulk_upsert(table, ns, batch)
133
+ end
134
+ elapsed = Time.now - start
135
+ log.info("Imported #{count} rows (#{elapsed}s, #{sql_time}s SQL)...")
136
+ batch.clear
137
+ exit(0) if @done
138
+ end
139
+ end
140
+ end
141
+ end
142
+
143
+ unless batch.empty?
144
+ bulk_upsert(table, ns, batch)
145
+ end
146
+ end
147
+
148
+ def optail
149
+ tailer.tail_from(options[:tail_from] ?
150
+ BSON::Timestamp.new(options[:tail_from].to_i, 0) :
151
+ nil)
152
+ until @done
153
+ tailer.stream(1000) do |op|
154
+ handle_op(op)
155
+ end
156
+ end
157
+ end
158
+
159
+ def sync_object(ns, _id)
160
+ primary_sql_key = @schema.primary_sql_key_for_ns(ns)
161
+ sqlid = @sql.transform_one_ns(ns, { '_id' => _id })[primary_sql_key]
162
+ obj = collection_for_ns(ns).find_one({:_id => _id})
163
+ if obj
164
+ unsafe_handle_exceptions(ns, obj) do
165
+ @sql.upsert_ns(ns, obj)
166
+ end
167
+ else
168
+ @sql.table_for_ns(ns).where(primary_sql_key.to_sym => sqlid).delete()
169
+ end
170
+ end
171
+
172
+ def handle_op(op)
173
+ log.debug("processing op: #{op.inspect}")
174
+ unless op['ns'] && op['op']
175
+ log.warn("Weird op: #{op.inspect}")
176
+ return
177
+ end
178
+
179
+ unless @schema.find_ns(op['ns'])
180
+ log.debug("Skipping op for unknown ns #{op['ns']}...")
181
+ return
182
+ end
183
+
184
+ ns = op['ns']
185
+ dbname, collection_name = ns.split(".", 2)
186
+
187
+ case op['op']
188
+ when 'n'
189
+ log.debug("Skipping no-op #{op.inspect}")
190
+ when 'i'
191
+ if collection_name == 'system.indexes'
192
+ log.info("Skipping index update: #{op.inspect}")
193
+ else
194
+ unsafe_handle_exceptions(ns, op['o']) do
195
+ @sql.upsert_ns(ns, op['o'])
196
+ end
197
+ end
198
+ when 'u'
199
+ selector = op['o2']
200
+ update = op['o']
201
+ if update.keys.any? { |k| k.start_with? '$' }
202
+ log.debug("resync #{ns}: #{selector['_id']} (update was: #{update.inspect})")
203
+ sync_object(ns, selector['_id'])
204
+ else
205
+ log.debug("upsert #{ns}: _id=#{selector['_id']}")
206
+
207
+ # The update operation replaces the existing object, but
208
+ # preserves its _id field, so grab the _id off of the
209
+ # 'query' field -- it's not guaranteed to be present on the
210
+ # update.
211
+ update = { '_id' => selector['_id'] }.merge(update)
212
+ unsafe_handle_exceptions(ns, update) do
213
+ @sql.upsert_ns(ns, update)
214
+ end
215
+ end
216
+ when 'd'
217
+ if options[:ignore_delete]
218
+ log.debug("Ignoring delete op on #{ns} as instructed.")
219
+ else
220
+ @sql.delete_ns(ns, op['o'])
221
+ end
222
+ else
223
+ log.info("Skipping unknown op #{op.inspect}")
224
+ end
225
+ end
226
+ end
227
+ end
data/lib/mosql/tailer.rb CHANGED
@@ -25,12 +25,15 @@ module MoSQL
25
25
  end
26
26
 
27
27
  def write_timestamp(ts)
28
- begin
29
- @table.insert({:service => @service, :timestamp => ts.seconds})
30
- rescue Sequel::DatabaseError => e
31
- raise unless e.message =~ /duplicate key value violates unique constraint/
32
- @table.where(:service => @service).update(:timestamp => ts.seconds)
28
+ unless @did_insert
29
+ begin
30
+ @table.insert({:service => @service, :timestamp => ts.seconds})
31
+ rescue Sequel::DatabaseError => e
32
+ raise unless MoSQL::SQLAdapter.duplicate_key_error?(e)
33
+ end
34
+ @did_insert = true
33
35
  end
36
+ @table.where(:service => @service).update(:timestamp => ts.seconds)
34
37
  end
35
38
  end
36
39
  end
data/lib/mosql/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module MoSQL
2
- VERSION = "0.2.0"
2
+ VERSION = "0.3.0"
3
3
  end
data/lib/mosql.rb CHANGED
@@ -8,4 +8,5 @@ require 'mosql/version'
8
8
  require 'mosql/log'
9
9
  require 'mosql/sql'
10
10
  require 'mosql/schema'
11
+ require 'mosql/streamer'
11
12
  require 'mosql/tailer'
@@ -101,4 +101,46 @@ EOF
101
101
  table.insert(row)
102
102
  assert_equal(o['_id'].to_s, table.select.first[:_id])
103
103
  end
104
+
105
+ describe 'special fields' do
106
+ SPECIAL_MAP = <<EOF
107
+ ---
108
+ db:
109
+ collection:
110
+ :meta:
111
+ :table: special
112
+ :columns:
113
+ - _id: TEXT
114
+ - mosql_updated:
115
+ :source: $timestamp
116
+ :type: timestamp
117
+ EOF
118
+
119
+ before do
120
+ @specialmap = MoSQL::Schema.new(YAML.load(SPECIAL_MAP))
121
+
122
+ @sequel.drop_table?(:special)
123
+ @specialmap.create_schema(@sequel)
124
+ end
125
+
126
+ it 'sets a default on the column' do
127
+ @sequel[:special].insert({_id: 'a'})
128
+ row = @sequel[:special].select.first
129
+ assert_instance_of(Time, row[:mosql_updated])
130
+ end
131
+
132
+ it 'Can populate $timestamp on COPY' do
133
+ objects = [
134
+ {'_id' => "a"},
135
+ {'_id' => "b"}
136
+ ]
137
+ before = @sequel.select(Sequel.function(:NOW)).first[:now]
138
+ @specialmap.copy_data(@sequel, 'db.collection',
139
+ objects.map { |o| @specialmap.transform('db.collection', o) } )
140
+ after = @sequel.select(Sequel.function(:NOW)).first[:now]
141
+ rows = @sequel[:special].select.sort_by { |r| r[:_id] }
142
+ assert_instance_of(Time, rows[0][:mosql_updated])
143
+ assert(rows[0][:mosql_updated] > before && rows[0][:mosql_updated] < after)
144
+ end
145
+ end
104
146
  end
@@ -0,0 +1,213 @@
1
+ require File.join(File.dirname(__FILE__), '_lib.rb')
2
+ require 'mosql/cli'
3
+
4
+ class MoSQL::Test::Functional::StreamerTest < MoSQL::Test::Functional
5
+ def build_streamer
6
+ MoSQL::Streamer.new(:mongo => mongo,
7
+ :tailer => nil,
8
+ :options => {},
9
+ :sql => @adapter,
10
+ :schema => @map)
11
+ end
12
+
13
+ describe 'with a basic schema' do
14
+ TEST_MAP = <<EOF
15
+ ---
16
+ mosql_test:
17
+ collection:
18
+ :meta:
19
+ :table: sqltable
20
+ :columns:
21
+ - _id: TEXT
22
+ - var: INTEGER
23
+ renameid:
24
+ :meta:
25
+ :table: sqltable2
26
+ :columns:
27
+ - id:
28
+ :source: _id
29
+ :type: TEXT
30
+ - goats: INTEGER
31
+ EOF
32
+
33
+ before do
34
+ @map = MoSQL::Schema.new(YAML.load(TEST_MAP))
35
+ @adapter = MoSQL::SQLAdapter.new(@map, sql_test_uri)
36
+
37
+ @sequel.drop_table?(:sqltable)
38
+ @sequel.drop_table?(:sqltable2)
39
+ @map.create_schema(@sequel)
40
+
41
+ @streamer = build_streamer
42
+ end
43
+
44
+ it 'handle "u" ops without _id' do
45
+ o = { '_id' => BSON::ObjectId.new, 'var' => 17 }
46
+ @adapter.upsert_ns('mosql_test.collection', o)
47
+
48
+ @streamer.handle_op({ 'ns' => 'mosql_test.collection',
49
+ 'op' => 'u',
50
+ 'o2' => { '_id' => o['_id'] },
51
+ 'o' => { 'var' => 27 }
52
+ })
53
+ assert_equal(27, sequel[:sqltable].where(:_id => o['_id'].to_s).select.first[:var])
54
+ end
55
+
56
+ it 'handle "d" ops with BSON::ObjectIds' do
57
+ o = { '_id' => BSON::ObjectId.new, 'var' => 17 }
58
+ @adapter.upsert_ns('mosql_test.collection', o)
59
+
60
+ @streamer.handle_op({ 'ns' => 'mosql_test.collection',
61
+ 'op' => 'd',
62
+ 'o' => { '_id' => o['_id'] },
63
+ })
64
+ assert_equal(0, sequel[:sqltable].where(:_id => o['_id'].to_s).count)
65
+ end
66
+
67
+ it 'handle "u" ops with $set and BSON::ObjectIDs' do
68
+ o = { '_id' => BSON::ObjectId.new, 'var' => 17 }
69
+ @adapter.upsert_ns('mosql_test.collection', o)
70
+
71
+ # $set's are currently a bit of a hack where we read the object
72
+ # from the db, so make sure the new object exists in mongo
73
+ connect_mongo['mosql_test']['collection'].insert(o.merge('var' => 100),
74
+ :w => 1)
75
+
76
+ @streamer.handle_op({ 'ns' => 'mosql_test.collection',
77
+ 'op' => 'u',
78
+ 'o2' => { '_id' => o['_id'] },
79
+ 'o' => { '$set' => { 'var' => 100 } },
80
+ })
81
+ assert_equal(100, sequel[:sqltable].where(:_id => o['_id'].to_s).select.first[:var])
82
+ end
83
+
84
+ it 'handle "u" ops with $set and a renamed _id' do
85
+ o = { '_id' => BSON::ObjectId.new, 'goats' => 96 }
86
+ @adapter.upsert_ns('mosql_test.renameid', o)
87
+
88
+ # $set's are currently a bit of a hack where we read the object
89
+ # from the db, so make sure the new object exists in mongo
90
+ connect_mongo['mosql_test']['renameid'].insert(o.merge('goats' => 0),
91
+ :w => 1)
92
+
93
+ @streamer.handle_op({ 'ns' => 'mosql_test.renameid',
94
+ 'op' => 'u',
95
+ 'o2' => { '_id' => o['_id'] },
96
+ 'o' => { '$set' => { 'goats' => 0 } },
97
+ })
98
+ assert_equal(0, sequel[:sqltable2].where(:id => o['_id'].to_s).select.first[:goats])
99
+ end
100
+
101
+ it 'handles "d" ops with a renamed id' do
102
+ o = { '_id' => BSON::ObjectId.new, 'goats' => 1 }
103
+ @adapter.upsert_ns('mosql_test.renameid', o)
104
+
105
+ @streamer.handle_op({ 'ns' => 'mosql_test.renameid',
106
+ 'op' => 'd',
107
+ 'o' => { '_id' => o['_id'] },
108
+ })
109
+ assert_equal(0, sequel[:sqltable2].where(:id => o['_id'].to_s).count)
110
+ end
111
+
112
+ describe '.bulk_upsert' do
113
+ it 'inserts multiple rows' do
114
+ objs = [
115
+ { '_id' => BSON::ObjectId.new, 'var' => 0 },
116
+ { '_id' => BSON::ObjectId.new, 'var' => 1 },
117
+ { '_id' => BSON::ObjectId.new, 'var' => 3 },
118
+ ].map { |o| @map.transform('mosql_test.collection', o) }
119
+
120
+ @streamer.bulk_upsert(sequel[:sqltable], 'mosql_test.collection',
121
+ objs)
122
+
123
+ assert(sequel[:sqltable].where(:_id => objs[0].first, :var => 0).count)
124
+ assert(sequel[:sqltable].where(:_id => objs[1].first, :var => 1).count)
125
+ assert(sequel[:sqltable].where(:_id => objs[2].first, :var => 3).count)
126
+ end
127
+
128
+ it 'upserts' do
129
+ _id = BSON::ObjectId.new
130
+ objs = [
131
+ { '_id' => _id, 'var' => 0 },
132
+ { '_id' => BSON::ObjectId.new, 'var' => 1 },
133
+ { '_id' => BSON::ObjectId.new, 'var' => 3 },
134
+ ].map { |o| @map.transform('mosql_test.collection', o) }
135
+
136
+ @streamer.bulk_upsert(sequel[:sqltable], 'mosql_test.collection',
137
+ objs)
138
+
139
+ newobjs = [
140
+ { '_id' => _id, 'var' => 117 },
141
+ { '_id' => BSON::ObjectId.new, 'var' => 32 },
142
+ ].map { |o| @map.transform('mosql_test.collection', o) }
143
+ @streamer.bulk_upsert(sequel[:sqltable], 'mosql_test.collection',
144
+ newobjs)
145
+
146
+
147
+ assert(sequel[:sqltable].where(:_id => newobjs[0].first, :var => 117).count)
148
+ assert(sequel[:sqltable].where(:_id => newobjs[1].first, :var => 32).count)
149
+ end
150
+
151
+ describe 'when working with --unsafe' do
152
+ it 'raises on error by default' do
153
+ assert_raises(Sequel::DatabaseError) do
154
+ @streamer.handle_op({ 'ns' => 'mosql_test.collection',
155
+ 'op' => 'u',
156
+ 'o2' => { '_id' => 'a' },
157
+ 'o' => { 'var' => 1 << 70 },
158
+ })
159
+ end
160
+ end
161
+
162
+ it 'does not raises on error with :unsafe' do
163
+ @streamer.options[:unsafe] = true
164
+ @streamer.handle_op({ 'ns' => 'mosql_test.collection',
165
+ 'op' => 'u',
166
+ 'o2' => { '_id' => 'a' },
167
+ 'o' => { 'var' => 1 << 70 },
168
+ })
169
+ assert_equal(0, sequel[:sqltable].where(:_id => 'a').count)
170
+ end
171
+ end
172
+ end
173
+ end
174
+
175
+ describe 'when dealing with aliased dbs' do
176
+ ALIAS_MAP = <<EOF
177
+ ---
178
+ test:
179
+ :meta:
180
+ :alias: test_[0-9]+
181
+ collection:
182
+ :meta:
183
+ :table: sqltable
184
+ :columns:
185
+ - _id: TEXT
186
+ - var: INTEGER
187
+ EOF
188
+ before do
189
+ @map = MoSQL::Schema.new(YAML.load(ALIAS_MAP))
190
+ @adapter = MoSQL::SQLAdapter.new(@map, sql_test_uri)
191
+
192
+ @sequel.drop_table?(:sqltable)
193
+ @map.create_schema(@sequel)
194
+
195
+ @streamer = build_streamer
196
+ end
197
+
198
+ it 'imports from all dbs' do
199
+ ids = (1.upto(4)).map { BSON::ObjectId.new }
200
+ ids.each_with_index do |_id, i|
201
+ collection = mongo["test_#{i}"]['collection']
202
+ collection.drop
203
+ collection.insert({:_id => _id, :var => i}, :w => 1)
204
+ end
205
+
206
+ @streamer.options[:skip_tail] = true
207
+ @streamer.initial_import
208
+
209
+ sqlobjs = @sequel[:sqltable].select.to_a
210
+ assert_equal(ids.map(&:to_s).sort, sqlobjs.map { |o| o[:_id] }.sort)
211
+ end
212
+ end
213
+ end
@@ -12,6 +12,7 @@ db:
12
12
  :source: _id
13
13
  :type: TEXT
14
14
  - var: INTEGER
15
+ - str: TEXT
15
16
  with_extra_props:
16
17
  :meta:
17
18
  :table: sqltable2
@@ -88,16 +89,17 @@ EOF
88
89
  it 'creates a SQL schema with the right fields' do
89
90
  db = {}
90
91
  stub_1 = stub()
91
- stub_1.expects(:column).with('id', 'TEXT')
92
- stub_1.expects(:column).with('var', 'INTEGER')
92
+ stub_1.expects(:column).with('id', 'TEXT', {})
93
+ stub_1.expects(:column).with('var', 'INTEGER', {})
94
+ stub_1.expects(:column).with('str', 'TEXT', {})
93
95
  stub_1.expects(:column).with('_extra_props').never
94
96
  stub_1.expects(:primary_key).with([:id])
95
97
  stub_2 = stub()
96
- stub_2.expects(:column).with('id', 'TEXT')
98
+ stub_2.expects(:column).with('id', 'TEXT', {})
97
99
  stub_2.expects(:column).with('_extra_props', 'TEXT')
98
100
  stub_2.expects(:primary_key).with([:id])
99
101
  stub_3 = stub()
100
- stub_3.expects(:column).with('_id', 'TEXT')
102
+ stub_3.expects(:column).with('_id', 'TEXT', {})
101
103
  stub_3.expects(:column).with('_extra_props').never
102
104
  stub_3.expects(:primary_key).with([:_id])
103
105
  (class << db; self; end).send(:define_method, :create_table?) do |tbl, &blk|
@@ -118,8 +120,8 @@ EOF
118
120
 
119
121
  describe 'when transforming' do
120
122
  it 'transforms rows' do
121
- out = @map.transform('db.collection', {'_id' => "row 1", 'var' => 6})
122
- assert_equal(["row 1", 6], out)
123
+ out = @map.transform('db.collection', {'_id' => "row 1", 'var' => 6, 'str' => 'a string'})
124
+ assert_equal(["row 1", 6, 'a string'], out)
123
125
  end
124
126
 
125
127
  it 'Includes extra props' do
@@ -130,9 +132,21 @@ EOF
130
132
  end
131
133
 
132
134
  it 'gets all_columns right' do
133
- assert_equal(['id', 'var'], @map.all_columns(@map.find_ns('db.collection')))
135
+ assert_equal(['id', 'var', 'str'], @map.all_columns(@map.find_ns('db.collection')))
134
136
  assert_equal(['id', '_extra_props'], @map.all_columns(@map.find_ns('db.with_extra_props')))
135
137
  end
138
+
139
+ it 'stringifies symbols' do
140
+ out = @map.transform('db.collection', {'_id' => "row 1", 'str' => :stringy})
141
+ assert_equal(["row 1", nil, 'stringy'], out)
142
+ end
143
+
144
+ it 'changes NaN to null in extra_props' do
145
+ out = @map.transform('db.with_extra_props', {'_id' => 7, 'nancy' => 0.0/0.0})
146
+ extra = JSON.parse(out[1])
147
+ assert(extra.key?('nancy'))
148
+ assert_equal(nil, extra['nancy'])
149
+ end
136
150
  end
137
151
 
138
152
  describe 'when copying data' do
@@ -195,4 +209,76 @@ EOF
195
209
  {'a' => { 'c' => 4 }})
196
210
  end
197
211
  end
212
+
213
+ describe 'when handling a map with aliases' do
214
+ ALIAS_MAP = <<EOF
215
+ ---
216
+ db:
217
+ :meta:
218
+ :alias: db_[0-9]+
219
+ collection:
220
+ :meta:
221
+ :table: sqltable
222
+ :columns:
223
+ - _id: TEXT
224
+ - var: INTEGER
225
+ EOF
226
+ before do
227
+ @map = MoSQL::Schema.new(YAML.load(ALIAS_MAP))
228
+ end
229
+
230
+ it 'can look up collections by aliases' do
231
+ ns = @map.find_ns("db.collection")
232
+ assert_equal(ns, @map.find_ns("db_00.collection"))
233
+ assert_equal(ns, @map.find_ns("db_01.collection"))
234
+ end
235
+
236
+ it 'caches negative lookups' do
237
+ assert_equal(nil, @map.find_ns("nosuchdb.foo"))
238
+ assert(@map.instance_variable_get(:@map).key?("nosuchdb"))
239
+ end
240
+
241
+ it 'can do lookups after a negative cache' do
242
+ @map.find_ns("nosuchdb.foo")
243
+ assert_nil(@map.find_ns("otherdb.collection"))
244
+ end
245
+ end
246
+
247
+ describe 'parsing magic source values' do
248
+ OTHER_MAP = <<EOF
249
+ ---
250
+ db:
251
+ collection:
252
+ :meta:
253
+ :table: a_table
254
+ :columns:
255
+ - _id: TEXT
256
+ - mosql_created:
257
+ :source: $timestamp
258
+ :type: timestamp
259
+ invalid:
260
+ :meta:
261
+ :table: invalid
262
+ :columns:
263
+ - _id: TEXT
264
+ - magic:
265
+ :source: $magic
266
+ :type: timestamp
267
+ EOF
268
+
269
+ before do
270
+ @othermap = MoSQL::Schema.new(YAML.load(OTHER_MAP))
271
+ end
272
+
273
+ it 'translates $timestamp' do
274
+ r = @othermap.transform('db.collection', { '_id' => 'a' })
275
+ assert_equal(['a', Sequel.function(:now)], r)
276
+ end
277
+
278
+ it 'rejects unknown specials' do
279
+ assert_raises(MoSQL::SchemaError) do
280
+ r = @othermap.transform('db.invalid', { '_id' => 'a' })
281
+ end
282
+ end
283
+ end
198
284
  end