mosql 0.3.2 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 43cb64bb52ef41955332728b063f24470d73c134
4
+ data.tar.gz: 9ddb444a959cdce77da58255fa64f4704cc4f201
5
+ SHA512:
6
+ metadata.gz: 7260354422d7fad24eb8a018f7bf26108e7d3a86e502c0910df72cea011ecb58ee0c26065f8562940b4cca9bff2db06d07016aa4a83bd1dd4c1c0ee04c132f7d
7
+ data.tar.gz: 7bc1b499a41c0285770f26559c117e864e072490466f9aaabb7bb33093d5202767bc42d98525b2cf5a4405f79d88c5e4704df5aad75df918ad1b97ad1d5f15b8
@@ -1,6 +1,7 @@
1
1
  language: ruby
2
2
  rvm:
3
3
  - 1.9.3
4
+ - 2.1.2
4
5
  services:
5
6
  - mongodb
6
7
  - postgresql
@@ -1,12 +1,12 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- mosql (0.3.2)
4
+ mosql (0.4.0)
5
5
  bson_ext
6
6
  json
7
7
  log4r
8
8
  mongo
9
- mongoriver
9
+ mongoriver (= 0.4)
10
10
  pg
11
11
  rake
12
12
  sequel
@@ -14,24 +14,24 @@ PATH
14
14
  GEM
15
15
  remote: https://rubygems.org/
16
16
  specs:
17
- bson (1.9.1)
18
- bson_ext (1.9.1)
19
- bson (~> 1.9.1)
17
+ bson (1.10.2)
18
+ bson_ext (1.10.2)
19
+ bson (~> 1.10.2)
20
20
  json (1.8.1)
21
21
  log4r (1.1.10)
22
22
  metaclass (0.0.4)
23
23
  minitest (3.0.0)
24
24
  mocha (1.0.0)
25
25
  metaclass (~> 0.0.1)
26
- mongo (1.9.1)
27
- bson (~> 1.9.1)
28
- mongoriver (0.1.0)
26
+ mongo (1.10.2)
27
+ bson (= 1.10.2)
28
+ mongoriver (0.4.0)
29
29
  bson_ext
30
30
  log4r
31
31
  mongo (>= 1.7)
32
- pg (0.14.1)
33
- rake (10.3.1)
34
- sequel (3.44.0)
32
+ pg (0.17.1)
33
+ rake (10.3.2)
34
+ sequel (4.14.0)
35
35
 
36
36
  PLATFORMS
37
37
  ruby
data/README.md CHANGED
@@ -229,8 +229,16 @@ Patches and contributions are welcome! Please fork the project and
229
229
  open a pull request on [github][github], or just report issues.
230
230
 
231
231
  MoSQL includes a small but hopefully-growing test suite. It assumes a
232
- running PostgreSQL and MongoDB instance on the local host; You can
233
- point it at a different target via environment variables; See
234
- `test/functional/_lib.rb` for more information.
232
+ running PostgreSQL and MongoDB instance on the local host. To run the
233
+ test suite, first install all of MoSQL's dependencies:
234
+ ```shell
235
+ bundle install
236
+ ```
237
+ Then, run the tests:
238
+ ```shell
239
+ rake test
240
+ ```
241
+ You can also point the suite at a different target via environment
242
+ variables; See `test/functional/_lib.rb` for more information.
235
243
 
236
244
  [github]: https://github.com/stripe/mosql
@@ -70,6 +70,9 @@ module MoSQL
70
70
  end
71
71
  end
72
72
  end
73
+
74
+ # Lurky way to force Sequel force all timestamps to use UTC.
75
+ Sequel.default_timezone = :utc
73
76
  end
74
77
 
75
78
  def create_schema(db, clobber=false)
@@ -77,6 +80,8 @@ module MoSQL
77
80
  dbspec.each do |n, collection|
78
81
  next unless n.is_a?(String)
79
82
  meta = collection[:meta]
83
+ composite_key = meta[:composite_key]
84
+ keys = []
80
85
  log.info("Creating table '#{meta[:table]}'...")
81
86
  db.send(clobber ? :create_table! : :create_table?, meta[:table]) do
82
87
  collection[:columns].each do |col|
@@ -86,10 +91,14 @@ module MoSQL
86
91
  end
87
92
  column col[:name], col[:type], opts
88
93
 
89
- if col[:source].to_sym == :_id
90
- primary_key [col[:name].to_sym]
94
+ if composite_key and composite_key.include?(col[:name])
95
+ keys << col[:name].to_sym
96
+ elsif not composite_key and col[:source].to_sym == :_id
97
+ keys << col[:name].to_sym
91
98
  end
92
99
  end
100
+
101
+ primary_key keys
93
102
  if meta[:extra_props]
94
103
  type =
95
104
  if meta[:extra_props] == "JSON"
@@ -159,6 +168,23 @@ module MoSQL
159
168
  end
160
169
  end
161
170
 
171
+ def transform_primitive(v, type=nil)
172
+ case v
173
+ when BSON::ObjectId, Symbol
174
+ v.to_s
175
+ when BSON::Binary
176
+ if type.downcase == 'uuid'
177
+ v.to_s.unpack("H*").first
178
+ else
179
+ Sequel::SQL::Blob.new(v.to_s)
180
+ end
181
+ when BSON::DBRef
182
+ v.object_id.to_s
183
+ else
184
+ v
185
+ end
186
+ end
187
+
162
188
  def transform(ns, obj, schema=nil)
163
189
  schema ||= find_ns!(ns)
164
190
 
@@ -174,18 +200,17 @@ module MoSQL
174
200
  else
175
201
  v = fetch_and_delete_dotted(obj, source)
176
202
  case v
177
- when BSON::Binary, BSON::ObjectId, Symbol
178
- v = v.to_s
179
- when BSON::DBRef
180
- v = v.object_id.to_s
181
203
  when Hash
182
- v = JSON.dump(v)
204
+ v = JSON.dump(Hash[v.map { |k,v| [k, transform_primitive(v)] }])
183
205
  when Array
206
+ v = v.map { |it| transform_primitive(it) }
184
207
  if col[:array_type]
185
208
  v = Sequel.pg_array(v, col[:array_type])
186
209
  else
187
210
  v = JSON.dump(v)
188
211
  end
212
+ else
213
+ v = transform_primitive(v, type)
189
214
  end
190
215
  end
191
216
  row << v
@@ -268,6 +293,10 @@ module MoSQL
268
293
  'f'
269
294
  when Sequel::SQL::Function
270
295
  nil
296
+ when DateTime, Time
297
+ val.strftime("%FT%T.%6N %z")
298
+ when Sequel::SQL::Blob
299
+ "\\\\x" + [val].pack("h*")
271
300
  else
272
301
  val.to_s.gsub(/([\\\t\n\r])/, '\\\\\\1')
273
302
  end
@@ -290,7 +319,15 @@ module MoSQL
290
319
  end
291
320
 
292
321
  def primary_sql_key_for_ns(ns)
293
- find_ns!(ns)[:columns].find {|c| c[:source] == '_id'}[:name]
322
+ ns = find_ns!(ns)
323
+ keys = []
324
+ if ns[:meta][:composite_key]
325
+ keys = ns[:meta][:composite_key]
326
+ else
327
+ keys << ns[:columns].find {|c| c[:source] == '_id'}[:name]
328
+ end
329
+
330
+ return keys
294
331
  end
295
332
  end
296
333
  end
@@ -39,16 +39,24 @@ module MoSQL
39
39
  upsert!(table_for_ns(ns), @schema.primary_sql_key_for_ns(ns), h)
40
40
  end
41
41
 
42
- # obj must contain an _id field. All other fields will be ignored.
43
42
  def delete_ns(ns, obj)
44
- primary_sql_key = @schema.primary_sql_key_for_ns(ns)
43
+ primary_sql_keys = @schema.primary_sql_key_for_ns(ns)
45
44
  h = transform_one_ns(ns, obj)
46
- raise "No #{primary_sql_key} found in transform of #{obj.inspect}" if h[primary_sql_key].nil?
47
- table_for_ns(ns).where(primary_sql_key.to_sym => h[primary_sql_key]).delete
45
+ query = {}
46
+ primary_sql_keys.each do |key|
47
+ raise "No #{primary_sql_keys} found in transform of #{obj.inspect}" if h[key].nil?
48
+ query[key.to_sym] = h[key]
49
+ end
50
+
51
+ table_for_ns(ns).where(query).delete
48
52
  end
49
53
 
50
- def upsert!(table, table_primary_key, item)
51
- rows = table.where(table_primary_key.to_sym => item[table_primary_key]).update(item)
54
+ def upsert!(table, table_primary_keys, item)
55
+ query = {}
56
+ table_primary_keys.each do |key|
57
+ query[key.to_sym] = item[key]
58
+ end
59
+ rows = table.where(query).update(item)
52
60
  if rows == 0
53
61
  begin
54
62
  table.insert(item)
@@ -69,6 +77,10 @@ module MoSQL
69
77
  # how to get at this error code....
70
78
  e.wrapped_exception.result.error_field(PG::Result::PG_DIAG_SQLSTATE) == "23505"
71
79
  end
80
+
81
+ def self.duplicate_column_error?(e)
82
+ e.wrapped_exception.result.error_field(PG::Result::PG_DIAG_SQLSTATE) == "42701"
83
+ end
72
84
  end
73
85
  end
74
86
 
@@ -24,7 +24,7 @@ module MoSQL
24
24
  end
25
25
 
26
26
  def import
27
- if options[:reimport] || tailer.read_timestamp.seconds == 0
27
+ if options[:reimport] || tailer.read_position.nil?
28
28
  initial_import
29
29
  end
30
30
  end
@@ -90,14 +90,17 @@ module MoSQL
90
90
  @schema.create_schema(@sql.db, !options[:no_drop_tables])
91
91
 
92
92
  unless options[:skip_tail]
93
- start_ts = @mongo['local']['oplog.rs'].find_one({}, {:sort => [['$natural', -1]]})['ts']
93
+ start_state = {
94
+ 'time' => nil,
95
+ 'position' => @tailer.most_recent_position
96
+ }
94
97
  end
95
98
 
96
99
  dbnames = []
97
100
 
98
- if dbname = options[:dbname]
99
- log.info "Skipping DB scan and using db: #{dbname}"
100
- dbnames = [ dbname ]
101
+ if options[:dbname]
102
+ log.info "Skipping DB scan and using db: #{options[:dbname]}"
103
+ dbnames = [ options[:dbname] ]
101
104
  else
102
105
  dbnames = @mongo.database_names
103
106
  end
@@ -121,8 +124,7 @@ module MoSQL
121
124
  end
122
125
  end
123
126
 
124
-
125
- tailer.write_timestamp(start_ts) unless options[:skip_tail]
127
+ tailer.save_state(start_state) unless options[:skip_tail]
126
128
  end
127
129
 
128
130
  def did_truncate; @did_truncate ||= {}; end
@@ -164,9 +166,11 @@ module MoSQL
164
166
  end
165
167
 
166
168
  def optail
167
- tailer.tail_from(options[:tail_from] ?
168
- BSON::Timestamp.new(options[:tail_from].to_i, 0) :
169
- nil)
169
+ tail_from = options[:tail_from]
170
+ if tail_from.is_a? Time
171
+ tail_from = tailer.most_recent_position(tail_from)
172
+ end
173
+ tailer.tail(:from => tail_from)
170
174
  until @done
171
175
  tailer.stream(1000) do |op|
172
176
  handle_op(op)
@@ -174,16 +178,21 @@ module MoSQL
174
178
  end
175
179
  end
176
180
 
177
- def sync_object(ns, _id)
178
- primary_sql_key = @schema.primary_sql_key_for_ns(ns)
179
- sqlid = @sql.transform_one_ns(ns, { '_id' => _id })[primary_sql_key]
180
- obj = collection_for_ns(ns).find_one({:_id => _id})
181
+ def sync_object(ns, selector)
182
+ obj = collection_for_ns(ns).find_one(selector)
181
183
  if obj
182
184
  unsafe_handle_exceptions(ns, obj) do
183
185
  @sql.upsert_ns(ns, obj)
184
186
  end
185
187
  else
186
- @sql.table_for_ns(ns).where(primary_sql_key.to_sym => sqlid).delete()
188
+ primary_sql_keys = @schema.primary_sql_key_for_ns(ns)
189
+ schema = @schema.find_ns!(ns)
190
+ query = {}
191
+ primary_sql_keys.each do |key|
192
+ source = schema[:columns].find {|c| c[:name] == key }[:source]
193
+ query[key] = selector[source]
194
+ end
195
+ @sql.table_for_ns(ns).where(query).delete()
187
196
  end
188
197
  end
189
198
 
@@ -194,6 +203,15 @@ module MoSQL
194
203
  return
195
204
  end
196
205
 
206
+ # First, check if this was an operation performed via applyOps. If so, call handle_op with
207
+ # for each op that was applied.
208
+ # The oplog format of applyOps commands can be viewed here:
209
+ # https://groups.google.com/forum/#!topic/mongodb-user/dTf5VEJJWvY
210
+ if op['op'] == 'c' && (ops = op['o']['applyOps'])
211
+ ops.each { |op| handle_op(op) }
212
+ return
213
+ end
214
+
197
215
  unless @schema.find_ns(op['ns'])
198
216
  log.debug("Skipping op for unknown ns #{op['ns']}...")
199
217
  return
@@ -218,15 +236,24 @@ module MoSQL
218
236
  update = op['o']
219
237
  if update.keys.any? { |k| k.start_with? '$' }
220
238
  log.debug("resync #{ns}: #{selector['_id']} (update was: #{update.inspect})")
221
- sync_object(ns, selector['_id'])
239
+ sync_object(ns, selector)
222
240
  else
223
- log.debug("upsert #{ns}: _id=#{selector['_id']}")
224
241
 
225
242
  # The update operation replaces the existing object, but
226
243
  # preserves its _id field, so grab the _id off of the
227
244
  # 'query' field -- it's not guaranteed to be present on the
228
245
  # update.
229
- update = { '_id' => selector['_id'] }.merge(update)
246
+ primary_sql_keys = @schema.primary_sql_key_for_ns(ns)
247
+ schema = @schema.find_ns!(ns)
248
+ keys = {}
249
+ primary_sql_keys.each do |key|
250
+ source = schema[:columns].find {|c| c[:name] == key }[:source]
251
+ keys[key] = selector[source]
252
+ end
253
+
254
+ log.debug("upsert #{ns}: #{keys}")
255
+
256
+ update = keys.merge(update)
230
257
  unsafe_handle_exceptions(ns, update) do
231
258
  @sql.upsert_ns(ns, update)
232
259
  end
@@ -1,11 +1,25 @@
1
1
  module MoSQL
2
2
  class Tailer < Mongoriver::AbstractPersistentTailer
3
3
  def self.create_table(db, tablename)
4
- db.create_table?(tablename) do
5
- column :service, 'TEXT'
6
- column :timestamp, 'INTEGER'
7
- primary_key [:service]
4
+ if !db.table_exists?(tablename)
5
+ db.create_table(tablename) do
6
+ column :service, 'TEXT'
7
+ column :timestamp, 'INTEGER'
8
+ column :position, 'BYTEA'
9
+ primary_key [:service]
10
+ end
11
+ else
12
+ # Try to do seamless upgrades from before-tokumx times
13
+ # It will raise an exception in this in most cases,
14
+ # but there isn't a nice way I found to check if column
15
+ # exists.
16
+ begin
17
+ db.add_column(tablename, :position, 'BYTEA')
18
+ rescue Sequel::DatabaseError => e
19
+ raise unless MoSQL::SQLAdapter.duplicate_column_error?(e)
20
+ end
8
21
  end
22
+
9
23
  db[tablename.to_sym]
10
24
  end
11
25
 
@@ -15,25 +29,60 @@ module MoSQL
15
29
  @service = opts[:service] || "mosql"
16
30
  end
17
31
 
18
- def read_timestamp
19
- row = @table.where(:service => @service).select([:timestamp]).first
20
- if row
21
- BSON::Timestamp.new(row[:timestamp], 0)
32
+ def read_state
33
+ row = @table.where(:service => @service).first
34
+ return nil unless row
35
+ # Again, try to do seamless upgrades -
36
+ # If latest operation before or at timestamp if no position
37
+ # exists, use timestamp in database to guess what it could be.
38
+ result = {}
39
+ result['time'] = Time.at(row.fetch(:timestamp))
40
+ if row[:position]
41
+ result['position'] = from_blob(row[:position])
22
42
  else
23
- BSON::Timestamp.new(0, 0)
43
+ log.warn("Trying to seamlessly update from old version!")
44
+ result['position'] = most_recent_position(result['time'])
45
+ save_state(result)
24
46
  end
47
+ result
25
48
  end
26
49
 
27
- def write_timestamp(ts)
50
+ def write_state(state)
51
+ data = {
52
+ :service => @service,
53
+ :timestamp => state['time'].to_i,
54
+ :position => to_blob(state['position'])
55
+ }
56
+
28
57
  unless @did_insert
29
58
  begin
30
- @table.insert({:service => @service, :timestamp => ts.seconds})
59
+ @table.insert(data)
31
60
  rescue Sequel::DatabaseError => e
32
61
  raise unless MoSQL::SQLAdapter.duplicate_key_error?(e)
33
62
  end
34
63
  @did_insert = true
35
64
  end
36
- @table.where(:service => @service).update(:timestamp => ts.seconds)
65
+
66
+ @table.where(:service => @service).update(data)
67
+ end
68
+
69
+ private
70
+ def to_blob(position)
71
+ case database_type
72
+ when :mongo
73
+ return Sequel::SQL::Blob.new(position.seconds.to_s)
74
+ when :toku
75
+ return Sequel::SQL::Blob.new(position.to_s)
76
+ end
77
+ end
78
+
79
+ def from_blob(blob)
80
+ case database_type
81
+ when :mongo
82
+ return BSON::Timestamp.new(blob.to_i, 0)
83
+ when :toku
84
+ return BSON::Binary.new(blob)
85
+ end
37
86
  end
38
87
  end
39
88
  end
@@ -1,3 +1,3 @@
1
1
  module MoSQL
2
- VERSION = "0.3.2"
2
+ VERSION = "0.4.0"
3
3
  end
@@ -17,7 +17,8 @@ Gem::Specification.new do |gem|
17
17
  gem.version = MoSQL::VERSION
18
18
 
19
19
  %w[sequel pg mongo bson_ext rake log4r json
20
- mongoriver].each { |dep| gem.add_runtime_dependency(dep) }
20
+ ].each { |dep| gem.add_runtime_dependency(dep) }
21
+ gem.add_runtime_dependency "mongoriver", "0.4"
21
22
 
22
23
  gem.add_development_dependency "minitest"
23
24
  gem.add_development_dependency "mocha"
@@ -1,3 +1,6 @@
1
+ require 'rubygems'
2
+ require 'bundler/setup'
3
+
1
4
  require 'minitest/autorun'
2
5
  require 'minitest/spec'
3
6
  require 'mocha'
@@ -58,6 +58,7 @@ EOF
58
58
  end
59
59
 
60
60
  def setup
61
+ Sequel.default_timezone = :utc
61
62
  @sequel = connect_sql
62
63
  @mongo = connect_mongo
63
64
  super
@@ -141,8 +141,10 @@ EOF
141
141
  objects.map { |o| @specialmap.transform('db.collection', o) } )
142
142
  after = @sequel.select(Sequel.function(:NOW)).first[:now]
143
143
  rows = @sequel[:special].select.sort_by { |r| r[:_id] }
144
+
144
145
  assert_instance_of(Time, rows[0][:mosql_updated])
145
- assert(rows[0][:mosql_updated] > before && rows[0][:mosql_updated] < after)
146
+ assert_operator(rows[0][:mosql_updated], :>, before)
147
+ assert_operator(rows[0][:mosql_updated], :<, after)
146
148
  end
147
149
  end
148
150
  end
@@ -17,8 +17,8 @@ class MoSQL::Test::Functional::SQLTest < MoSQL::Test::Functional
17
17
 
18
18
  describe 'upsert' do
19
19
  it 'inserts new items' do
20
- @adapter.upsert!(@table, '_id', {'_id' => 0, 'color' => 'red', 'quantity' => 10, 'numbers' => Sequel.pg_array([1, 2, 3], :integer)})
21
- @adapter.upsert!(@table, '_id', {'_id' => 1, 'color' => 'blue', 'quantity' => 5, 'numbers' => Sequel.pg_array([], :integer)})
20
+ @adapter.upsert!(@table, ['_id'], {'_id' => 0, 'color' => 'red', 'quantity' => 10, 'numbers' => Sequel.pg_array([1, 2, 3], :integer)})
21
+ @adapter.upsert!(@table, ['_id'], {'_id' => 1, 'color' => 'blue', 'quantity' => 5, 'numbers' => Sequel.pg_array([], :integer)})
22
22
  assert_equal(2, @table.count)
23
23
  assert_equal('red', @table[:_id => 0][:color])
24
24
  assert_equal(10, @table[:_id => 0][:quantity])
@@ -29,11 +29,11 @@ class MoSQL::Test::Functional::SQLTest < MoSQL::Test::Functional
29
29
  end
30
30
 
31
31
  it 'updates items' do
32
- @adapter.upsert!(@table, '_id', {'_id' => 0, 'color' => 'red', 'quantity' => 10})
32
+ @adapter.upsert!(@table, ['_id'], {'_id' => 0, 'color' => 'red', 'quantity' => 10})
33
33
  assert_equal(1, @table.count)
34
34
  assert_equal('red', @table[:_id => 0][:color])
35
35
 
36
- @adapter.upsert!(@table, '_id', {'_id' => 0, 'color' => 'blue', 'quantity' => 5})
36
+ @adapter.upsert!(@table, ['_id'], {'_id' => 0, 'color' => 'blue', 'quantity' => 5})
37
37
  assert_equal(1, @table.count)
38
38
  assert_equal('blue', @table[:_id => 0][:color])
39
39
  end
@@ -53,6 +53,22 @@ filter_test:
53
53
  :columns:
54
54
  - _id: TEXT
55
55
  - var: INTEGER
56
+
57
+ composite_key_test:
58
+ collection:
59
+ :meta:
60
+ :table: composite_table
61
+ :composite_key:
62
+ - store
63
+ - time
64
+ :columns:
65
+ - store:
66
+ :source: _id.s
67
+ :type: TEXT
68
+ - time:
69
+ :source: _id.t
70
+ :type: TIMESTAMP
71
+ - var: TEXT
56
72
  EOF
57
73
 
58
74
  before do
@@ -61,6 +77,7 @@ EOF
61
77
 
62
78
  @sequel.drop_table?(:sqltable)
63
79
  @sequel.drop_table?(:sqltable2)
80
+ @sequel.drop_table?(:composite_table)
64
81
  @map.create_schema(@sequel)
65
82
 
66
83
  @streamer = build_streamer
@@ -78,6 +95,22 @@ EOF
78
95
  assert_equal(27, sequel[:sqltable].where(:_id => o['_id'].to_s).select.first[:var])
79
96
  end
80
97
 
98
+ it 'applies ops performed via applyOps' do
99
+ o = { '_id' => BSON::ObjectId.new, 'var' => 17 }
100
+ @adapter.upsert_ns('mosql_test.collection', o)
101
+
102
+ op = { 'ns' => 'mosql_test.collection',
103
+ 'op' => 'u',
104
+ 'o2' => { '_id' => o['_id'] },
105
+ 'o' => { 'var' => 27 }
106
+ }
107
+ @streamer.handle_op({ 'op' => 'c',
108
+ 'ns' => 'mosql_test.$cmd',
109
+ 'o' => { 'applyOps' => [op] }
110
+ })
111
+ assert_equal(27, sequel[:sqltable].where(:_id => o['_id'].to_s).select.first[:var])
112
+ end
113
+
81
114
  it 'handle "d" ops with BSON::ObjectIds' do
82
115
  o = { '_id' => BSON::ObjectId.new, 'var' => 17 }
83
116
  @adapter.upsert_ns('mosql_test.collection', o)
@@ -151,6 +184,39 @@ EOF
151
184
  assert_equal(data[1], record)
152
185
  end
153
186
 
187
+ it 'handles "u" ops with a compsite key' do
188
+ date = Time.utc(2014, 7, 1)
189
+ o = {'_id' => {'s' => 'asdf', 't' => date}, 'var' => 'data'}
190
+ collection = mongo["composite_key_test"]["collection"]
191
+ collection.drop
192
+ collection.insert(o)
193
+
194
+ @streamer.options[:skip_tail] = true
195
+ @streamer.initial_import
196
+
197
+ collection.update({ '_id' => { 's' => 'asdf', 't' => date}}, { '$set' => { 'var' => 'new_data'}})
198
+ @streamer.handle_op({'ns' => 'composite_key_test.collection',
199
+ 'op' => 'u',
200
+ 'o2' => { '_id' => { 's' => 'asdf', 't' => date}},
201
+ 'o' => { '$set' => { 'var' => 'new_data'}}
202
+ })
203
+
204
+ assert_equal(0, @sequel[:composite_table].where(:var => "data").count)
205
+ assert_equal(1, @sequel[:composite_table].where(:var => "new_data").count)
206
+ end
207
+
208
+ it 'handles composite keys' do
209
+ o = {'_id' => {'s' => 'asdf', 't' => Time.new}, 'var' => 'data'}
210
+ collection = mongo["composite_key_test"]["collection"]
211
+ collection.drop
212
+ collection.insert(o)
213
+
214
+ @streamer.options[:skip_tail] = true
215
+ @streamer.initial_import
216
+
217
+ assert_equal(1, @sequel[:composite_table].count)
218
+ end
219
+
154
220
  describe '.bulk_upsert' do
155
221
  it 'inserts multiple rows' do
156
222
  objs = [
@@ -252,4 +318,56 @@ EOF
252
318
  assert_equal(ids.map(&:to_s).sort, sqlobjs.map { |o| o[:_id] }.sort)
253
319
  end
254
320
  end
321
+ describe 'timestamps' do
322
+ TIMESTAMP_MAP = <<EOF
323
+ ---
324
+ db:
325
+ has_timestamp:
326
+ :meta:
327
+ :table: has_timestamp
328
+ :columns:
329
+ - _id: TEXT
330
+ - ts: timestamp
331
+ EOF
332
+
333
+ before do
334
+ @map = MoSQL::Schema.new(YAML.load(TIMESTAMP_MAP))
335
+ @adapter = MoSQL::SQLAdapter.new(@map, sql_test_uri)
336
+
337
+ mongo['db']['has_timestamp'].drop
338
+ @sequel.drop_table?(:has_timestamp)
339
+ @map.create_schema(@sequel)
340
+
341
+ @streamer = build_streamer
342
+ end
343
+
344
+ it 'preserves milliseconds on import' do
345
+ ts = Time.utc(2014, 8, 7, 6, 54, 32, 123000)
346
+ mongo['db']['has_timestamp'].insert({ts: ts})
347
+ @streamer.options[:skip_tail] = true
348
+ @streamer.initial_import
349
+
350
+ row = @sequel[:has_timestamp].select.to_a
351
+ assert_equal(1, row.length)
352
+ assert_equal(ts.to_i, row.first[:ts].to_i)
353
+ assert_equal(ts.tv_usec, row.first[:ts].tv_usec)
354
+ end
355
+
356
+ it 'preserves milliseconds on tailing' do
357
+ ts = Time.utc(2006,01,02, 15,04,05,678000)
358
+ id = mongo['db']['has_timestamp'].insert({ts: ts})
359
+ @streamer.handle_op(
360
+ {
361
+ "ts" => {"t" => 1408647630, "i" => 4},
362
+ "h" => -965650193548512059,
363
+ "v" => 2,
364
+ "op" => "i",
365
+ "ns" => "db.has_timestamp",
366
+ "o" => mongo['db']['has_timestamp'].find_one({_id: id})
367
+ })
368
+ got = @sequel[:has_timestamp].where(:_id => id.to_s).select.first[:ts]
369
+ assert_equal(ts.to_i, got.to_i)
370
+ assert_equal(ts.tv_usec, got.tv_usec)
371
+ end
372
+ end
255
373
  end
@@ -0,0 +1,122 @@
1
+ require File.join(File.dirname(__FILE__), '_lib.rb')
2
+ require 'mosql/cli'
3
+
4
+ class MoSQL::Test::Functional::TransformTest < MoSQL::Test::Functional
5
+ TESTCASES = [
6
+ [
7
+ BSON::ObjectId.from_string('5405fae77c584947fc000001'),
8
+ 'TEXT',
9
+ '5405fae77c584947fc000001'
10
+ ],
11
+ [
12
+ Time.utc(2006,01,02, 15,04,05,678000),
13
+ 'TIMESTAMP',
14
+ Time.utc(2006,01,02, 15,04,05,678000)
15
+ ],
16
+ [
17
+ :stringy,
18
+ 'TEXT',
19
+ 'stringy'
20
+ ],
21
+ [
22
+ BSON::DBRef.new('db.otherns', BSON::ObjectId.from_string('5405fae77c584947fc000001')),
23
+ 'TEXT',
24
+ '5405fae77c584947fc000001'
25
+ ],
26
+ [
27
+ [
28
+ BSON::ObjectId.from_string('5405fae77c584947fc000001'),
29
+ BSON::ObjectId.from_string('5405fae77c584947fc000002')
30
+ ],
31
+ 'TEXT ARRAY',
32
+ ['5405fae77c584947fc000001', '5405fae77c584947fc000002']
33
+ ],
34
+ [
35
+ [
36
+ BSON::ObjectId.from_string('5405fae77c584947fc000001'),
37
+ BSON::ObjectId.from_string('5405fae77c584947fc000002')
38
+ ],
39
+ 'TEXT',
40
+ ['5405fae77c584947fc000001', '5405fae77c584947fc000002'].to_json,
41
+ ],
42
+ [
43
+ [
44
+ BSON::DBRef.new('db.otherns', BSON::ObjectId.from_string('5405fae77c584947fc000001')),
45
+ BSON::DBRef.new('db.otherns', BSON::ObjectId.from_string('5405fae77c584947fc000002'))
46
+ ],
47
+ 'TEXT ARRAY',
48
+ ['5405fae77c584947fc000001', '5405fae77c584947fc000002']
49
+ ],
50
+ [
51
+ [
52
+ BSON::DBRef.new('db.otherns', BSON::ObjectId.from_string('5405fae77c584947fc000001')),
53
+ BSON::DBRef.new('db.otherns', BSON::ObjectId.from_string('5405fae77c584947fc000002'))
54
+ ],
55
+ 'TEXT',
56
+ ['5405fae77c584947fc000001', '5405fae77c584947fc000002'].to_json
57
+ ],
58
+ [
59
+ BSON::Binary.new(["2d931510d99f494a8c6787feb05e1594"].pack("H*"),
60
+ BSON::Binary::SUBTYPE_UUID),
61
+ 'UUID',
62
+ "2d931510-d99f-494a-8c67-87feb05e1594"
63
+ ],
64
+ [
65
+ BSON::Binary.new(["deadbeefcafebabe"].pack("H*"),
66
+ BSON::Binary::SUBTYPE_SIMPLE),
67
+ 'BYTEA',
68
+ ["deadbeefcafebabe"].pack("H*")
69
+ ]
70
+ ]
71
+
72
+ TESTCASES.each do |mongo, typ, sql|
73
+ it "Can transform a #{mongo.class} into a #{typ}" do
74
+ map = {'test' => {'test_transform' =>
75
+ {
76
+ meta: {
77
+ table: 'test_transform'
78
+ },
79
+ columns: [
80
+ {'_id' => 'TEXT'},
81
+ {'value' => typ},
82
+ ]
83
+ }}}
84
+ schema = MoSQL::Schema.new(map)
85
+ adapter = MoSQL::SQLAdapter.new(schema, sql_test_uri)
86
+ @sequel.drop_table?(:test_transform)
87
+ collection = @mongo['test']['test_transform']
88
+ collection.drop
89
+
90
+ schema.create_schema(@sequel)
91
+ streamer = MoSQL::Streamer.new(:mongo => self.mongo,
92
+ :tailer => nil,
93
+ :options => {skip_tail: true},
94
+ :sql => adapter,
95
+ :schema => schema)
96
+
97
+ # Test initial import
98
+ id = 'imported'
99
+ collection.insert({_id: id, value: mongo})
100
+ streamer.initial_import
101
+
102
+ got = @sequel[:test_transform].where(_id: id).to_a
103
+ assert_equal(sql, got.first[:value], "was able to transform a #{typ} field on initial import")
104
+
105
+ # Test streaming an insert
106
+ id = 'inserted'
107
+ collection.insert({_id: id, value: mongo})
108
+ streamer.handle_op(
109
+ {
110
+ "ts" => {"t" => 1408647630, "i" => 4},
111
+ "h" => -965650193548512059,
112
+ "v" => 2,
113
+ "op" => "i",
114
+ "ns" => "test.test_transform",
115
+ "o" => collection.find_one(_id: id)
116
+ })
117
+
118
+ got = @sequel[:test_transform].where(_id: id).to_a
119
+ assert_equal(sql, got.first[:value], "was able to transform a #{typ} field while streaming")
120
+ end
121
+ end
122
+ end
@@ -39,6 +39,23 @@ db:
39
39
  - arry: TEXT
40
40
  :meta:
41
41
  :table: sqltable5
42
+ with_composite_key:
43
+ :meta:
44
+ :table: sqltable6
45
+ :composite_key:
46
+ - store
47
+ - time
48
+ :columns:
49
+ - store:
50
+ :source: _id.s
51
+ :type: TEXT
52
+ - time:
53
+ :source: id.t
54
+ :type: TEXT
55
+ - var:
56
+ :source: var
57
+ :type: TEXT
58
+
42
59
  EOF
43
60
 
44
61
  before do
@@ -87,8 +104,8 @@ EOF
87
104
  end
88
105
 
89
106
  it 'Can find the primary key of the SQL table' do
90
- assert_equal('id', @map.primary_sql_key_for_ns('db.collection'))
91
- assert_equal('_id', @map.primary_sql_key_for_ns('db.old_conf_syntax'))
107
+ assert_equal(['id'], @map.primary_sql_key_for_ns('db.collection'))
108
+ assert_equal(['_id'], @map.primary_sql_key_for_ns('db.old_conf_syntax'))
92
109
  end
93
110
 
94
111
  it 'can create a SQL schema' do
@@ -98,6 +115,7 @@ EOF
98
115
  db.expects(:create_table?).with('sqltable3')
99
116
  db.expects(:create_table?).with('sqltable4')
100
117
  db.expects(:create_table?).with('sqltable5')
118
+ db.expects(:create_table?).with('sqltable6')
101
119
 
102
120
  @map.create_schema(db)
103
121
  end
@@ -127,6 +145,11 @@ EOF
127
145
  stub_5.expects(:column).with('_id', 'TEXT', {})
128
146
  stub_5.expects(:column).with('arry', 'TEXT', {})
129
147
  stub_5.expects(:primary_key).with([:_id])
148
+ stub_6 = stub('table 6')
149
+ stub_6.expects(:column).with('store', 'TEXT', {})
150
+ stub_6.expects(:column).with('time', 'TEXT', {})
151
+ stub_6.expects(:column).with('var', 'TEXT', {})
152
+ stub_6.expects(:primary_key).with([:store, :time])
130
153
  (class << db; self; end).send(:define_method, :create_table?) do |tbl, &blk|
131
154
  case tbl
132
155
  when "sqltable"
@@ -139,6 +162,8 @@ EOF
139
162
  o = stub_4
140
163
  when "sqltable5"
141
164
  o = stub_5
165
+ when "sqltable6"
166
+ o = stub_6
142
167
  else
143
168
  assert(false, "Tried to create an unexpected table: #{tbl}")
144
169
  end
@@ -177,6 +202,13 @@ EOF
177
202
  assert_equal(["row 1", nil, oid.to_s, nil], out)
178
203
  end
179
204
 
205
+ it 'converts DBRef to object id in arrays' do
206
+ oid = [ BSON::ObjectId.new, BSON::ObjectId.new]
207
+ o = {'_id' => "row 1", "str" => [ BSON::DBRef.new('db.otherns', oid[0]), BSON::DBRef.new('db.otherns', oid[1]) ] }
208
+ out = @map.transform('db.collection', o)
209
+ assert_equal(["row 1", nil, JSON.dump(oid.map! {|o| o.to_s}), nil ], out)
210
+ end
211
+
180
212
  it 'changes NaN to null in extra_props' do
181
213
  out = @map.transform('db.with_extra_props', {'_id' => 7, 'nancy' => 0.0/0.0})
182
214
  extra = JSON.parse(out[1])
metadata CHANGED
@@ -1,174 +1,153 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: mosql
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.2
5
- prerelease:
4
+ version: 0.4.0
6
5
  platform: ruby
7
6
  authors:
8
7
  - Nelson Elhage
9
8
  autorequire:
10
9
  bindir: bin
11
10
  cert_chain: []
12
- date: 2014-08-11 00:00:00.000000000 Z
11
+ date: 2014-10-01 00:00:00.000000000 Z
13
12
  dependencies:
14
13
  - !ruby/object:Gem::Dependency
15
14
  name: sequel
16
15
  requirement: !ruby/object:Gem::Requirement
17
- none: false
18
16
  requirements:
19
- - - ! '>='
17
+ - - ">="
20
18
  - !ruby/object:Gem::Version
21
19
  version: '0'
22
20
  type: :runtime
23
21
  prerelease: false
24
22
  version_requirements: !ruby/object:Gem::Requirement
25
- none: false
26
23
  requirements:
27
- - - ! '>='
24
+ - - ">="
28
25
  - !ruby/object:Gem::Version
29
26
  version: '0'
30
27
  - !ruby/object:Gem::Dependency
31
28
  name: pg
32
29
  requirement: !ruby/object:Gem::Requirement
33
- none: false
34
30
  requirements:
35
- - - ! '>='
31
+ - - ">="
36
32
  - !ruby/object:Gem::Version
37
33
  version: '0'
38
34
  type: :runtime
39
35
  prerelease: false
40
36
  version_requirements: !ruby/object:Gem::Requirement
41
- none: false
42
37
  requirements:
43
- - - ! '>='
38
+ - - ">="
44
39
  - !ruby/object:Gem::Version
45
40
  version: '0'
46
41
  - !ruby/object:Gem::Dependency
47
42
  name: mongo
48
43
  requirement: !ruby/object:Gem::Requirement
49
- none: false
50
44
  requirements:
51
- - - ! '>='
45
+ - - ">="
52
46
  - !ruby/object:Gem::Version
53
47
  version: '0'
54
48
  type: :runtime
55
49
  prerelease: false
56
50
  version_requirements: !ruby/object:Gem::Requirement
57
- none: false
58
51
  requirements:
59
- - - ! '>='
52
+ - - ">="
60
53
  - !ruby/object:Gem::Version
61
54
  version: '0'
62
55
  - !ruby/object:Gem::Dependency
63
56
  name: bson_ext
64
57
  requirement: !ruby/object:Gem::Requirement
65
- none: false
66
58
  requirements:
67
- - - ! '>='
59
+ - - ">="
68
60
  - !ruby/object:Gem::Version
69
61
  version: '0'
70
62
  type: :runtime
71
63
  prerelease: false
72
64
  version_requirements: !ruby/object:Gem::Requirement
73
- none: false
74
65
  requirements:
75
- - - ! '>='
66
+ - - ">="
76
67
  - !ruby/object:Gem::Version
77
68
  version: '0'
78
69
  - !ruby/object:Gem::Dependency
79
70
  name: rake
80
71
  requirement: !ruby/object:Gem::Requirement
81
- none: false
82
72
  requirements:
83
- - - ! '>='
73
+ - - ">="
84
74
  - !ruby/object:Gem::Version
85
75
  version: '0'
86
76
  type: :runtime
87
77
  prerelease: false
88
78
  version_requirements: !ruby/object:Gem::Requirement
89
- none: false
90
79
  requirements:
91
- - - ! '>='
80
+ - - ">="
92
81
  - !ruby/object:Gem::Version
93
82
  version: '0'
94
83
  - !ruby/object:Gem::Dependency
95
84
  name: log4r
96
85
  requirement: !ruby/object:Gem::Requirement
97
- none: false
98
86
  requirements:
99
- - - ! '>='
87
+ - - ">="
100
88
  - !ruby/object:Gem::Version
101
89
  version: '0'
102
90
  type: :runtime
103
91
  prerelease: false
104
92
  version_requirements: !ruby/object:Gem::Requirement
105
- none: false
106
93
  requirements:
107
- - - ! '>='
94
+ - - ">="
108
95
  - !ruby/object:Gem::Version
109
96
  version: '0'
110
97
  - !ruby/object:Gem::Dependency
111
98
  name: json
112
99
  requirement: !ruby/object:Gem::Requirement
113
- none: false
114
100
  requirements:
115
- - - ! '>='
101
+ - - ">="
116
102
  - !ruby/object:Gem::Version
117
103
  version: '0'
118
104
  type: :runtime
119
105
  prerelease: false
120
106
  version_requirements: !ruby/object:Gem::Requirement
121
- none: false
122
107
  requirements:
123
- - - ! '>='
108
+ - - ">="
124
109
  - !ruby/object:Gem::Version
125
110
  version: '0'
126
111
  - !ruby/object:Gem::Dependency
127
112
  name: mongoriver
128
113
  requirement: !ruby/object:Gem::Requirement
129
- none: false
130
114
  requirements:
131
- - - ! '>='
115
+ - - '='
132
116
  - !ruby/object:Gem::Version
133
- version: '0'
117
+ version: '0.4'
134
118
  type: :runtime
135
119
  prerelease: false
136
120
  version_requirements: !ruby/object:Gem::Requirement
137
- none: false
138
121
  requirements:
139
- - - ! '>='
122
+ - - '='
140
123
  - !ruby/object:Gem::Version
141
- version: '0'
124
+ version: '0.4'
142
125
  - !ruby/object:Gem::Dependency
143
126
  name: minitest
144
127
  requirement: !ruby/object:Gem::Requirement
145
- none: false
146
128
  requirements:
147
- - - ! '>='
129
+ - - ">="
148
130
  - !ruby/object:Gem::Version
149
131
  version: '0'
150
132
  type: :development
151
133
  prerelease: false
152
134
  version_requirements: !ruby/object:Gem::Requirement
153
- none: false
154
135
  requirements:
155
- - - ! '>='
136
+ - - ">="
156
137
  - !ruby/object:Gem::Version
157
138
  version: '0'
158
139
  - !ruby/object:Gem::Dependency
159
140
  name: mocha
160
141
  requirement: !ruby/object:Gem::Requirement
161
- none: false
162
142
  requirements:
163
- - - ! '>='
143
+ - - ">="
164
144
  - !ruby/object:Gem::Version
165
145
  version: '0'
166
146
  type: :development
167
147
  prerelease: false
168
148
  version_requirements: !ruby/object:Gem::Requirement
169
- none: false
170
149
  requirements:
171
- - - ! '>='
150
+ - - ">="
172
151
  - !ruby/object:Gem::Version
173
152
  version: '0'
174
153
  description: A library for streaming MongoDB to SQL
@@ -179,8 +158,8 @@ executables:
179
158
  extensions: []
180
159
  extra_rdoc_files: []
181
160
  files:
182
- - .gitignore
183
- - .travis.yml
161
+ - ".gitignore"
162
+ - ".travis.yml"
184
163
  - Gemfile
185
164
  - Gemfile.lock
186
165
  - LICENSE
@@ -198,41 +177,40 @@ files:
198
177
  - mosql.gemspec
199
178
  - test/_lib.rb
200
179
  - test/functional/_lib.rb
201
- - test/functional/functional.rb
202
180
  - test/functional/schema.rb
203
181
  - test/functional/sql.rb
204
182
  - test/functional/streamer.rb
183
+ - test/functional/transform.rb
205
184
  - test/unit/lib/mosql/schema.rb
206
185
  homepage: https://github.com/stripe/mosql
207
186
  licenses: []
187
+ metadata: {}
208
188
  post_install_message:
209
189
  rdoc_options: []
210
190
  require_paths:
211
191
  - lib
212
192
  required_ruby_version: !ruby/object:Gem::Requirement
213
- none: false
214
193
  requirements:
215
- - - ! '>='
194
+ - - ">="
216
195
  - !ruby/object:Gem::Version
217
196
  version: '0'
218
197
  required_rubygems_version: !ruby/object:Gem::Requirement
219
- none: false
220
198
  requirements:
221
- - - ! '>='
199
+ - - ">="
222
200
  - !ruby/object:Gem::Version
223
201
  version: '0'
224
202
  requirements: []
225
203
  rubyforge_project:
226
- rubygems_version: 1.8.23.2
204
+ rubygems_version: 2.2.2
227
205
  signing_key:
228
- specification_version: 3
206
+ specification_version: 4
229
207
  summary: MongoDB -> SQL streaming bridge
230
208
  test_files:
231
209
  - test/_lib.rb
232
210
  - test/functional/_lib.rb
233
- - test/functional/functional.rb
234
211
  - test/functional/schema.rb
235
212
  - test/functional/sql.rb
236
213
  - test/functional/streamer.rb
214
+ - test/functional/transform.rb
237
215
  - test/unit/lib/mosql/schema.rb
238
216
  has_rdoc:
@@ -1,7 +0,0 @@
1
- require File.join(File.dirname(__FILE__), '_lib.rb')
2
-
3
- class MoSQL::Test::Functional::SchemaTest < MoSQL::Test::Functional
4
- it 'runs tests' do
5
- assert(true)
6
- end
7
- end