mosql 0.3.1 → 0.3.2

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- mosql (0.3.1)
4
+ mosql (0.3.2)
5
5
  bson_ext
6
6
  json
7
7
  log4r
@@ -14,23 +14,23 @@ PATH
14
14
  GEM
15
15
  remote: https://rubygems.org/
16
16
  specs:
17
- bson (1.8.5)
18
- bson_ext (1.8.5)
19
- bson (~> 1.8.5)
20
- json (1.8.0)
17
+ bson (1.9.1)
18
+ bson_ext (1.9.1)
19
+ bson (~> 1.9.1)
20
+ json (1.8.1)
21
21
  log4r (1.1.10)
22
- metaclass (0.0.1)
22
+ metaclass (0.0.4)
23
23
  minitest (3.0.0)
24
- mocha (0.10.5)
24
+ mocha (1.0.0)
25
25
  metaclass (~> 0.0.1)
26
- mongo (1.8.5)
27
- bson (~> 1.8.5)
26
+ mongo (1.9.1)
27
+ bson (~> 1.9.1)
28
28
  mongoriver (0.1.0)
29
29
  bson_ext
30
30
  log4r
31
31
  mongo (>= 1.7)
32
32
  pg (0.14.1)
33
- rake (10.1.0)
33
+ rake (10.3.1)
34
34
  sequel (3.44.0)
35
35
 
36
36
  PLATFORMS
data/README.md CHANGED
@@ -154,11 +154,18 @@ For advanced scenarios, you can pass options to control mosql's
154
154
  behavior. If you pass `--skip-tail`, mosql will do the initial import,
155
155
  but not tail the oplog. This could be used, for example, to do an
156
156
  import off of a backup snapshot, and then start the tailer on the live
157
- cluster.
157
+ cluster. This can also be useful for hosted services where you do not
158
+ have access to the oplog.
158
159
 
159
160
  If you need to force a fresh reimport, run `--reimport`, which will
160
161
  cause `mosql` to drop tables, create them anew, and do another import.
161
162
 
163
+ Normaly, MoSQL will scan through a list of the databases on the mongo
164
+ server you connect to. You avoid this behavior by specifiying a specific
165
+ mongo db to connect to with the `--only-db [dbname]` option. This is
166
+ useful for hosted services which do not let you list all databases (via
167
+ the `listDatabases` command).
168
+
162
169
  ## Schema mismatches and _extra_props
163
170
 
164
171
  If MoSQL encounters values in the MongoDB database that don't fit
@@ -169,16 +176,24 @@ If it encounters a MongoDB object with fields not listed in the
169
176
  collection map, it will discard the extra fields, unless
170
177
  `:extra_props` is set in the `:meta` hash. If it is, it will collect
171
178
  any missing fields, JSON-encode them in a hash, and store the
172
- resulting text in `_extra_props` in SQL. It's up to you to do
173
- something useful with the JSON. One option is to use [plv8][plv8] to
174
- parse them inside PostgreSQL, or you can just pull the JSON out whole
175
- and parse it in application code.
176
-
177
- This is also currently the only way to handle array or object values
178
- inside records -- specify `:extra_props`, and they'll get JSON-encoded
179
- into `_extra_props`. There's no reason we couldn't support
180
- JSON-encoded values for individual columns/fields, but we haven't
181
- written that code yet.
179
+ resulting text in `_extra_props` in SQL.
180
+
181
+ As of PostgreSQL 9.3, you can declare columns as type "JSON" and use
182
+ the [native JSON support][pg-json] to inspect inside of JSON-encoded
183
+ types. In earlier versions, you can write code in an extension
184
+ language, such as [plv8][plv8].
185
+
186
+ [pg-json]: http://www.postgresql.org/docs/9.3/static/functions-json.html
187
+
188
+ ## Non-scalar types
189
+
190
+ MoSQL supports array types, using the `INTEGER ARRAY` array type
191
+ syntax. This will cause MoSQL to create the column as an array type in
192
+ PostgreSQL, and insert rows appropriately-formatted.
193
+
194
+ Fields with hash values, or array values that are not in an
195
+ ARRAY-typed column, will be transformed into JSON TEXT strings before
196
+ being inserted into PostgreSQL.
182
197
 
183
198
  [plv8]: http://code.google.com/p/plv8js/
184
199
 
@@ -71,6 +71,10 @@ module MoSQL
71
71
  @options[:ignore_delete] = true
72
72
  end
73
73
 
74
+ opts.on("--only-db [dbname]", "Don't scan for mongo dbs, just use the one specified") do |dbname|
75
+ @options[:dbname] = dbname
76
+ end
77
+
74
78
  opts.on("--tail-from [timestamp]", "Start tailing from the specified UNIX timestamp") do |ts|
75
79
  @options[:tail_from] = ts
76
80
  end
@@ -5,17 +5,17 @@ module MoSQL
5
5
  include MoSQL::Logging
6
6
 
7
7
  def to_array(lst)
8
- array = []
9
- lst.each do |ent|
8
+ lst.map do |ent|
9
+ col = nil
10
10
  if ent.is_a?(Hash) && ent[:source].is_a?(String) && ent[:type].is_a?(String)
11
11
  # new configuration format
12
- array << {
12
+ col = {
13
13
  :source => ent.fetch(:source),
14
14
  :type => ent.fetch(:type),
15
15
  :name => (ent.keys - [:source, :type]).first,
16
16
  }
17
17
  elsif ent.is_a?(Hash) && ent.keys.length == 1 && ent.values.first.is_a?(String)
18
- array << {
18
+ col = {
19
19
  :source => ent.first.first,
20
20
  :name => ent.first.first,
21
21
  :type => ent.first.last
@@ -24,8 +24,12 @@ module MoSQL
24
24
  raise SchemaError.new("Invalid ordered hash entry #{ent.inspect}")
25
25
  end
26
26
 
27
+ if !col.key?(:array_type) && /\A(.+)\s+array\z/i.match(col[:type])
28
+ col[:array_type] = $1
29
+ end
30
+
31
+ col
27
32
  end
28
- array
29
33
  end
30
34
 
31
35
  def check_columns!(ns, spec)
@@ -87,7 +91,13 @@ module MoSQL
87
91
  end
88
92
  end
89
93
  if meta[:extra_props]
90
- column '_extra_props', 'TEXT'
94
+ type =
95
+ if meta[:extra_props] == "JSON"
96
+ "JSON"
97
+ else
98
+ "TEXT"
99
+ end
100
+ column '_extra_props', type
91
101
  end
92
102
  end
93
103
  end
@@ -104,7 +114,7 @@ module MoSQL
104
114
  end
105
115
 
106
116
  def find_ns(ns)
107
- db, collection = ns.split(".")
117
+ db, collection = ns.split(".", 2)
108
118
  unless spec = find_db(db)
109
119
  return nil
110
120
  end
@@ -166,27 +176,23 @@ module MoSQL
166
176
  case v
167
177
  when BSON::Binary, BSON::ObjectId, Symbol
168
178
  v = v.to_s
169
- when Hash, Array
179
+ when BSON::DBRef
180
+ v = v.object_id.to_s
181
+ when Hash
170
182
  v = JSON.dump(v)
183
+ when Array
184
+ if col[:array_type]
185
+ v = Sequel.pg_array(v, col[:array_type])
186
+ else
187
+ v = JSON.dump(v)
188
+ end
171
189
  end
172
190
  end
173
191
  row << v
174
192
  end
175
193
 
176
194
  if schema[:meta][:extra_props]
177
- # Kludgily delete binary blobs from _extra_props -- they may
178
- # contain invalid UTF-8, which to_json will not properly encode.
179
- extra = {}
180
- obj.each do |k,v|
181
- case v
182
- when BSON::Binary
183
- next
184
- when Float
185
- # NaN is illegal in JSON. Translate into null.
186
- v = nil if v.nan?
187
- end
188
- extra[k] = v
189
- end
195
+ extra = sanitize(obj)
190
196
  row << JSON.dump(extra)
191
197
  end
192
198
 
@@ -195,6 +201,26 @@ module MoSQL
195
201
  row
196
202
  end
197
203
 
204
+ def sanitize(value)
205
+ # Base64-encode binary blobs from _extra_props -- they may
206
+ # contain invalid UTF-8, which to_json will not properly encode.
207
+ case value
208
+ when Hash
209
+ ret = {}
210
+ value.each {|k, v| ret[k] = sanitize(v)}
211
+ ret
212
+ when Array
213
+ value.map {|v| sanitize(v)}
214
+ when BSON::Binary
215
+ Base64.encode64(value.to_s)
216
+ when Float
217
+ # NaN is illegal in JSON. Translate into null.
218
+ value.nan? ? nil : value
219
+ else
220
+ value
221
+ end
222
+ end
223
+
198
224
  def copy_column?(col)
199
225
  col[:source] != '$timestamp'
200
226
  end
@@ -7,6 +7,7 @@ module MoSQL
7
7
  def initialize(schema, uri, pgschema=nil)
8
8
  @schema = schema
9
9
  connect_db(uri, pgschema)
10
+ @db.extension :pg_array
10
11
  end
11
12
 
12
13
  def connect_db(uri, pgschema)
@@ -93,23 +93,41 @@ module MoSQL
93
93
  start_ts = @mongo['local']['oplog.rs'].find_one({}, {:sort => [['$natural', -1]]})['ts']
94
94
  end
95
95
 
96
- @mongo.database_names.each do |dbname|
97
- next unless spec = @schema.find_db(dbname)
96
+ dbnames = []
97
+
98
+ if dbname = options[:dbname]
99
+ log.info "Skipping DB scan and using db: #{dbname}"
100
+ dbnames = [ dbname ]
101
+ else
102
+ dbnames = @mongo.database_names
103
+ end
104
+
105
+ dbnames.each do |dbname|
106
+ spec = @schema.find_db(dbname)
107
+
108
+ if(spec.nil?)
109
+ log.info("Mongd DB '#{dbname}' not found in config file. Skipping.")
110
+ next
111
+ end
112
+
98
113
  log.info("Importing for Mongo DB #{dbname}...")
99
114
  db = @mongo.db(dbname)
100
- db.collections.select { |c| spec.key?(c.name) }.each do |collection|
115
+ collections = db.collections.select { |c| spec.key?(c.name) }
116
+
117
+ collections.each do |collection|
101
118
  ns = "#{dbname}.#{collection.name}"
102
- import_collection(ns, collection)
119
+ import_collection(ns, collection, spec[collection.name][:meta][:filter])
103
120
  exit(0) if @done
104
121
  end
105
122
  end
106
123
 
124
+
107
125
  tailer.write_timestamp(start_ts) unless options[:skip_tail]
108
126
  end
109
127
 
110
128
  def did_truncate; @did_truncate ||= {}; end
111
129
 
112
- def import_collection(ns, collection)
130
+ def import_collection(ns, collection, filter)
113
131
  log.info("Importing for #{ns}...")
114
132
  count = 0
115
133
  batch = []
@@ -121,7 +139,7 @@ module MoSQL
121
139
 
122
140
  start = Time.now
123
141
  sql_time = 0
124
- collection.find(nil, :batch_size => BATCH) do |cursor|
142
+ collection.find(filter, :batch_size => BATCH) do |cursor|
125
143
  with_retries do
126
144
  cursor.each do |obj|
127
145
  batch << @schema.transform(ns, obj)
@@ -1,3 +1,3 @@
1
1
  module MoSQL
2
- VERSION = "0.3.1"
2
+ VERSION = "0.3.2"
3
3
  end
@@ -5,6 +5,7 @@ require 'mocha'
5
5
  $:.unshift(File.expand_path(File.join(File.dirname(__FILE__), '../lib')))
6
6
 
7
7
  require 'mosql'
8
+ require 'mocha/mini_test'
8
9
 
9
10
  module MoSQL
10
11
  class Test < ::MiniTest::Spec
@@ -17,6 +17,7 @@ module MoSQL
17
17
  def connect_sql
18
18
  begin
19
19
  conn = Sequel.connect(sql_test_uri)
20
+ conn.extension :pg_array
20
21
  conn.test_connection
21
22
  conn
22
23
  rescue Sequel::DatabaseConnectionError
@@ -10,6 +10,7 @@ db:
10
10
  :columns:
11
11
  - _id: TEXT
12
12
  - var: INTEGER
13
+ - arry: INTEGER ARRAY
13
14
  with_extra_props:
14
15
  :meta:
15
16
  :table: sqltable2
@@ -44,7 +45,7 @@ EOF
44
45
  def table3; @sequel[:sqltable3]; end
45
46
 
46
47
  it 'Creates the tables with the right columns' do
47
- assert_equal(Set.new([:_id, :var]),
48
+ assert_equal(Set.new([:_id, :var, :arry]),
48
49
  Set.new(table.columns))
49
50
  assert_equal(Set.new([:_id, :_extra_props]),
50
51
  Set.new(table2.columns))
@@ -53,7 +54,7 @@ EOF
53
54
  it 'Can COPY data' do
54
55
  objects = [
55
56
  {'_id' => "a", 'var' => 0},
56
- {'_id' => "b", 'var' => 1},
57
+ {'_id' => "b", 'var' => 1, 'arry' => "{1, 2, 3}"},
57
58
  {'_id' => "c"},
58
59
  {'_id' => "d", 'other_var' => "hello"}
59
60
  ]
@@ -63,6 +64,7 @@ EOF
63
64
  assert_equal(%w[a b c d], rows.map { |r| r[:_id] })
64
65
  assert_equal(nil, rows[2][:var])
65
66
  assert_equal(nil, rows[3][:var])
67
+ assert_equal([1 ,2, 3], rows[1][:arry])
66
68
  end
67
69
 
68
70
  it 'Can COPY dotted data' do
@@ -7,6 +7,7 @@ class MoSQL::Test::Functional::SQLTest < MoSQL::Test::Functional
7
7
  column :_id, 'INTEGER'
8
8
  column :color, 'TEXT'
9
9
  column :quantity, 'INTEGER'
10
+ column :numbers, 'INTEGER ARRAY'
10
11
  primary_key [:_id]
11
12
  end
12
13
 
@@ -16,13 +17,15 @@ class MoSQL::Test::Functional::SQLTest < MoSQL::Test::Functional
16
17
 
17
18
  describe 'upsert' do
18
19
  it 'inserts new items' do
19
- @adapter.upsert!(@table, '_id', {'_id' => 0, 'color' => 'red', 'quantity' => 10})
20
- @adapter.upsert!(@table, '_id', {'_id' => 1, 'color' => 'blue', 'quantity' => 5})
20
+ @adapter.upsert!(@table, '_id', {'_id' => 0, 'color' => 'red', 'quantity' => 10, 'numbers' => Sequel.pg_array([1, 2, 3], :integer)})
21
+ @adapter.upsert!(@table, '_id', {'_id' => 1, 'color' => 'blue', 'quantity' => 5, 'numbers' => Sequel.pg_array([], :integer)})
21
22
  assert_equal(2, @table.count)
22
23
  assert_equal('red', @table[:_id => 0][:color])
23
24
  assert_equal(10, @table[:_id => 0][:quantity])
24
25
  assert_equal('blue', @table[:_id => 1][:color])
25
26
  assert_equal(5, @table[:_id => 1][:quantity])
27
+ assert_equal([1, 2, 3], @table[:_id => 0][:numbers])
28
+ assert_equal([], @table[:_id => 1][:numbers])
26
29
  end
27
30
 
28
31
  it 'updates items' do
@@ -20,6 +20,7 @@ mosql_test:
20
20
  :columns:
21
21
  - _id: TEXT
22
22
  - var: INTEGER
23
+ - arry: INTEGER ARRAY
23
24
  renameid:
24
25
  :meta:
25
26
  :table: sqltable2
@@ -28,6 +29,30 @@ mosql_test:
28
29
  :source: _id
29
30
  :type: TEXT
30
31
  - goats: INTEGER
32
+
33
+ filter_test:
34
+ collection:
35
+ :meta:
36
+ :table: filter_sqltable
37
+ :filter:
38
+ :_id:
39
+ '$gte': !ruby/object:BSON::ObjectId
40
+ data:
41
+ - 83
42
+ - 179
43
+ - 75
44
+ - 128
45
+ - 0
46
+ - 0
47
+ - 0
48
+ - 0
49
+ - 0
50
+ - 0
51
+ - 0
52
+ - 0
53
+ :columns:
54
+ - _id: TEXT
55
+ - var: INTEGER
31
56
  EOF
32
57
 
33
58
  before do
@@ -109,11 +134,28 @@ EOF
109
134
  assert_equal(0, sequel[:sqltable2].where(:id => o['_id'].to_s).count)
110
135
  end
111
136
 
137
+ it 'filters unwanted records' do
138
+ data = [{:_id => BSON::ObjectId.from_time(Time.utc(2014, 7, 1)), :var => 2},
139
+ {:_id => BSON::ObjectId.from_time(Time.utc(2014, 7, 2)), :var => 3}]
140
+ collection = mongo["filter_test"]["collection"]
141
+ collection.drop
142
+ data.map { |rec| collection.insert(rec)}
143
+
144
+ @streamer.options[:skip_tail] = true
145
+ @streamer.initial_import
146
+
147
+ inserted_records = @sequel[:filter_sqltable].select
148
+ assert_equal(1, inserted_records.count)
149
+ record = inserted_records.first
150
+ data[1][:_id] = data[1][:_id].to_s
151
+ assert_equal(data[1], record)
152
+ end
153
+
112
154
  describe '.bulk_upsert' do
113
155
  it 'inserts multiple rows' do
114
156
  objs = [
115
157
  { '_id' => BSON::ObjectId.new, 'var' => 0 },
116
- { '_id' => BSON::ObjectId.new, 'var' => 1 },
158
+ { '_id' => BSON::ObjectId.new, 'var' => 1, 'arry' => [1, 2] },
117
159
  { '_id' => BSON::ObjectId.new, 'var' => 3 },
118
160
  ].map { |o| @map.transform('mosql_test.collection', o) }
119
161
 
@@ -13,6 +13,7 @@ db:
13
13
  :type: TEXT
14
14
  - var: INTEGER
15
15
  - str: TEXT
16
+ - arry: INTEGER ARRAY
16
17
  with_extra_props:
17
18
  :meta:
18
19
  :table: sqltable2
@@ -26,9 +27,22 @@ db:
26
27
  - _id: TEXT
27
28
  :meta:
28
29
  :table: sqltable3
30
+ with_extra_props_type:
31
+ :meta:
32
+ :table: sqltable4
33
+ :extra_props: JSON
34
+ :columns:
35
+ - _id: TEXT
36
+ treat_array_as_string:
37
+ :columns:
38
+ - _id: TEXT
39
+ - arry: TEXT
40
+ :meta:
41
+ :table: sqltable5
29
42
  EOF
30
43
 
31
44
  before do
45
+ Sequel.extension(:pg_array)
32
46
  @map = MoSQL::Schema.new(YAML.load(TEST_MAP))
33
47
  end
34
48
 
@@ -82,26 +96,37 @@ EOF
82
96
  db.expects(:create_table?).with('sqltable')
83
97
  db.expects(:create_table?).with('sqltable2')
84
98
  db.expects(:create_table?).with('sqltable3')
99
+ db.expects(:create_table?).with('sqltable4')
100
+ db.expects(:create_table?).with('sqltable5')
85
101
 
86
102
  @map.create_schema(db)
87
103
  end
88
104
 
89
105
  it 'creates a SQL schema with the right fields' do
90
106
  db = {}
91
- stub_1 = stub()
107
+ stub_1 = stub('table 1')
92
108
  stub_1.expects(:column).with('id', 'TEXT', {})
93
109
  stub_1.expects(:column).with('var', 'INTEGER', {})
94
110
  stub_1.expects(:column).with('str', 'TEXT', {})
111
+ stub_1.expects(:column).with('arry', 'INTEGER ARRAY', {})
95
112
  stub_1.expects(:column).with('_extra_props').never
96
113
  stub_1.expects(:primary_key).with([:id])
97
- stub_2 = stub()
114
+ stub_2 = stub('table 2')
98
115
  stub_2.expects(:column).with('id', 'TEXT', {})
99
116
  stub_2.expects(:column).with('_extra_props', 'TEXT')
100
117
  stub_2.expects(:primary_key).with([:id])
101
- stub_3 = stub()
118
+ stub_3 = stub('table 3')
102
119
  stub_3.expects(:column).with('_id', 'TEXT', {})
103
120
  stub_3.expects(:column).with('_extra_props').never
104
121
  stub_3.expects(:primary_key).with([:_id])
122
+ stub_4 = stub('table 4')
123
+ stub_4.expects(:column).with('_id', 'TEXT', {})
124
+ stub_4.expects(:column).with('_extra_props', 'JSON')
125
+ stub_4.expects(:primary_key).with([:_id])
126
+ stub_5 = stub('table 5')
127
+ stub_5.expects(:column).with('_id', 'TEXT', {})
128
+ stub_5.expects(:column).with('arry', 'TEXT', {})
129
+ stub_5.expects(:primary_key).with([:_id])
105
130
  (class << db; self; end).send(:define_method, :create_table?) do |tbl, &blk|
106
131
  case tbl
107
132
  when "sqltable"
@@ -110,6 +135,10 @@ EOF
110
135
  o = stub_2
111
136
  when "sqltable3"
112
137
  o = stub_3
138
+ when "sqltable4"
139
+ o = stub_4
140
+ when "sqltable5"
141
+ o = stub_5
113
142
  else
114
143
  assert(false, "Tried to create an unexpected table: #{tbl}")
115
144
  end
@@ -120,8 +149,8 @@ EOF
120
149
 
121
150
  describe 'when transforming' do
122
151
  it 'transforms rows' do
123
- out = @map.transform('db.collection', {'_id' => "row 1", 'var' => 6, 'str' => 'a string'})
124
- assert_equal(["row 1", 6, 'a string'], out)
152
+ out = @map.transform('db.collection', {'_id' => "row 1", 'var' => 6, 'str' => 'a string', 'arry' => [1,2,3]})
153
+ assert_equal(["row 1", 6, 'a string', [1,2,3]], out)
125
154
  end
126
155
 
127
156
  it 'Includes extra props' do
@@ -132,13 +161,20 @@ EOF
132
161
  end
133
162
 
134
163
  it 'gets all_columns right' do
135
- assert_equal(['id', 'var', 'str'], @map.all_columns(@map.find_ns('db.collection')))
164
+ assert_equal(['id', 'var', 'str', 'arry'], @map.all_columns(@map.find_ns('db.collection')))
136
165
  assert_equal(['id', '_extra_props'], @map.all_columns(@map.find_ns('db.with_extra_props')))
137
166
  end
138
167
 
139
168
  it 'stringifies symbols' do
140
- out = @map.transform('db.collection', {'_id' => "row 1", 'str' => :stringy})
141
- assert_equal(["row 1", nil, 'stringy'], out)
169
+ out = @map.transform('db.collection', {'_id' => "row 1", 'str' => :stringy, 'arry' => [1,2,3]})
170
+ assert_equal(["row 1", nil, 'stringy', [1,2,3]], out)
171
+ end
172
+
173
+ it 'extracts object ids from a DBRef' do
174
+ oid = BSON::ObjectId.new
175
+ out = @map.transform('db.collection', {'_id' => "row 1",
176
+ 'str' => BSON::DBRef.new('db.otherns', oid)})
177
+ assert_equal(["row 1", nil, oid.to_s, nil], out)
142
178
  end
143
179
 
144
180
  it 'changes NaN to null in extra_props' do
@@ -147,6 +183,25 @@ EOF
147
183
  assert(extra.key?('nancy'))
148
184
  assert_equal(nil, extra['nancy'])
149
185
  end
186
+
187
+ it 'base64-encodes BSON::Binary blobs in extra_props' do
188
+ out = @map.transform('db.with_extra_props',
189
+ {'_id' => 7,
190
+ 'blob' => BSON::Binary.new("\x00\x00\x00"),
191
+ 'embedded' => {'thing' => BSON::Binary.new("\x00\x00\x00")}})
192
+ extra = JSON.parse(out[1])
193
+ assert(extra.key?('blob'))
194
+ assert_equal('AAAA', extra['blob'].strip)
195
+ refute_nil(extra['embedded'])
196
+ refute_nil(extra['embedded']['thing'])
197
+ assert_equal('AAAA', extra['embedded']['thing'].strip)
198
+ end
199
+
200
+ it 'will treat arrays as strings when schame says to' do
201
+ out = @map.transform('db.treat_array_as_string', {'_id' => 1, 'arry' => [1, 2, 3]})
202
+ assert_equal(out[0], 1)
203
+ assert_equal(out[1], '[1,2,3]')
204
+ end
150
205
  end
151
206
 
152
207
  describe 'when copying data' do
@@ -281,4 +336,23 @@ EOF
281
336
  end
282
337
  end
283
338
  end
339
+
340
+ describe 'dotted names' do
341
+ MAP = <<EOF
342
+ db:
343
+ my.collection:
344
+ :meta:
345
+ :table: table
346
+ :columns:
347
+ - _id: TEXT
348
+ EOF
349
+
350
+ it 'handles dotted names' do
351
+ @map = MoSQL::Schema.new(YAML.load(MAP))
352
+ collections = @map.collections_for_mongo_db('db')
353
+ assert(collections.include?('my.collection'),
354
+ "#{collections} doesn't include `my.collection`")
355
+ assert(@map.find_ns('db.my.collection'))
356
+ end
357
+ end
284
358
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: mosql
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.3.2
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-10-19 00:00:00.000000000 Z
12
+ date: 2014-08-11 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: sequel
@@ -223,7 +223,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
223
223
  version: '0'
224
224
  requirements: []
225
225
  rubyforge_project:
226
- rubygems_version: 1.8.23
226
+ rubygems_version: 1.8.23.2
227
227
  signing_key:
228
228
  specification_version: 3
229
229
  summary: MongoDB -> SQL streaming bridge