pg_data_encoder 0.1.8 → 0.1.9

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 6934d6ca62f34e3ca38a53bdef9eb4300d616687
4
- data.tar.gz: 04e0ec2af98f46d67072c6f2e662db470a851953
3
+ metadata.gz: 431bdd87361eddc821f92a2f930ea9164d6d5520
4
+ data.tar.gz: e4ed69cc46abb4ac4ce20096e66cd5f9356361f0
5
5
  SHA512:
6
- metadata.gz: 310e919ea938f3897d16ec8e7f46fc04844a31ad00b4fa0078a102eae60517be8e948cd99e213bd5f86d9ed62720ba653117b569215cafd92d06d877f3fb8067
7
- data.tar.gz: 4cc2f6d4cf47bc15e60dddbddcd90af5bc08528d33947d236e95f8b2121d8089f7206341df923141f49ef8b99d8a6e51fac78c75f366ef9b89949ef20cbdfef6
6
+ metadata.gz: f30bc9742ae41a779ba406eb29451cdbf0abbdef4aa678044c2b2fab923de2014097f8ab14e3ed5314255b73e62f1677643ffe9884af26335cb6645b57bace09
7
+ data.tar.gz: 1f435434aa8517f0114d66d47d92cb1229946a954fc21319dbff0b820c6183387c4d100a04a5337d7b9fea0eec79dd6fcf12a23be114b1234d23a858a836c527
@@ -161,7 +161,7 @@ module PgDataEncoder
161
161
  value = nil
162
162
  else
163
163
  value = io.read(value_size)
164
- value = value.force_encoding("UTF-8") if value.present?
164
+ value = value.force_encoding("UTF-8") if !value.nil?
165
165
  end
166
166
  h[key] = value
167
167
  #p h
@@ -106,78 +106,90 @@ module PgDataEncoder
106
106
  io.write(buf)
107
107
  end
108
108
  when Array
109
- array_io = TempBuffer.new
110
- field.compact!
111
- completed = false
112
- case field[0]
113
- when String
114
- if @options[:column_types][index] == :uuid
115
- array_io.write([1].pack("N")) # unknown
116
- array_io.write([0].pack("N")) # unknown
109
+ if @options[:column_types] && @options[:column_types][index] == :json
110
+ buf = field.to_json.encode("UTF-8")
111
+ io.write([buf.bytesize].pack("N"))
112
+ io.write(buf)
113
+ else
114
+ array_io = TempBuffer.new
115
+ field.compact!
116
+ completed = false
117
+ case field[0]
118
+ when String
119
+ if @options[:column_types][index] == :uuid
120
+ array_io.write([1].pack("N")) # unknown
121
+ array_io.write([0].pack("N")) # unknown
117
122
 
118
- array_io.write([2950].pack("N")) # I think is used to determine string data type
119
- array_io.write([field.size].pack("N"))
120
- array_io.write([1].pack("N")) # forcing single dimension array for now
121
-
122
- field.each_with_index {|val, index|
123
- array_io.write([16].pack("N"))
124
- c = [val.gsub(/-/, "")].pack('H*')
125
- array_io.write(c)
123
+ array_io.write([2950].pack("N")) # I think is used to determine string data type
124
+ array_io.write([field.size].pack("N"))
125
+ array_io.write([1].pack("N")) # forcing single dimension array for now
126
126
 
127
- }
128
- else
127
+ field.each_with_index {|val, index|
128
+ array_io.write([16].pack("N"))
129
+ c = [val.gsub(/-/, "")].pack('H*')
130
+ array_io.write(c)
131
+
132
+ }
133
+ else
134
+ array_io.write([1].pack("N")) # unknown
135
+ array_io.write([0].pack("N")) # unknown
136
+
137
+ array_io.write([1043].pack("N")) # I think is used to determine string data type
138
+ array_io.write([field.size].pack("N"))
139
+ array_io.write([1].pack("N")) # forcing single dimension array for now
140
+
141
+ field.each_with_index {|val, index|
142
+ buf = val.to_s.encode("UTF-8")
143
+ array_io.write([buf.bytesize].pack("N"))
144
+ array_io.write(buf)
145
+
146
+ }
147
+ end
148
+ when Integer
129
149
  array_io.write([1].pack("N")) # unknown
130
150
  array_io.write([0].pack("N")) # unknown
131
151
 
132
- array_io.write([1043].pack("N")) # I think is used to determine string data type
152
+ array_io.write([23].pack("N")) # I think is used to detemine int data type
133
153
  array_io.write([field.size].pack("N"))
134
154
  array_io.write([1].pack("N")) # forcing single dimension array for now
135
155
 
136
156
  field.each_with_index {|val, index|
137
- buf = val.to_s.encode("UTF-8")
157
+ buf = [val.to_i].pack("N")
138
158
  array_io.write([buf.bytesize].pack("N"))
139
159
  array_io.write(buf)
140
160
 
141
161
  }
162
+ when nil
163
+ io.write([-1].pack("N"))
164
+ completed = true
165
+ else
166
+ raise Exception.new("Arrays support int or string only")
142
167
  end
143
- when Integer
144
- array_io.write([1].pack("N")) # unknown
145
- array_io.write([0].pack("N")) # unknown
146
168
 
147
- array_io.write([23].pack("N")) # I think is used to detemine int data type
148
- array_io.write([field.size].pack("N"))
149
- array_io.write([1].pack("N")) # forcing single dimension array for now
150
-
151
- field.each_with_index {|val, index|
152
- buf = [val.to_i].pack("N")
153
- array_io.write([buf.bytesize].pack("N"))
154
- array_io.write(buf)
155
-
156
- }
157
- when nil
158
- io.write([-1].pack("N"))
159
- completed = true
160
- else
161
- raise Exception.new("Arrays support int or string only")
162
- end
163
-
164
- if !completed
165
- io.write([array_io.pos].pack("N"))
166
- io.write(array_io.string)
169
+ if !completed
170
+ io.write([array_io.pos].pack("N"))
171
+ io.write(array_io.string)
172
+ end
167
173
  end
168
174
  when Hash
169
175
  raise Exception.new("Hash's can't contain hashes") if depth > 0
170
- hash_io = TempBuffer.new
171
-
172
- hash_io.write([field.size].pack("N"))
173
- field.each_pair {|key,val|
174
- buf = key.to_s.encode("UTF-8")
175
- hash_io.write([buf.bytesize].pack("N"))
176
- hash_io.write(buf.to_s)
177
- encode_field(hash_io, val.nil? ? val : val.to_s, index, depth + 1)
178
- }
179
- io.write([hash_io.pos].pack("N")) # size of hstore data
180
- io.write(hash_io.string)
176
+ if @options[:column_types] && @options[:column_types][index] == :json
177
+ buf = field.to_json.encode("UTF-8")
178
+ io.write([buf.bytesize].pack("N"))
179
+ io.write(buf)
180
+ else
181
+ hash_io = TempBuffer.new
182
+
183
+ hash_io.write([field.size].pack("N"))
184
+ field.each_pair {|key,val|
185
+ buf = key.to_s.encode("UTF-8")
186
+ hash_io.write([buf.bytesize].pack("N"))
187
+ hash_io.write(buf.to_s)
188
+ encode_field(hash_io, val.nil? ? val : val.to_s, index, depth + 1)
189
+ }
190
+ io.write([hash_io.pos].pack("N")) # size of hstore data
191
+ io.write(hash_io.string)
192
+ end
181
193
  when Time
182
194
  buf = [(field.to_f * 1_000_000 - POSTGRES_EPOCH_TIME).to_i].pack("L!>")
183
195
  io.write([buf.bytesize].pack("N"))
@@ -1,3 +1,3 @@
1
1
  module PgDataEncoder
2
- VERSION = "0.1.8"
2
+ VERSION = "0.1.9"
3
3
  end
@@ -3,6 +3,7 @@ require "pg_data_encoder/version"
3
3
  require 'pg_data_encoder/temp_buffer'
4
4
  require 'pg_data_encoder/encode_for_copy'
5
5
  require 'pg_data_encoder/decoder'
6
+ require 'json'
6
7
 
7
8
  module PgDataEncoder
8
9
  # Your code goes here...
Binary file
Binary file
@@ -252,6 +252,32 @@ describe "generating data" do
252
252
  str.should == existing_data
253
253
  end
254
254
 
255
+ it 'should encode json hash correctly' do
256
+ encoder = PgDataEncoder::EncodeForCopy.new(column_types: {0 => :json})
257
+ encoder.add [{}]
258
+ encoder.close
259
+ io = encoder.get_io
260
+ existing_data = filedata("json.dat")
261
+ str = io.read
262
+ io.class.name.should == "StringIO"
263
+ str.force_encoding("ASCII-8BIT")
264
+ #File.open("spec/fixtures/output.dat", "w:ASCII-8BIT") {|out| out.write(str) }
265
+ str.should == existing_data
266
+ end
267
+
268
+ it 'should encode json array correctly' do
269
+ encoder = PgDataEncoder::EncodeForCopy.new(column_types: {0 => :json})
270
+ encoder.add [[]]
271
+ encoder.close
272
+ io = encoder.get_io
273
+ existing_data = filedata("json_array.dat")
274
+ str = io.read
275
+ io.class.name.should == "StringIO"
276
+ str.force_encoding("ASCII-8BIT")
277
+ #File.open("spec/fixtures/output.dat", "w:ASCII-8BIT") {|out| out.write(str) }
278
+ str.should == existing_data
279
+ end
280
+
255
281
  it 'should encode float correctly from tempfile' do
256
282
  encoder = PgDataEncoder::EncodeForCopy.new(:use_tempfile => true)
257
283
  encoder.add [Time.parse("2013-06-11 15:03:54.62605 UTC")]
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pg_data_encoder
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.8
4
+ version: 0.1.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Pete Brumm
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-03-19 00:00:00.000000000 Z
11
+ date: 2016-01-29 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rspec
@@ -80,6 +80,8 @@ files:
80
80
  - spec/fixtures/float.dat
81
81
  - spec/fixtures/hstore_utf8.dat
82
82
  - spec/fixtures/intarray.dat
83
+ - spec/fixtures/json.dat
84
+ - spec/fixtures/json_array.dat
83
85
  - spec/fixtures/just_an_array.dat
84
86
  - spec/fixtures/just_an_array2.dat
85
87
  - spec/fixtures/multiline_hstore.dat
@@ -115,7 +117,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
115
117
  version: '0'
116
118
  requirements: []
117
119
  rubyforge_project:
118
- rubygems_version: 2.2.2
120
+ rubygems_version: 2.2.3
119
121
  signing_key:
120
122
  specification_version: 4
121
123
  summary: for faster input of data into postgres you can use this to generate the binary
@@ -143,6 +145,8 @@ test_files:
143
145
  - spec/fixtures/float.dat
144
146
  - spec/fixtures/hstore_utf8.dat
145
147
  - spec/fixtures/intarray.dat
148
+ - spec/fixtures/json.dat
149
+ - spec/fixtures/json_array.dat
146
150
  - spec/fixtures/just_an_array.dat
147
151
  - spec/fixtures/just_an_array2.dat
148
152
  - spec/fixtures/multiline_hstore.dat