pg_csv 0.1 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Gemfile.lock +1 -1
- data/README.md +2 -2
- data/lib/pg_csv.rb +8 -5
- data/lib/pg_csv_version.rb +1 -1
- data/spec/pg_csv_spec.rb +22 -8
- metadata +6 -7
data/Gemfile.lock
CHANGED
data/README.md
CHANGED
|
@@ -22,9 +22,9 @@ Options:
|
|
|
22
22
|
:delimiter => ["\t", ",", ]
|
|
23
23
|
:header => boolean, use pg header for fields?
|
|
24
24
|
:logger => logger
|
|
25
|
-
:columns =>
|
|
25
|
+
:columns => array of column names, ignore :header option
|
|
26
26
|
|
|
27
|
-
:temp_file => boolean, generate throught temp file
|
|
27
|
+
:temp_file => boolean, generate throught temp file? final file appears by mv
|
|
28
28
|
:temp_dir => for :temp_file, ex: '/tmp'
|
|
29
29
|
|
|
30
30
|
:type => :plain - return full string
|
data/lib/pg_csv.rb
CHANGED
|
@@ -25,6 +25,9 @@ class PgCsv
|
|
|
25
25
|
# do export :to - filename or stream
|
|
26
26
|
def export(to, opts = {})
|
|
27
27
|
@local_options = opts
|
|
28
|
+
|
|
29
|
+
raise ":connection should be" unless connection
|
|
30
|
+
raise ":sql should be" unless o(:sql)
|
|
28
31
|
|
|
29
32
|
with_temp_file(to, o(:temp_file), o(:temp_dir)) do |_to|
|
|
30
33
|
export_to(_to)
|
|
@@ -64,7 +67,7 @@ protected
|
|
|
64
67
|
File.open(to, 'w', &exporter)
|
|
65
68
|
|
|
66
69
|
when :gzip
|
|
67
|
-
check_to_str(to)
|
|
70
|
+
check_to_str(to)
|
|
68
71
|
Zlib::GzipWriter.open(to, &exporter)
|
|
69
72
|
|
|
70
73
|
when :stream
|
|
@@ -84,7 +87,7 @@ protected
|
|
|
84
87
|
end
|
|
85
88
|
|
|
86
89
|
def check_to_str(to)
|
|
87
|
-
raise "to should be an string" unless to.is_a?(String)
|
|
90
|
+
raise "'to' should be an string" unless to.is_a?(String)
|
|
88
91
|
end
|
|
89
92
|
|
|
90
93
|
def export_to_stream(stream)
|
|
@@ -106,16 +109,16 @@ protected
|
|
|
106
109
|
|
|
107
110
|
def load_data
|
|
108
111
|
info "#{query}"
|
|
109
|
-
|
|
112
|
+
raw = connection.raw_connection
|
|
110
113
|
|
|
111
114
|
info "=> query"
|
|
112
|
-
q =
|
|
115
|
+
q = raw.exec(query)
|
|
113
116
|
info "<= query"
|
|
114
117
|
|
|
115
118
|
info "=> write data"
|
|
116
119
|
yield(columns_str) if columns_str
|
|
117
120
|
|
|
118
|
-
while row =
|
|
121
|
+
while row = raw.get_copy_data()
|
|
119
122
|
yield row
|
|
120
123
|
end
|
|
121
124
|
info "<= write data"
|
data/lib/pg_csv_version.rb
CHANGED
data/spec/pg_csv_spec.rb
CHANGED
|
@@ -8,10 +8,7 @@ describe PgCsv do
|
|
|
8
8
|
Test.create :a => 4, :b => 5, :c => 6
|
|
9
9
|
|
|
10
10
|
@name = tmp_dir + "1.csv"
|
|
11
|
-
@gzname = tmp_dir + "1.gz"
|
|
12
|
-
|
|
13
11
|
FileUtils.rm(@name) rescue nil
|
|
14
|
-
FileUtils.rm(@gzname) rescue nil
|
|
15
12
|
|
|
16
13
|
@sql0 = "select a,b,c from tests order by a asc"
|
|
17
14
|
@sql = "select a,b,c from tests order by a desc"
|
|
@@ -19,10 +16,8 @@ describe PgCsv do
|
|
|
19
16
|
|
|
20
17
|
after :each do
|
|
21
18
|
FileUtils.rm(@name) rescue nil
|
|
22
|
-
FileUtils.rm(@gzname) rescue nil
|
|
23
19
|
end
|
|
24
20
|
|
|
25
|
-
|
|
26
21
|
describe "simple export" do
|
|
27
22
|
|
|
28
23
|
it "1" do
|
|
@@ -96,9 +91,9 @@ describe PgCsv do
|
|
|
96
91
|
|
|
97
92
|
describe "different types of export" do
|
|
98
93
|
it "gzip export" do
|
|
99
|
-
File.exists?(@
|
|
100
|
-
PgCsv.new(:sql => @sql, :type => :gzip).export(@
|
|
101
|
-
with_gzfile(@
|
|
94
|
+
File.exists?(@name).should be_false
|
|
95
|
+
PgCsv.new(:sql => @sql, :type => :gzip).export(@name)
|
|
96
|
+
with_gzfile(@name){|d| d.should == "4,5,6\n1,2,3\n" }
|
|
102
97
|
end
|
|
103
98
|
|
|
104
99
|
it "plain export" do
|
|
@@ -120,5 +115,24 @@ describe PgCsv do
|
|
|
120
115
|
with_file(@name){|d| d.should == "4,5,6\n1,2,3\n" }
|
|
121
116
|
end
|
|
122
117
|
end
|
|
118
|
+
|
|
119
|
+
describe "integration specs" do
|
|
120
|
+
it "1" do
|
|
121
|
+
File.exists?(@name).should be_false
|
|
122
|
+
PgCsv.new(:sql => @sql, :type => :gzip).export(@name, :delimiter => "|", :columns => %w{q w e}, :temp_file => true, :temp_dir => tmp_dir)
|
|
123
|
+
with_gzfile(@name){|d| d.should == "q|w|e\n4|5|6\n1|2|3\n" }
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
it "2" do
|
|
127
|
+
Zlib::GzipWriter.open(@name) do |gz|
|
|
128
|
+
e = PgCsv.new(:sql => @sql, :type => :stream)
|
|
129
|
+
|
|
130
|
+
e.export(gz, :delimiter => "|", :columns => %w{q w e} )
|
|
131
|
+
e.export(gz, :delimiter => "*", :sql => @sql0)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
with_gzfile(@name){|d| d.should == "q|w|e\n4|5|6\n1|2|3\n1*2*3\n4*5*6\n" }
|
|
135
|
+
end
|
|
136
|
+
end
|
|
123
137
|
|
|
124
138
|
end
|
metadata
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: pg_csv
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
hash:
|
|
5
|
-
prerelease:
|
|
4
|
+
hash: 25
|
|
5
|
+
prerelease:
|
|
6
6
|
segments:
|
|
7
7
|
- 0
|
|
8
8
|
- 1
|
|
9
|
-
|
|
9
|
+
- 1
|
|
10
|
+
version: 0.1.1
|
|
10
11
|
platform: ruby
|
|
11
12
|
authors:
|
|
12
13
|
- Makarchev Konstantin
|
|
@@ -14,8 +15,7 @@ autorequire:
|
|
|
14
15
|
bindir: bin
|
|
15
16
|
cert_chain: []
|
|
16
17
|
|
|
17
|
-
date: 2012-05-31 00:00:00
|
|
18
|
-
default_executable:
|
|
18
|
+
date: 2012-05-31 00:00:00 Z
|
|
19
19
|
dependencies:
|
|
20
20
|
- !ruby/object:Gem::Dependency
|
|
21
21
|
name: pg
|
|
@@ -96,7 +96,6 @@ files:
|
|
|
96
96
|
- spec/spec_helper.rb
|
|
97
97
|
- spec/spec_support.rb
|
|
98
98
|
- spec/tmp/.gitkeep
|
|
99
|
-
has_rdoc: true
|
|
100
99
|
homepage: http://github.com/kostya/pg_csv
|
|
101
100
|
licenses: []
|
|
102
101
|
|
|
@@ -126,7 +125,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
126
125
|
requirements: []
|
|
127
126
|
|
|
128
127
|
rubyforge_project:
|
|
129
|
-
rubygems_version: 1.
|
|
128
|
+
rubygems_version: 1.8.24
|
|
130
129
|
signing_key:
|
|
131
130
|
specification_version: 3
|
|
132
131
|
summary: Fast AR/PostgreSQL csv export. Used pg function 'copy to csv'. Effective on millions rows.
|