autocompl 0.2.1 → 0.2.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (131) hide show
  1. checksums.yaml +4 -4
  2. data/lib/autocompl/repository.rb +11 -1
  3. data/lib/autocompl/version.rb +1 -1
  4. data/test/dummy/log/development.log +467 -0
  5. data/test/dummy/log/test.log +3 -0
  6. data/test/dummy/vendor/bundle/ruby/2.3.0/bin/console +23 -0
  7. data/test/dummy/vendor/bundle/ruby/2.3.0/cache/pg-0.19.0.gem +0 -0
  8. data/test/dummy/vendor/bundle/ruby/2.3.0/cache/pq-0.0.1.gem +0 -0
  9. data/test/dummy/vendor/bundle/ruby/2.3.0/extensions/x86_64-darwin-15/2.3.0-static/pg-0.19.0/gem.build_complete +0 -0
  10. data/test/dummy/vendor/bundle/ruby/2.3.0/extensions/x86_64-darwin-15/2.3.0-static/pg-0.19.0/gem_make.out +78 -0
  11. data/test/dummy/vendor/bundle/ruby/2.3.0/extensions/x86_64-darwin-15/2.3.0-static/pg-0.19.0/mkmf.log +1346 -0
  12. data/test/dummy/vendor/bundle/ruby/2.3.0/extensions/x86_64-darwin-15/2.3.0-static/pg-0.19.0/pg_ext.bundle +0 -0
  13. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/BSDL +22 -0
  14. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ChangeLog +6378 -0
  15. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/Contributors.rdoc +46 -0
  16. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/History.rdoc +363 -0
  17. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/LICENSE +56 -0
  18. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/Manifest.txt +85 -0
  19. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/POSTGRES +23 -0
  20. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/README-OS_X.rdoc +68 -0
  21. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/README-Windows.rdoc +56 -0
  22. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/README.ja.rdoc +14 -0
  23. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/README.rdoc +168 -0
  24. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/Rakefile +216 -0
  25. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/Rakefile.cross +301 -0
  26. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/Makefile +261 -0
  27. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/errorcodes.def +947 -0
  28. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/errorcodes.rb +45 -0
  29. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/errorcodes.txt +467 -0
  30. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/extconf.h +38 -0
  31. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/extconf.rb +112 -0
  32. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/gvl_wrappers.c +13 -0
  33. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/gvl_wrappers.h +257 -0
  34. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/gvl_wrappers.o +0 -0
  35. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg.c +667 -0
  36. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg.h +395 -0
  37. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg.o +0 -0
  38. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_binary_decoder.c +162 -0
  39. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_binary_decoder.o +0 -0
  40. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_binary_encoder.c +162 -0
  41. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_binary_encoder.o +0 -0
  42. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_coder.c +500 -0
  43. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_coder.o +0 -0
  44. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_connection.c +4102 -0
  45. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_connection.o +0 -0
  46. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_copy_coder.c +591 -0
  47. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_copy_coder.o +0 -0
  48. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_errors.c +95 -0
  49. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_errors.o +0 -0
  50. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_ext.bundle +0 -0
  51. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_result.c +1271 -0
  52. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_result.o +0 -0
  53. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_text_decoder.c +421 -0
  54. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_text_decoder.o +0 -0
  55. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_text_encoder.c +683 -0
  56. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_text_encoder.o +0 -0
  57. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map.c +159 -0
  58. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map.o +0 -0
  59. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_all_strings.c +116 -0
  60. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_all_strings.o +0 -0
  61. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_class.c +239 -0
  62. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_class.o +0 -0
  63. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_column.c +312 -0
  64. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_column.o +0 -0
  65. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_mri_type.c +284 -0
  66. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_mri_type.o +0 -0
  67. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_oid.c +355 -0
  68. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_by_oid.o +0 -0
  69. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_in_ruby.c +299 -0
  70. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/pg_type_map_in_ruby.o +0 -0
  71. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/util.c +149 -0
  72. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/util.h +65 -0
  73. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/util.o +0 -0
  74. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/vc/pg.sln +26 -0
  75. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/vc/pg_18/pg.vcproj +216 -0
  76. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/ext/vc/pg_19/pg_19.vcproj +209 -0
  77. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg.rb +64 -0
  78. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/basic_type_mapping.rb +426 -0
  79. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/coder.rb +83 -0
  80. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/connection.rb +271 -0
  81. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/constants.rb +11 -0
  82. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/exceptions.rb +11 -0
  83. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/result.rb +30 -0
  84. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/text_decoder.rb +51 -0
  85. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/text_encoder.rb +35 -0
  86. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg/type_map_by_column.rb +15 -0
  87. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/lib/pg_ext.bundle +0 -0
  88. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/array_insert.rb +20 -0
  89. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/async_api.rb +106 -0
  90. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/async_copyto.rb +39 -0
  91. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/async_mixed.rb +56 -0
  92. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/check_conn.rb +21 -0
  93. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/copyfrom.rb +81 -0
  94. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/copyto.rb +19 -0
  95. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/cursor.rb +21 -0
  96. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/disk_usage_report.rb +186 -0
  97. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/issue-119.rb +94 -0
  98. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/losample.rb +69 -0
  99. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/minimal-testcase.rb +17 -0
  100. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/notify_wait.rb +72 -0
  101. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/pg_statistics.rb +294 -0
  102. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/replication_monitor.rb +231 -0
  103. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/test_binary_values.rb +33 -0
  104. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/wal_shipper.rb +434 -0
  105. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/sample/warehouse_partitions.rb +320 -0
  106. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/data/expected_trace.out +26 -0
  107. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/data/random_binary_data +0 -0
  108. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/helpers.rb +352 -0
  109. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/basic_type_mapping_spec.rb +305 -0
  110. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/connection_spec.rb +1676 -0
  111. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/result_spec.rb +449 -0
  112. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/type_map_by_class_spec.rb +138 -0
  113. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/type_map_by_column_spec.rb +222 -0
  114. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/type_map_by_mri_type_spec.rb +136 -0
  115. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/type_map_by_oid_spec.rb +149 -0
  116. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/type_map_in_ruby_spec.rb +164 -0
  117. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/type_map_spec.rb +22 -0
  118. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg/type_spec.rb +777 -0
  119. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pg-0.19.0/spec/pg_spec.rb +50 -0
  120. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/Gemfile +4 -0
  121. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/LICENSE.txt +22 -0
  122. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/README.md +76 -0
  123. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/Rakefile +1 -0
  124. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/bin/console +7 -0
  125. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/lib/pq.rb +99 -0
  126. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/pq.gemspec +29 -0
  127. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/spec/helpers.rb +10 -0
  128. data/test/dummy/vendor/bundle/ruby/2.3.0/gems/pq-0.0.1/spec/queue_spec.rb +84 -0
  129. data/test/dummy/vendor/bundle/ruby/2.3.0/specifications/pg-0.19.0.gemspec +63 -0
  130. data/test/dummy/vendor/bundle/ruby/2.3.0/specifications/pq-0.0.1.gemspec +49 -0
  131. metadata +253 -1
@@ -0,0 +1,449 @@
1
+ #!/usr/bin/env rspec
2
+ # encoding: utf-8
3
+
4
+ require_relative '../helpers'
5
+
6
+ require 'pg'
7
+
8
+
9
+ describe PG::Result do
10
+
11
+ it "acts as an array of hashes" do
12
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
13
+ expect( res[0]['a'] ).to eq( '1' )
14
+ expect( res[0]['b'] ).to eq( '2' )
15
+ end
16
+
17
+ it "yields a row as an array" do
18
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
19
+ list = []
20
+ res.each_row { |r| list << r }
21
+ expect( list ).to eq [['1', '2']]
22
+ end
23
+
24
+ it "yields a row as an Enumerator" do
25
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
26
+ e = res.each_row
27
+ expect( e ).to be_a_kind_of(Enumerator)
28
+ pending "Rubinius doesn't define RETURN_SIZED_ENUMERATOR()" if RUBY_ENGINE=='rbx'
29
+ expect( e.size ).to eq( 1 )
30
+ expect( e.to_a ).to eq [['1', '2']]
31
+ end
32
+
33
+ it "yields a row as an Enumerator of hashs" do
34
+ res = @conn.exec("SELECT 1 AS a, 2 AS b")
35
+ e = res.each
36
+ expect( e ).to be_a_kind_of(Enumerator)
37
+ pending "Rubinius doesn't define RETURN_SIZED_ENUMERATOR()" if RUBY_ENGINE=='rbx'
38
+ expect( e.size ).to eq( 1 )
39
+ expect( e.to_a ).to eq [{'a'=>'1', 'b'=>'2'}]
40
+ end
41
+
42
+ context "result streaming", :postgresql_92 do
43
+ it "can iterate over all tuples in single row mode" do
44
+ @conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
45
+ @conn.set_single_row_mode
46
+ expect(
47
+ @conn.get_result.stream_each.to_a
48
+ ).to eq(
49
+ [{'a'=>"2"}, {'a'=>"3"}, {'a'=>"4"}]
50
+ )
51
+ expect(
52
+ @conn.get_result.enum_for(:stream_each).to_a
53
+ ).to eq(
54
+ [{'b'=>"1", 'c'=>"5"}, {'b'=>"1", 'c'=>"6"}]
55
+ )
56
+ expect( @conn.get_result ).to be_nil
57
+ end
58
+
59
+ it "can iterate over all rows in single row mode" do
60
+ @conn.send_query( "SELECT generate_series(2,4) AS a; SELECT 1 AS b, generate_series(5,6) AS c" )
61
+ @conn.set_single_row_mode
62
+ expect(
63
+ @conn.get_result.enum_for(:stream_each_row).to_a
64
+ ).to eq(
65
+ [["2"], ["3"], ["4"]]
66
+ )
67
+ expect(
68
+ @conn.get_result.stream_each_row.to_a
69
+ ).to eq(
70
+ [["1", "5"], ["1", "6"]]
71
+ )
72
+ expect( @conn.get_result ).to be_nil
73
+ end
74
+
75
+ it "complains when not in single row mode" do
76
+ @conn.send_query( "SELECT generate_series(2,4)" )
77
+ expect{
78
+ @conn.get_result.stream_each_row.to_a
79
+ }.to raise_error(PG::InvalidResultStatus, /not in single row mode/)
80
+ end
81
+
82
+ it "complains when intersected with get_result" do
83
+ @conn.send_query( "SELECT 1" )
84
+ @conn.set_single_row_mode
85
+ expect{
86
+ @conn.get_result.stream_each_row.each{ @conn.get_result }
87
+ }.to raise_error(PG::NoResultError, /no result received/)
88
+ end
89
+
90
+ it "raises server errors" do
91
+ @conn.send_query( "SELECT 0/0" )
92
+ expect{
93
+ @conn.get_result.stream_each_row.to_a
94
+ }.to raise_error(PG::DivisionByZero)
95
+ end
96
+ end
97
+
98
+ it "inserts nil AS NULL and return NULL as nil" do
99
+ res = @conn.exec("SELECT $1::int AS n", [nil])
100
+ expect( res[0]['n'] ).to be_nil()
101
+ end
102
+
103
+ it "encapsulates errors in a PGError object" do
104
+ exception = nil
105
+ begin
106
+ @conn.exec( "SELECT * FROM nonexistant_table" )
107
+ rescue PGError => err
108
+ exception = err
109
+ end
110
+
111
+ result = exception.result
112
+
113
+ expect( result ).to be_a( described_class() )
114
+ expect( result.error_field(PG::PG_DIAG_SEVERITY) ).to eq( 'ERROR' )
115
+ expect( result.error_field(PG::PG_DIAG_SQLSTATE) ).to eq( '42P01' )
116
+ expect(
117
+ result.error_field(PG::PG_DIAG_MESSAGE_PRIMARY)
118
+ ).to eq( 'relation "nonexistant_table" does not exist' )
119
+ expect( result.error_field(PG::PG_DIAG_MESSAGE_DETAIL) ).to be_nil()
120
+ expect( result.error_field(PG::PG_DIAG_MESSAGE_HINT) ).to be_nil()
121
+ expect( result.error_field(PG::PG_DIAG_STATEMENT_POSITION) ).to eq( '15' )
122
+ expect( result.error_field(PG::PG_DIAG_INTERNAL_POSITION) ).to be_nil()
123
+ expect( result.error_field(PG::PG_DIAG_INTERNAL_QUERY) ).to be_nil()
124
+ expect( result.error_field(PG::PG_DIAG_CONTEXT) ).to be_nil()
125
+ expect(
126
+ result.error_field(PG::PG_DIAG_SOURCE_FILE)
127
+ ).to match( /parse_relation\.c$|namespace\.c$/ )
128
+ expect( result.error_field(PG::PG_DIAG_SOURCE_LINE) ).to match( /^\d+$/ )
129
+ expect(
130
+ result.error_field(PG::PG_DIAG_SOURCE_FUNCTION)
131
+ ).to match( /^parserOpenTable$|^RangeVarGetRelid$/ )
132
+ end
133
+
134
+ it "encapsulates database object names for integrity constraint violations", :postgresql_93 do
135
+ @conn.exec( "CREATE TABLE integrity (id SERIAL PRIMARY KEY)" )
136
+ exception = nil
137
+ begin
138
+ @conn.exec( "INSERT INTO integrity VALUES (NULL)" )
139
+ rescue PGError => err
140
+ exception = err
141
+ end
142
+ result = exception.result
143
+
144
+ expect( result.error_field(PG::PG_DIAG_SCHEMA_NAME) ).to eq( 'public' )
145
+ expect( result.error_field(PG::PG_DIAG_TABLE_NAME) ).to eq( 'integrity' )
146
+ expect( result.error_field(PG::PG_DIAG_COLUMN_NAME) ).to eq( 'id' )
147
+ expect( result.error_field(PG::PG_DIAG_DATATYPE_NAME) ).to be_nil
148
+ expect( result.error_field(PG::PG_DIAG_CONSTRAINT_NAME) ).to be_nil
149
+ end
150
+
151
+ it "detects division by zero as SQLSTATE 22012" do
152
+ sqlstate = nil
153
+ begin
154
+ res = @conn.exec("SELECT 1/0")
155
+ rescue PGError => e
156
+ sqlstate = e.result.result_error_field( PG::PG_DIAG_SQLSTATE ).to_i
157
+ end
158
+ expect( sqlstate ).to eq( 22012 )
159
+ end
160
+
161
+ it "returns the same bytes in binary format that are sent in binary format" do
162
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
163
+ bytes = File.open(binary_file, 'rb').read
164
+ res = @conn.exec('VALUES ($1::bytea)',
165
+ [ { :value => bytes, :format => 1 } ], 1)
166
+ expect( res[0]['column1'] ).to eq( bytes )
167
+ expect( res.getvalue(0,0) ).to eq( bytes )
168
+ expect( res.values[0][0] ).to eq( bytes )
169
+ expect( res.column_values(0)[0] ).to eq( bytes )
170
+ end
171
+
172
+ it "returns the same bytes in binary format that are sent as inline text" do
173
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
174
+ bytes = File.open(binary_file, 'rb').read
175
+ @conn.exec("SET standard_conforming_strings=on")
176
+ res = @conn.exec("VALUES ('#{PG::Connection.escape_bytea(bytes)}'::bytea)", [], 1)
177
+ expect( res[0]['column1'] ).to eq( bytes )
178
+ expect( res.getvalue(0,0) ).to eq( bytes )
179
+ expect( res.values[0][0] ).to eq( bytes )
180
+ expect( res.column_values(0)[0] ).to eq( bytes )
181
+ end
182
+
183
+ it "returns the same bytes in text format that are sent in binary format" do
184
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
185
+ bytes = File.open(binary_file, 'rb').read
186
+ res = @conn.exec('VALUES ($1::bytea)',
187
+ [ { :value => bytes, :format => 1 } ])
188
+ expect( PG::Connection.unescape_bytea(res[0]['column1']) ).to eq( bytes )
189
+ end
190
+
191
+ it "returns the same bytes in text format that are sent as inline text" do
192
+ binary_file = File.join(Dir.pwd, 'spec/data', 'random_binary_data')
193
+ in_bytes = File.open(binary_file, 'rb').read
194
+
195
+ out_bytes = nil
196
+ @conn.exec("SET standard_conforming_strings=on")
197
+ res = @conn.exec("VALUES ('#{PG::Connection.escape_bytea(in_bytes)}'::bytea)", [], 0)
198
+ out_bytes = PG::Connection.unescape_bytea(res[0]['column1'])
199
+ expect( out_bytes ).to eq( in_bytes )
200
+ end
201
+
202
+ it "returns the parameter type of the specified prepared statement parameter", :postgresql_92 do
203
+ query = 'SELECT * FROM pg_stat_activity WHERE user = $1::name AND query = $2::text'
204
+ @conn.prepare( 'queryfinder', query )
205
+ res = @conn.describe_prepared( 'queryfinder' )
206
+
207
+ expect(
208
+ @conn.exec( 'SELECT format_type($1, -1)', [res.paramtype(0)] ).getvalue( 0, 0 )
209
+ ).to eq( 'name' )
210
+ expect(
211
+ @conn.exec( 'SELECT format_type($1, -1)', [res.paramtype(1)] ).getvalue( 0, 0 )
212
+ ).to eq( 'text' )
213
+ end
214
+
215
+ it "raises an exception when a negative index is given to #fformat" do
216
+ res = @conn.exec('SELECT * FROM pg_stat_activity')
217
+ expect {
218
+ res.fformat( -1 )
219
+ }.to raise_error( ArgumentError, /column number/i )
220
+ end
221
+
222
+ it "raises an exception when a negative index is given to #fmod" do
223
+ res = @conn.exec('SELECT * FROM pg_stat_activity')
224
+ expect {
225
+ res.fmod( -1 )
226
+ }.to raise_error( ArgumentError, /column number/i )
227
+ end
228
+
229
+ it "raises an exception when a negative index is given to #[]" do
230
+ res = @conn.exec('SELECT * FROM pg_stat_activity')
231
+ expect {
232
+ res[ -1 ]
233
+ }.to raise_error( IndexError, /-1 is out of range/i )
234
+ end
235
+
236
+ it "raises allow for conversion to an array of arrays" do
237
+ @conn.exec( 'CREATE TABLE valuestest ( foo varchar(33) )' )
238
+ @conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar\')' )
239
+ @conn.exec( 'INSERT INTO valuestest ("foo") values (\'bar2\')' )
240
+
241
+ res = @conn.exec( 'SELECT * FROM valuestest' )
242
+ expect( res.values ).to eq( [ ["bar"], ["bar2"] ] )
243
+ end
244
+
245
+ # PQfmod
246
+ it "can return the type modifier for a result column" do
247
+ @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
248
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
249
+ expect( res.fmod(0) ).to eq( 33 + 4 ) # Column length + varlena size (4)
250
+ end
251
+
252
+ it "raises an exception when an invalid index is passed to PG::Result#fmod" do
253
+ @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
254
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
255
+ expect { res.fmod(1) }.to raise_error( ArgumentError )
256
+ end
257
+
258
+ it "raises an exception when an invalid (negative) index is passed to PG::Result#fmod" do
259
+ @conn.exec( 'CREATE TABLE fmodtest ( foo varchar(33) )' )
260
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
261
+ expect { res.fmod(-11) }.to raise_error( ArgumentError )
262
+ end
263
+
264
+ it "doesn't raise an exception when a valid index is passed to PG::Result#fmod for a" +
265
+ " column with no typemod" do
266
+ @conn.exec( 'CREATE TABLE fmodtest ( foo text )' )
267
+ res = @conn.exec( 'SELECT * FROM fmodtest' )
268
+ expect( res.fmod(0) ).to eq( -1 )
269
+ end
270
+
271
+ # PQftable
272
+ it "can return the oid of the table from which a result column was fetched" do
273
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
274
+ res = @conn.exec( 'SELECT * FROM ftabletest' )
275
+
276
+ expect( res.ftable(0) ).to be_nonzero()
277
+ end
278
+
279
+ it "raises an exception when an invalid index is passed to PG::Result#ftable" do
280
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
281
+ res = @conn.exec( 'SELECT * FROM ftabletest' )
282
+
283
+ expect { res.ftable(18) }.to raise_error( ArgumentError )
284
+ end
285
+
286
+ it "raises an exception when an invalid (negative) index is passed to PG::Result#ftable" do
287
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
288
+ res = @conn.exec( 'SELECT * FROM ftabletest' )
289
+
290
+ expect { res.ftable(-2) }.to raise_error( ArgumentError )
291
+ end
292
+
293
+ it "doesn't raise an exception when a valid index is passed to PG::Result#ftable for a " +
294
+ "column with no corresponding table" do
295
+ @conn.exec( 'CREATE TABLE ftabletest ( foo text )' )
296
+ res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftabletest' )
297
+ expect( res.ftable(1) ).to eq( PG::INVALID_OID )
298
+ end
299
+
300
+ # PQftablecol
301
+ it "can return the column number (within its table) of a column in a result" do
302
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
303
+ res = @conn.exec( 'SELECT * FROM ftablecoltest' )
304
+
305
+ expect( res.ftablecol(0) ).to eq( 1 )
306
+ expect( res.ftablecol(1) ).to eq( 2 )
307
+ end
308
+
309
+ it "raises an exception when an invalid index is passed to PG::Result#ftablecol" do
310
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
311
+ res = @conn.exec( 'SELECT * FROM ftablecoltest' )
312
+
313
+ expect { res.ftablecol(32) }.to raise_error( ArgumentError )
314
+ end
315
+
316
+ it "raises an exception when an invalid (negative) index is passed to PG::Result#ftablecol" do
317
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text, bar numeric )' )
318
+ res = @conn.exec( 'SELECT * FROM ftablecoltest' )
319
+
320
+ expect { res.ftablecol(-1) }.to raise_error( ArgumentError )
321
+ end
322
+
323
+ it "doesnn't raise an exception when a valid index is passed to PG::Result#ftablecol for a " +
324
+ "column with no corresponding table" do
325
+ @conn.exec( 'CREATE TABLE ftablecoltest ( foo text )' )
326
+ res = @conn.exec( 'SELECT foo, LENGTH(foo) as length FROM ftablecoltest' )
327
+ expect( res.ftablecol(1) ).to eq( 0 )
328
+ end
329
+
330
+ it "can be manually checked for failed result status (async API)" do
331
+ @conn.send_query( "SELECT * FROM nonexistant_table" )
332
+ res = @conn.get_result
333
+ expect {
334
+ res.check
335
+ }.to raise_error( PG::Error, /relation "nonexistant_table" does not exist/ )
336
+ end
337
+
338
+ it "can return the values of a single field" do
339
+ res = @conn.exec( "SELECT 1 AS x, 'a' AS y UNION ALL SELECT 2, 'b'" )
340
+ expect( res.field_values('x') ).to eq( ['1', '2'] )
341
+ expect( res.field_values('y') ).to eq( ['a', 'b'] )
342
+ expect{ res.field_values('') }.to raise_error(IndexError)
343
+ expect{ res.field_values(:x) }.to raise_error(TypeError)
344
+ end
345
+
346
+ it "raises a proper exception for a nonexistant table" do
347
+ expect {
348
+ @conn.exec( "SELECT * FROM nonexistant_table" )
349
+ }.to raise_error( PG::UndefinedTable, /relation "nonexistant_table" does not exist/ )
350
+ end
351
+
352
+ it "raises a more generic exception for an unknown SQLSTATE" do
353
+ old_error = PG::ERROR_CLASSES.delete('42P01')
354
+ begin
355
+ expect {
356
+ @conn.exec( "SELECT * FROM nonexistant_table" )
357
+ }.to raise_error{|error|
358
+ expect( error ).to be_an_instance_of(PG::SyntaxErrorOrAccessRuleViolation)
359
+ expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
360
+ }
361
+ ensure
362
+ PG::ERROR_CLASSES['42P01'] = old_error
363
+ end
364
+ end
365
+
366
+ it "raises a ServerError for an unknown SQLSTATE class" do
367
+ old_error1 = PG::ERROR_CLASSES.delete('42P01')
368
+ old_error2 = PG::ERROR_CLASSES.delete('42')
369
+ begin
370
+ expect {
371
+ @conn.exec( "SELECT * FROM nonexistant_table" )
372
+ }.to raise_error{|error|
373
+ expect( error ).to be_an_instance_of(PG::ServerError)
374
+ expect( error.to_s ).to match(/relation "nonexistant_table" does not exist/)
375
+ }
376
+ ensure
377
+ PG::ERROR_CLASSES['42P01'] = old_error1
378
+ PG::ERROR_CLASSES['42'] = old_error2
379
+ end
380
+ end
381
+
382
+ it "raises a proper exception for a nonexistant schema" do
383
+ expect {
384
+ @conn.exec( "DROP SCHEMA nonexistant_schema" )
385
+ }.to raise_error( PG::InvalidSchemaName, /schema "nonexistant_schema" does not exist/ )
386
+ end
387
+
388
+ it "the raised result is nil in case of a connection error" do
389
+ c = PGconn.connect_start( '127.0.0.1', 54320, "", "", "me", "xxxx", "somedb" )
390
+ expect {
391
+ c.exec "select 1"
392
+ }.to raise_error {|error|
393
+ expect( error ).to be_an_instance_of(PG::UnableToSend)
394
+ expect( error.result ).to eq( nil )
395
+ }
396
+ end
397
+
398
+ it "does not clear the result itself" do
399
+ r = @conn.exec "select 1"
400
+ expect( r.autoclear? ).to eq(false)
401
+ expect( r.cleared? ).to eq(false)
402
+ r.clear
403
+ expect( r.cleared? ).to eq(true)
404
+ end
405
+
406
+ context 'result value conversions with TypeMapByColumn' do
407
+ let!(:textdec_int){ PG::TextDecoder::Integer.new name: 'INT4', oid: 23 }
408
+ let!(:textdec_float){ PG::TextDecoder::Float.new name: 'FLOAT4', oid: 700 }
409
+
410
+ it "should allow reading, assigning and diabling type conversions" do
411
+ res = @conn.exec( "SELECT 123" )
412
+ expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
413
+ res.type_map = PG::TypeMapByColumn.new [textdec_int]
414
+ expect( res.type_map ).to be_an_instance_of(PG::TypeMapByColumn)
415
+ expect( res.type_map.coders ).to eq( [textdec_int] )
416
+ res.type_map = PG::TypeMapByColumn.new [textdec_float]
417
+ expect( res.type_map.coders ).to eq( [textdec_float] )
418
+ res.type_map = PG::TypeMapAllStrings.new
419
+ expect( res.type_map ).to be_kind_of(PG::TypeMapAllStrings)
420
+ end
421
+
422
+ it "should be applied to all value retrieving methods" do
423
+ res = @conn.exec( "SELECT 123 as f" )
424
+ res.type_map = PG::TypeMapByColumn.new [textdec_int]
425
+ expect( res.values ).to eq( [[123]] )
426
+ expect( res.getvalue(0,0) ).to eq( 123 )
427
+ expect( res[0] ).to eq( {'f' => 123 } )
428
+ expect( res.enum_for(:each_row).to_a ).to eq( [[123]] )
429
+ expect( res.enum_for(:each).to_a ).to eq( [{'f' => 123}] )
430
+ expect( res.column_values(0) ).to eq( [123] )
431
+ expect( res.field_values('f') ).to eq( [123] )
432
+ end
433
+
434
+ it "should be usable for several querys" do
435
+ colmap = PG::TypeMapByColumn.new [textdec_int]
436
+ res = @conn.exec( "SELECT 123" )
437
+ res.type_map = colmap
438
+ expect( res.values ).to eq( [[123]] )
439
+ res = @conn.exec( "SELECT 456" )
440
+ res.type_map = colmap
441
+ expect( res.values ).to eq( [[456]] )
442
+ end
443
+
444
+ it "shouldn't allow invalid type maps" do
445
+ res = @conn.exec( "SELECT 1" )
446
+ expect{ res.type_map = 1 }.to raise_error(TypeError)
447
+ end
448
+ end
449
+ end
@@ -0,0 +1,138 @@
1
+ #!/usr/bin/env rspec
2
+ # encoding: utf-8
3
+
4
+ require_relative '../helpers'
5
+
6
+ require 'pg'
7
+
8
+
9
+ describe PG::TypeMapByClass do
10
+
11
+ let!(:textenc_int){ PG::TextEncoder::Integer.new name: 'INT4', oid: 23 }
12
+ let!(:textenc_float){ PG::TextEncoder::Float.new name: 'FLOAT8', oid: 701 }
13
+ let!(:textenc_string){ PG::TextEncoder::String.new name: 'TEXT', oid: 25 }
14
+ let!(:binaryenc_int){ PG::BinaryEncoder::Int8.new name: 'INT8', oid: 20, format: 1 }
15
+ let!(:pass_through_type) do
16
+ type = Class.new(PG::SimpleEncoder) do
17
+ def encode(*v)
18
+ v.inspect
19
+ end
20
+ end.new
21
+ type.oid = 25
22
+ type.format = 0
23
+ type.name = 'pass_through'
24
+ type
25
+ end
26
+
27
+ let!(:tm) do
28
+ tm = PG::TypeMapByClass.new
29
+ tm[Integer] = binaryenc_int
30
+ tm[Float] = textenc_float
31
+ tm[Symbol] = pass_through_type
32
+ tm
33
+ end
34
+
35
+ let!(:raise_class) do
36
+ Class.new
37
+ end
38
+
39
+ let!(:derived_tm) do
40
+ tm = Class.new(PG::TypeMapByClass) do
41
+ def array_type_map_for(value)
42
+ PG::TextEncoder::Array.new name: '_INT4', oid: 1007, elements_type: PG::TextEncoder::Integer.new
43
+ end
44
+ end.new
45
+ tm[Integer] = proc{|value| textenc_int }
46
+ tm[raise_class] = proc{|value| /invalid/ }
47
+ tm[Array] = :array_type_map_for
48
+ tm
49
+ end
50
+
51
+ it "should retrieve all conversions" do
52
+ expect( tm.coders ).to eq( {
53
+ Integer => binaryenc_int,
54
+ Float => textenc_float,
55
+ Symbol => pass_through_type,
56
+ } )
57
+ end
58
+
59
+ it "should retrieve particular conversions" do
60
+ expect( tm[Integer] ).to eq(binaryenc_int)
61
+ expect( tm[Float] ).to eq(textenc_float)
62
+ expect( tm[Bignum] ).to be_nil
63
+ expect( derived_tm[raise_class] ).to be_kind_of(Proc)
64
+ expect( derived_tm[Array] ).to eq(:array_type_map_for)
65
+ end
66
+
67
+ it "should allow deletion of coders" do
68
+ tm[Integer] = nil
69
+ expect( tm[Integer] ).to be_nil
70
+ expect( tm.coders ).to eq( {
71
+ Float => textenc_float,
72
+ Symbol => pass_through_type,
73
+ } )
74
+ end
75
+
76
+ it "forwards query param conversions to the #default_type_map" do
77
+ tm1 = PG::TypeMapByColumn.new( [textenc_int, nil, nil] )
78
+
79
+ tm2 = PG::TypeMapByClass.new
80
+ tm2[Integer] = PG::TextEncoder::Integer.new name: 'INT2', oid: 21
81
+ tm2.default_type_map = tm1
82
+
83
+ res = @conn.exec_params( "SELECT $1, $2, $3::TEXT", ['1', 2, 3], 0, tm2 )
84
+
85
+ expect( res.ftype(0) ).to eq( 23 ) # tm1
86
+ expect( res.ftype(1) ).to eq( 21 ) # tm2
87
+ expect( res.getvalue(0,2) ).to eq( "3" ) # TypeMapAllStrings
88
+ end
89
+
90
+ #
91
+ # Decoding Examples
92
+ #
93
+
94
+ it "should raise an error when used for results" do
95
+ res = @conn.exec_params( "SELECT 1", [], 1 )
96
+ expect{ res.type_map = tm }.to raise_error(NotImplementedError, /not suitable to map result values/)
97
+ end
98
+
99
+ #
100
+ # Encoding Examples
101
+ #
102
+
103
+ it "should allow mixed type conversions" do
104
+ res = @conn.exec_params( "SELECT $1, $2, $3", [5, 1.23, :TestSymbol], 0, tm )
105
+ expect( res.values ).to eq([['5', '1.23', "[:TestSymbol, #{@conn.internal_encoding.inspect}]"]])
106
+ expect( res.ftype(0) ).to eq(20)
107
+ end
108
+
109
+ it "should expire the cache after changes to the coders" do
110
+ res = @conn.exec_params( "SELECT $1", [5], 0, tm )
111
+ expect( res.ftype(0) ).to eq(20)
112
+
113
+ tm[Integer] = textenc_int
114
+
115
+ res = @conn.exec_params( "SELECT $1", [5], 0, tm )
116
+ expect( res.ftype(0) ).to eq(23)
117
+ end
118
+
119
+ it "should allow mixed type conversions with derived type map" do
120
+ res = @conn.exec_params( "SELECT $1, $2", [6, [7]], 0, derived_tm )
121
+ expect( res.values ).to eq([['6', '{7}']])
122
+ expect( res.ftype(0) ).to eq(23)
123
+ expect( res.ftype(1) ).to eq(1007)
124
+ end
125
+
126
+ it "should raise TypeError with derived type map" do
127
+ expect{
128
+ @conn.exec_params( "SELECT $1", [raise_class.new], 0, derived_tm )
129
+ }.to raise_error(TypeError, /invalid type Regexp/)
130
+ end
131
+
132
+ it "should raise error on invalid coder object" do
133
+ tm[TrueClass] = "dummy"
134
+ expect{
135
+ res = @conn.exec_params( "SELECT $1", [true], 0, tm )
136
+ }.to raise_error(NoMethodError, /undefined method.*call/)
137
+ end
138
+ end