sequel 2.11.0 → 2.12.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (162) hide show
  1. data/CHANGELOG +168 -0
  2. data/README.rdoc +77 -95
  3. data/Rakefile +100 -80
  4. data/bin/sequel +2 -1
  5. data/doc/advanced_associations.rdoc +23 -32
  6. data/doc/cheat_sheet.rdoc +23 -40
  7. data/doc/dataset_filtering.rdoc +6 -6
  8. data/doc/prepared_statements.rdoc +22 -22
  9. data/doc/release_notes/2.12.0.txt +534 -0
  10. data/doc/schema.rdoc +3 -1
  11. data/doc/sharding.rdoc +8 -8
  12. data/doc/virtual_rows.rdoc +65 -0
  13. data/lib/sequel.rb +1 -1
  14. data/lib/{sequel_core → sequel}/adapters/ado.rb +3 -3
  15. data/lib/{sequel_core → sequel}/adapters/db2.rb +0 -0
  16. data/lib/{sequel_core → sequel}/adapters/dbi.rb +1 -1
  17. data/lib/{sequel_core → sequel}/adapters/do.rb +9 -5
  18. data/lib/{sequel_core → sequel}/adapters/do/mysql.rb +1 -1
  19. data/lib/{sequel_core → sequel}/adapters/do/postgres.rb +1 -1
  20. data/lib/{sequel_core → sequel}/adapters/do/sqlite.rb +1 -1
  21. data/lib/{sequel_core → sequel}/adapters/firebird.rb +84 -80
  22. data/lib/{sequel_core → sequel}/adapters/informix.rb +1 -1
  23. data/lib/{sequel_core → sequel}/adapters/jdbc.rb +21 -14
  24. data/lib/{sequel_core → sequel}/adapters/jdbc/h2.rb +14 -13
  25. data/lib/{sequel_core → sequel}/adapters/jdbc/mysql.rb +1 -1
  26. data/lib/{sequel_core → sequel}/adapters/jdbc/oracle.rb +1 -1
  27. data/lib/{sequel_core → sequel}/adapters/jdbc/postgresql.rb +1 -1
  28. data/lib/{sequel_core → sequel}/adapters/jdbc/sqlite.rb +1 -1
  29. data/lib/{sequel_core → sequel}/adapters/mysql.rb +60 -39
  30. data/lib/{sequel_core → sequel}/adapters/odbc.rb +8 -4
  31. data/lib/{sequel_core → sequel}/adapters/openbase.rb +0 -0
  32. data/lib/{sequel_core → sequel}/adapters/oracle.rb +38 -7
  33. data/lib/{sequel_core → sequel}/adapters/postgres.rb +24 -24
  34. data/lib/{sequel_core → sequel}/adapters/shared/mssql.rb +5 -5
  35. data/lib/{sequel_core → sequel}/adapters/shared/mysql.rb +126 -71
  36. data/lib/{sequel_core → sequel}/adapters/shared/oracle.rb +7 -10
  37. data/lib/{sequel_core → sequel}/adapters/shared/postgres.rb +159 -125
  38. data/lib/{sequel_core → sequel}/adapters/shared/progress.rb +1 -2
  39. data/lib/{sequel_core → sequel}/adapters/shared/sqlite.rb +72 -67
  40. data/lib/{sequel_core → sequel}/adapters/sqlite.rb +11 -7
  41. data/lib/{sequel_core → sequel}/adapters/utils/date_format.rb +0 -0
  42. data/lib/{sequel_core → sequel}/adapters/utils/stored_procedures.rb +0 -0
  43. data/lib/{sequel_core → sequel}/adapters/utils/unsupported.rb +19 -0
  44. data/lib/{sequel_core → sequel}/connection_pool.rb +7 -5
  45. data/lib/sequel/core.rb +221 -0
  46. data/lib/{sequel_core → sequel}/core_sql.rb +91 -49
  47. data/lib/{sequel_core → sequel}/database.rb +264 -149
  48. data/lib/{sequel_core/schema/generator.rb → sequel/database/schema_generator.rb} +6 -2
  49. data/lib/{sequel_core/database/schema.rb → sequel/database/schema_methods.rb} +12 -12
  50. data/lib/sequel/database/schema_sql.rb +224 -0
  51. data/lib/{sequel_core → sequel}/dataset.rb +78 -236
  52. data/lib/{sequel_core → sequel}/dataset/convenience.rb +99 -61
  53. data/lib/{sequel_core/object_graph.rb → sequel/dataset/graph.rb} +16 -14
  54. data/lib/{sequel_core → sequel}/dataset/prepared_statements.rb +1 -1
  55. data/lib/{sequel_core → sequel}/dataset/sql.rb +150 -99
  56. data/lib/sequel/deprecated.rb +593 -0
  57. data/lib/sequel/deprecated_migration.rb +91 -0
  58. data/lib/sequel/exceptions.rb +48 -0
  59. data/lib/sequel/extensions/blank.rb +42 -0
  60. data/lib/{sequel_model → sequel/extensions}/inflector.rb +8 -1
  61. data/lib/{sequel_core → sequel/extensions}/migration.rb +1 -1
  62. data/lib/{sequel_core/dataset → sequel/extensions}/pagination.rb +0 -0
  63. data/lib/{sequel_core → sequel/extensions}/pretty_table.rb +7 -0
  64. data/lib/{sequel_core/dataset → sequel/extensions}/query.rb +7 -0
  65. data/lib/sequel/extensions/string_date_time.rb +47 -0
  66. data/lib/sequel/metaprogramming.rb +43 -0
  67. data/lib/sequel/model.rb +110 -0
  68. data/lib/sequel/model/associations.rb +1300 -0
  69. data/lib/sequel/model/base.rb +937 -0
  70. data/lib/sequel/model/deprecated.rb +204 -0
  71. data/lib/sequel/model/deprecated_hooks.rb +103 -0
  72. data/lib/sequel/model/deprecated_inflector.rb +335 -0
  73. data/lib/sequel/model/deprecated_validations.rb +388 -0
  74. data/lib/sequel/model/errors.rb +39 -0
  75. data/lib/{sequel_model → sequel/model}/exceptions.rb +4 -4
  76. data/lib/sequel/model/inflections.rb +208 -0
  77. data/lib/sequel/model/plugins.rb +76 -0
  78. data/lib/sequel/plugins/caching.rb +122 -0
  79. data/lib/sequel/plugins/hook_class_methods.rb +122 -0
  80. data/lib/sequel/plugins/schema.rb +53 -0
  81. data/lib/sequel/plugins/serialization.rb +117 -0
  82. data/lib/sequel/plugins/single_table_inheritance.rb +63 -0
  83. data/lib/sequel/plugins/validation_class_methods.rb +384 -0
  84. data/lib/sequel/plugins/validation_helpers.rb +150 -0
  85. data/lib/{sequel_core → sequel}/sql.rb +125 -190
  86. data/lib/{sequel_core → sequel}/version.rb +2 -1
  87. data/lib/sequel_core.rb +1 -172
  88. data/lib/sequel_model.rb +1 -91
  89. data/spec/adapters/firebird_spec.rb +5 -5
  90. data/spec/adapters/informix_spec.rb +1 -1
  91. data/spec/adapters/mysql_spec.rb +128 -42
  92. data/spec/adapters/oracle_spec.rb +47 -19
  93. data/spec/adapters/postgres_spec.rb +64 -52
  94. data/spec/adapters/spec_helper.rb +1 -1
  95. data/spec/adapters/sqlite_spec.rb +12 -17
  96. data/spec/{sequel_core → core}/connection_pool_spec.rb +10 -10
  97. data/spec/{sequel_core → core}/core_ext_spec.rb +19 -19
  98. data/spec/{sequel_core → core}/core_sql_spec.rb +68 -71
  99. data/spec/{sequel_core → core}/database_spec.rb +135 -99
  100. data/spec/{sequel_core → core}/dataset_spec.rb +398 -242
  101. data/spec/{sequel_core → core}/expression_filters_spec.rb +13 -13
  102. data/spec/core/migration_spec.rb +263 -0
  103. data/spec/{sequel_core → core}/object_graph_spec.rb +10 -10
  104. data/spec/{sequel_core → core}/pretty_table_spec.rb +2 -2
  105. data/spec/{sequel_core → core}/schema_generator_spec.rb +0 -0
  106. data/spec/{sequel_core → core}/schema_spec.rb +8 -10
  107. data/spec/{sequel_core → core}/spec_helper.rb +29 -2
  108. data/spec/{sequel_core → core}/version_spec.rb +0 -0
  109. data/spec/extensions/blank_spec.rb +67 -0
  110. data/spec/extensions/caching_spec.rb +201 -0
  111. data/spec/{sequel_model/hooks_spec.rb → extensions/hook_class_methods_spec.rb} +8 -23
  112. data/spec/{sequel_model → extensions}/inflector_spec.rb +3 -0
  113. data/spec/{sequel_core → extensions}/migration_spec.rb +4 -4
  114. data/spec/extensions/pagination_spec.rb +99 -0
  115. data/spec/extensions/pretty_table_spec.rb +91 -0
  116. data/spec/extensions/query_spec.rb +85 -0
  117. data/spec/{sequel_model → extensions}/schema_spec.rb +22 -1
  118. data/spec/extensions/serialization_spec.rb +109 -0
  119. data/spec/extensions/single_table_inheritance_spec.rb +53 -0
  120. data/spec/{sequel_model → extensions}/spec_helper.rb +13 -4
  121. data/spec/extensions/string_date_time_spec.rb +93 -0
  122. data/spec/{sequel_model/validations_spec.rb → extensions/validation_class_methods_spec.rb} +15 -103
  123. data/spec/extensions/validation_helpers_spec.rb +291 -0
  124. data/spec/integration/dataset_test.rb +31 -0
  125. data/spec/integration/eager_loader_test.rb +17 -30
  126. data/spec/integration/schema_test.rb +8 -5
  127. data/spec/integration/spec_helper.rb +17 -0
  128. data/spec/integration/transaction_test.rb +68 -0
  129. data/spec/{sequel_model → model}/association_reflection_spec.rb +0 -0
  130. data/spec/{sequel_model → model}/associations_spec.rb +23 -10
  131. data/spec/{sequel_model → model}/base_spec.rb +29 -20
  132. data/spec/{sequel_model → model}/caching_spec.rb +16 -14
  133. data/spec/{sequel_model → model}/dataset_methods_spec.rb +0 -0
  134. data/spec/{sequel_model → model}/eager_loading_spec.rb +8 -8
  135. data/spec/model/hooks_spec.rb +472 -0
  136. data/spec/model/inflector_spec.rb +126 -0
  137. data/spec/{sequel_model → model}/model_spec.rb +25 -20
  138. data/spec/model/plugins_spec.rb +142 -0
  139. data/spec/{sequel_model → model}/record_spec.rb +121 -62
  140. data/spec/model/schema_spec.rb +92 -0
  141. data/spec/model/spec_helper.rb +124 -0
  142. data/spec/model/validations_spec.rb +1080 -0
  143. metadata +136 -107
  144. data/lib/sequel_core/core_ext.rb +0 -217
  145. data/lib/sequel_core/dataset/callback.rb +0 -13
  146. data/lib/sequel_core/dataset/schema.rb +0 -15
  147. data/lib/sequel_core/deprecated.rb +0 -26
  148. data/lib/sequel_core/exceptions.rb +0 -44
  149. data/lib/sequel_core/schema.rb +0 -2
  150. data/lib/sequel_core/schema/sql.rb +0 -325
  151. data/lib/sequel_model/association_reflection.rb +0 -267
  152. data/lib/sequel_model/associations.rb +0 -499
  153. data/lib/sequel_model/base.rb +0 -539
  154. data/lib/sequel_model/caching.rb +0 -82
  155. data/lib/sequel_model/dataset_methods.rb +0 -26
  156. data/lib/sequel_model/eager_loading.rb +0 -370
  157. data/lib/sequel_model/hooks.rb +0 -101
  158. data/lib/sequel_model/plugins.rb +0 -62
  159. data/lib/sequel_model/record.rb +0 -568
  160. data/lib/sequel_model/schema.rb +0 -49
  161. data/lib/sequel_model/validations.rb +0 -429
  162. data/spec/sequel_model/plugins_spec.rb +0 -80
@@ -1,7 +1,7 @@
1
1
  require File.join(File.dirname(__FILE__), "spec_helper")
2
2
 
3
3
  context "Dataset" do
4
- setup do
4
+ before do
5
5
  @dataset = Sequel::Dataset.new("db")
6
6
  end
7
7
 
@@ -63,7 +63,7 @@ context "Dataset" do
63
63
  end
64
64
 
65
65
  context "Dataset" do
66
- setup do
66
+ before do
67
67
  @dataset = Sequel::Dataset.new("db")
68
68
  end
69
69
 
@@ -74,14 +74,14 @@ context "Dataset" do
74
74
  @dataset.literal(:a).should == 'a'
75
75
  end
76
76
 
77
- specify "should have upcase_identifiers= method which changes literalization of identifiers" do
77
+ deprec_specify "should have upcase_identifiers= method which changes literalization of identifiers" do
78
78
  @dataset.upcase_identifiers = true
79
79
  @dataset.literal(:a).should == 'A'
80
80
  @dataset.upcase_identifiers = false
81
81
  @dataset.literal(:a).should == 'a'
82
82
  end
83
83
 
84
- specify "should have upcase_identifiers? method which returns whether identifiers are currently upcased" do
84
+ deprec_specify "should have upcase_identifiers? method which returns whether identifiers are currently upcased" do
85
85
  @dataset.upcase_identifiers = true
86
86
  @dataset.upcase_identifiers?.should == true
87
87
  @dataset.upcase_identifiers = false
@@ -109,18 +109,18 @@ context "Dataset" do
109
109
  end
110
110
 
111
111
  context "Dataset#clone" do
112
- setup do
112
+ before do
113
113
  @dataset = Sequel::Dataset.new(nil).from(:items)
114
114
  end
115
115
 
116
116
  specify "should create an exact copy of the dataset" do
117
- @c = Class.new
118
- @dataset.set_model(@c)
117
+ @dataset.row_proc = Proc.new{|r| r}
119
118
  @clone = @dataset.clone
120
119
 
121
120
  @clone.should_not === @dataset
122
121
  @clone.class.should == @dataset.class
123
- @clone.model_classes.should == @dataset.model_classes
122
+ @clone.opts.should == @dataset.opts
123
+ @clone.row_proc.should == @dataset.row_proc
124
124
  end
125
125
 
126
126
  specify "should deep-copy the dataset opts" do
@@ -164,7 +164,7 @@ context "Dataset#clone" do
164
164
  end
165
165
 
166
166
  context "A simple dataset" do
167
- setup do
167
+ before do
168
168
  @dataset = Sequel::Dataset.new(nil).from(:test)
169
169
  end
170
170
 
@@ -225,7 +225,7 @@ context "A simple dataset" do
225
225
  end
226
226
 
227
227
  specify "should be able to return rows for arbitrary SQL" do
228
- @dataset.select_sql(:sql => 'xxx yyy zzz').should ==
228
+ @dataset.clone(:sql => 'xxx yyy zzz').select_sql.should ==
229
229
  "xxx yyy zzz"
230
230
  end
231
231
 
@@ -241,16 +241,16 @@ context "A simple dataset" do
241
241
  end
242
242
 
243
243
  context "A dataset with multiple tables in its FROM clause" do
244
- setup do
244
+ before do
245
245
  @dataset = Sequel::Dataset.new(nil).from(:t1, :t2)
246
246
  end
247
247
 
248
248
  specify "should raise on #update_sql" do
249
- proc {@dataset.update_sql(:a=>1)}.should raise_error(Sequel::Error::InvalidOperation)
249
+ proc {@dataset.update_sql(:a=>1)}.should raise_error(Sequel::InvalidOperation)
250
250
  end
251
251
 
252
252
  specify "should raise on #delete_sql" do
253
- proc {@dataset.delete_sql}.should raise_error(Sequel::Error::InvalidOperation)
253
+ proc {@dataset.delete_sql}.should raise_error(Sequel::InvalidOperation)
254
254
  end
255
255
 
256
256
  specify "should generate a select query FROM all specified tables" do
@@ -259,7 +259,7 @@ context "A dataset with multiple tables in its FROM clause" do
259
259
  end
260
260
 
261
261
  context "Dataset#exists" do
262
- setup do
262
+ before do
263
263
  @ds1 = Sequel::Dataset.new(nil).from(:test)
264
264
  @ds2 = @ds1.filter(:price.sql_number < 100)
265
265
  @ds3 = @ds1.filter(:price.sql_number > 50)
@@ -279,7 +279,7 @@ context "Dataset#exists" do
279
279
  end
280
280
 
281
281
  context "Dataset#where" do
282
- setup do
282
+ before do
283
283
  @dataset = Sequel::Dataset.new(nil).from(:test)
284
284
  @d1 = @dataset.where(:region => 'Asia')
285
285
  @d2 = @dataset.where('region = ?', 'Asia')
@@ -444,6 +444,14 @@ context "Dataset#where" do
444
444
  "SELECT * FROM test WHERE (((name < 'b') AND (table.id = 1)) OR is_active(blah, xx, x.y_z))"
445
445
  end
446
446
 
447
+ specify "should instance_eval the block in the context of a VirtualRow if the block doesn't request an argument" do
448
+ x = nil
449
+ @dataset.filter{x = self; false}
450
+ x.should be_a_kind_of(Sequel::SQL::VirtualRow)
451
+ @dataset.filter{((name < 'b') & {table__id => 1}) | is_active(blah, xx, x__y_z)}.sql.should ==
452
+ "SELECT * FROM test WHERE (((name < 'b') AND (table.id = 1)) OR is_active(blah, xx, x.y_z))"
453
+ end
454
+
447
455
  specify "should raise an error if an invalid argument is used" do
448
456
  proc{@dataset.filter(1)}.should raise_error(Sequel::Error)
449
457
  end
@@ -455,7 +463,7 @@ context "Dataset#where" do
455
463
  end
456
464
 
457
465
  context "Dataset#or" do
458
- setup do
466
+ before do
459
467
  @dataset = Sequel::Dataset.new(nil).from(:test)
460
468
  @d1 = @dataset.where(:x => 1)
461
469
  end
@@ -496,7 +504,7 @@ context "Dataset#or" do
496
504
  end
497
505
 
498
506
  context "Dataset#and" do
499
- setup do
507
+ before do
500
508
  @dataset = Sequel::Dataset.new(nil).from(:test)
501
509
  @d1 = @dataset.where(:x => 1)
502
510
  end
@@ -536,7 +544,7 @@ context "Dataset#and" do
536
544
  end
537
545
 
538
546
  context "Dataset#exclude" do
539
- setup do
547
+ before do
540
548
  @dataset = Sequel::Dataset.new(nil).from(:test)
541
549
  end
542
550
 
@@ -578,7 +586,7 @@ context "Dataset#exclude" do
578
586
  end
579
587
 
580
588
  context "Dataset#invert" do
581
- setup do
589
+ before do
582
590
  @d = Sequel::Dataset.new(nil).from(:test)
583
591
  end
584
592
 
@@ -596,7 +604,7 @@ context "Dataset#invert" do
596
604
  end
597
605
 
598
606
  context "Dataset#having" do
599
- setup do
607
+ before do
600
608
  @dataset = Sequel::Dataset.new(nil).from(:test)
601
609
  @grouped = @dataset.group(:region).select(:region, :sum.sql_function(:population), :avg.sql_function(:gdp))
602
610
  @d1 = @grouped.having('sum(population) > 10')
@@ -605,7 +613,7 @@ context "Dataset#having" do
605
613
  end
606
614
 
607
615
  specify "should raise if the dataset is not grouped" do
608
- proc {@dataset.having('avg(gdp) > 10')}.should raise_error(Sequel::Error::InvalidOperation)
616
+ proc {@dataset.having('avg(gdp) > 10')}.should raise_error(Sequel::InvalidOperation)
609
617
  end
610
618
 
611
619
  specify "should affect select statements" do
@@ -625,7 +633,7 @@ context "Dataset#having" do
625
633
  end
626
634
 
627
635
  context "a grouped dataset" do
628
- setup do
636
+ before do
629
637
  @dataset = Sequel::Dataset.new(nil).from(:test).group(:type_id)
630
638
  end
631
639
 
@@ -650,7 +658,7 @@ context "a grouped dataset" do
650
658
  end
651
659
 
652
660
  context "Dataset#group_by" do
653
- setup do
661
+ before do
654
662
  @dataset = Sequel::Dataset.new(nil).from(:test).group_by(:type_id)
655
663
  end
656
664
 
@@ -689,7 +697,7 @@ context "Dataset#as" do
689
697
  end
690
698
 
691
699
  context "Dataset#literal" do
692
- setup do
700
+ before do
693
701
  @dataset = Sequel::Dataset.new(nil).from(:test)
694
702
  end
695
703
 
@@ -754,10 +762,10 @@ context "Dataset#literal" do
754
762
  @dataset.literal(d).should == s
755
763
  end
756
764
 
757
- specify "should not literalize expression strings" do
758
- @dataset.literal('col1 + 2'.expr).should == 'col1 + 2'
765
+ specify "should not modify literal strings" do
766
+ @dataset.literal('col1 + 2'.lit).should == 'col1 + 2'
759
767
 
760
- @dataset.update_sql(:a => 'a + 2'.expr).should ==
768
+ @dataset.update_sql(:a => 'a + 2'.lit).should ==
761
769
  'UPDATE test SET a = a + 2'
762
770
  end
763
771
 
@@ -774,7 +782,7 @@ context "Dataset#literal" do
774
782
  end
775
783
 
776
784
  context "Dataset#from" do
777
- setup do
785
+ before do
778
786
  @dataset = Sequel::Dataset.new(nil)
779
787
  end
780
788
 
@@ -837,7 +845,7 @@ context "Dataset#from" do
837
845
  end
838
846
 
839
847
  context "Dataset#select" do
840
- setup do
848
+ before do
841
849
  @d = Sequel::Dataset.new(nil).from(:test)
842
850
  end
843
851
 
@@ -887,21 +895,21 @@ context "Dataset#select" do
887
895
 
888
896
  specify "should accept a block that yields a virtual row" do
889
897
  @d.select{|o| o.a}.sql.should == 'SELECT a FROM test'
890
- @d.select{|o| o.a(1)}.sql.should == 'SELECT a(1) FROM test'
898
+ @d.select{a(1)}.sql.should == 'SELECT a(1) FROM test'
891
899
  @d.select{|o| o.a(1, 2)}.sql.should == 'SELECT a(1, 2) FROM test'
892
- @d.select{|o| [o.a, o.a(1, 2)]}.sql.should == 'SELECT a, a(1, 2) FROM test'
900
+ @d.select{[a, a(1, 2)]}.sql.should == 'SELECT a, a(1, 2) FROM test'
893
901
  end
894
902
 
895
903
  specify "should merge regular arguments with argument returned from block" do
896
- @d.select(:b){|o| o.a}.sql.should == 'SELECT b, a FROM test'
904
+ @d.select(:b){a}.sql.should == 'SELECT b, a FROM test'
897
905
  @d.select(:b, :c){|o| o.a(1)}.sql.should == 'SELECT b, c, a(1) FROM test'
898
- @d.select(:b){|o| [o.a, o.a(1, 2)]}.sql.should == 'SELECT b, a, a(1, 2) FROM test'
906
+ @d.select(:b){[a, a(1, 2)]}.sql.should == 'SELECT b, a, a(1, 2) FROM test'
899
907
  @d.select(:b, :c){|o| [o.a, o.a(1, 2)]}.sql.should == 'SELECT b, c, a, a(1, 2) FROM test'
900
908
  end
901
909
  end
902
910
 
903
911
  context "Dataset#select_all" do
904
- setup do
912
+ before do
905
913
  @d = Sequel::Dataset.new(nil).from(:test)
906
914
  end
907
915
 
@@ -915,7 +923,7 @@ context "Dataset#select_all" do
915
923
  end
916
924
 
917
925
  context "Dataset#select_more" do
918
- setup do
926
+ before do
919
927
  @d = Sequel::Dataset.new(nil).from(:test)
920
928
  end
921
929
 
@@ -932,12 +940,12 @@ context "Dataset#select_more" do
932
940
 
933
941
  specify "should accept a block that yields a virtual row" do
934
942
  @d.select(:a).select_more{|o| o.b}.sql.should == 'SELECT a, b FROM test'
935
- @d.select(:a.*).select_more(:b.*){|o| o.b(1)}.sql.should == 'SELECT a.*, b.*, b(1) FROM test'
943
+ @d.select(:a.*).select_more(:b.*){b(1)}.sql.should == 'SELECT a.*, b.*, b(1) FROM test'
936
944
  end
937
945
  end
938
946
 
939
947
  context "Dataset#order" do
940
- setup do
948
+ before do
941
949
  @dataset = Sequel::Dataset.new(nil).from(:test)
942
950
  end
943
951
 
@@ -973,21 +981,21 @@ context "Dataset#order" do
973
981
 
974
982
  specify "should accept a block that yields a virtual row" do
975
983
  @dataset.order{|o| o.a}.sql.should == 'SELECT * FROM test ORDER BY a'
976
- @dataset.order{|o| o.a(1)}.sql.should == 'SELECT * FROM test ORDER BY a(1)'
984
+ @dataset.order{a(1)}.sql.should == 'SELECT * FROM test ORDER BY a(1)'
977
985
  @dataset.order{|o| o.a(1, 2)}.sql.should == 'SELECT * FROM test ORDER BY a(1, 2)'
978
- @dataset.order{|o| [o.a, o.a(1, 2)]}.sql.should == 'SELECT * FROM test ORDER BY a, a(1, 2)'
986
+ @dataset.order{[a, a(1, 2)]}.sql.should == 'SELECT * FROM test ORDER BY a, a(1, 2)'
979
987
  end
980
988
 
981
989
  specify "should merge regular arguments with argument returned from block" do
982
- @dataset.order(:b){|o| o.a}.sql.should == 'SELECT * FROM test ORDER BY b, a'
990
+ @dataset.order(:b){a}.sql.should == 'SELECT * FROM test ORDER BY b, a'
983
991
  @dataset.order(:b, :c){|o| o.a(1)}.sql.should == 'SELECT * FROM test ORDER BY b, c, a(1)'
984
- @dataset.order(:b){|o| [o.a, o.a(1, 2)]}.sql.should == 'SELECT * FROM test ORDER BY b, a, a(1, 2)'
992
+ @dataset.order(:b){[a, a(1, 2)]}.sql.should == 'SELECT * FROM test ORDER BY b, a, a(1, 2)'
985
993
  @dataset.order(:b, :c){|o| [o.a, o.a(1, 2)]}.sql.should == 'SELECT * FROM test ORDER BY b, c, a, a(1, 2)'
986
994
  end
987
995
  end
988
996
 
989
997
  context "Dataset#unfiltered" do
990
- setup do
998
+ before do
991
999
  @dataset = Sequel::Dataset.new(nil).from(:test)
992
1000
  end
993
1001
 
@@ -998,7 +1006,7 @@ context "Dataset#unfiltered" do
998
1006
  end
999
1007
 
1000
1008
  context "Dataset#unordered" do
1001
- setup do
1009
+ before do
1002
1010
  @dataset = Sequel::Dataset.new(nil).from(:test)
1003
1011
  end
1004
1012
 
@@ -1009,7 +1017,7 @@ context "Dataset#unordered" do
1009
1017
  end
1010
1018
 
1011
1019
  context "Dataset#with_sql" do
1012
- setup do
1020
+ before do
1013
1021
  @dataset = Sequel::Dataset.new(nil).from(:test)
1014
1022
  end
1015
1023
 
@@ -1017,14 +1025,13 @@ context "Dataset#with_sql" do
1017
1025
  @dataset.with_sql('SELECT 1 FROM test').sql.should == 'SELECT 1 FROM test'
1018
1026
  end
1019
1027
 
1020
- specify "should keep row_proc and transform" do
1028
+ specify "should keep row_proc" do
1021
1029
  @dataset.with_sql('SELECT 1 FROM test').row_proc.should == @dataset.row_proc
1022
- @dataset.with_sql('SELECT 1 FROM test').instance_variable_get(:@transform).should == @dataset.instance_variable_get(:@transform)
1023
1030
  end
1024
1031
  end
1025
1032
 
1026
1033
  context "Dataset#order_by" do
1027
- setup do
1034
+ before do
1028
1035
  @dataset = Sequel::Dataset.new(nil).from(:test)
1029
1036
  end
1030
1037
 
@@ -1055,7 +1062,7 @@ context "Dataset#order_by" do
1055
1062
  end
1056
1063
 
1057
1064
  context "Dataset#order_more" do
1058
- setup do
1065
+ before do
1059
1066
  @dataset = Sequel::Dataset.new(nil).from(:test)
1060
1067
  end
1061
1068
 
@@ -1071,12 +1078,12 @@ context "Dataset#order_more" do
1071
1078
 
1072
1079
  specify "should accept a block that yields a virtual row" do
1073
1080
  @dataset.order(:a).order_more{|o| o.b}.sql.should == 'SELECT * FROM test ORDER BY a, b'
1074
- @dataset.order(:a, :b).order_more(:c, :d){|o| [o.e, o.f(1, 2)]}.sql.should == 'SELECT * FROM test ORDER BY a, b, c, d, e, f(1, 2)'
1081
+ @dataset.order(:a, :b).order_more(:c, :d){[e, f(1, 2)]}.sql.should == 'SELECT * FROM test ORDER BY a, b, c, d, e, f(1, 2)'
1075
1082
  end
1076
1083
  end
1077
1084
 
1078
1085
  context "Dataset#reverse_order" do
1079
- setup do
1086
+ before do
1080
1087
  @dataset = Sequel::Dataset.new(nil).from(:test)
1081
1088
  end
1082
1089
 
@@ -1119,7 +1126,7 @@ context "Dataset#reverse_order" do
1119
1126
  end
1120
1127
 
1121
1128
  context "Dataset#limit" do
1122
- setup do
1129
+ before do
1123
1130
  @dataset = Sequel::Dataset.new(nil).from(:test)
1124
1131
  end
1125
1132
 
@@ -1158,24 +1165,20 @@ context "Dataset#limit" do
1158
1165
  end
1159
1166
 
1160
1167
  context "Dataset#naked" do
1161
- setup do
1168
+ before do
1162
1169
  @d1 = Sequel::Dataset.new(nil, {1 => 2, 3 => 4})
1163
- @d2 = Sequel::Dataset.new(nil, {1 => 2, 3 => 4}).set_model(Object)
1164
- end
1165
-
1166
- specify "should return a clone with :naked option set" do
1167
- naked = @d1.naked
1168
- naked.opts[:naked].should be_true
1170
+ @d2 = @d1.clone
1171
+ @d2.row_proc = Proc.new{|r| r}
1169
1172
  end
1170
1173
 
1171
- specify "should remove any existing reference to a model class" do
1174
+ specify "should remove any existing row_proc" do
1172
1175
  naked = @d2.naked
1173
- naked.opts[:models].should be_nil
1176
+ naked.row_proc.should be_nil
1174
1177
  end
1175
1178
  end
1176
1179
 
1177
1180
  context "Dataset#qualified_column_name" do
1178
- setup do
1181
+ before do
1179
1182
  @dataset = Sequel::Dataset.new(nil).from(:test)
1180
1183
  end
1181
1184
 
@@ -1206,7 +1209,7 @@ class DummyDataset < Sequel::Dataset
1206
1209
  end
1207
1210
 
1208
1211
  context "Dataset#map" do
1209
- setup do
1212
+ before do
1210
1213
  @d = DummyDataset.new(nil).from(:items)
1211
1214
  end
1212
1215
 
@@ -1224,7 +1227,7 @@ context "Dataset#map" do
1224
1227
  end
1225
1228
 
1226
1229
  context "Dataset#to_hash" do
1227
- setup do
1230
+ before do
1228
1231
  @d = DummyDataset.new(nil).from(:items)
1229
1232
  end
1230
1233
 
@@ -1239,34 +1242,33 @@ context "Dataset#to_hash" do
1239
1242
  end
1240
1243
  end
1241
1244
 
1242
- context "Dataset#uniq" do
1243
- setup do
1245
+ context "Dataset#distinct" do
1246
+ before do
1244
1247
  @db = MockDatabase.new
1245
1248
  @dataset = @db[:test].select(:name)
1246
1249
  end
1247
1250
 
1248
1251
  specify "should include DISTINCT clause in statement" do
1249
- @dataset.uniq.sql.should == 'SELECT DISTINCT name FROM test'
1252
+ @dataset.distinct.sql.should == 'SELECT DISTINCT name FROM test'
1250
1253
  end
1251
1254
 
1252
- specify "should be aliased by Dataset#distinct" do
1253
- @dataset.distinct.sql.should == 'SELECT DISTINCT name FROM test'
1255
+ deprec_specify "should be aliased by Dataset#uniq" do
1256
+ @dataset.uniq.sql.should == 'SELECT DISTINCT name FROM test'
1254
1257
  end
1255
1258
 
1256
1259
  specify "should accept an expression list" do
1257
- @dataset.uniq(:a, :b).sql.should == 'SELECT DISTINCT ON (a, b) name FROM test'
1258
-
1259
- @dataset.uniq(:stamp.cast_as(:integer), :node_id=>nil).sql.should == 'SELECT DISTINCT ON (CAST(stamp AS integer), (node_id IS NULL)) name FROM test'
1260
+ @dataset.distinct(:a, :b).sql.should == 'SELECT DISTINCT ON (a, b) name FROM test'
1261
+ @dataset.distinct(:stamp.cast(:integer), :node_id=>nil).sql.should == 'SELECT DISTINCT ON (CAST(stamp AS integer), (node_id IS NULL)) name FROM test'
1260
1262
  end
1261
1263
 
1262
1264
  specify "should do a subselect for count" do
1263
- @dataset.uniq.count
1265
+ @dataset.distinct.count
1264
1266
  @db.sqls.should == ['SELECT COUNT(*) FROM (SELECT DISTINCT name FROM test) AS t1 LIMIT 1']
1265
1267
  end
1266
1268
  end
1267
1269
 
1268
1270
  context "Dataset#count" do
1269
- setup do
1271
+ before do
1270
1272
  @c = Class.new(Sequel::Dataset) do
1271
1273
  def self.sql
1272
1274
  @@sql
@@ -1286,7 +1288,7 @@ context "Dataset#count" do
1286
1288
  @c.sql.should == 'SELECT COUNT(*) FROM test LIMIT 1'
1287
1289
  end
1288
1290
 
1289
- specify "should be aliased by #size" do
1291
+ deprec_specify "should be aliased by #size" do
1290
1292
  @dataset.size.should == 1
1291
1293
  end
1292
1294
 
@@ -1332,7 +1334,7 @@ end
1332
1334
 
1333
1335
 
1334
1336
  context "Dataset#group_and_count" do
1335
- setup do
1337
+ before do
1336
1338
  @c = Class.new(Sequel::Dataset) do
1337
1339
  def self.sql
1338
1340
  @@sql
@@ -1356,7 +1358,7 @@ context "Dataset#group_and_count" do
1356
1358
  "SELECT a, b, count(*) AS count FROM test GROUP BY a, b ORDER BY count"
1357
1359
  end
1358
1360
 
1359
- specify "should work within query block" do
1361
+ deprec_specify "should work within query block" do
1360
1362
  @ds.query{group_and_count(:a, :b)}.sql.should ==
1361
1363
  "SELECT a, b, count(*) AS count FROM test GROUP BY a, b ORDER BY count"
1362
1364
  end
@@ -1382,7 +1384,7 @@ context "Dataset#empty?" do
1382
1384
  end
1383
1385
 
1384
1386
  context "Dataset#join_table" do
1385
- setup do
1387
+ before do
1386
1388
  @d = MockDataset.new(nil).from(:items)
1387
1389
  @d.quote_identifiers = true
1388
1390
  end
@@ -1518,7 +1520,7 @@ context "Dataset#join_table" do
1518
1520
  ds.filter!(:active => true)
1519
1521
 
1520
1522
  @d.join_table(:left_outer, ds, :item_id => :id).sql.should ==
1521
- 'SELECT * FROM "items" LEFT OUTER JOIN (SELECT * FROM categories WHERE (active = \'t\')) AS "t1" ON ("t1"."item_id" = "items"."id")'
1523
+ 'SELECT * FROM "items" LEFT OUTER JOIN (SELECT * FROM categories WHERE (active IS TRUE)) AS "t1" ON ("t1"."item_id" = "items"."id")'
1522
1524
  end
1523
1525
 
1524
1526
  specify "should support joining datasets and aliasing the join" do
@@ -1647,7 +1649,7 @@ context "Dataset#join_table" do
1647
1649
  end
1648
1650
 
1649
1651
  context "Dataset#[]=" do
1650
- setup do
1652
+ before do
1651
1653
  c = Class.new(Sequel::Dataset) do
1652
1654
  def last_sql
1653
1655
  @@last_sql
@@ -1668,7 +1670,7 @@ context "Dataset#[]=" do
1668
1670
  end
1669
1671
 
1670
1672
  context "Dataset#set" do
1671
- setup do
1673
+ before do
1672
1674
  c = Class.new(Sequel::Dataset) do
1673
1675
  def last_sql
1674
1676
  @@last_sql
@@ -1690,7 +1692,7 @@ end
1690
1692
 
1691
1693
 
1692
1694
  context "Dataset#insert_multiple" do
1693
- setup do
1695
+ before do
1694
1696
  c = Class.new(Sequel::Dataset) do
1695
1697
  attr_reader :inserts
1696
1698
  def insert(arg)
@@ -1715,7 +1717,7 @@ context "Dataset#insert_multiple" do
1715
1717
  end
1716
1718
 
1717
1719
  context "Dataset aggregate methods" do
1718
- setup do
1720
+ before do
1719
1721
  c = Class.new(Sequel::Dataset) do
1720
1722
  def fetch_rows(sql)
1721
1723
  yield({1 => sql})
@@ -1746,7 +1748,7 @@ context "Dataset aggregate methods" do
1746
1748
  end
1747
1749
 
1748
1750
  context "Dataset#range" do
1749
- setup do
1751
+ before do
1750
1752
  c = Class.new(Sequel::Dataset) do
1751
1753
  @@sql = nil
1752
1754
 
@@ -1774,7 +1776,7 @@ context "Dataset#range" do
1774
1776
  end
1775
1777
 
1776
1778
  context "Dataset#interval" do
1777
- setup do
1779
+ before do
1778
1780
  c = Class.new(Sequel::Dataset) do
1779
1781
  @@sql = nil
1780
1782
 
@@ -1802,10 +1804,10 @@ context "Dataset#interval" do
1802
1804
  end
1803
1805
 
1804
1806
  context "Dataset #first and #last" do
1805
- setup do
1807
+ before do
1806
1808
  @c = Class.new(Sequel::Dataset) do
1807
- def each(opts = nil, &block)
1808
- s = select_sql(opts)
1809
+ def each(&block)
1810
+ s = select_sql
1809
1811
  x = [:a,1,:b,2,s]
1810
1812
  i = /LIMIT (\d+)/.match(s)[1].to_i.times{yield x}
1811
1813
  end
@@ -1865,7 +1867,7 @@ context "Dataset #first and #last" do
1865
1867
  end
1866
1868
 
1867
1869
  context "Dataset compound operations" do
1868
- setup do
1870
+ before do
1869
1871
  @a = Sequel::Dataset.new(nil).from(:a).filter(:z => 1)
1870
1872
  @b = Sequel::Dataset.new(nil).from(:b).filter(:z => 2)
1871
1873
  end
@@ -1915,7 +1917,7 @@ context "Dataset compound operations" do
1915
1917
  end
1916
1918
 
1917
1919
  context "Dataset#[]" do
1918
- setup do
1920
+ before do
1919
1921
  @c = Class.new(Sequel::Dataset) do
1920
1922
  @@last_dataset = nil
1921
1923
 
@@ -1923,7 +1925,7 @@ context "Dataset#[]" do
1923
1925
  @@last_dataset
1924
1926
  end
1925
1927
 
1926
- def single_record(opts = nil)
1928
+ def single_record
1927
1929
  @@last_dataset = opts ? clone(opts) : self
1928
1930
  {1 => 2, 3 => 4}
1929
1931
  end
@@ -1941,7 +1943,7 @@ context "Dataset#[]" do
1941
1943
  end
1942
1944
 
1943
1945
  context "Dataset#single_record" do
1944
- setup do
1946
+ before do
1945
1947
  @c = Class.new(Sequel::Dataset) do
1946
1948
  def fetch_rows(sql)
1947
1949
  yield sql
@@ -1959,11 +1961,11 @@ context "Dataset#single_record" do
1959
1961
  @d.single_record.should == 'SELECT * FROM test LIMIT 1'
1960
1962
  end
1961
1963
 
1962
- specify "should pass opts to each" do
1964
+ deprec_specify "should pass opts to each" do
1963
1965
  @d.single_record(:order => [:name]).should == 'SELECT * FROM test ORDER BY name LIMIT 1'
1964
1966
  end
1965
1967
 
1966
- specify "should override the limit if passed as an option" do
1968
+ deprec_specify "should override the limit if passed as an option" do
1967
1969
  @d.single_record(:limit => 3).should == 'SELECT * FROM test LIMIT 1'
1968
1970
  end
1969
1971
 
@@ -1973,7 +1975,7 @@ context "Dataset#single_record" do
1973
1975
  end
1974
1976
 
1975
1977
  context "Dataset#single_value" do
1976
- setup do
1978
+ before do
1977
1979
  @c = Class.new(Sequel::Dataset) do
1978
1980
  def fetch_rows(sql)
1979
1981
  yield({1 => sql})
@@ -1991,7 +1993,7 @@ context "Dataset#single_value" do
1991
1993
  @d.single_value.should == 'SELECT * FROM test LIMIT 1'
1992
1994
  end
1993
1995
 
1994
- specify "should pass opts to each" do
1996
+ deprec_specify "should pass opts to each" do
1995
1997
  @d.single_value(:from => [:blah]).should == 'SELECT * FROM blah LIMIT 1'
1996
1998
  end
1997
1999
 
@@ -2006,7 +2008,7 @@ context "Dataset#single_value" do
2006
2008
  end
2007
2009
 
2008
2010
  context "Dataset#get" do
2009
- setup do
2011
+ before do
2010
2012
  @c = Class.new(Sequel::Dataset) do
2011
2013
  attr_reader :last_sql
2012
2014
 
@@ -2034,7 +2036,7 @@ context "Dataset#get" do
2034
2036
 
2035
2037
  specify "should accept a block that yields a virtual row" do
2036
2038
  @d.get{|o| o.x__b.as(:name)}.should == "SELECT x.b AS name FROM test LIMIT 1"
2037
- @d.get{|o| o.x(1).as(:name)}.should == "SELECT x(1) AS name FROM test LIMIT 1"
2039
+ @d.get{x(1).as(:name)}.should == "SELECT x(1) AS name FROM test LIMIT 1"
2038
2040
  end
2039
2041
 
2040
2042
  specify "should raise an error if both a regular argument and block argument are used" do
@@ -2043,7 +2045,7 @@ context "Dataset#get" do
2043
2045
  end
2044
2046
 
2045
2047
  context "Dataset#set_row_proc" do
2046
- setup do
2048
+ before do
2047
2049
  @c = Class.new(Sequel::Dataset) do
2048
2050
  def fetch_rows(sql, &block)
2049
2051
  # yield a hash with kind as the 1 bit of a number
@@ -2070,7 +2072,7 @@ context "Dataset#set_row_proc" do
2070
2072
  end
2071
2073
 
2072
2074
  context "Dataset#set_model" do
2073
- setup do
2075
+ before do
2074
2076
  @c = Class.new(Sequel::Dataset) do
2075
2077
  def fetch_rows(sql, &block)
2076
2078
  # yield a hash with kind as the 1 bit of a number
@@ -2085,38 +2087,38 @@ context "Dataset#set_model" do
2085
2087
  end
2086
2088
  end
2087
2089
 
2088
- specify "should clear the models hash and restore the stock #each if nil is specified" do
2090
+ deprec_specify "should clear the models hash and restore the stock #each if nil is specified" do
2089
2091
  @dataset.set_model(@m)
2090
2092
  @dataset.set_model(nil)
2091
2093
  @dataset.first.should == {:kind => 1}
2092
2094
  @dataset.model_classes.should be_nil
2093
2095
  end
2094
2096
 
2095
- specify "should clear the models hash and restore the stock #each if nothing is specified" do
2097
+ deprec_specify "should clear the models hash and restore the stock #each if nothing is specified" do
2096
2098
  @dataset.set_model(@m)
2097
2099
  @dataset.set_model(nil)
2098
2100
  @dataset.first.should == {:kind => 1}
2099
2101
  @dataset.model_classes.should be_nil
2100
2102
  end
2101
2103
 
2102
- specify "should alter #each to provide model instances" do
2104
+ deprec_specify "should alter #each to provide model instances" do
2103
2105
  @dataset.first.should == {:kind => 1}
2104
2106
  @dataset.set_model(@m)
2105
2107
  @dataset.first.should == @m.new({:kind => 1})
2106
2108
  end
2107
2109
 
2108
- specify "should set opts[:naked] to nil" do
2110
+ deprec_specify "should set opts[:naked] to nil" do
2109
2111
  @dataset.opts[:naked] = true
2110
2112
  @dataset.set_model(@m)
2111
2113
  @dataset.opts[:naked].should be_nil
2112
2114
  end
2113
2115
 
2114
- specify "should send additional arguments to the models' initialize method" do
2116
+ deprec_specify "should send additional arguments to the models' initialize method" do
2115
2117
  @dataset.set_model(@m, 7, 6, 5)
2116
2118
  @dataset.first.should == @m.new({:kind => 1}, 7, 6, 5)
2117
2119
  end
2118
2120
 
2119
- specify "should provide support for polymorphic model instantiation" do
2121
+ deprec_specify "should provide support for polymorphic model instantiation" do
2120
2122
  @m1 = Class.new(@m)
2121
2123
  @m2 = Class.new(@m)
2122
2124
  @dataset.set_model(:kind, 0 => @m1, 1 => @m2)
@@ -2133,7 +2135,7 @@ context "Dataset#set_model" do
2133
2135
  @dataset.first.should == {:kind => 1}
2134
2136
  end
2135
2137
 
2136
- specify "should send additional arguments for polymorphic models as well" do
2138
+ deprec_specify "should send additional arguments for polymorphic models as well" do
2137
2139
  @m1 = Class.new(@m)
2138
2140
  @m2 = Class.new(@m)
2139
2141
  @dataset.set_model(:kind, {0 => @m1, 1 => @m2}, :hey => :wow)
@@ -2144,7 +2146,7 @@ context "Dataset#set_model" do
2144
2146
  all[3].class.should == @m1; all[3].args.should == [{:hey => :wow}]
2145
2147
  end
2146
2148
 
2147
- specify "should raise for invalid parameters" do
2149
+ deprec_specify "should raise for invalid parameters" do
2148
2150
  proc {@dataset.set_model('kind')}.should raise_error(ArgumentError)
2149
2151
  proc {@dataset.set_model(0)}.should raise_error(ArgumentError)
2150
2152
  proc {@dataset.set_model(:kind)}.should raise_error(ArgumentError) # no hash given
@@ -2152,7 +2154,7 @@ context "Dataset#set_model" do
2152
2154
  end
2153
2155
 
2154
2156
  context "Dataset#model_classes" do
2155
- setup do
2157
+ before do
2156
2158
  @c = Class.new(Sequel::Dataset) do
2157
2159
  # # We don't need that for now
2158
2160
  # def fetch_rows(sql, &block)
@@ -2167,16 +2169,16 @@ context "Dataset#model_classes" do
2167
2169
  end
2168
2170
  end
2169
2171
 
2170
- specify "should return nil for a naked dataset" do
2172
+ deprec_specify "should return nil for a naked dataset" do
2171
2173
  @dataset.model_classes.should == nil
2172
2174
  end
2173
2175
 
2174
- specify "should return a {nil => model_class} hash for a model dataset" do
2176
+ deprec_specify "should return a {nil => model_class} hash for a model dataset" do
2175
2177
  @dataset.set_model(@m)
2176
2178
  @dataset.model_classes.should == {nil => @m}
2177
2179
  end
2178
2180
 
2179
- specify "should return the polymorphic hash for a polymorphic model dataset" do
2181
+ deprec_specify "should return the polymorphic hash for a polymorphic model dataset" do
2180
2182
  @m1 = Class.new(@m)
2181
2183
  @m2 = Class.new(@m)
2182
2184
  @dataset.set_model(:key, 0 => @m1, 1 => @m2)
@@ -2185,7 +2187,7 @@ context "Dataset#model_classes" do
2185
2187
  end
2186
2188
 
2187
2189
  context "Dataset#polymorphic_key" do
2188
- setup do
2190
+ before do
2189
2191
  @c = Class.new(Sequel::Dataset) do
2190
2192
  # # We don't need this for now
2191
2193
  # def fetch_rows(sql, &block)
@@ -2200,18 +2202,18 @@ context "Dataset#polymorphic_key" do
2200
2202
  end
2201
2203
  end
2202
2204
 
2203
- specify "should return nil for a naked dataset" do
2205
+ deprec_specify "should return nil for a naked dataset" do
2204
2206
  @dataset.polymorphic_key.should be_nil
2205
2207
  end
2206
2208
 
2207
- specify "should return the polymorphic key" do
2209
+ deprec_specify "should return the polymorphic key" do
2208
2210
  @dataset.set_model(:id, nil => @m)
2209
2211
  @dataset.polymorphic_key.should == :id
2210
2212
  end
2211
2213
  end
2212
2214
 
2213
2215
  context "A model dataset" do
2214
- setup do
2216
+ before do
2215
2217
  @c = Class.new(Sequel::Dataset) do
2216
2218
  def fetch_rows(sql, &block)
2217
2219
  (1..10).each(&block)
@@ -2223,17 +2225,17 @@ context "A model dataset" do
2223
2225
  def initialize(c); @c = c; end
2224
2226
  def ==(o); @c == o.c; end
2225
2227
  end
2226
- @dataset.set_model(@m)
2228
+ @dataset.row_proc = Proc.new{|r| @m.new(r)}
2227
2229
  end
2228
2230
 
2229
- specify "should supply naked records if the naked option is specified" do
2231
+ deprec_specify "should supply naked records if the naked option is specified" do
2230
2232
  @dataset.each {|r| r.class.should == @m}
2231
- @dataset.each(:naked => true) {|r| r.class.should == Fixnum}
2233
+ @dataset.naked.each(:naked => true) {|r| r.class.should == Fixnum}
2232
2234
  end
2233
2235
  end
2234
2236
 
2235
2237
  context "A polymorphic model dataset" do
2236
- setup do
2238
+ before do
2237
2239
  @c = Class.new(Sequel::Dataset) do
2238
2240
  def fetch_rows(sql, &block)
2239
2241
  (1..10).each {|i| block.call(:bit => i[0])}
@@ -2247,7 +2249,7 @@ context "A polymorphic model dataset" do
2247
2249
  end
2248
2250
  end
2249
2251
 
2250
- specify "should use a nil key in the polymorphic hash to specify the default model class" do
2252
+ deprec_specify "should use a nil key in the polymorphic hash to specify the default model class" do
2251
2253
  @m2 = Class.new(@m)
2252
2254
  @dataset.set_model(:bit, nil => @m, 1 => @m2)
2253
2255
  all = @dataset.all
@@ -2258,20 +2260,20 @@ context "A polymorphic model dataset" do
2258
2260
  #...
2259
2261
  end
2260
2262
 
2261
- specify "should raise Sequel::Error if no suitable class is found in the polymorphic hash" do
2263
+ deprec_specify "should raise Sequel::Error if no suitable class is found in the polymorphic hash" do
2262
2264
  @m2 = Class.new(@m)
2263
2265
  @dataset.set_model(:bit, 1 => @m2)
2264
2266
  proc {@dataset.all}.should raise_error(Sequel::Error)
2265
2267
  end
2266
2268
 
2267
- specify "should supply naked records if the naked option is specified" do
2269
+ deprec_specify "should supply naked records if the naked option is specified" do
2268
2270
  @dataset.set_model(:bit, nil => @m)
2269
2271
  @dataset.each(:naked => true) {|r| r.class.should == Hash}
2270
2272
  end
2271
2273
  end
2272
2274
 
2273
2275
  context "A dataset with associated model class(es)" do
2274
- setup do
2276
+ before do
2275
2277
  @c = Class.new(Sequel::Dataset) do
2276
2278
  def fetch_rows(sql, &block)
2277
2279
  block.call({:x => 1, :y => 2})
@@ -2290,14 +2292,14 @@ context "A dataset with associated model class(es)" do
2290
2292
  @m3 = Class.new(@m2)
2291
2293
  end
2292
2294
 
2293
- specify "should instantiate an instance by passing the record hash as argument" do
2295
+ deprec_specify "should instantiate an instance by passing the record hash as argument" do
2294
2296
  @dataset.set_model(@m1)
2295
2297
  o = @dataset.first
2296
2298
  o.class.should == @m1
2297
2299
  o.v.should == {:x => 1, :y => 2}
2298
2300
  end
2299
2301
 
2300
- specify "should use the .load constructor if available" do
2302
+ deprec_specify "should use the .load constructor if available" do
2301
2303
  @dataset.set_model(@m2)
2302
2304
  o = @dataset.first
2303
2305
  o.class.should == @m2
@@ -2305,7 +2307,7 @@ context "A dataset with associated model class(es)" do
2305
2307
  o.vv.should == {:x => 1, :y => 2}
2306
2308
  end
2307
2309
 
2308
- specify "should use the .load constructor also for polymorphic datasets" do
2310
+ deprec_specify "should use the .load constructor also for polymorphic datasets" do
2309
2311
  @dataset.set_model(:y, 1 => @m2, 2 => @m3)
2310
2312
  o = @dataset.first
2311
2313
  o.class.should == @m3
@@ -2315,7 +2317,7 @@ context "A dataset with associated model class(es)" do
2315
2317
  end
2316
2318
 
2317
2319
  context "Dataset#<<" do
2318
- setup do
2320
+ before do
2319
2321
  @d = Sequel::Dataset.new(nil)
2320
2322
  @d.meta_def(:insert) do |*args|
2321
2323
  1234567890
@@ -2328,75 +2330,75 @@ context "Dataset#<<" do
2328
2330
  end
2329
2331
 
2330
2332
  context "A paginated dataset" do
2331
- setup do
2333
+ before do
2332
2334
  @d = Sequel::Dataset.new(nil)
2333
2335
  @d.meta_def(:count) {153}
2334
2336
 
2335
- @paginated = @d.paginate(1, 20)
2337
+ deprec{@paginated = @d.paginate(1, 20)}
2336
2338
  end
2337
2339
 
2338
- specify "should raise an error if the dataset already has a limit" do
2340
+ deprec_specify "should raise an error if the dataset already has a limit" do
2339
2341
  proc{@d.limit(10).paginate(1,10)}.should raise_error(Sequel::Error)
2340
2342
  proc{@paginated.paginate(2,20)}.should raise_error(Sequel::Error)
2341
2343
  end
2342
2344
 
2343
- specify "should set the limit and offset options correctly" do
2345
+ deprec_specify "should set the limit and offset options correctly" do
2344
2346
  @paginated.opts[:limit].should == 20
2345
2347
  @paginated.opts[:offset].should == 0
2346
2348
  end
2347
2349
 
2348
- specify "should set the page count correctly" do
2350
+ deprec_specify "should set the page count correctly" do
2349
2351
  @paginated.page_count.should == 8
2350
2352
  @d.paginate(1, 50).page_count.should == 4
2351
2353
  end
2352
2354
 
2353
- specify "should set the current page number correctly" do
2355
+ deprec_specify "should set the current page number correctly" do
2354
2356
  @paginated.current_page.should == 1
2355
2357
  @d.paginate(3, 50).current_page.should == 3
2356
2358
  end
2357
2359
 
2358
- specify "should return the next page number or nil if we're on the last" do
2360
+ deprec_specify "should return the next page number or nil if we're on the last" do
2359
2361
  @paginated.next_page.should == 2
2360
2362
  @d.paginate(4, 50).next_page.should be_nil
2361
2363
  end
2362
2364
 
2363
- specify "should return the previous page number or nil if we're on the last" do
2365
+ deprec_specify "should return the previous page number or nil if we're on the last" do
2364
2366
  @paginated.prev_page.should be_nil
2365
2367
  @d.paginate(4, 50).prev_page.should == 3
2366
2368
  end
2367
2369
 
2368
- specify "should return the page range" do
2370
+ deprec_specify "should return the page range" do
2369
2371
  @paginated.page_range.should == (1..8)
2370
2372
  @d.paginate(4, 50).page_range.should == (1..4)
2371
2373
  end
2372
2374
 
2373
- specify "should return the record range for the current page" do
2375
+ deprec_specify "should return the record range for the current page" do
2374
2376
  @paginated.current_page_record_range.should == (1..20)
2375
2377
  @d.paginate(4, 50).current_page_record_range.should == (151..153)
2376
2378
  @d.paginate(5, 50).current_page_record_range.should == (0..0)
2377
2379
  end
2378
2380
 
2379
- specify "should return the record count for the current page" do
2381
+ deprec_specify "should return the record count for the current page" do
2380
2382
  @paginated.current_page_record_count.should == 20
2381
2383
  @d.paginate(3, 50).current_page_record_count.should == 50
2382
2384
  @d.paginate(4, 50).current_page_record_count.should == 3
2383
2385
  @d.paginate(5, 50).current_page_record_count.should == 0
2384
2386
  end
2385
2387
 
2386
- specify "should know if current page is last page" do
2388
+ deprec_specify "should know if current page is last page" do
2387
2389
  @paginated.last_page?.should be_false
2388
2390
  @d.paginate(2, 20).last_page?.should be_false
2389
2391
  @d.paginate(5, 30).last_page?.should be_false
2390
2392
  @d.paginate(6, 30).last_page?.should be_true
2391
2393
  end
2392
2394
 
2393
- specify "should know if current page is first page" do
2395
+ deprec_specify "should know if current page is first page" do
2394
2396
  @paginated.first_page?.should be_true
2395
2397
  @d.paginate(1, 20).first_page?.should be_true
2396
2398
  @d.paginate(2, 20).first_page?.should be_false
2397
2399
  end
2398
2400
 
2399
- specify "should work with fixed sql" do
2401
+ deprec_specify "should work with fixed sql" do
2400
2402
  ds = @d.clone(:sql => 'select * from blah')
2401
2403
  ds.meta_def(:count) {150}
2402
2404
  ds.paginate(2, 50).sql.should == 'SELECT * FROM (select * from blah) AS t1 LIMIT 50 OFFSET 50'
@@ -2404,16 +2406,16 @@ context "A paginated dataset" do
2404
2406
  end
2405
2407
 
2406
2408
  context "Dataset#each_page" do
2407
- setup do
2409
+ before do
2408
2410
  @d = Sequel::Dataset.new(nil).from(:items)
2409
2411
  @d.meta_def(:count) {153}
2410
2412
  end
2411
2413
 
2412
- specify "should raise an error if the dataset already has a limit" do
2414
+ deprec_specify "should raise an error if the dataset already has a limit" do
2413
2415
  proc{@d.limit(10).each_page(10){}}.should raise_error(Sequel::Error)
2414
2416
  end
2415
2417
 
2416
- specify "should iterate over each page in the resultset as a paginated dataset" do
2418
+ deprec_specify "should iterate over each page in the resultset as a paginated dataset" do
2417
2419
  a = []
2418
2420
  @d.each_page(50) {|p| a << p}
2419
2421
  a.map {|p| p.sql}.should == [
@@ -2426,11 +2428,11 @@ context "Dataset#each_page" do
2426
2428
  end
2427
2429
 
2428
2430
  context "Dataset#columns" do
2429
- setup do
2431
+ before do
2430
2432
  @dataset = DummyDataset.new(nil).from(:items)
2431
2433
  @dataset.meta_def(:columns=) {|c| @columns = c}
2432
2434
  i = 'a'
2433
- @dataset.meta_def(:each) {|o| @columns = select_sql(o||@opts) + i; i = i.next}
2435
+ @dataset.meta_def(:each){@columns = select_sql + i; i = i.next}
2434
2436
  end
2435
2437
 
2436
2438
  specify "should return the value of @columns if @columns is not nil" do
@@ -2453,10 +2455,10 @@ context "Dataset#columns" do
2453
2455
  end
2454
2456
 
2455
2457
  context "Dataset#columns!" do
2456
- setup do
2458
+ before do
2457
2459
  @dataset = DummyDataset.new(nil).from(:items)
2458
2460
  i = 'a'
2459
- @dataset.meta_def(:each) {|o| @columns = select_sql(o||@opts) + i; i = i.next}
2461
+ @dataset.meta_def(:each){@columns = select_sql + i; i = i.next}
2460
2462
  end
2461
2463
 
2462
2464
  specify "should always attempt to get a record and return @columns" do
@@ -2470,25 +2472,25 @@ end
2470
2472
  require 'stringio'
2471
2473
 
2472
2474
  context "Dataset#print" do
2473
- setup do
2475
+ before do
2474
2476
  @output = StringIO.new
2475
2477
  @orig_stdout = $stdout
2476
2478
  $stdout = @output
2477
2479
  @dataset = DummyDataset.new(nil).from(:items)
2478
2480
  end
2479
2481
 
2480
- teardown do
2482
+ after do
2481
2483
  $stdout = @orig_stdout
2482
2484
  end
2483
2485
 
2484
- specify "should print out a table with the values" do
2486
+ deprec_specify "should print out a table with the values" do
2485
2487
  @dataset.print(:a, :b)
2486
2488
  @output.rewind
2487
2489
  @output.read.should == \
2488
2490
  "+-+-+\n|a|b|\n+-+-+\n|1|2|\n|3|4|\n|5|6|\n+-+-+\n"
2489
2491
  end
2490
2492
 
2491
- specify "should default to the dataset's columns" do
2493
+ deprec_specify "should default to the dataset's columns" do
2492
2494
  @dataset.meta_def(:columns) {[:a, :b]}
2493
2495
  @dataset.print
2494
2496
  @output.rewind
@@ -2497,8 +2499,8 @@ context "Dataset#print" do
2497
2499
  end
2498
2500
  end
2499
2501
 
2500
- context "Dataset#multi_insert" do
2501
- setup do
2502
+ context "Dataset#import" do
2503
+ before do
2502
2504
  @dbc = Class.new do
2503
2505
  attr_reader :sqls
2504
2506
 
@@ -2508,7 +2510,7 @@ context "Dataset#multi_insert" do
2508
2510
  end
2509
2511
  alias execute_dui execute
2510
2512
 
2511
- def transaction
2513
+ def transaction(opts={})
2512
2514
  @sqls ||= []
2513
2515
  @sqls << 'BEGIN'
2514
2516
  yield
@@ -2522,8 +2524,8 @@ context "Dataset#multi_insert" do
2522
2524
  @list = [{:name => 'abc'}, {:name => 'def'}, {:name => 'ghi'}]
2523
2525
  end
2524
2526
 
2525
- specify "should join all inserts into a single SQL string" do
2526
- @ds.multi_insert(@list)
2527
+ deprec_specify "should issue multiple inserts inside a transaction" do
2528
+ @ds.import(@list)
2527
2529
  @db.sqls.should == [
2528
2530
  'BEGIN',
2529
2531
  "INSERT INTO items (name) VALUES ('abc')",
@@ -2533,8 +2535,41 @@ context "Dataset#multi_insert" do
2533
2535
  ]
2534
2536
  end
2535
2537
 
2536
- specify "should accept the :commit_every option for committing every x records" do
2537
- @ds.multi_insert(@list, :commit_every => 2)
2538
+ deprec_specify "should handle different formats for tables" do
2539
+ @ds = @ds.from(:sch__tab)
2540
+ @ds.import(@list)
2541
+ @db.sqls.should == [
2542
+ 'BEGIN',
2543
+ "INSERT INTO sch.tab (name) VALUES ('abc')",
2544
+ "INSERT INTO sch.tab (name) VALUES ('def')",
2545
+ "INSERT INTO sch.tab (name) VALUES ('ghi')",
2546
+ 'COMMIT'
2547
+ ]
2548
+ @db.sqls.clear
2549
+
2550
+ @ds = @ds.from(:tab.qualify(:sch))
2551
+ @ds.import(@list)
2552
+ @db.sqls.should == [
2553
+ 'BEGIN',
2554
+ "INSERT INTO sch.tab (name) VALUES ('abc')",
2555
+ "INSERT INTO sch.tab (name) VALUES ('def')",
2556
+ "INSERT INTO sch.tab (name) VALUES ('ghi')",
2557
+ 'COMMIT'
2558
+ ]
2559
+ @db.sqls.clear
2560
+ @ds = @ds.from(:sch__tab.identifier)
2561
+ @ds.import(@list)
2562
+ @db.sqls.should == [
2563
+ 'BEGIN',
2564
+ "INSERT INTO sch__tab (name) VALUES ('abc')",
2565
+ "INSERT INTO sch__tab (name) VALUES ('def')",
2566
+ "INSERT INTO sch__tab (name) VALUES ('ghi')",
2567
+ 'COMMIT'
2568
+ ]
2569
+ end
2570
+
2571
+ deprec_specify "should accept the :commit_every option for committing every x records" do
2572
+ @ds.import(@list, :commit_every => 2)
2538
2573
  @db.sqls.should == [
2539
2574
  'BEGIN',
2540
2575
  "INSERT INTO items (name) VALUES ('abc')",
@@ -2546,8 +2581,8 @@ context "Dataset#multi_insert" do
2546
2581
  ]
2547
2582
  end
2548
2583
 
2549
- specify "should accept the :slice option for committing every x records" do
2550
- @ds.multi_insert(@list, :slice => 2)
2584
+ deprec_specify "should accept the :slice option for committing every x records" do
2585
+ @ds.import(@list, :slice => 2)
2551
2586
  @db.sqls.should == [
2552
2587
  'BEGIN',
2553
2588
  "INSERT INTO items (name) VALUES ('abc')",
@@ -2560,18 +2595,17 @@ context "Dataset#multi_insert" do
2560
2595
  end
2561
2596
 
2562
2597
  specify "should accept string keys as column names" do
2563
- @ds.multi_insert([{'x'=>1, 'y'=>2}, {'x'=>3, 'y'=>4}])
2564
- @ds.multi_insert(['x', 'y'], [[1, 2], [3, 4]])
2598
+ @ds.import(['x', 'y'], [[1, 2], [3, 4]])
2565
2599
  @db.sqls.should == [
2566
2600
  'BEGIN',
2567
2601
  "INSERT INTO items (x, y) VALUES (1, 2)",
2568
2602
  "INSERT INTO items (x, y) VALUES (3, 4)",
2569
2603
  'COMMIT'
2570
- ] * 2
2604
+ ]
2571
2605
  end
2572
2606
 
2573
2607
  specify "should accept a columns array and a values array" do
2574
- @ds.multi_insert([:x, :y], [[1, 2], [3, 4]])
2608
+ @ds.import([:x, :y], [[1, 2], [3, 4]])
2575
2609
  @db.sqls.should == [
2576
2610
  'BEGIN',
2577
2611
  "INSERT INTO items (x, y) VALUES (1, 2)",
@@ -2583,16 +2617,16 @@ context "Dataset#multi_insert" do
2583
2617
  specify "should accept a columns array and a dataset" do
2584
2618
  @ds2 = Sequel::Dataset.new(@db).from(:cats).filter(:purr => true).select(:a, :b)
2585
2619
 
2586
- @ds.multi_insert([:x, :y], @ds2)
2620
+ @ds.import([:x, :y], @ds2)
2587
2621
  @db.sqls.should == [
2588
2622
  'BEGIN',
2589
- "INSERT INTO items (x, y) VALUES (SELECT a, b FROM cats WHERE (purr = 't'))",
2623
+ "INSERT INTO items (x, y) VALUES (SELECT a, b FROM cats WHERE (purr IS TRUE))",
2590
2624
  'COMMIT'
2591
2625
  ]
2592
2626
  end
2593
2627
 
2594
2628
  specify "should accept a columns array and a values array with slice option" do
2595
- @ds.multi_insert([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice => 2)
2629
+ @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice => 2)
2596
2630
  @db.sqls.should == [
2597
2631
  'BEGIN',
2598
2632
  "INSERT INTO items (x, y) VALUES (1, 2)",
@@ -2604,35 +2638,151 @@ context "Dataset#multi_insert" do
2604
2638
  ]
2605
2639
  end
2606
2640
 
2607
- specify "should be aliased by #import" do
2608
- @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice => 2)
2641
+ deprec_specify "should not do anything if no columns or values are given" do
2642
+ @ds.import
2643
+ @db.sqls.should be_nil
2644
+
2645
+ @ds.import([])
2646
+ @db.sqls.should be_nil
2647
+
2648
+ @ds.import([], [])
2649
+ @db.sqls.should be_nil
2650
+
2651
+ @ds.import([{}, {}])
2652
+ @db.sqls.should be_nil
2653
+
2654
+ @ds.import([:a, :b], [])
2655
+ @db.sqls.should be_nil
2656
+ end
2657
+ end
2658
+
2659
+ context "Dataset#multi_insert" do
2660
+ before do
2661
+ @dbc = Class.new do
2662
+ attr_reader :sqls
2663
+
2664
+ def execute(sql, opts={})
2665
+ @sqls ||= []
2666
+ @sqls << sql
2667
+ end
2668
+ alias execute_dui execute
2669
+
2670
+ def transaction(opts={})
2671
+ @sqls ||= []
2672
+ @sqls << 'BEGIN'
2673
+ yield
2674
+ @sqls << 'COMMIT'
2675
+ end
2676
+ end
2677
+ @db = @dbc.new
2678
+
2679
+ @ds = Sequel::Dataset.new(@db).from(:items)
2680
+
2681
+ @list = [{:name => 'abc'}, {:name => 'def'}, {:name => 'ghi'}]
2682
+ end
2683
+
2684
+ specify "should issue multiple insert statements inside a transaction" do
2685
+ @ds.multi_insert(@list)
2686
+ @db.sqls.should == [
2687
+ 'BEGIN',
2688
+ "INSERT INTO items (name) VALUES ('abc')",
2689
+ "INSERT INTO items (name) VALUES ('def')",
2690
+ "INSERT INTO items (name) VALUES ('ghi')",
2691
+ 'COMMIT'
2692
+ ]
2693
+ end
2694
+
2695
+ specify "should handle different formats for tables" do
2696
+ @ds = @ds.from(:sch__tab)
2697
+ @ds.multi_insert(@list)
2698
+ @db.sqls.should == [
2699
+ 'BEGIN',
2700
+ "INSERT INTO sch.tab (name) VALUES ('abc')",
2701
+ "INSERT INTO sch.tab (name) VALUES ('def')",
2702
+ "INSERT INTO sch.tab (name) VALUES ('ghi')",
2703
+ 'COMMIT'
2704
+ ]
2705
+ @db.sqls.clear
2706
+
2707
+ @ds = @ds.from(:tab.qualify(:sch))
2708
+ @ds.multi_insert(@list)
2709
+ @db.sqls.should == [
2710
+ 'BEGIN',
2711
+ "INSERT INTO sch.tab (name) VALUES ('abc')",
2712
+ "INSERT INTO sch.tab (name) VALUES ('def')",
2713
+ "INSERT INTO sch.tab (name) VALUES ('ghi')",
2714
+ 'COMMIT'
2715
+ ]
2716
+ @db.sqls.clear
2717
+ @ds = @ds.from(:sch__tab.identifier)
2718
+ @ds.multi_insert(@list)
2719
+ @db.sqls.should == [
2720
+ 'BEGIN',
2721
+ "INSERT INTO sch__tab (name) VALUES ('abc')",
2722
+ "INSERT INTO sch__tab (name) VALUES ('def')",
2723
+ "INSERT INTO sch__tab (name) VALUES ('ghi')",
2724
+ 'COMMIT'
2725
+ ]
2726
+ end
2727
+
2728
+ specify "should accept the :commit_every option for committing every x records" do
2729
+ @ds.multi_insert(@list, :commit_every => 2)
2730
+ @db.sqls.should == [
2731
+ 'BEGIN',
2732
+ "INSERT INTO items (name) VALUES ('abc')",
2733
+ "INSERT INTO items (name) VALUES ('def')",
2734
+ 'COMMIT',
2735
+ 'BEGIN',
2736
+ "INSERT INTO items (name) VALUES ('ghi')",
2737
+ 'COMMIT'
2738
+ ]
2739
+ end
2740
+
2741
+ specify "should accept the :slice option for committing every x records" do
2742
+ @ds.multi_insert(@list, :slice => 2)
2743
+ @db.sqls.should == [
2744
+ 'BEGIN',
2745
+ "INSERT INTO items (name) VALUES ('abc')",
2746
+ "INSERT INTO items (name) VALUES ('def')",
2747
+ 'COMMIT',
2748
+ 'BEGIN',
2749
+ "INSERT INTO items (name) VALUES ('ghi')",
2750
+ 'COMMIT'
2751
+ ]
2752
+ end
2753
+
2754
+ specify "should accept string keys as column names" do
2755
+ @ds.multi_insert([{'x'=>1, 'y'=>2}, {'x'=>3, 'y'=>4}])
2609
2756
  @db.sqls.should == [
2610
2757
  'BEGIN',
2611
2758
  "INSERT INTO items (x, y) VALUES (1, 2)",
2612
2759
  "INSERT INTO items (x, y) VALUES (3, 4)",
2613
- 'COMMIT',
2760
+ 'COMMIT'
2761
+ ]
2762
+ end
2763
+
2764
+ deprec_specify "should accept a columns array and a values array" do
2765
+ @ds.multi_insert([:x, :y], [[1, 2], [3, 4]])
2766
+ @db.sqls.should == [
2614
2767
  'BEGIN',
2615
- "INSERT INTO items (x, y) VALUES (5, 6)",
2768
+ "INSERT INTO items (x, y) VALUES (1, 2)",
2769
+ "INSERT INTO items (x, y) VALUES (3, 4)",
2616
2770
  'COMMIT'
2617
2771
  ]
2618
2772
  end
2619
2773
 
2620
- specify "should not do anything if no columns or values are given" do
2621
- @ds.multi_insert
2622
- @db.sqls.should be_nil
2623
-
2624
- @ds.multi_insert([])
2625
- @db.sqls.should be_nil
2774
+ deprec_specify "should accept a columns array and a dataset" do
2775
+ @ds2 = Sequel::Dataset.new(@db).from(:cats).filter(:purr => true).select(:a, :b)
2626
2776
 
2627
- @ds.multi_insert([], [])
2628
- @db.sqls.should be_nil
2777
+ @ds.multi_insert([:x, :y], @ds2)
2778
+ @db.sqls.should == [
2779
+ 'BEGIN',
2780
+ "INSERT INTO items (x, y) VALUES (SELECT a, b FROM cats WHERE (purr IS TRUE))",
2781
+ 'COMMIT'
2782
+ ]
2783
+ end
2629
2784
 
2630
- @ds.multi_insert([{}, {}])
2631
- @db.sqls.should be_nil
2632
-
2633
- @ds.multi_insert([:a, :b], [])
2634
- @db.sqls.should be_nil
2635
-
2785
+ deprec_specify "should accept a columns array and a values array with slice option" do
2636
2786
  @ds.multi_insert([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice => 2)
2637
2787
  @db.sqls.should == [
2638
2788
  'BEGIN',
@@ -2645,20 +2795,24 @@ context "Dataset#multi_insert" do
2645
2795
  ]
2646
2796
  end
2647
2797
 
2798
+ deprec_specify "should not do anything if no hashes are provided" do
2799
+ @ds.multi_insert
2800
+ @db.sqls.should be_nil
2801
+ end
2648
2802
  end
2649
2803
 
2650
2804
  context "Dataset#query" do
2651
- setup do
2805
+ before do
2652
2806
  @d = Sequel::Dataset.new(nil)
2653
2807
  end
2654
2808
 
2655
- specify "should support #from" do
2809
+ deprec_specify "should support #from" do
2656
2810
  q = @d.query {from :xxx}
2657
2811
  q.class.should == @d.class
2658
2812
  q.sql.should == "SELECT * FROM xxx"
2659
2813
  end
2660
2814
 
2661
- specify "should support #select" do
2815
+ deprec_specify "should support #select" do
2662
2816
  q = @d.query do
2663
2817
  select :a, :b___mongo
2664
2818
  from :yyy
@@ -2667,7 +2821,7 @@ context "Dataset#query" do
2667
2821
  q.sql.should == "SELECT a, b AS mongo FROM yyy"
2668
2822
  end
2669
2823
 
2670
- specify "should support #where" do
2824
+ deprec_specify "should support #where" do
2671
2825
  q = @d.query do
2672
2826
  from :zzz
2673
2827
  where(:x + 2 > :y + 3)
@@ -2688,7 +2842,7 @@ context "Dataset#query" do
2688
2842
  q.sql.should == "SELECT * FROM zzz WHERE (x = 33)"
2689
2843
  end
2690
2844
 
2691
- specify "should support #group_by and #having" do
2845
+ deprec_specify "should support #group_by and #having" do
2692
2846
  q = @d.query do
2693
2847
  from :abc
2694
2848
  group_by :id
@@ -2698,7 +2852,7 @@ context "Dataset#query" do
2698
2852
  q.sql.should == "SELECT * FROM abc GROUP BY id HAVING (x >= 2)"
2699
2853
  end
2700
2854
 
2701
- specify "should support #order, #order_by" do
2855
+ deprec_specify "should support #order, #order_by" do
2702
2856
  q = @d.query do
2703
2857
  from :xyz
2704
2858
  order_by :stamp
@@ -2707,11 +2861,11 @@ context "Dataset#query" do
2707
2861
  q.sql.should == "SELECT * FROM xyz ORDER BY stamp"
2708
2862
  end
2709
2863
 
2710
- specify "should raise on non-chainable method calls" do
2864
+ deprec_specify "should raise on non-chainable method calls" do
2711
2865
  proc {@d.query {first_source}}.should raise_error(Sequel::Error)
2712
2866
  end
2713
2867
 
2714
- specify "should raise on each, insert, update, delete" do
2868
+ deprec_specify "should raise on each, insert, update, delete" do
2715
2869
  proc {@d.query {each}}.should raise_error(Sequel::Error)
2716
2870
  proc {@d.query {insert(:x => 1)}}.should raise_error(Sequel::Error)
2717
2871
  proc {@d.query {update(:x => 1)}}.should raise_error(Sequel::Error)
@@ -2720,7 +2874,7 @@ context "Dataset#query" do
2720
2874
  end
2721
2875
 
2722
2876
  context "Dataset" do
2723
- setup do
2877
+ before do
2724
2878
  @d = Sequel::Dataset.new(nil).from(:x)
2725
2879
  end
2726
2880
 
@@ -2767,7 +2921,7 @@ context "Dataset" do
2767
2921
  end
2768
2922
 
2769
2923
  context "Dataset#transform" do
2770
- setup do
2924
+ before do
2771
2925
  @c = Class.new(Sequel::Dataset) do
2772
2926
  attr_accessor :raw
2773
2927
  attr_accessor :sql
@@ -2786,20 +2940,22 @@ context "Dataset#transform" do
2786
2940
  end
2787
2941
 
2788
2942
  @ds = @c.new(nil).from(:items)
2789
- @ds.transform(:x => [
2790
- proc {|v| Marshal.load(v)},
2791
- proc {|v| Marshal.dump(v)}
2792
- ])
2943
+ deprec do
2944
+ @ds.transform(:x => [
2945
+ proc {|v| Marshal.load(v)},
2946
+ proc {|v| Marshal.dump(v)}
2947
+ ])
2948
+ end
2793
2949
  end
2794
2950
 
2795
- specify "should change the dataset to transform values loaded from the database" do
2951
+ deprec_specify "should change the dataset to transform values loaded from the database" do
2796
2952
  @ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
2797
2953
  @ds.first.should == {:x => [1, 2, 3], :y => 'hello'}
2798
2954
  @ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
2799
2955
  @ds.all.should == [{:x => [1, 2, 3], :y => 'hello'}]
2800
2956
  end
2801
2957
 
2802
- specify "should change the dataset to transform values saved to the database" do
2958
+ deprec_specify "should change the dataset to transform values saved to the database" do
2803
2959
  @ds.insert(:x => :toast)
2804
2960
  @ds.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
2805
2961
 
@@ -2810,7 +2966,7 @@ context "Dataset#transform" do
2810
2966
  @ds.sql.should == "UPDATE items SET x = '#{Marshal.dump(['dream'])}'"
2811
2967
  end
2812
2968
 
2813
- specify "should be transferred to cloned datasets" do
2969
+ deprec_specify "should be transferred to cloned datasets" do
2814
2970
  @ds2 = @ds.filter(:a => 1)
2815
2971
 
2816
2972
  @ds2.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@@ -2820,18 +2976,18 @@ context "Dataset#transform" do
2820
2976
  @ds2.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
2821
2977
  end
2822
2978
 
2823
- specify "should work correctly together with set_row_proc" do
2979
+ deprec_specify "should work correctly together with set_row_proc" do
2824
2980
  @ds.row_proc = proc{|r| r[:z] = r[:x] * 2; r}
2825
2981
  @ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
2826
2982
  @ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
2827
2983
 
2828
2984
  f = nil
2829
2985
  @ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
2830
- @ds.each(:naked => true) {|r| f = r}
2986
+ @ds.naked.each{|r| f = r}
2831
2987
  f.should == {:x => "wow", :y => 'hello'}
2832
2988
  end
2833
2989
 
2834
- specify "should leave the supplied values intact" do
2990
+ deprec_specify "should leave the supplied values intact" do
2835
2991
  h = {:x => :toast}
2836
2992
  @ds.insert(h)
2837
2993
  h.should == {:x => :toast}
@@ -2839,7 +2995,7 @@ context "Dataset#transform" do
2839
2995
  end
2840
2996
 
2841
2997
  context "Dataset#transform" do
2842
- setup do
2998
+ before do
2843
2999
  @c = Class.new(Sequel::Dataset) do
2844
3000
  attr_accessor :raw
2845
3001
  attr_accessor :sql
@@ -2860,7 +3016,7 @@ context "Dataset#transform" do
2860
3016
  @ds = @c.new(nil).from(:items)
2861
3017
  end
2862
3018
 
2863
- specify "should raise Sequel::Error for invalid transformations" do
3019
+ deprec_specify "should raise Sequel::Error for invalid transformations" do
2864
3020
  proc {@ds.transform(:x => 'mau')}.should raise_error(Sequel::Error::InvalidTransform)
2865
3021
  proc {@ds.transform(:x => :mau)}.should raise_error(Sequel::Error::InvalidTransform)
2866
3022
  proc {@ds.transform(:x => [])}.should raise_error(Sequel::Error::InvalidTransform)
@@ -2868,7 +3024,7 @@ context "Dataset#transform" do
2868
3024
  proc {@ds.transform(:x => [proc {|v|}, proc {|v|}])}.should_not raise_error(Sequel::Error::InvalidTransform)
2869
3025
  end
2870
3026
 
2871
- specify "should support stock YAML transformation" do
3027
+ deprec_specify "should support stock YAML transformation" do
2872
3028
  @ds.transform(:x => :yaml)
2873
3029
 
2874
3030
  @ds.raw = {:x => [1, 2, 3].to_yaml, :y => 'hello'}
@@ -2892,11 +3048,11 @@ context "Dataset#transform" do
2892
3048
  @ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
2893
3049
  f = nil
2894
3050
  @ds.raw = {:x => "wow".to_yaml, :y => 'hello'}
2895
- @ds.each(:naked => true) {|r| f = r}
3051
+ @ds.naked.each{|r| f = r}
2896
3052
  f.should == {:x => "wow", :y => 'hello'}
2897
3053
  end
2898
3054
 
2899
- specify "should support stock Marshal transformation with Base64 encoding" do
3055
+ deprec_specify "should support stock Marshal transformation with Base64 encoding" do
2900
3056
  @ds.transform(:x => :marshal)
2901
3057
 
2902
3058
  @ds.raw = {:x => [Marshal.dump([1, 2, 3])].pack('m'), :y => 'hello'}
@@ -2920,29 +3076,29 @@ context "Dataset#transform" do
2920
3076
  @ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
2921
3077
  f = nil
2922
3078
  @ds.raw = {:x => [Marshal.dump("wow")].pack('m'), :y => 'hello'}
2923
- @ds.each(:naked => true) {|r| f = r}
3079
+ @ds.naked.each{|r| f = r}
2924
3080
  f.should == {:x => "wow", :y => 'hello'}
2925
3081
  end
2926
3082
 
2927
- specify "should support loading of Marshalled values without Base64 encoding" do
3083
+ deprec_specify "should support loading of Marshalled values without Base64 encoding" do
2928
3084
  @ds.transform(:x => :marshal)
2929
3085
 
2930
3086
  @ds.raw = {:x => Marshal.dump([1,2,3]), :y => nil}
2931
3087
  @ds.first.should == {:x => [1,2,3], :y => nil}
2932
3088
  end
2933
3089
 
2934
- specify "should return self" do
3090
+ deprec_specify "should return self" do
2935
3091
  @ds.transform(:x => :marshal).should be(@ds)
2936
3092
  end
2937
3093
  end
2938
3094
 
2939
3095
  context "A dataset with a transform" do
2940
- setup do
3096
+ before do
2941
3097
  @ds = Sequel::Dataset.new(nil).from(:items)
2942
- @ds.transform(:x => :marshal)
3098
+ deprec{@ds.transform(:x => :marshal)}
2943
3099
  end
2944
3100
 
2945
- specify "should automatically transform hash filters" do
3101
+ deprec_specify "should automatically transform hash filters" do
2946
3102
  @ds.filter(:y => 2).sql.should == 'SELECT * FROM items WHERE (y = 2)'
2947
3103
 
2948
3104
  @ds.filter(:x => 2).sql.should == "SELECT * FROM items WHERE (x = '#{[Marshal.dump(2)].pack('m')}')"
@@ -2950,7 +3106,7 @@ context "A dataset with a transform" do
2950
3106
  end
2951
3107
 
2952
3108
  context "Dataset#to_csv" do
2953
- setup do
3109
+ before do
2954
3110
  @c = Class.new(Sequel::Dataset) do
2955
3111
  attr_accessor :data
2956
3112
  attr_accessor :columns
@@ -2982,7 +3138,7 @@ context "Dataset#to_csv" do
2982
3138
  end
2983
3139
 
2984
3140
  context "Dataset#create_view" do
2985
- setup do
3141
+ before do
2986
3142
  @dbc = Class.new(Sequel::Database) do
2987
3143
  attr_reader :sqls
2988
3144
 
@@ -2996,14 +3152,14 @@ context "Dataset#create_view" do
2996
3152
  @ds = @db[:items].order(:abc).filter(:category => 'ruby')
2997
3153
  end
2998
3154
 
2999
- specify "should create a view with the dataset's sql" do
3155
+ deprec_specify "should create a view with the dataset's sql" do
3000
3156
  @ds.create_view(:xyz)
3001
3157
  @db.sqls.should == ["CREATE VIEW xyz AS #{@ds.sql}"]
3002
3158
  end
3003
3159
  end
3004
3160
 
3005
3161
  context "Dataset#create_or_replace_view" do
3006
- setup do
3162
+ before do
3007
3163
  @dbc = Class.new(Sequel::Database) do
3008
3164
  attr_reader :sqls
3009
3165
 
@@ -3017,14 +3173,14 @@ context "Dataset#create_or_replace_view" do
3017
3173
  @ds = @db[:items].order(:abc).filter(:category => 'ruby')
3018
3174
  end
3019
3175
 
3020
- specify "should create a view with the dataset's sql" do
3176
+ deprec_specify "should create a view with the dataset's sql" do
3021
3177
  @ds.create_or_replace_view(:xyz)
3022
3178
  @db.sqls.should == ["CREATE OR REPLACE VIEW xyz AS #{@ds.sql}"]
3023
3179
  end
3024
3180
  end
3025
3181
 
3026
3182
  context "Dataset#update_sql" do
3027
- setup do
3183
+ before do
3028
3184
  @ds = Sequel::Dataset.new(nil).from(:items)
3029
3185
  end
3030
3186
 
@@ -3037,12 +3193,12 @@ context "Dataset#update_sql" do
3037
3193
  end
3038
3194
 
3039
3195
  specify "should accept array subscript references" do
3040
- @ds.update_sql((:day|1) => 'd').should == "UPDATE items SET day[1] = 'd'"
3196
+ @ds.update_sql((:day.sql_subscript(1)) => 'd').should == "UPDATE items SET day[1] = 'd'"
3041
3197
  end
3042
3198
  end
3043
3199
 
3044
3200
  context "Dataset#insert_sql" do
3045
- setup do
3201
+ before do
3046
3202
  @ds = Sequel::Dataset.new(nil).from(:items)
3047
3203
  end
3048
3204
 
@@ -3055,7 +3211,7 @@ context "Dataset#insert_sql" do
3055
3211
  end
3056
3212
 
3057
3213
  specify "should accept array subscript references" do
3058
- @ds.insert_sql((:day|1) => 'd').should == "INSERT INTO items (day[1]) VALUES ('d')"
3214
+ @ds.insert_sql((:day.sql_subscript(1)) => 'd').should == "INSERT INTO items (day[1]) VALUES ('d')"
3059
3215
  end
3060
3216
  end
3061
3217
 
@@ -3082,7 +3238,7 @@ class DummyMummyDatabase < Sequel::Database
3082
3238
  end
3083
3239
 
3084
3240
  context "Dataset#table_exists?" do
3085
- setup do
3241
+ before do
3086
3242
  @db = DummyMummyDatabase.new
3087
3243
  @db.instance_variable_set(:@schemas, {:a=>[]})
3088
3244
  @db2 = DummyMummyDatabase.new
@@ -3111,7 +3267,7 @@ context "Dataset#table_exists?" do
3111
3267
  end
3112
3268
 
3113
3269
  context "Dataset#inspect" do
3114
- setup do
3270
+ before do
3115
3271
  @ds = Sequel::Dataset.new(nil).from(:blah)
3116
3272
  end
3117
3273
 
@@ -3121,7 +3277,7 @@ context "Dataset#inspect" do
3121
3277
  end
3122
3278
 
3123
3279
  context "Dataset#all" do
3124
- setup do
3280
+ before do
3125
3281
  @c = Class.new(Sequel::Dataset) do
3126
3282
  def fetch_rows(sql, &block)
3127
3283
  block.call({:x => 1, :y => 2})
@@ -3140,7 +3296,7 @@ context "Dataset#all" do
3140
3296
  ]
3141
3297
  end
3142
3298
 
3143
- specify "should accept options and pass them to #each" do
3299
+ deprec_specify "should accept options and pass them to #each" do
3144
3300
  @dataset.all(:limit => 33).should == [
3145
3301
  {:x => 1, :y => 2},
3146
3302
  {:x => 3, :y => 4},
@@ -3160,7 +3316,7 @@ context "Dataset#all" do
3160
3316
  end
3161
3317
 
3162
3318
  context "Dataset#grep" do
3163
- setup do
3319
+ before do
3164
3320
  @ds = Sequel::Dataset.new(nil).from(:posts)
3165
3321
  end
3166
3322
 
@@ -3199,23 +3355,23 @@ context "Dataset#grep" do
3199
3355
  end
3200
3356
 
3201
3357
  context "Sequel.use_parse_tree" do
3202
- specify "be false" do
3358
+ deprec_specify "be false" do
3203
3359
  Sequel.use_parse_tree.should == false
3204
3360
  end
3205
3361
  end
3206
3362
 
3207
3363
  context "Sequel.use_parse_tree=" do
3208
- specify "raise an error if true" do
3364
+ deprec_specify "raise an error if true" do
3209
3365
  proc{Sequel.use_parse_tree = true}.should raise_error(Sequel::Error)
3210
3366
  end
3211
3367
 
3212
- specify "do nothing if false" do
3368
+ deprec_specify "do nothing if false" do
3213
3369
  proc{Sequel.use_parse_tree = false}.should_not raise_error
3214
3370
  end
3215
3371
  end
3216
3372
 
3217
3373
  context "Dataset.dataset_classes" do
3218
- specify "should be an array of dataset subclasses" do
3374
+ deprec_specify "should be an array of dataset subclasses" do
3219
3375
  ds_class = Class.new(Sequel::Dataset)
3220
3376
  Sequel::Dataset.dataset_classes.should be_a_kind_of(Array)
3221
3377
  Sequel::Dataset.dataset_classes.should include(ds_class)
@@ -3223,7 +3379,7 @@ context "Dataset.dataset_classes" do
3223
3379
  end
3224
3380
 
3225
3381
  context "Dataset default #fetch_rows, #insert, #update, and #delete, #execute" do
3226
- setup do
3382
+ before do
3227
3383
  @db = Sequel::Database.new
3228
3384
  @ds = @db[:items]
3229
3385
  end
@@ -3254,9 +3410,9 @@ context "Dataset default #fetch_rows, #insert, #update, and #delete, #execute" d
3254
3410
  end
3255
3411
 
3256
3412
  context "Dataset prepared statements and bound variables " do
3257
- setup do
3413
+ before do
3258
3414
  @db = Sequel::Database.new
3259
- @db.meta_eval{attr_accessor :sqls}
3415
+ @db.send :metaattr_accessor, :sqls
3260
3416
  @db.sqls = []
3261
3417
  def @db.execute(sql, opts={})
3262
3418
  @sqls << sql
@@ -3342,9 +3498,9 @@ context "Dataset prepared statements and bound variables " do
3342
3498
  end
3343
3499
 
3344
3500
  context Sequel::Dataset::UnnumberedArgumentMapper do
3345
- setup do
3501
+ before do
3346
3502
  @db = Sequel::Database.new
3347
- @db.meta_eval{attr_accessor :sqls}
3503
+ @db.send :metaattr_accessor, :sqls
3348
3504
  @db.sqls = []
3349
3505
  def @db.execute(sql, opts={})
3350
3506
  @sqls << [sql, *opts[:arguments]]
@@ -3445,7 +3601,7 @@ context "Sequel::Dataset#each" do
3445
3601
  end
3446
3602
  end
3447
3603
 
3448
- specify "should not set the columns if passing an option that modifies them" do
3604
+ deprec_specify "should not set the columns if passing an option that modifies them" do
3449
3605
  @ds.each(:select=>[:count]){}
3450
3606
  @ds.columns.should == [:a]
3451
3607
  @ds.each(:from=>[:count]){}
@@ -3456,7 +3612,7 @@ context "Sequel::Dataset#each" do
3456
3612
  @ds.columns.should == [:a]
3457
3613
  end
3458
3614
 
3459
- specify "should have the correct columns inside the block regardless" do
3615
+ deprec_specify "should have the correct columns inside the block regardless" do
3460
3616
  @ds.each(:select=>[:count]) do |x|
3461
3617
  x[:count].should == 'SELECT count FROM items'
3462
3618
  @ds.columns.should == [:count]