dataset 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +59 -0
- data/LICENSE +19 -0
- data/README +111 -0
- data/Rakefile +31 -0
- data/TODO +15 -0
- data/VERSION.yml +4 -0
- data/lib/dataset.rb +128 -0
- data/lib/dataset/base.rb +157 -0
- data/lib/dataset/collection.rb +19 -0
- data/lib/dataset/database/base.rb +30 -0
- data/lib/dataset/database/mysql.rb +34 -0
- data/lib/dataset/database/postgresql.rb +34 -0
- data/lib/dataset/database/sqlite3.rb +32 -0
- data/lib/dataset/extensions/cucumber.rb +20 -0
- data/lib/dataset/extensions/rspec.rb +21 -0
- data/lib/dataset/extensions/test_unit.rb +60 -0
- data/lib/dataset/instance_methods.rb +10 -0
- data/lib/dataset/load.rb +47 -0
- data/lib/dataset/record/fixture.rb +73 -0
- data/lib/dataset/record/meta.rb +66 -0
- data/lib/dataset/record/model.rb +50 -0
- data/lib/dataset/resolver.rb +110 -0
- data/lib/dataset/session.rb +51 -0
- data/lib/dataset/session_binding.rb +317 -0
- data/lib/dataset/version.rb +9 -0
- data/plugit/descriptor.rb +25 -0
- data/spec/dataset/cucumber_spec.rb +54 -0
- data/spec/dataset/database/base_spec.rb +21 -0
- data/spec/dataset/record/meta_spec.rb +14 -0
- data/spec/dataset/resolver_spec.rb +110 -0
- data/spec/dataset/rspec_spec.rb +133 -0
- data/spec/dataset/session_binding_spec.rb +198 -0
- data/spec/dataset/session_spec.rb +299 -0
- data/spec/dataset/test_unit_spec.rb +210 -0
- data/spec/fixtures/datasets/constant_not_defined.rb +0 -0
- data/spec/fixtures/datasets/ending_with_dataset.rb +2 -0
- data/spec/fixtures/datasets/exact_name.rb +2 -0
- data/spec/fixtures/datasets/not_a_dataset_base.rb +2 -0
- data/spec/fixtures/more_datasets/in_another_directory.rb +2 -0
- data/spec/models.rb +18 -0
- data/spec/schema.rb +26 -0
- data/spec/spec_helper.rb +47 -0
- data/spec/stubs/mini_rails.rb +18 -0
- data/spec/stubs/test_help.rb +1 -0
- data/tasks/dataset.rake +19 -0
- metadata +120 -0
@@ -0,0 +1,19 @@
|
|
1
|
+
require 'set'
|
2
|
+
|
3
|
+
module Dataset
|
4
|
+
class Collection < Array # :nodoc:
|
5
|
+
def initialize(parent)
|
6
|
+
concat parent
|
7
|
+
end
|
8
|
+
|
9
|
+
def <<(dataset)
|
10
|
+
super
|
11
|
+
uniq!
|
12
|
+
self
|
13
|
+
end
|
14
|
+
|
15
|
+
def subset?(other)
|
16
|
+
Set.new(self).subset?(Set.new(other))
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
|
3
|
+
module Dataset
|
4
|
+
module Database # :nodoc:
|
5
|
+
|
6
|
+
# Provides Dataset a way to clear, dump and load databases.
|
7
|
+
class Base
|
8
|
+
include FileUtils
|
9
|
+
|
10
|
+
def clear
|
11
|
+
connection = ActiveRecord::Base.connection
|
12
|
+
ActiveRecord::Base.silence do
|
13
|
+
connection.tables.each do |table_name|
|
14
|
+
connection.delete "DELETE FROM #{connection.quote_table_name(table_name)}",
|
15
|
+
"Dataset::Database#clear" unless table_name == ActiveRecord::Migrator.schema_migrations_table_name
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
def record_meta(record_class)
|
21
|
+
record_metas[record_class] ||= Dataset::Record::Meta.new(record_class)
|
22
|
+
end
|
23
|
+
|
24
|
+
protected
|
25
|
+
def record_metas
|
26
|
+
@record_metas ||= Hash.new
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
module Dataset
|
2
|
+
module Database # :nodoc:
|
3
|
+
|
4
|
+
# The interface to a mySQL database, this will capture by creating a dump
|
5
|
+
# file and restore by loading one of the same.
|
6
|
+
#
|
7
|
+
class Mysql < Base
|
8
|
+
def initialize(database_spec, storage_path)
|
9
|
+
@database = database_spec[:database]
|
10
|
+
@username = database_spec[:username]
|
11
|
+
@password = database_spec[:password]
|
12
|
+
@storage_path = storage_path
|
13
|
+
FileUtils.mkdir_p(@storage_path)
|
14
|
+
end
|
15
|
+
|
16
|
+
def capture(datasets)
|
17
|
+
return if datasets.nil? || datasets.empty?
|
18
|
+
`mysqldump -u #{@username} --password=#{@password} --compact --extended-insert --no-create-db --add-drop-table --quick --quote-names #{@database} > #{storage_path(datasets)}`
|
19
|
+
end
|
20
|
+
|
21
|
+
def restore(datasets)
|
22
|
+
store = storage_path(datasets)
|
23
|
+
if File.file?(store)
|
24
|
+
`mysql -u #{@username} --password=#{@password} --database=#{@database} < #{store}`
|
25
|
+
true
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def storage_path(datasets)
|
30
|
+
"#{@storage_path}/#{datasets.collect {|c| c.__id__}.join('_')}.sql"
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
module Dataset
|
2
|
+
module Database # :nodoc:
|
3
|
+
|
4
|
+
# The interface to a PostgreSQL database, this will capture by creating a dump
|
5
|
+
# file and restore by loading one of the same.
|
6
|
+
#
|
7
|
+
class Postgresql < Base
|
8
|
+
def initialize(database_spec, storage_path)
|
9
|
+
@database = database_spec[:database]
|
10
|
+
@username = database_spec[:username]
|
11
|
+
@password = database_spec[:password]
|
12
|
+
@storage_path = storage_path
|
13
|
+
FileUtils.mkdir_p(@storage_path)
|
14
|
+
end
|
15
|
+
|
16
|
+
def capture(datasets)
|
17
|
+
return if datasets.nil? || datasets.empty?
|
18
|
+
`pg_dump -c #{@database} > #{storage_path(datasets)}`
|
19
|
+
end
|
20
|
+
|
21
|
+
def restore(datasets)
|
22
|
+
store = storage_path(datasets)
|
23
|
+
if File.file?(store)
|
24
|
+
`psql -U #{@username} -p #{@password} -e #{@database} < #{store}`
|
25
|
+
true
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def storage_path(datasets)
|
30
|
+
"#{@storage_path}/#{datasets.collect {|c| c.__id__}.join('_')}.sql"
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
module Dataset
|
2
|
+
module Database # :nodoc:
|
3
|
+
|
4
|
+
# The interface to a sqlite3 database, this will capture by copying the db
|
5
|
+
# file and restore by replacing and reconnecting to one of the same.
|
6
|
+
#
|
7
|
+
class Sqlite3 < Base
|
8
|
+
def initialize(database_spec, storage_path)
|
9
|
+
@database_path, @storage_path = database_spec[:database], storage_path
|
10
|
+
FileUtils.mkdir_p(@storage_path)
|
11
|
+
end
|
12
|
+
|
13
|
+
def capture(datasets)
|
14
|
+
return if datasets.nil? || datasets.empty?
|
15
|
+
cp @database_path, storage_path(datasets)
|
16
|
+
end
|
17
|
+
|
18
|
+
def restore(datasets)
|
19
|
+
store = storage_path(datasets)
|
20
|
+
if File.file?(store)
|
21
|
+
mv store, @database_path
|
22
|
+
ActiveRecord::Base.establish_connection 'test'
|
23
|
+
true
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def storage_path(datasets)
|
28
|
+
"#{@storage_path}/#{datasets.collect {|c| c.__id__}.join('_')}.sqlite3.db"
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
module Dataset
|
2
|
+
module Extensions # :nodoc:
|
3
|
+
|
4
|
+
module CucumberWorld # :nodoc:
|
5
|
+
def dataset(*datasets, &block)
|
6
|
+
add_dataset(*datasets, &block)
|
7
|
+
|
8
|
+
load = nil
|
9
|
+
$__cucumber_toplevel.Before do
|
10
|
+
load = dataset_session.load_datasets_for(self.class)
|
11
|
+
extend_from_dataset_load(load)
|
12
|
+
end
|
13
|
+
# Makes sure the datasets are reloaded after each scenario
|
14
|
+
Cucumber::Rails.use_transactional_fixtures
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
end
|
19
|
+
end
|
20
|
+
Cucumber::Rails::World.extend Dataset::Extensions::CucumberWorld
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module Dataset
|
2
|
+
module Extensions # :nodoc:
|
3
|
+
|
4
|
+
module RSpecExampleGroup # :nodoc:
|
5
|
+
def dataset(*datasets, &block)
|
6
|
+
add_dataset(*datasets, &block)
|
7
|
+
|
8
|
+
load = nil
|
9
|
+
before(:all) do
|
10
|
+
load = dataset_session.load_datasets_for(self.class)
|
11
|
+
extend_from_dataset_load(load)
|
12
|
+
end
|
13
|
+
before(:each) do
|
14
|
+
extend_from_dataset_load(load)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
end
|
20
|
+
end
|
21
|
+
Spec::Example::ExampleGroup.extend Dataset::Extensions::RSpecExampleGroup
|
@@ -0,0 +1,60 @@
|
|
1
|
+
module Dataset
|
2
|
+
class TestSuite # :nodoc:
|
3
|
+
def initialize(suite, test_class)
|
4
|
+
@suite = suite
|
5
|
+
@test_class = test_class
|
6
|
+
end
|
7
|
+
|
8
|
+
def dataset_session
|
9
|
+
@test_class.dataset_session
|
10
|
+
end
|
11
|
+
|
12
|
+
def run(result, &progress_block)
|
13
|
+
if dataset_session
|
14
|
+
load = dataset_session.load_datasets_for(@test_class)
|
15
|
+
@suite.tests.each { |e| e.extend_from_dataset_load(load) }
|
16
|
+
end
|
17
|
+
@suite.run(result, &progress_block)
|
18
|
+
end
|
19
|
+
|
20
|
+
def method_missing(method_symbol, *args)
|
21
|
+
@suite.send(method_symbol, *args)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
module Extensions # :nodoc:
|
26
|
+
|
27
|
+
module TestUnitTestCase # :nodoc:
|
28
|
+
def self.extended(test_case)
|
29
|
+
class << test_case
|
30
|
+
alias_method_chain :suite, :dataset
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def suite_with_dataset
|
35
|
+
Dataset::TestSuite.new(suite_without_dataset, self)
|
36
|
+
end
|
37
|
+
|
38
|
+
def dataset(*datasets, &block)
|
39
|
+
add_dataset(*datasets, &block)
|
40
|
+
|
41
|
+
# Unfortunately, if we have rspec loaded, TestCase has it's suite method
|
42
|
+
# modified for the test/unit runners, but uses a different mechanism to
|
43
|
+
# collect tests if the rspec runners are used.
|
44
|
+
if included_modules.find {|m| m.name =~ /ExampleMethods\Z/}
|
45
|
+
load = nil
|
46
|
+
before(:all) do
|
47
|
+
load = dataset_session.load_datasets_for(self.class)
|
48
|
+
extend_from_dataset_load(load)
|
49
|
+
end
|
50
|
+
before(:each) do
|
51
|
+
extend_from_dataset_load(load)
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
Test::Unit::TestCase.extend Dataset::Extensions::TestUnitTestCase
|
@@ -0,0 +1,10 @@
|
|
1
|
+
module Dataset
|
2
|
+
module InstanceMethods # :nodoc:
|
3
|
+
def extend_from_dataset_load(load)
|
4
|
+
load.dataset_binding.install_block_variables(self)
|
5
|
+
self.extend load.dataset_binding.record_methods
|
6
|
+
self.extend load.dataset_binding.model_finders
|
7
|
+
self.extend load.helper_methods
|
8
|
+
end
|
9
|
+
end
|
10
|
+
end
|
data/lib/dataset/load.rb
ADDED
@@ -0,0 +1,47 @@
|
|
1
|
+
module Dataset
|
2
|
+
class Load # :nodoc:
|
3
|
+
attr_reader :datasets, :dataset_binding, :helper_methods
|
4
|
+
|
5
|
+
def initialize(datasets, parent_binding)
|
6
|
+
@datasets = datasets
|
7
|
+
@dataset_binding = SessionBinding.new(parent_binding)
|
8
|
+
@helper_methods = Module.new
|
9
|
+
end
|
10
|
+
|
11
|
+
def execute(loaded_datasets, dataset_resolver)
|
12
|
+
(datasets - loaded_datasets).each do |dataset|
|
13
|
+
instance = dataset.new
|
14
|
+
instance.extend dataset_binding.record_methods
|
15
|
+
instance.extend dataset_binding.model_finders
|
16
|
+
used_datasets(dataset, dataset_resolver).each do |ds|
|
17
|
+
next unless ds.helper_methods
|
18
|
+
instance.extend ds.helper_methods
|
19
|
+
helper_methods.module_eval do
|
20
|
+
include ds.helper_methods
|
21
|
+
end
|
22
|
+
end
|
23
|
+
instance.load
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def used_datasets(dataset, dataset_resolver, collector = [])
|
28
|
+
dataset.used_datasets.each do |used|
|
29
|
+
ds = dataset_resolver.resolve(used)
|
30
|
+
used_datasets(ds, dataset_resolver, collector)
|
31
|
+
collector << ds
|
32
|
+
end if dataset.used_datasets
|
33
|
+
collector << dataset
|
34
|
+
collector.uniq
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class Reload # :nodoc:
|
39
|
+
attr_reader :dataset_binding, :load
|
40
|
+
delegate :datasets, :helper_methods, :to => :load
|
41
|
+
|
42
|
+
def initialize(load)
|
43
|
+
@load = load
|
44
|
+
@dataset_binding = SessionBinding.new(@load.dataset_binding)
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
require 'active_record/fixtures'
|
2
|
+
|
3
|
+
module Dataset
|
4
|
+
module Record # :nodoc:
|
5
|
+
|
6
|
+
class Fixture # :nodoc:
|
7
|
+
attr_reader :meta, :symbolic_name, :session_binding
|
8
|
+
|
9
|
+
def initialize(meta, attributes, symbolic_name, session_binding)
|
10
|
+
@meta = meta
|
11
|
+
@attributes = attributes.stringify_keys
|
12
|
+
@symbolic_name = symbolic_name || object_id
|
13
|
+
@session_binding = session_binding
|
14
|
+
|
15
|
+
install_default_attributes!
|
16
|
+
end
|
17
|
+
|
18
|
+
def create
|
19
|
+
record_class.connection.insert_fixture to_fixture, meta.table_name
|
20
|
+
id
|
21
|
+
end
|
22
|
+
|
23
|
+
def id
|
24
|
+
@attributes['id']
|
25
|
+
end
|
26
|
+
|
27
|
+
def record_class
|
28
|
+
meta.record_class
|
29
|
+
end
|
30
|
+
|
31
|
+
def to_fixture
|
32
|
+
::Fixture.new(to_hash, meta.class_name)
|
33
|
+
end
|
34
|
+
|
35
|
+
def to_hash
|
36
|
+
hash = @attributes.dup
|
37
|
+
hash[meta.inheritance_column] = meta.sti_name if meta.inheriting_record?
|
38
|
+
record_class.reflections.each do |name, reflection|
|
39
|
+
name = name.to_s
|
40
|
+
add_reflection_attributes(hash, name, reflection) if hash[name]
|
41
|
+
end
|
42
|
+
hash
|
43
|
+
end
|
44
|
+
|
45
|
+
def install_default_attributes!
|
46
|
+
@attributes['id'] ||= symbolic_name.to_s.hash.abs
|
47
|
+
install_timestamps!
|
48
|
+
end
|
49
|
+
|
50
|
+
def install_timestamps!
|
51
|
+
meta.timestamp_columns.each do |column|
|
52
|
+
@attributes[column.name] = now(column) unless @attributes.key?(column.name)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def now(column)
|
57
|
+
(ActiveRecord::Base.default_timezone == :utc ? Time.now.utc : Time.now).to_s(:db)
|
58
|
+
end
|
59
|
+
|
60
|
+
private
|
61
|
+
def add_reflection_attributes(hash, name, reflection)
|
62
|
+
value = hash.delete(name)
|
63
|
+
case value
|
64
|
+
when Symbol
|
65
|
+
hash[reflection.primary_key_name] = session_binding.find_id(reflection.klass, value)
|
66
|
+
else
|
67
|
+
hash[reflection.primary_key_name] = value
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
end
|
73
|
+
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
module Dataset
|
2
|
+
module Record # :nodoc:
|
3
|
+
|
4
|
+
# A mechanism to cache information about an ActiveRecord class to speed
|
5
|
+
# things up a bit for insertions, finds, and method generation.
|
6
|
+
class Meta # :nodoc:
|
7
|
+
attr_reader :class_name, :columns, :record_class, :table_name
|
8
|
+
|
9
|
+
# Provides information necessary to insert STI classes correctly for
|
10
|
+
# later reading.
|
11
|
+
delegate :name, :inheritance_column, :sti_name, :to => :record_class
|
12
|
+
|
13
|
+
def initialize(record_class)
|
14
|
+
@record_class = record_class
|
15
|
+
@class_name = record_class.name
|
16
|
+
@table_name = record_class.table_name
|
17
|
+
@columns = record_class.columns
|
18
|
+
end
|
19
|
+
|
20
|
+
def id_cache_key
|
21
|
+
@id_cache_key ||= table_name
|
22
|
+
end
|
23
|
+
|
24
|
+
def inheriting_record?
|
25
|
+
!record_class.descends_from_active_record?
|
26
|
+
end
|
27
|
+
|
28
|
+
def timestamp_columns
|
29
|
+
@timestamp_columns ||= begin
|
30
|
+
timestamps = %w(created_at created_on updated_at updated_on)
|
31
|
+
columns.select do |column|
|
32
|
+
timestamps.include?(column.name)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def id_finder_names
|
38
|
+
@id_finder_names ||= begin
|
39
|
+
names = descendants.collect {|c| finder_name c}
|
40
|
+
names.uniq.collect {|n| "#{n}_id".to_sym}
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
def model_finder_names
|
45
|
+
@record_finder_names ||= descendants.collect {|c| finder_name(c).pluralize.to_sym}.uniq
|
46
|
+
end
|
47
|
+
|
48
|
+
def to_s
|
49
|
+
"#<RecordMeta: #{table_name}>"
|
50
|
+
end
|
51
|
+
|
52
|
+
def descendants
|
53
|
+
if record_class.respond_to?(:self_and_descendents_from_active_record)
|
54
|
+
record_class.self_and_descendents_from_active_record
|
55
|
+
else
|
56
|
+
record_class.self_and_descendants_from_active_record
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
def finder_name(klass)
|
61
|
+
klass.name.underscore.gsub('/', '_').sub(/^(\w)_/, '\1').gsub(/_(\w)_/, '_\1')
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
end
|
66
|
+
end
|