massive_record 0.1.1 → 0.2.0.beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. data/CHANGELOG.md +28 -5
  2. data/Gemfile.lock +12 -12
  3. data/README.md +29 -1
  4. data/lib/massive_record/adapters/initialize.rb +18 -0
  5. data/lib/massive_record/adapters/thrift/adapter.rb +25 -0
  6. data/lib/massive_record/adapters/thrift/column_family.rb +24 -0
  7. data/lib/massive_record/adapters/thrift/connection.rb +73 -0
  8. data/lib/massive_record/{thrift → adapters/thrift/hbase}/hbase.rb +0 -0
  9. data/lib/massive_record/{thrift → adapters/thrift/hbase}/hbase_constants.rb +0 -0
  10. data/lib/massive_record/{thrift → adapters/thrift/hbase}/hbase_types.rb +0 -0
  11. data/lib/massive_record/adapters/thrift/row.rb +150 -0
  12. data/lib/massive_record/adapters/thrift/scanner.rb +59 -0
  13. data/lib/massive_record/adapters/thrift/table.rb +169 -0
  14. data/lib/massive_record/orm/attribute_methods/read.rb +2 -1
  15. data/lib/massive_record/orm/base.rb +61 -3
  16. data/lib/massive_record/orm/coders/chained.rb +71 -0
  17. data/lib/massive_record/orm/coders/json.rb +17 -0
  18. data/lib/massive_record/orm/coders/yaml.rb +15 -0
  19. data/lib/massive_record/orm/coders.rb +3 -0
  20. data/lib/massive_record/orm/errors.rb +15 -2
  21. data/lib/massive_record/orm/finders/scope.rb +166 -0
  22. data/lib/massive_record/orm/finders.rb +45 -24
  23. data/lib/massive_record/orm/persistence.rb +4 -4
  24. data/lib/massive_record/orm/relations/interface.rb +170 -0
  25. data/lib/massive_record/orm/relations/metadata.rb +150 -0
  26. data/lib/massive_record/orm/relations/proxy/references_many.rb +229 -0
  27. data/lib/massive_record/orm/relations/proxy/references_one.rb +40 -0
  28. data/lib/massive_record/orm/relations/proxy/references_one_polymorphic.rb +49 -0
  29. data/lib/massive_record/orm/relations/proxy.rb +174 -0
  30. data/lib/massive_record/orm/relations.rb +6 -0
  31. data/lib/massive_record/orm/schema/column_interface.rb +1 -1
  32. data/lib/massive_record/orm/schema/field.rb +62 -27
  33. data/lib/massive_record/orm/single_table_inheritance.rb +21 -0
  34. data/lib/massive_record/version.rb +1 -1
  35. data/lib/massive_record/wrapper/adapter.rb +6 -0
  36. data/lib/massive_record/wrapper/base.rb +6 -7
  37. data/lib/massive_record/wrapper/cell.rb +9 -32
  38. data/lib/massive_record/wrapper/column_families_collection.rb +2 -2
  39. data/lib/massive_record/wrapper/errors.rb +10 -0
  40. data/lib/massive_record/wrapper/tables_collection.rb +1 -1
  41. data/lib/massive_record.rb +5 -12
  42. data/spec/orm/cases/attribute_methods_spec.rb +5 -1
  43. data/spec/orm/cases/base_spec.rb +77 -4
  44. data/spec/orm/cases/column_spec.rb +1 -1
  45. data/spec/orm/cases/finder_default_scope.rb +53 -0
  46. data/spec/orm/cases/finder_scope_spec.rb +288 -0
  47. data/spec/orm/cases/finders_spec.rb +56 -13
  48. data/spec/orm/cases/persistence_spec.rb +20 -5
  49. data/spec/orm/cases/single_table_inheritance_spec.rb +26 -0
  50. data/spec/orm/cases/table_spec.rb +1 -1
  51. data/spec/orm/cases/timestamps_spec.rb +16 -16
  52. data/spec/orm/coders/chained_spec.rb +73 -0
  53. data/spec/orm/coders/json_spec.rb +6 -0
  54. data/spec/orm/coders/yaml_spec.rb +6 -0
  55. data/spec/orm/models/best_friend.rb +7 -0
  56. data/spec/orm/models/friend.rb +4 -0
  57. data/spec/orm/models/person.rb +20 -6
  58. data/spec/orm/models/{person_with_timestamps.rb → person_with_timestamp.rb} +1 -1
  59. data/spec/orm/models/test_class.rb +3 -0
  60. data/spec/orm/relations/interface_spec.rb +207 -0
  61. data/spec/orm/relations/metadata_spec.rb +202 -0
  62. data/spec/orm/relations/proxy/references_many_spec.rb +624 -0
  63. data/spec/orm/relations/proxy/references_one_polymorphic_spec.rb +106 -0
  64. data/spec/orm/relations/proxy/references_one_spec.rb +111 -0
  65. data/spec/orm/relations/proxy_spec.rb +13 -0
  66. data/spec/orm/schema/field_spec.rb +101 -2
  67. data/spec/shared/orm/coders/an_orm_coder.rb +14 -0
  68. data/spec/shared/orm/relations/proxy.rb +154 -0
  69. data/spec/shared/orm/relations/singular_proxy.rb +68 -0
  70. data/spec/spec_helper.rb +1 -0
  71. data/spec/thrift/cases/encoding_spec.rb +28 -7
  72. data/spec/wrapper/cases/adapter_spec.rb +9 -0
  73. data/spec/wrapper/cases/connection_spec.rb +13 -10
  74. data/spec/wrapper/cases/table_spec.rb +85 -85
  75. metadata +74 -22
  76. data/TODO.md +0 -8
  77. data/lib/massive_record/exceptions.rb +0 -11
  78. data/lib/massive_record/wrapper/column_family.rb +0 -22
  79. data/lib/massive_record/wrapper/connection.rb +0 -71
  80. data/lib/massive_record/wrapper/row.rb +0 -173
  81. data/lib/massive_record/wrapper/scanner.rb +0 -61
  82. data/lib/massive_record/wrapper/table.rb +0 -149
  83. data/spec/orm/cases/hbase/connection_spec.rb +0 -13
@@ -0,0 +1,59 @@
1
+ module MassiveRecord
2
+ module Adapters
3
+ module Thrift
4
+ class Scanner
5
+
6
+ attr_accessor :connection, :table_name, :column_family_names, :opened_scanner
7
+ attr_accessor :start_key, :offset_key, :created_at, :limit
8
+ attr_accessor :formatted_column_family_names, :column_family_names
9
+
10
+ def initialize(connection, table_name, column_family_names, opts = {})
11
+ @connection = connection
12
+ @table_name = table_name
13
+ @column_family_names = column_family_names.collect{|n| n.split(":").first}
14
+ @column_family_names = opts[:columns] unless opts[:columns].nil?
15
+ @formatted_column_family_names = @column_family_names.collect{|n| "#{n.split(":").first}:"}
16
+ @start_key = opts[:start_key].to_s
17
+ @offset_key = opts[:offset_key].to_s
18
+ @created_at = opts[:created_at].to_s
19
+ @limit = opts[:limit] || 10
20
+ end
21
+
22
+ def key
23
+ offset_key.empty? ? start_key : offset_key
24
+ end
25
+
26
+ def open
27
+ if created_at.empty?
28
+ self.opened_scanner = connection.scannerOpen(table_name, key, formatted_column_family_names)
29
+ else
30
+ self.opened_scanner = connection.scannerOpenTs(table_name, key, formatted_column_family_names, created_at)
31
+ end
32
+ end
33
+
34
+ def close
35
+ connection.scannerClose(opened_scanner)
36
+ end
37
+
38
+ def fetch_trows(opts = {})
39
+ connection.scannerGetList(opened_scanner, limit)
40
+ end
41
+
42
+ def fetch_rows(opts = {})
43
+ populate_rows(fetch_trows(opts))
44
+ end
45
+
46
+ def populate_rows(results)
47
+ results.collect do |result|
48
+ populate_row(result) unless result.row.match(/^#{start_key.gsub("|", "\\|")}/).nil?
49
+ end.select{|r| !r.nil?}
50
+ end
51
+
52
+ def populate_row(result)
53
+ Row.populate_from_trow_result(result, connection, table_name, column_family_names)
54
+ end
55
+
56
+ end
57
+ end
58
+ end
59
+ end
@@ -0,0 +1,169 @@
1
+ module MassiveRecord
2
+ module Adapters
3
+ module Thrift
4
+ class Table
5
+
6
+ attr_accessor :connection, :name, :column_families
7
+
8
+ def initialize(connection, table_name)
9
+ @connection = connection
10
+ @name = table_name.to_s
11
+ init_column_families
12
+ end
13
+
14
+ def init_column_families
15
+ @column_families = MassiveRecord::Wrapper::ColumnFamiliesCollection.new
16
+ @column_families.table = self
17
+ end
18
+
19
+ def self.create(connection, table_name, column_families = [])
20
+ table = self.new(connection, table_name)
21
+ table.column_families = column_families
22
+ table.save
23
+ end
24
+
25
+ def save
26
+ begin
27
+ client.createTable(name, @column_families.collect{|cf| cf.descriptor}).nil?
28
+ rescue Apache::Hadoop::Hbase::Thrift::AlreadyExists => ex
29
+ "The table already exists."
30
+ rescue => ex
31
+ raise ex
32
+ end
33
+ end
34
+
35
+ def client
36
+ connection
37
+ end
38
+
39
+ def disable
40
+ client.disableTable(name).nil?
41
+ end
42
+
43
+ def destroy
44
+ disable
45
+ client.deleteTable(name).nil?
46
+ end
47
+
48
+ def create_column_families(column_family_names)
49
+ column_family_names.each{|name| @column_families.push(ColumnFamily.new(name))}
50
+ end
51
+
52
+ def fetch_column_families
53
+ @column_families.clear
54
+ client.getColumnDescriptors(name).each do |column_name, description|
55
+ @column_families.push(ColumnFamily.new(column_name.split(":").first))
56
+ end
57
+ @column_families
58
+ end
59
+
60
+ def column_family_names
61
+ @column_families.collect{|column_family| column_family.name.to_s}
62
+ end
63
+
64
+ def fetch_column_family_names
65
+ fetch_column_families
66
+ column_family_names
67
+ end
68
+
69
+ def column_names
70
+ first.column_names
71
+ end
72
+
73
+ def scanner(opts = {})
74
+ scanner = Scanner.new(connection, name, column_family_names, format_options_for_scanner(opts))
75
+
76
+ if block_given?
77
+ begin
78
+ scanner.open
79
+ yield scanner
80
+ ensure
81
+ scanner.close
82
+ end
83
+ else
84
+ scanner
85
+ end
86
+ end
87
+
88
+ def format_options_for_scanner(opts = {})
89
+ {
90
+ :start_key => opts[:start],
91
+ :offset_key => opts[:offset],
92
+ :created_at => opts[:created_at],
93
+ :columns => opts[:select], # list of column families to fetch from hbase
94
+ :limit => opts[:limit] || opts[:batch_size]
95
+ }
96
+ end
97
+
98
+ def all(opts = {})
99
+ rows = []
100
+
101
+ find_in_batches(opts) do |batch|
102
+ rows |= batch
103
+ end
104
+
105
+ rows
106
+ end
107
+
108
+ def first(opts = {})
109
+ all(opts.merge(:limit => 1)).first
110
+ end
111
+
112
+ #
113
+ # Fast way of fetching the value of the cell
114
+ # table.get("my_id", :info, :name) # => "Bob"
115
+ #
116
+ def get(id, column_family_name, column_name)
117
+ MassiveRecord::Wrapper::Cell.new(:value => connection.get(name, id, "#{column_family_name.to_s}:#{column_name.to_s}").first.value).value
118
+ end
119
+
120
+ def find(*args)
121
+ arg = args[0]
122
+ opts = args[1] || {}
123
+ if arg.is_a?(Array)
124
+ arg.collect{|id| first(opts.merge(:start => id))}
125
+ else
126
+ # need to replace by connection.getRowWithColumns("companies_development", "NO0000000812676342", ["info:name", "info:org_num"]).first
127
+ first(opts.merge(:start => arg))
128
+ end
129
+ end
130
+
131
+ def find_in_batches(opts = {})
132
+ results_limit = opts.delete(:limit)
133
+ results_found = 0
134
+
135
+ scanner(opts) do |s|
136
+ while (true) do
137
+ s.limit = results_limit - results_found if !results_limit.nil? && results_limit <= results_found + s.limit
138
+
139
+ rows = s.fetch_rows
140
+ if rows.empty?
141
+ break
142
+ else
143
+ results_found += rows.size
144
+ yield rows
145
+ end
146
+ end
147
+ end
148
+ end
149
+
150
+ def exists?
151
+ connection.tables.include?(name)
152
+ end
153
+
154
+ def regions
155
+ connection.getTableRegions(name).collect do |r|
156
+ {
157
+ :start_key => r.startKey,
158
+ :end_key => r.endKey,
159
+ :id => r.id,
160
+ :name => r.name,
161
+ :version => r.version
162
+ }
163
+ end
164
+ end
165
+
166
+ end
167
+ end
168
+ end
169
+ end
@@ -9,7 +9,8 @@ module MassiveRecord
9
9
  end
10
10
 
11
11
  def read_attribute(attr_name)
12
- attributes_schema[attr_name].nil? ? @attributes[attr_name.to_s] : attributes_schema[attr_name].decode(@attributes[attr_name.to_s])
12
+ attr_name = attr_name.to_s
13
+ attributes_schema[attr_name].nil? ? @attributes[attr_name] : attributes_schema[attr_name].decode(@attributes[attr_name])
13
14
  end
14
15
 
15
16
  private
@@ -7,23 +7,31 @@ require 'active_support/core_ext/string'
7
7
  require 'active_support/memoizable'
8
8
 
9
9
  require 'massive_record/orm/schema'
10
+ require 'massive_record/orm/coders'
10
11
  require 'massive_record/orm/errors'
11
12
  require 'massive_record/orm/config'
13
+ require 'massive_record/orm/relations'
12
14
  require 'massive_record/orm/finders'
15
+ require 'massive_record/orm/finders/scope'
13
16
  require 'massive_record/orm/attribute_methods'
14
17
  require 'massive_record/orm/attribute_methods/write'
15
18
  require 'massive_record/orm/attribute_methods/read'
16
19
  require 'massive_record/orm/attribute_methods/dirty'
20
+ require 'massive_record/orm/single_table_inheritance'
17
21
  require 'massive_record/orm/validations'
18
22
  require 'massive_record/orm/callbacks'
19
23
  require 'massive_record/orm/timestamps'
20
24
  require 'massive_record/orm/persistence'
21
25
 
26
+
22
27
  module MassiveRecord
23
28
  module ORM
24
29
  class Base
25
30
  include ActiveModel::Conversion
26
31
 
32
+ class_attribute :coder, :instance_writer => false
33
+ self.coder = Coders::JSON.new
34
+
27
35
  # Accepts a logger conforming to the interface of Log4r or the default Ruby 1.8+ Logger class,
28
36
  cattr_accessor :logger, :instance_writer => false
29
37
 
@@ -42,7 +50,11 @@ module MassiveRecord
42
50
 
43
51
  class << self
44
52
  def table_name
45
- @table_name ||= table_name_prefix + (table_name_overriden.blank? ? self.to_s.demodulize.underscore.pluralize : table_name_overriden) + table_name_suffix
53
+ @table_name ||= table_name_prefix + table_name_without_pre_and_suffix + table_name_suffix
54
+ end
55
+
56
+ def table_name_without_pre_and_suffix
57
+ (table_name_overriden.blank? ? base_class.to_s.demodulize.underscore.pluralize : table_name_overriden)
46
58
  end
47
59
 
48
60
  def table_name=(name)
@@ -54,6 +66,36 @@ module MassiveRecord
54
66
  @table_name = self.table_name_overriden = nil
55
67
  self.table_name_prefix = self.table_name_suffix = ""
56
68
  end
69
+
70
+ def base_class
71
+ class_of_descendant(self)
72
+ end
73
+
74
+
75
+ def inheritance_attribute
76
+ @inheritance_attribute ||= "type"
77
+ end
78
+
79
+ def set_inheritance_attribute(value = nil, &block)
80
+ define_attr_method :inheritance_attribute, value, &block
81
+ end
82
+ alias :inheritance_attribute= :set_inheritance_attribute
83
+
84
+
85
+ def ===(other)
86
+ other.is_a? self
87
+ end
88
+
89
+
90
+ private
91
+
92
+ def class_of_descendant(klass)
93
+ if klass.superclass.superclass == Base
94
+ klass
95
+ else
96
+ class_of_descendant(klass.superclass)
97
+ end
98
+ end
57
99
  end
58
100
 
59
101
  #
@@ -64,10 +106,14 @@ module MassiveRecord
64
106
  # for describing column families and fields are in place
65
107
  #
66
108
  def initialize(attributes = {})
67
- self.attributes_raw = attributes_from_field_definition.merge(attributes)
68
- self.attributes = attributes
69
109
  @new_record = true
70
110
  @destroyed = @readonly = false
111
+ @relation_proxy_cache = {}
112
+
113
+ attributes = {} if attributes.nil?
114
+
115
+ self.attributes_raw = attributes_from_field_definition.merge(attributes)
116
+ self.attributes = attributes
71
117
 
72
118
  _run_initialize_callbacks
73
119
  end
@@ -88,11 +134,14 @@ module MassiveRecord
88
134
  def init_with(coder)
89
135
  @new_record = false
90
136
  @destroyed = @readonly = false
137
+ @relation_proxy_cache = {}
91
138
 
92
139
  self.attributes_raw = coder['attributes']
93
140
 
94
141
  _run_find_callbacks
95
142
  _run_initialize_callbacks
143
+
144
+ self
96
145
  end
97
146
 
98
147
 
@@ -139,6 +188,13 @@ module MassiveRecord
139
188
  end
140
189
 
141
190
 
191
+ def clone
192
+ object = self.class.new
193
+ object.init_with('attributes' => attributes.select{|k| !['id', 'created_at', 'updated_at'].include?(k)})
194
+ object
195
+ end
196
+
197
+
142
198
  private
143
199
 
144
200
  #
@@ -173,6 +229,7 @@ module MassiveRecord
173
229
 
174
230
  Base.class_eval do
175
231
  include Config
232
+ include Relations::Interface
176
233
  include Persistence
177
234
  include Finders
178
235
  include ActiveModel::Translation
@@ -182,6 +239,7 @@ module MassiveRecord
182
239
  include Validations
183
240
  include Callbacks
184
241
  include Timestamps
242
+ include SingleTableInheritance
185
243
 
186
244
 
187
245
  alias [] read_attribute
@@ -0,0 +1,71 @@
1
+ module MassiveRecord
2
+ module ORM
3
+ module Coders
4
+ CoderError = ::StandardError
5
+ ParseError = CoderError
6
+ EncodeError = CoderError
7
+
8
+ #
9
+ # If you ever need support for multiple coders, this class can help you out.
10
+ # Lets say you have YAML serialized data in your attributes, but what to migrate
11
+ # over to JSON, you can:
12
+ #
13
+ # MassiveRecord::ORM::Coders::Chained.new(
14
+ # MassiveRecord::ORM::Coders::JSON.new,
15
+ # MassiveRecord::ORM::Coders::YAML.new
16
+ # )
17
+ #
18
+ # or
19
+ #
20
+ # MassiveRecord::ORM::Base.coder = MassiveRecord::ORM::Coders::Chained.new({
21
+ # :load_with => [MassiveRecord::ORM::Coders::JSON.new, MassiveRecord::ORM::Coders::JSON.new],
22
+ # :dump_with => MassiveRecord::ORM::Coders::JSON.new
23
+ # })
24
+ #
25
+ # With this set we'll first try the JSON coder, and if it fails with an
26
+ # encoding error we'll try the next one in the chain.
27
+ #
28
+ class Chained
29
+ attr_reader :loaders, :dumpers
30
+
31
+ def initialize(*args)
32
+ coders = args.extract_options!
33
+
34
+ @loaders = args.flatten
35
+ @dumpers = args.flatten
36
+
37
+ @loaders = [coders[:load_with]].flatten if coders[:load_with]
38
+ @dumpers = [coders[:dump_with]].flatten if coders[:dump_with]
39
+ end
40
+
41
+
42
+ def dump(object)
43
+ raise "We have no coders to dump with" if dumpers.empty?
44
+
45
+ dumpers.each do |coder|
46
+ begin
47
+ return coder.dump(object)
48
+ rescue
49
+ end
50
+ end
51
+
52
+ raise "Unable to encode #{object}. Tried encode it with: #{dumpers.collect(&:class).to_sentence}"
53
+ end
54
+
55
+
56
+ def load(data)
57
+ raise "We have no coders to load with" if loaders.empty?
58
+
59
+ loaders.each do |coder|
60
+ begin
61
+ return coder.load(data)
62
+ rescue
63
+ end
64
+ end
65
+
66
+ raise "Unable to parse #{data}. Tried loading it with: #{loaders.collect(&:class).to_sentence}"
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end