bigrecord 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/MIT-LICENSE +20 -0
- data/README.rdoc +44 -0
- data/Rakefile +17 -0
- data/VERSION +1 -0
- data/doc/bigrecord_specs.rdoc +36 -0
- data/doc/getting_started.rdoc +157 -0
- data/examples/bigrecord.yml +25 -0
- data/generators/bigrecord/bigrecord_generator.rb +17 -0
- data/generators/bigrecord/templates/bigrecord.rake +47 -0
- data/generators/bigrecord_migration/bigrecord_migration_generator.rb +13 -0
- data/generators/bigrecord_migration/templates/migration.rb +9 -0
- data/generators/bigrecord_model/bigrecord_model_generator.rb +28 -0
- data/generators/bigrecord_model/templates/migration.rb +13 -0
- data/generators/bigrecord_model/templates/model.rb +7 -0
- data/generators/bigrecord_model/templates/model_spec.rb +12 -0
- data/init.rb +9 -0
- data/install.rb +22 -0
- data/lib/big_record/abstract_base.rb +1088 -0
- data/lib/big_record/action_view_extensions.rb +266 -0
- data/lib/big_record/ar_associations/association_collection.rb +194 -0
- data/lib/big_record/ar_associations/association_proxy.rb +158 -0
- data/lib/big_record/ar_associations/belongs_to_association.rb +57 -0
- data/lib/big_record/ar_associations/belongs_to_many_association.rb +57 -0
- data/lib/big_record/ar_associations/has_and_belongs_to_many_association.rb +164 -0
- data/lib/big_record/ar_associations/has_many_association.rb +191 -0
- data/lib/big_record/ar_associations/has_one_association.rb +80 -0
- data/lib/big_record/ar_associations.rb +1608 -0
- data/lib/big_record/ar_reflection.rb +223 -0
- data/lib/big_record/attribute_methods.rb +75 -0
- data/lib/big_record/base.rb +618 -0
- data/lib/big_record/br_associations/association_collection.rb +194 -0
- data/lib/big_record/br_associations/association_proxy.rb +153 -0
- data/lib/big_record/br_associations/belongs_to_association.rb +52 -0
- data/lib/big_record/br_associations/belongs_to_many_association.rb +293 -0
- data/lib/big_record/br_associations/cached_item_proxy.rb +194 -0
- data/lib/big_record/br_associations/cached_item_proxy_factory.rb +62 -0
- data/lib/big_record/br_associations/has_and_belongs_to_many_association.rb +168 -0
- data/lib/big_record/br_associations/has_one_association.rb +80 -0
- data/lib/big_record/br_associations.rb +978 -0
- data/lib/big_record/br_reflection.rb +151 -0
- data/lib/big_record/callbacks.rb +367 -0
- data/lib/big_record/connection_adapters/abstract/connection_specification.rb +279 -0
- data/lib/big_record/connection_adapters/abstract/database_statements.rb +175 -0
- data/lib/big_record/connection_adapters/abstract/quoting.rb +58 -0
- data/lib/big_record/connection_adapters/abstract_adapter.rb +190 -0
- data/lib/big_record/connection_adapters/column.rb +491 -0
- data/lib/big_record/connection_adapters/hbase_adapter.rb +432 -0
- data/lib/big_record/connection_adapters/view.rb +27 -0
- data/lib/big_record/connection_adapters.rb +10 -0
- data/lib/big_record/deletion.rb +73 -0
- data/lib/big_record/dynamic_schema.rb +92 -0
- data/lib/big_record/embedded.rb +71 -0
- data/lib/big_record/embedded_associations/association_proxy.rb +148 -0
- data/lib/big_record/family_span_columns.rb +89 -0
- data/lib/big_record/fixtures.rb +1025 -0
- data/lib/big_record/migration.rb +380 -0
- data/lib/big_record/routing_ext.rb +65 -0
- data/lib/big_record/timestamp.rb +51 -0
- data/lib/big_record/validations.rb +830 -0
- data/lib/big_record.rb +125 -0
- data/lib/bigrecord.rb +1 -0
- data/rails/init.rb +9 -0
- data/spec/connections/bigrecord.yml +13 -0
- data/spec/connections/cassandra/connection.rb +2 -0
- data/spec/connections/hbase/connection.rb +2 -0
- data/spec/debug.log +281 -0
- data/spec/integration/br_associations_spec.rb +80 -0
- data/spec/lib/animal.rb +12 -0
- data/spec/lib/book.rb +10 -0
- data/spec/lib/broken_migrations/duplicate_name/20090706182535_add_animals_table.rb +14 -0
- data/spec/lib/broken_migrations/duplicate_name/20090706193019_add_animals_table.rb +9 -0
- data/spec/lib/broken_migrations/duplicate_version/20090706190623_add_books_table.rb +9 -0
- data/spec/lib/broken_migrations/duplicate_version/20090706190623_add_companies_table.rb +9 -0
- data/spec/lib/company.rb +14 -0
- data/spec/lib/embedded/web_link.rb +12 -0
- data/spec/lib/employee.rb +33 -0
- data/spec/lib/migrations/20090706182535_add_animals_table.rb +13 -0
- data/spec/lib/migrations/20090706190623_add_books_table.rb +15 -0
- data/spec/lib/migrations/20090706193019_add_companies_table.rb +14 -0
- data/spec/lib/migrations/20090706194512_add_employees_table.rb +13 -0
- data/spec/lib/migrations/20090706195741_add_zoos_table.rb +13 -0
- data/spec/lib/novel.rb +5 -0
- data/spec/lib/zoo.rb +17 -0
- data/spec/spec.opts +4 -0
- data/spec/spec_helper.rb +55 -0
- data/spec/unit/abstract_base_spec.rb +287 -0
- data/spec/unit/adapters/abstract_adapter_spec.rb +56 -0
- data/spec/unit/adapters/adapter_shared_spec.rb +51 -0
- data/spec/unit/adapters/hbase_adapter_spec.rb +15 -0
- data/spec/unit/ar_associations_spec.rb +8 -0
- data/spec/unit/base_spec.rb +6 -0
- data/spec/unit/br_associations_spec.rb +58 -0
- data/spec/unit/embedded_spec.rb +43 -0
- data/spec/unit/find_spec.rb +34 -0
- data/spec/unit/hash_helper_spec.rb +44 -0
- data/spec/unit/migration_spec.rb +144 -0
- data/spec/unit/model_spec.rb +315 -0
- data/spec/unit/validations_spec.rb +182 -0
- data/tasks/bigrecord_tasks.rake +47 -0
- data/tasks/data_store.rb +46 -0
- data/tasks/gem.rb +22 -0
- data/tasks/rdoc.rb +8 -0
- data/tasks/spec.rb +34 -0
- metadata +189 -0
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
require 'rubygems'
|
|
2
|
+
require 'set'
|
|
3
|
+
require 'drb'
|
|
4
|
+
|
|
5
|
+
unless defined?(BigRecordDriver)
|
|
6
|
+
begin
|
|
7
|
+
# Bigrecord's source is included with Bigrecord-Driver, that's we check for this first
|
|
8
|
+
require File.join(File.dirname(__FILE__), "..", "..", "..", "..", "bigrecord-driver", "lib", "big_record_driver")
|
|
9
|
+
rescue LoadError
|
|
10
|
+
begin
|
|
11
|
+
gem 'bigrecord-driver'
|
|
12
|
+
require 'bigrecord_driver'
|
|
13
|
+
rescue Gem::LoadError
|
|
14
|
+
puts "bigrecord-driver not available. Install it with: sudo gem install bigrecord-driver -s http://gemcutter.org"
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
module BigRecord
|
|
20
|
+
class Base
|
|
21
|
+
# Establishes a connection to the database that's used by all Active Record objects.
|
|
22
|
+
def self.hbase_connection(config) # :nodoc:
|
|
23
|
+
config = config.symbolize_keys
|
|
24
|
+
|
|
25
|
+
zookeeper_host = config[:zookeeper_host]
|
|
26
|
+
zookeeper_client_port = config[:zookeeper_client_port]
|
|
27
|
+
drb_host = config[:drb_host]
|
|
28
|
+
drb_port = config[:drb_port]
|
|
29
|
+
|
|
30
|
+
hbase = BigRecordDriver::Client.new(config)
|
|
31
|
+
|
|
32
|
+
ConnectionAdapters::HbaseAdapter.new(hbase, logger, [zookeeper_host, zookeeper_client_port], config)
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
module ConnectionAdapters
|
|
37
|
+
class HbaseAdapter < AbstractAdapter
|
|
38
|
+
@@emulate_booleans = true
|
|
39
|
+
cattr_accessor :emulate_booleans
|
|
40
|
+
|
|
41
|
+
LOST_CONNECTION_ERROR_MESSAGES = [
|
|
42
|
+
"Server shutdown in progress",
|
|
43
|
+
"Broken pipe",
|
|
44
|
+
"Lost connection to HBase server during query",
|
|
45
|
+
"HBase server has gone away"
|
|
46
|
+
]
|
|
47
|
+
|
|
48
|
+
# data types
|
|
49
|
+
TYPE_NULL = 0x00;
|
|
50
|
+
TYPE_STRING = 0x01; # utf-8 strings
|
|
51
|
+
# TYPE_INTEGER = 0x02; # delegate to YAML
|
|
52
|
+
# TYPE_FLOAT = 0x03; # fixed 1 byte
|
|
53
|
+
TYPE_BOOLEAN = 0x04; # delegate to YAML
|
|
54
|
+
# TYPE_MAP = 0x05; # delegate to YAML
|
|
55
|
+
# TYPE_DATETIME = 0x06; # delegate to YAML
|
|
56
|
+
TYPE_BINARY = 0x07; # byte[] => no conversion
|
|
57
|
+
|
|
58
|
+
# string charset
|
|
59
|
+
CHARSET = "utf-8"
|
|
60
|
+
|
|
61
|
+
# utility constants
|
|
62
|
+
NULL = "\000"
|
|
63
|
+
# TRUE = "\001"
|
|
64
|
+
# FALSE = "\000"
|
|
65
|
+
|
|
66
|
+
def initialize(connection, logger, connection_options, config)
|
|
67
|
+
super(connection, logger)
|
|
68
|
+
@connection_options, @config = connection_options, config
|
|
69
|
+
|
|
70
|
+
connect
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def configuration
|
|
74
|
+
@config.clone
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def adapter_name #:nodoc:
|
|
78
|
+
'HBase'
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
def supports_migrations? #:nodoc:
|
|
82
|
+
true
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# CONNECTION MANAGEMENT ====================================
|
|
86
|
+
|
|
87
|
+
def active?
|
|
88
|
+
@connection.ping
|
|
89
|
+
rescue BigRecordError
|
|
90
|
+
false
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
def reconnect!
|
|
94
|
+
disconnect!
|
|
95
|
+
connect
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
def disconnect!
|
|
99
|
+
@connection.close rescue nil
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
# DATABASE STATEMENTS ======================================
|
|
104
|
+
|
|
105
|
+
def update_raw(table_name, row, values, timestamp)
|
|
106
|
+
result = nil
|
|
107
|
+
log "UPDATE #{table_name} SET #{values.inspect if values} WHERE ROW=#{row};" do
|
|
108
|
+
result = @connection.update(table_name, row, values, timestamp)
|
|
109
|
+
end
|
|
110
|
+
result
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def update(table_name, row, values, timestamp)
|
|
114
|
+
serialized_collection = {}
|
|
115
|
+
values.each do |column, value|
|
|
116
|
+
serialized_collection[column] = serialize(value)
|
|
117
|
+
end
|
|
118
|
+
update_raw(table_name, row, serialized_collection, timestamp)
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def get_raw(table_name, row, column, options={})
|
|
122
|
+
result = nil
|
|
123
|
+
log "SELECT (#{column}) FROM #{table_name} WHERE ROW=#{row};" do
|
|
124
|
+
result = @connection.get(table_name, row, column, options)
|
|
125
|
+
end
|
|
126
|
+
result
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def get(table_name, row, column, options={})
|
|
130
|
+
serialized_result = get_raw(table_name, row, column, options)
|
|
131
|
+
result = nil
|
|
132
|
+
if serialized_result.is_a?(Array)
|
|
133
|
+
result = serialized_result.collect{|e| deserialize(e)}
|
|
134
|
+
else
|
|
135
|
+
result = deserialize(serialized_result)
|
|
136
|
+
end
|
|
137
|
+
result
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
def get_columns_raw(table_name, row, columns, options={})
|
|
141
|
+
result = {}
|
|
142
|
+
log "SELECT (#{columns.join(", ")}) FROM #{table_name} WHERE ROW=#{row};" do
|
|
143
|
+
result = @connection.get_columns(table_name, row, columns, options)
|
|
144
|
+
end
|
|
145
|
+
result
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def get_columns(table_name, row, columns, options={})
|
|
149
|
+
row_cols = get_columns_raw(table_name, row, columns, options)
|
|
150
|
+
result = {}
|
|
151
|
+
return nil unless row_cols
|
|
152
|
+
|
|
153
|
+
row_cols.each do |key, col|
|
|
154
|
+
result[key] =
|
|
155
|
+
if key == 'id'
|
|
156
|
+
col
|
|
157
|
+
else
|
|
158
|
+
deserialize(col)
|
|
159
|
+
end
|
|
160
|
+
end
|
|
161
|
+
result
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
def get_consecutive_rows_raw(table_name, start_row, limit, columns, stop_row = nil)
|
|
165
|
+
result = nil
|
|
166
|
+
log "SCAN (#{columns.join(", ")}) FROM #{table_name} WHERE START_ROW=#{start_row} AND STOP_ROW=#{stop_row} LIMIT=#{limit};" do
|
|
167
|
+
result = @connection.get_consecutive_rows(table_name, start_row, limit, columns, stop_row)
|
|
168
|
+
end
|
|
169
|
+
result
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
def get_consecutive_rows(table_name, start_row, limit, columns, stop_row = nil)
|
|
173
|
+
rows = get_consecutive_rows_raw(table_name, start_row, limit, columns, stop_row)
|
|
174
|
+
result = rows.collect do |row_cols|
|
|
175
|
+
cols = {}
|
|
176
|
+
row_cols.each do |key, col|
|
|
177
|
+
begin
|
|
178
|
+
cols[key] =
|
|
179
|
+
if key == 'id'
|
|
180
|
+
col
|
|
181
|
+
else
|
|
182
|
+
deserialize(col)
|
|
183
|
+
end
|
|
184
|
+
rescue Exception => e
|
|
185
|
+
puts "Could not load column value #{key} for row=#{row_cols['id']}"
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
cols
|
|
189
|
+
end
|
|
190
|
+
result
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
def delete(table_name, row, timestamp = nil)
|
|
194
|
+
timestamp ||= Time.now.to_bigrecord_timestamp
|
|
195
|
+
result = nil
|
|
196
|
+
log "DELETE FROM #{table_name} WHERE ROW=#{row};" do
|
|
197
|
+
result = @connection.delete(table_name, row, timestamp)
|
|
198
|
+
end
|
|
199
|
+
result
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
def truncate_table(table_name)
|
|
203
|
+
result = nil
|
|
204
|
+
log "TRUNCATE TABLE #{table_name}" do
|
|
205
|
+
result = @connection.truncate_table(table_name)
|
|
206
|
+
end
|
|
207
|
+
result
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
# SCHEMA STATEMENTS ========================================
|
|
212
|
+
|
|
213
|
+
def initialize_schema_migrations_table
|
|
214
|
+
sm_table = BigRecord::Migrator.schema_migrations_table_name
|
|
215
|
+
|
|
216
|
+
unless table_exists?(sm_table)
|
|
217
|
+
create_table(sm_table) do |t|
|
|
218
|
+
t.family :attribute, :versions => 1
|
|
219
|
+
end
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
def get_all_schema_versions
|
|
224
|
+
sm_table = BigRecord::Migrator.schema_migrations_table_name
|
|
225
|
+
|
|
226
|
+
get_consecutive_rows(sm_table, nil, nil, ["attribute:version"]).map{|version| version["attribute:version"]}
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
def table_exists?(table_name)
|
|
230
|
+
log "TABLE EXISTS? #{table_name};" do
|
|
231
|
+
@connection.table_exists?(table_name)
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def create_table(table_name, options = {})
|
|
236
|
+
table_definition = TableDefinition.new
|
|
237
|
+
|
|
238
|
+
yield table_definition if block_given?
|
|
239
|
+
|
|
240
|
+
if options[:force] && table_exists?(table_name)
|
|
241
|
+
drop_table(table_name)
|
|
242
|
+
end
|
|
243
|
+
|
|
244
|
+
result = nil
|
|
245
|
+
log "CREATE TABLE #{table_name} (#{table_definition.column_families_list});" do
|
|
246
|
+
result = @connection.create_table(table_name, table_definition.to_adapter_format)
|
|
247
|
+
end
|
|
248
|
+
result
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
def drop_table(table_name)
|
|
252
|
+
result = nil
|
|
253
|
+
log "DROP TABLE #{table_name};" do
|
|
254
|
+
result = @connection.drop_table(table_name)
|
|
255
|
+
end
|
|
256
|
+
result
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
def add_column_family(table_name, column_name, options = {})
|
|
260
|
+
column = BigRecordDriver::ColumnDescriptor.new(column_name.to_s, options)
|
|
261
|
+
|
|
262
|
+
result = nil
|
|
263
|
+
log "ADD COLUMN TABLE #{table_name} COLUMN #{column_name} (#{options.inspect});" do
|
|
264
|
+
result = @connection.add_column(table_name, column)
|
|
265
|
+
end
|
|
266
|
+
result
|
|
267
|
+
end
|
|
268
|
+
|
|
269
|
+
alias :add_family :add_column_family
|
|
270
|
+
|
|
271
|
+
def remove_column_family(table_name, column_name)
|
|
272
|
+
result = nil
|
|
273
|
+
log "REMOVE COLUMN TABLE #{table_name} COLUMN #{column_name};" do
|
|
274
|
+
result = @connection.remove_column(table_name, column_name)
|
|
275
|
+
end
|
|
276
|
+
result
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
alias :remove_family :remove_column_family
|
|
280
|
+
|
|
281
|
+
def modify_column_family(table_name, column_name, options = {})
|
|
282
|
+
column = BigRecordDriver::ColumnDescriptor.new(column_name.to_s, options)
|
|
283
|
+
|
|
284
|
+
result = nil
|
|
285
|
+
log "MODIFY COLUMN TABLE #{table_name} COLUMN #{column_name} (#{options.inspect});" do
|
|
286
|
+
result = @connection.modify_column(table_name, column)
|
|
287
|
+
end
|
|
288
|
+
result
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
alias :modify_family :modify_column_family
|
|
292
|
+
|
|
293
|
+
# Serialize the given value
|
|
294
|
+
def serialize(value)
|
|
295
|
+
case value
|
|
296
|
+
when NilClass then NULL
|
|
297
|
+
when String then build_serialized_value(TYPE_STRING, value)
|
|
298
|
+
else value.to_yaml
|
|
299
|
+
end
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
# Serialize an object in a given type
|
|
303
|
+
def build_serialized_value(type, value)
|
|
304
|
+
type.chr + value
|
|
305
|
+
end
|
|
306
|
+
|
|
307
|
+
# Deserialize the given string. This method supports both the pure YAML format and
|
|
308
|
+
# the type header format.
|
|
309
|
+
def deserialize(str)
|
|
310
|
+
return unless str
|
|
311
|
+
|
|
312
|
+
# stay compatible with the old serialization code
|
|
313
|
+
# YAML documents start with "--- " so if we find that sequence at the beginning we
|
|
314
|
+
# consider it as a serialized YAML value, else it's the new format with the type header
|
|
315
|
+
if str[0..3] == "--- "
|
|
316
|
+
YAML::load(str) if str
|
|
317
|
+
else
|
|
318
|
+
deserialize_with_header(str)
|
|
319
|
+
end
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
# Deserialize the given string assumed to be in the type header format.
|
|
323
|
+
def deserialize_with_header(data)
|
|
324
|
+
return unless data and data.size >= 2
|
|
325
|
+
|
|
326
|
+
# the type of the data is encoded in the first byte
|
|
327
|
+
type = data[0];
|
|
328
|
+
|
|
329
|
+
case type
|
|
330
|
+
when TYPE_NULL then nil
|
|
331
|
+
when TYPE_STRING then data[1..-1]
|
|
332
|
+
when TYPE_BINARY then data[1..-1]
|
|
333
|
+
else nil
|
|
334
|
+
end
|
|
335
|
+
end
|
|
336
|
+
|
|
337
|
+
private
|
|
338
|
+
def connect
|
|
339
|
+
@connection.configure(@config)
|
|
340
|
+
rescue DRb::DRbConnError
|
|
341
|
+
raise BigRecord::ConnectionFailed, "Failed to connect to the DRb server (jruby) " +
|
|
342
|
+
"at #{@config[:drb_host]}:#{@config[:drb_port]}."
|
|
343
|
+
end
|
|
344
|
+
|
|
345
|
+
protected
|
|
346
|
+
def log(str, name = nil)
|
|
347
|
+
if block_given?
|
|
348
|
+
if @logger and @logger.level <= Logger::INFO
|
|
349
|
+
result = nil
|
|
350
|
+
seconds = Benchmark.realtime { result = yield }
|
|
351
|
+
@runtime += seconds
|
|
352
|
+
log_info(str, name, seconds)
|
|
353
|
+
result
|
|
354
|
+
else
|
|
355
|
+
yield
|
|
356
|
+
end
|
|
357
|
+
else
|
|
358
|
+
log_info(str, name, 0)
|
|
359
|
+
nil
|
|
360
|
+
end
|
|
361
|
+
rescue Exception => e
|
|
362
|
+
# Log message and raise exception.
|
|
363
|
+
# Set last_verfication to 0, so that connection gets verified
|
|
364
|
+
# upon reentering the request loop
|
|
365
|
+
@last_verification = 0
|
|
366
|
+
message = "#{e.class.name}: #{e.message}: #{str}"
|
|
367
|
+
log_info(message, name, 0)
|
|
368
|
+
raise e
|
|
369
|
+
end
|
|
370
|
+
|
|
371
|
+
def log_info(str, name, runtime)
|
|
372
|
+
return unless @logger
|
|
373
|
+
|
|
374
|
+
@logger.debug(
|
|
375
|
+
format_log_entry(
|
|
376
|
+
"#{name.nil? ? "HBASE" : name} (#{sprintf("%f", runtime)})",
|
|
377
|
+
str.gsub(/ +/, " ")
|
|
378
|
+
)
|
|
379
|
+
)
|
|
380
|
+
end
|
|
381
|
+
|
|
382
|
+
def format_log_entry(message, dump = nil)
|
|
383
|
+
if BigRecord::Base.colorize_logging
|
|
384
|
+
if @@row_even
|
|
385
|
+
@@row_even = false
|
|
386
|
+
message_color, dump_color = "4;36;1", "0;1"
|
|
387
|
+
else
|
|
388
|
+
@@row_even = true
|
|
389
|
+
message_color, dump_color = "4;35;1", "0"
|
|
390
|
+
end
|
|
391
|
+
|
|
392
|
+
log_entry = " \e[#{message_color}m#{message}\e[0m "
|
|
393
|
+
log_entry << "\e[#{dump_color}m%#{String === dump ? 's' : 'p'}\e[0m" % dump if dump
|
|
394
|
+
log_entry
|
|
395
|
+
else
|
|
396
|
+
"%s %s" % [message, dump]
|
|
397
|
+
end
|
|
398
|
+
end
|
|
399
|
+
end
|
|
400
|
+
|
|
401
|
+
class TableDefinition
|
|
402
|
+
|
|
403
|
+
def initialize
|
|
404
|
+
@column_families = []
|
|
405
|
+
end
|
|
406
|
+
|
|
407
|
+
# Returns a column family for the column with name +name+.
|
|
408
|
+
def [](name)
|
|
409
|
+
@column_families.find {|column| column.name.to_s == name.to_s}
|
|
410
|
+
end
|
|
411
|
+
|
|
412
|
+
def column_family(name, options = {})
|
|
413
|
+
column = self[name] || BigRecordDriver::ColumnDescriptor.new(name.to_s, options)
|
|
414
|
+
|
|
415
|
+
@column_families << column unless @column_families.include? column
|
|
416
|
+
self
|
|
417
|
+
end
|
|
418
|
+
|
|
419
|
+
alias :family :column_family
|
|
420
|
+
|
|
421
|
+
def to_adapter_format
|
|
422
|
+
@column_families
|
|
423
|
+
end
|
|
424
|
+
|
|
425
|
+
def column_families_list
|
|
426
|
+
@column_families.map(&:name).join(", ")
|
|
427
|
+
end
|
|
428
|
+
|
|
429
|
+
end
|
|
430
|
+
|
|
431
|
+
end
|
|
432
|
+
end
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
module BigRecord
|
|
2
|
+
module ConnectionAdapters
|
|
3
|
+
class View
|
|
4
|
+
attr_reader :name, :owner
|
|
5
|
+
|
|
6
|
+
def initialize(name, column_names, owner)
|
|
7
|
+
@name = name.to_s
|
|
8
|
+
@column_names = column_names ? column_names.collect{|c| c.to_s} : nil
|
|
9
|
+
@owner = owner
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
# Return the column objects associated with this view. By default the views 'all' and 'default' return every column.
|
|
13
|
+
def columns
|
|
14
|
+
if @column_names
|
|
15
|
+
@column_names.collect{|cn| owner.columns_hash[cn]}
|
|
16
|
+
else
|
|
17
|
+
owner.columns
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# Return the name of the column objects associated with this view. By default the views 'all' and 'default' return every column.
|
|
22
|
+
def column_names
|
|
23
|
+
@column_names || owner.column_names
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
end
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
dir = File.expand_path(File.join(File.dirname(__FILE__), "connection_adapters"))
|
|
2
|
+
|
|
3
|
+
require dir + '/column'
|
|
4
|
+
require dir + '/view'
|
|
5
|
+
require dir + '/abstract/database_statements'
|
|
6
|
+
require dir + '/abstract/quoting'
|
|
7
|
+
require dir + '/abstract/connection_specification'
|
|
8
|
+
|
|
9
|
+
require dir + '/abstract_adapter'
|
|
10
|
+
require dir + '/hbase_adapter'
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
module BigRecord
|
|
2
|
+
module Deletion
|
|
3
|
+
def self.included(base) #:nodoc:
|
|
4
|
+
base.alias_method_chain :destroy_without_callbacks, :flag_deleted
|
|
5
|
+
base.extend ClassMethods
|
|
6
|
+
|
|
7
|
+
base.class_eval do
|
|
8
|
+
class << self
|
|
9
|
+
alias_method_chain :find_one, :flag_deleted
|
|
10
|
+
alias_method_chain :find_every, :flag_deleted
|
|
11
|
+
end
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
# Flag the record as "deleted" if it responds to "deleted", else destroy it
|
|
17
|
+
def destroy_without_callbacks_with_flag_deleted #:nodoc:
|
|
18
|
+
if self.respond_to?(:deleted)
|
|
19
|
+
# mark as deleted
|
|
20
|
+
self.deleted = true
|
|
21
|
+
|
|
22
|
+
# set the timestamp
|
|
23
|
+
if record_timestamps
|
|
24
|
+
t = self.class.default_timezone == :utc ? Time.now.utc : Time.now
|
|
25
|
+
self.send(:updated_at=, t) if respond_to?(:updated_at)
|
|
26
|
+
self.send(:updated_on=, t) if respond_to?(:updated_on)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
self.update_without_callbacks
|
|
30
|
+
else
|
|
31
|
+
destroy_without_callbacks_without_flag_deleted
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
module ClassMethods
|
|
36
|
+
def find_one_with_flag_deleted(*args)
|
|
37
|
+
options = args.last.is_a?(Hash) ? args.last : {}
|
|
38
|
+
records = find_one_without_flag_deleted(*args)
|
|
39
|
+
unless options[:include_deleted]
|
|
40
|
+
if records.is_a?(Array)
|
|
41
|
+
records.each{|record| check_not_deleted(record)}
|
|
42
|
+
else
|
|
43
|
+
check_not_deleted(records)
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
records
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def find_every_with_flag_deleted(*args)
|
|
50
|
+
options = args.last.is_a?(Hash) ? args.last : {}
|
|
51
|
+
records = find_every_without_flag_deleted(*args)
|
|
52
|
+
|
|
53
|
+
unless options[:include_deleted]
|
|
54
|
+
records.select do |record|
|
|
55
|
+
begin
|
|
56
|
+
check_not_deleted(record)
|
|
57
|
+
true
|
|
58
|
+
rescue
|
|
59
|
+
false
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
else
|
|
63
|
+
records
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def check_not_deleted(record)
|
|
68
|
+
raise BigRecord::RecordNotFound, "The record (id=#{record.id}) is marked as deleted." if record.respond_to?(:deleted) and record.deleted
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
end
|
|
73
|
+
end
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
# Replace the anonymous classes
|
|
2
|
+
module BigRecord
|
|
3
|
+
module DynamicSchema
|
|
4
|
+
|
|
5
|
+
def self.included(base) #:nodoc:
|
|
6
|
+
super
|
|
7
|
+
|
|
8
|
+
base.alias_method_chain :column_for_attribute, :dynamic_schema
|
|
9
|
+
base.alias_method_chain :attributes_from_column_definition, :dynamic_schema
|
|
10
|
+
base.alias_method_chain :inspect, :dynamic_schema
|
|
11
|
+
base.alias_method_chain :define_read_methods, :dynamic_schema
|
|
12
|
+
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Stub of the callback for setting the dynamic columns. Override this to add dynamic columns
|
|
16
|
+
def initialize_columns(options={})
|
|
17
|
+
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
# Create and add a dynamic column to this record
|
|
21
|
+
def dynamic_column(name, type, options={})
|
|
22
|
+
add_dynamic_column ConnectionAdapters::Column.new(name.to_s, type, options)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Add an existing dynamic column to this record
|
|
26
|
+
def add_dynamic_column(c)
|
|
27
|
+
columns_hash[c.name] = c
|
|
28
|
+
@columns_name= nil; @columns= nil #reset
|
|
29
|
+
c
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def columns_hash
|
|
33
|
+
unless @columns_hash
|
|
34
|
+
@columns_hash = self.class.columns_hash.dup
|
|
35
|
+
initialize_columns
|
|
36
|
+
end
|
|
37
|
+
@columns_hash
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def columns
|
|
41
|
+
@columns ||= columns_hash.values
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def column_names
|
|
45
|
+
@column_names ||= columns_hash.keys
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Returns the column object for the named attribute.
|
|
49
|
+
def column_for_attribute_with_dynamic_schema(name)
|
|
50
|
+
self.columns_hash[name.to_s]
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
# Initializes the attributes array with keys matching the columns from the linked table and
|
|
54
|
+
# the values matching the corresponding default value of that column, so
|
|
55
|
+
# that a new instance, or one populated from a passed-in Hash, still has all the attributes
|
|
56
|
+
# that instances loaded from the database would.
|
|
57
|
+
def attributes_from_column_definition_with_dynamic_schema
|
|
58
|
+
self.columns.inject({}) do |attributes, column|
|
|
59
|
+
unless column.name == self.class.primary_key
|
|
60
|
+
attributes[column.name] = column.default
|
|
61
|
+
end
|
|
62
|
+
attributes
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Returns the contents of the record as a nicely formatted string.
|
|
67
|
+
def inspect_with_dynamic_schema
|
|
68
|
+
attributes_as_nice_string = self.column_names.collect { |name|
|
|
69
|
+
if has_attribute?(name) || new_record?
|
|
70
|
+
"#{name}: #{attribute_for_inspect(name)}"
|
|
71
|
+
end
|
|
72
|
+
}.compact.join(", ")
|
|
73
|
+
"#<#{self.class} #{attributes_as_nice_string}>"
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# Called on first read access to any given column and generates reader
|
|
77
|
+
# methods for all columns in the columns_hash if
|
|
78
|
+
# ActiveRecord::Base.generate_read_methods is set to true.
|
|
79
|
+
def define_read_methods_with_dynamic_schema
|
|
80
|
+
columns_hash.each do |name, column|
|
|
81
|
+
unless respond_to_without_attributes?(name)
|
|
82
|
+
define_read_method(name.to_sym, name, column)
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
unless respond_to_without_attributes?("#{name}?")
|
|
86
|
+
define_question_method(name)
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
end
|
|
92
|
+
end
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
module BigRecord
|
|
2
|
+
class Embedded < AbstractBase
|
|
3
|
+
|
|
4
|
+
def initialize(attrs = nil)
|
|
5
|
+
super
|
|
6
|
+
# Regenerate the id unless it's already there (i.e. we're instantiating an existing property)
|
|
7
|
+
@attributes["id"] ||= generate_id
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def connection
|
|
11
|
+
self.class.connection
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def id
|
|
15
|
+
super || (self.id = generate_id)
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
protected
|
|
19
|
+
def generate_id
|
|
20
|
+
UUIDTools::UUID.random_create.to_s
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
public
|
|
24
|
+
class << self
|
|
25
|
+
def store_primary_key?
|
|
26
|
+
true
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def primary_key
|
|
30
|
+
"id"
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
# Borrow the default connection of BigRecord
|
|
34
|
+
def connection
|
|
35
|
+
BigRecord::Base.connection
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def base_class
|
|
39
|
+
(superclass == BigRecord::Embedded) ? self : superclass.base_class
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Class attribute that holds the name of the embedded type for dispaly
|
|
43
|
+
def pretty_name
|
|
44
|
+
@pretty_name || self.to_s
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def set_pretty_name new_name
|
|
48
|
+
@pretty_name = new_name
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def hide_to_users
|
|
52
|
+
@hide_to_user = true
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def show_to_users?
|
|
56
|
+
!@hide_to_user
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def inherited(child) #:nodoc:
|
|
60
|
+
child.set_pretty_name child.name.split("::").last
|
|
61
|
+
super
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def default_columns
|
|
65
|
+
{primary_key => ConnectionAdapters::Column.new(primary_key, 'string')}
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
end
|
|
71
|
+
end
|