bigrecord 0.0.11 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Rakefile +5 -5
- data/VERSION +1 -1
- data/lib/big_record/base.rb +2 -3
- data/lib/big_record/connection_adapters/cassandra_adapter.rb +349 -0
- data/lib/big_record/connection_adapters/column.rb +1 -1
- data/lib/big_record/connection_adapters.rb +1 -0
- data/lib/big_record/dynamic_schema.rb +3 -1
- data/lib/big_record/embedded.rb +1 -1
- data/lib/big_record/family_span_columns.rb +1 -1
- data/lib/big_record/{abstract_base.rb → model.rb} +2 -2
- data/lib/big_record.rb +1 -1
- data/spec/connections/bigrecord.yml +6 -6
- data/spec/unit/adapters/hbase_adapter_spec.rb +0 -2
- data/spec/unit/attributes_spec.rb +2 -2
- data/spec/unit/columns_spec.rb +1 -1
- data/spec/unit/deletion_spec.rb +35 -0
- data/spec/unit/embedded_spec.rb +11 -1
- data/tasks/{data_store.rake → data_store.rb} +0 -0
- data/tasks/{gem.rake → gem.rb} +1 -1
- data/tasks/{rdoc.rake → rdoc.rb} +0 -0
- data/tasks/{spec.rake → spec.rb} +0 -0
- metadata +46 -27
data/Rakefile
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
require 'rubygems'
|
2
2
|
|
3
|
-
DATA_STORES = ["hbase"]
|
3
|
+
DATA_STORES = ["hbase", "cassandra"]
|
4
4
|
ROOT = File.expand_path(File.dirname(__FILE__)) + '/'
|
5
5
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
6
|
+
require ROOT + 'tasks/data_store.rb'
|
7
|
+
require ROOT + 'tasks/gem.rb'
|
8
|
+
require ROOT + 'tasks/rdoc.rb'
|
9
|
+
require ROOT + 'tasks/spec.rb'
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0
|
1
|
+
0.1.0
|
data/lib/big_record/base.rb
CHANGED
@@ -1,10 +1,9 @@
|
|
1
1
|
module BigRecord
|
2
2
|
|
3
|
-
class Base <
|
3
|
+
class Base < Model
|
4
4
|
|
5
5
|
attr_accessor :modified_attributes
|
6
6
|
|
7
|
-
|
8
7
|
def self.inherited(child) #:nodoc:
|
9
8
|
@@subclasses[self] ||= []
|
10
9
|
@@subclasses[self] << child
|
@@ -388,7 +387,7 @@ module BigRecord
|
|
388
387
|
end
|
389
388
|
|
390
389
|
def table_name
|
391
|
-
|
390
|
+
@table_name || superclass.table_name
|
392
391
|
end
|
393
392
|
|
394
393
|
def set_table_name(name)
|
@@ -0,0 +1,349 @@
|
|
1
|
+
module BigRecord
|
2
|
+
class Base
|
3
|
+
def self.cassandra_connection(config) # :nodoc:
|
4
|
+
begin
|
5
|
+
require 'cassandra'
|
6
|
+
rescue LoadError => e
|
7
|
+
puts "[BigRecord] The 'cassandra' gem is needed for CassandraAdapter. Install it with: gem install cassandra"
|
8
|
+
raise e
|
9
|
+
end
|
10
|
+
|
11
|
+
config = config.symbolize_keys
|
12
|
+
|
13
|
+
client = Cassandra.new(config[:keyspace], config[:servers])
|
14
|
+
ConnectionAdapters::CassandraAdapter.new(client, logger, [], config)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
module ConnectionAdapters
|
19
|
+
class CassandraAdapter < AbstractAdapter
|
20
|
+
@@emulate_booleans = true
|
21
|
+
cattr_accessor :emulate_booleans
|
22
|
+
|
23
|
+
LOST_CONNECTION_ERROR_MESSAGES = [
|
24
|
+
"Server shutdown in progress",
|
25
|
+
"Broken pipe",
|
26
|
+
"Lost connection to HBase server during query",
|
27
|
+
"HBase server has gone away"
|
28
|
+
]
|
29
|
+
|
30
|
+
# data types
|
31
|
+
TYPE_NULL = 0x00;
|
32
|
+
TYPE_STRING = 0x01; # utf-8 strings
|
33
|
+
TYPE_BOOLEAN = 0x04; # delegate to YAML
|
34
|
+
TYPE_BINARY = 0x07; # byte[] => no conversion
|
35
|
+
|
36
|
+
# string charset
|
37
|
+
CHARSET = "utf-8"
|
38
|
+
|
39
|
+
# utility constants
|
40
|
+
NULL = "\000"
|
41
|
+
|
42
|
+
def initialize(connection, logger, connection_options, config)
|
43
|
+
super(connection, logger)
|
44
|
+
@connection_options, @config = connection_options, config
|
45
|
+
end
|
46
|
+
|
47
|
+
def configuration
|
48
|
+
@config.clone
|
49
|
+
end
|
50
|
+
|
51
|
+
def adapter_name #:nodoc:
|
52
|
+
'Cassandra'
|
53
|
+
end
|
54
|
+
|
55
|
+
def supports_migrations? #:nodoc:
|
56
|
+
false
|
57
|
+
end
|
58
|
+
|
59
|
+
# CONNECTION MANAGEMENT ====================================
|
60
|
+
|
61
|
+
def disconnect!
|
62
|
+
@connection.disconnect!
|
63
|
+
super
|
64
|
+
end
|
65
|
+
|
66
|
+
# DATABASE STATEMENTS ======================================
|
67
|
+
|
68
|
+
def update_raw(table_name, row, values, timestamp)
|
69
|
+
result = nil
|
70
|
+
log "UPDATE #{table_name} SET #{values.inspect if values} WHERE ROW=#{row};" do
|
71
|
+
result = @connection.insert(table_name, row, data_to_cassandra_format(values), {:consistency => Cassandra::Consistency::QUORUM})
|
72
|
+
end
|
73
|
+
result
|
74
|
+
end
|
75
|
+
|
76
|
+
def update(table_name, row, values, timestamp)
|
77
|
+
serialized_collection = {}
|
78
|
+
values.each do |column, value|
|
79
|
+
serialized_collection[column] = serialize(value)
|
80
|
+
end
|
81
|
+
update_raw(table_name, row, serialized_collection, timestamp)
|
82
|
+
end
|
83
|
+
|
84
|
+
def get_raw(table_name, row, column, options={})
|
85
|
+
result = nil
|
86
|
+
log "SELECT (#{column}) FROM #{table_name} WHERE ROW=#{row};" do
|
87
|
+
super_column, name = column.split(":")
|
88
|
+
result = @connection.get(table_name, row, super_column, name)
|
89
|
+
end
|
90
|
+
result
|
91
|
+
end
|
92
|
+
|
93
|
+
def get(table_name, row, column, options={})
|
94
|
+
serialized_result = get_raw(table_name, row, column, options)
|
95
|
+
result = nil
|
96
|
+
if serialized_result.is_a?(Array)
|
97
|
+
result = serialized_result.collect{|e| deserialize(e)}
|
98
|
+
else
|
99
|
+
result = deserialize(serialized_result)
|
100
|
+
end
|
101
|
+
result
|
102
|
+
end
|
103
|
+
|
104
|
+
def get_columns_raw(table_name, row, columns, options={})
|
105
|
+
result = {}
|
106
|
+
|
107
|
+
log "SELECT (#{columns.join(", ")}) FROM #{table_name} WHERE ROW=#{row};" do
|
108
|
+
requested_columns = columns_to_cassandra_format(columns)
|
109
|
+
super_columns = requested_columns.keys
|
110
|
+
|
111
|
+
if super_columns.size == 1 && requested_columns[super_columns.first].size > 0
|
112
|
+
column_names = requested_columns[super_columns.first]
|
113
|
+
|
114
|
+
values = @connection.get_columns(table_name, row, super_columns.first, column_names)
|
115
|
+
|
116
|
+
result["id"] = row if values && values.compact.size > 0
|
117
|
+
column_names.each_index do |id|
|
118
|
+
full_key = super_columns.first + ":" + column_names[id].to_s
|
119
|
+
result[full_key] = values[id] unless values[id].nil?
|
120
|
+
end
|
121
|
+
else
|
122
|
+
values = @connection.get_columns(table_name, row, super_columns)
|
123
|
+
result["id"] = row if values && values.compact.size > 0
|
124
|
+
super_columns.each_index do |id|
|
125
|
+
next if values[id].nil?
|
126
|
+
|
127
|
+
values[id].each do |column_name, value|
|
128
|
+
next if value.nil?
|
129
|
+
|
130
|
+
full_key = super_columns[id] + ":" + column_name
|
131
|
+
result[full_key] = value
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
136
|
+
result
|
137
|
+
end
|
138
|
+
|
139
|
+
def get_columns(table_name, row, columns, options={})
|
140
|
+
row_cols = get_columns_raw(table_name, row, columns, options)
|
141
|
+
return nil unless row_cols && !row_cols.empty?
|
142
|
+
|
143
|
+
result = {}
|
144
|
+
row_cols.each do |key,value|
|
145
|
+
begin
|
146
|
+
result[key] =
|
147
|
+
if key == 'id'
|
148
|
+
value
|
149
|
+
else
|
150
|
+
deserialize(value)
|
151
|
+
end
|
152
|
+
rescue Exception => e
|
153
|
+
puts "Could not load column value #{key} for row=#{row.name}"
|
154
|
+
end
|
155
|
+
end
|
156
|
+
result
|
157
|
+
end
|
158
|
+
|
159
|
+
def get_consecutive_rows_raw(table_name, start_row, limit, columns, stop_row = nil)
|
160
|
+
result = []
|
161
|
+
log "SCAN (#{columns.join(", ")}) FROM #{table_name} WHERE START_ROW=#{start_row} AND STOP_ROW=#{stop_row} LIMIT=#{limit};" do
|
162
|
+
options = {}
|
163
|
+
options[:start] = start_row if start_row
|
164
|
+
options[:finish] = stop_row if stop_row
|
165
|
+
options[:count] = limit if limit
|
166
|
+
|
167
|
+
keys = @connection.get_range(table_name, options)
|
168
|
+
|
169
|
+
# This will be refactored. Don't make fun of me yet.
|
170
|
+
if !keys.empty?
|
171
|
+
keys.each do |key|
|
172
|
+
row = {}
|
173
|
+
row["id"] = key.key
|
174
|
+
|
175
|
+
key.columns.each do |s_col|
|
176
|
+
super_column = s_col.super_column
|
177
|
+
super_column_name = super_column.name
|
178
|
+
|
179
|
+
super_column.columns.each do |column|
|
180
|
+
full_key = super_column_name + ":" + column.name
|
181
|
+
row[full_key] = column.value
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
result << row if row.keys.size > 1
|
186
|
+
end
|
187
|
+
end
|
188
|
+
end
|
189
|
+
result
|
190
|
+
end
|
191
|
+
|
192
|
+
def get_consecutive_rows(table_name, start_row, limit, columns, stop_row = nil)
|
193
|
+
rows = get_consecutive_rows_raw(table_name, start_row, limit, columns, stop_row)
|
194
|
+
|
195
|
+
result = rows.collect do |row|
|
196
|
+
cols = {}
|
197
|
+
row.each do |key,value|
|
198
|
+
begin
|
199
|
+
cols[key] = (key == "id") ? value : deserialize(value)
|
200
|
+
rescue Exception => e
|
201
|
+
puts "Could not load column value #{key} for row=#{row.name}"
|
202
|
+
end
|
203
|
+
end
|
204
|
+
cols
|
205
|
+
end
|
206
|
+
result
|
207
|
+
end
|
208
|
+
|
209
|
+
def delete(table_name, row)
|
210
|
+
result = nil
|
211
|
+
log "DELETE FROM #{table_name} WHERE ROW=#{row};" do
|
212
|
+
result = @connection.remove(table_name.to_s, row, {:consistency => Cassandra::Consistency::QUORUM})
|
213
|
+
end
|
214
|
+
result
|
215
|
+
end
|
216
|
+
|
217
|
+
def delete_all(table_name)
|
218
|
+
raise NotImplementedError
|
219
|
+
end
|
220
|
+
|
221
|
+
# SERIALIZATION STATEMENTS =================================
|
222
|
+
|
223
|
+
# Serialize the given value
|
224
|
+
def serialize(value)
|
225
|
+
case value
|
226
|
+
when NilClass then NULL
|
227
|
+
when String then build_serialized_value(TYPE_STRING, value)
|
228
|
+
else value.to_yaml
|
229
|
+
end
|
230
|
+
end
|
231
|
+
|
232
|
+
# Serialize an object in a given type
|
233
|
+
def build_serialized_value(type, value)
|
234
|
+
type.chr + value
|
235
|
+
end
|
236
|
+
|
237
|
+
# Deserialize the given string. This method supports both the pure YAML format and
|
238
|
+
# the type header format.
|
239
|
+
def deserialize(str)
|
240
|
+
return unless str
|
241
|
+
|
242
|
+
# stay compatible with the old serialization code
|
243
|
+
# YAML documents start with "--- " so if we find that sequence at the beginning we
|
244
|
+
# consider it as a serialized YAML value, else it's the new format with the type header
|
245
|
+
if str[0..3] == "--- "
|
246
|
+
YAML::load(str) if str
|
247
|
+
else
|
248
|
+
deserialize_with_header(str)
|
249
|
+
end
|
250
|
+
end
|
251
|
+
|
252
|
+
# Deserialize the given string assumed to be in the type header format.
|
253
|
+
def deserialize_with_header(data)
|
254
|
+
return unless data and data.size >= 2
|
255
|
+
|
256
|
+
# the type of the data is encoded in the first byte
|
257
|
+
type = data[0];
|
258
|
+
|
259
|
+
case type
|
260
|
+
when TYPE_NULL then nil
|
261
|
+
when TYPE_STRING then data[1..-1]
|
262
|
+
when TYPE_BINARY then data[1..-1]
|
263
|
+
else data
|
264
|
+
end
|
265
|
+
end
|
266
|
+
|
267
|
+
protected
|
268
|
+
|
269
|
+
def data_to_cassandra_format(data = {})
|
270
|
+
super_columns = {}
|
271
|
+
|
272
|
+
data.each do |name, value|
|
273
|
+
super_column, column = name.split(":")
|
274
|
+
super_columns[super_column.to_s] = {} unless super_columns.has_key?(super_column.to_s)
|
275
|
+
super_columns[super_column.to_s][column.to_s] = value
|
276
|
+
end
|
277
|
+
|
278
|
+
return super_columns
|
279
|
+
end
|
280
|
+
|
281
|
+
def columns_to_cassandra_format(column_names = [])
|
282
|
+
super_columns = {}
|
283
|
+
|
284
|
+
column_names.each do |name|
|
285
|
+
super_column, sub_column = name.split(":")
|
286
|
+
|
287
|
+
super_columns[super_column.to_s] = [] unless super_columns.has_key?(super_column.to_s)
|
288
|
+
super_columns[super_column.to_s] << sub_column
|
289
|
+
end
|
290
|
+
|
291
|
+
return super_columns
|
292
|
+
end
|
293
|
+
|
294
|
+
def log(str, name = nil)
|
295
|
+
if block_given?
|
296
|
+
if @logger and @logger.level <= Logger::INFO
|
297
|
+
result = nil
|
298
|
+
seconds = Benchmark.realtime { result = yield }
|
299
|
+
@runtime += seconds
|
300
|
+
log_info(str, name, seconds)
|
301
|
+
result
|
302
|
+
else
|
303
|
+
yield
|
304
|
+
end
|
305
|
+
else
|
306
|
+
log_info(str, name, 0)
|
307
|
+
nil
|
308
|
+
end
|
309
|
+
rescue Exception => e
|
310
|
+
# Log message and raise exception.
|
311
|
+
# Set last_verfication to 0, so that connection gets verified
|
312
|
+
# upon reentering the request loop
|
313
|
+
@last_verification = 0
|
314
|
+
message = "#{e.class.name}: #{e.message}: #{str}"
|
315
|
+
log_info(message, name, 0)
|
316
|
+
raise e
|
317
|
+
end
|
318
|
+
|
319
|
+
def log_info(str, name, runtime)
|
320
|
+
return unless @logger
|
321
|
+
|
322
|
+
@logger.debug(
|
323
|
+
format_log_entry(
|
324
|
+
"#{name.nil? ? "CASSANDRA" : name} (#{sprintf("%f", runtime)})",
|
325
|
+
str.gsub(/ +/, " ")
|
326
|
+
)
|
327
|
+
)
|
328
|
+
end
|
329
|
+
|
330
|
+
def format_log_entry(message, dump = nil)
|
331
|
+
if BigRecord::Base.colorize_logging
|
332
|
+
if @@row_even
|
333
|
+
@@row_even = false
|
334
|
+
message_color, dump_color = "4;36;1", "0;1"
|
335
|
+
else
|
336
|
+
@@row_even = true
|
337
|
+
message_color, dump_color = "4;35;1", "0"
|
338
|
+
end
|
339
|
+
|
340
|
+
log_entry = " \e[#{message_color}m#{message}\e[0m "
|
341
|
+
log_entry << "\e[#{dump_color}m%#{String === dump ? 's' : 'p'}\e[0m" % dump if dump
|
342
|
+
log_entry
|
343
|
+
else
|
344
|
+
"%s %s" % [message, dump]
|
345
|
+
end
|
346
|
+
end
|
347
|
+
end
|
348
|
+
end
|
349
|
+
end
|
@@ -44,7 +44,9 @@ module BigRecord
|
|
44
44
|
|
45
45
|
# Returns the column object for the named attribute.
|
46
46
|
def column_for_attribute_with_dynamic_schema(name)
|
47
|
-
|
47
|
+
name_string = name.to_s
|
48
|
+
self.columns_hash[name_string] || self.columns_hash["#{self.class.default_column_prefix}#{name_string}"] ||
|
49
|
+
self.columns.select{|c|c.alias==name_string}.first
|
48
50
|
end
|
49
51
|
|
50
52
|
# Initializes the attributes array with keys matching the columns from the linked table and
|
data/lib/big_record/embedded.rb
CHANGED
@@ -64,7 +64,7 @@ module BigRecord
|
|
64
64
|
# ignore methods '=' and '?' (e.g. 'normalized_srf_ief:231=')
|
65
65
|
return if name =~ /=|\?$/
|
66
66
|
|
67
|
-
column =
|
67
|
+
column = column_for_attribute_without_family_span_columns(name)
|
68
68
|
unless column
|
69
69
|
family = BigRecord::ConnectionAdapters::Column.extract_family(name)
|
70
70
|
column = self.columns_hash[family] if family
|
@@ -52,7 +52,7 @@ module BigRecord
|
|
52
52
|
end
|
53
53
|
end
|
54
54
|
|
55
|
-
class
|
55
|
+
class Model
|
56
56
|
require 'rubygems'
|
57
57
|
require 'uuidtools'
|
58
58
|
|
@@ -1069,7 +1069,7 @@ private
|
|
1069
1069
|
end
|
1070
1070
|
|
1071
1071
|
def default_columns
|
1072
|
-
|
1072
|
+
{}
|
1073
1073
|
end
|
1074
1074
|
|
1075
1075
|
def default_views
|
data/lib/big_record.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
|
-
|
1
|
+
hbase_rest:
|
2
2
|
adapter: hbase_rest
|
3
3
|
api_address: http://localhost:8080
|
4
|
-
|
4
|
+
hbase:
|
5
5
|
adapter: hbase
|
6
6
|
zookeeper_quorum: localhost
|
7
7
|
zookeeper_client_port: 2181
|
@@ -9,7 +9,7 @@ hbase_brd:
|
|
9
9
|
drb_port: 50001
|
10
10
|
cassandra:
|
11
11
|
adapter: cassandra
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
12
|
+
keyspace: Bigrecord
|
13
|
+
servers:
|
14
|
+
- localhost:9160
|
15
|
+
- 127.0.0.1:9160
|
@@ -1,5 +1,5 @@
|
|
1
1
|
# Defined as a shared spec because embedded_spec uses it as well
|
2
|
-
describe BigRecord::
|
2
|
+
describe BigRecord::Model, :shared => true do
|
3
3
|
|
4
4
|
before(:all) do
|
5
5
|
Book.delete_all
|
@@ -174,7 +174,7 @@ describe BigRecord::AbstractBase, :shared => true do
|
|
174
174
|
end
|
175
175
|
|
176
176
|
it 'should be handled properly' do
|
177
|
-
pending "this still needs to be implemented in BigRecord::
|
177
|
+
pending "this still needs to be implemented in BigRecord::Model"
|
178
178
|
|
179
179
|
# readonly is the readonly attribute here
|
180
180
|
@company = Company.new(:name => "The Company", :address => "Unknown", :readonly => "secret")
|
data/spec/unit/columns_spec.rb
CHANGED
@@ -2,7 +2,7 @@ require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper'))
|
|
2
2
|
require File.expand_path(File.join(File.dirname(__FILE__), 'attributes_spec'))
|
3
3
|
|
4
4
|
describe BigRecord::Base do
|
5
|
-
it_should_behave_like "BigRecord::
|
5
|
+
it_should_behave_like "BigRecord::Model"
|
6
6
|
|
7
7
|
describe 'column functionality' do
|
8
8
|
|
@@ -0,0 +1,35 @@
|
|
1
|
+
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper'))
|
2
|
+
|
3
|
+
describe BigRecord::Base do
|
4
|
+
|
5
|
+
class FlaggedDeletionBook < Book
|
6
|
+
column :deleted, :boolean
|
7
|
+
end
|
8
|
+
|
9
|
+
before(:all) do
|
10
|
+
FlaggedDeletionBook.delete_all
|
11
|
+
@book = FlaggedDeletionBook.create(:title => "I Am Legend", :author => "Richard Matheson")
|
12
|
+
@book.destroy
|
13
|
+
end
|
14
|
+
|
15
|
+
after(:all) do
|
16
|
+
FlaggedDeletionBook.delete_all
|
17
|
+
end
|
18
|
+
|
19
|
+
describe "flagged deletion functionality" do
|
20
|
+
|
21
|
+
it "should not be found by normal finders" do
|
22
|
+
lambda {
|
23
|
+
FlaggedDeletionBook.find(@book)
|
24
|
+
}.should raise_error BigRecord::RecordNotFound
|
25
|
+
end
|
26
|
+
|
27
|
+
it "should be found using the :include_deleted option" do
|
28
|
+
lambda {
|
29
|
+
FlaggedDeletionBook.find(@book, :include_deleted => true)
|
30
|
+
}.should_not raise_error
|
31
|
+
end
|
32
|
+
|
33
|
+
end
|
34
|
+
|
35
|
+
end
|
data/spec/unit/embedded_spec.rb
CHANGED
@@ -12,7 +12,7 @@ describe BigRecord::Embedded do
|
|
12
12
|
Zoo.delete_all
|
13
13
|
end
|
14
14
|
|
15
|
-
it_should_behave_like "BigRecord::
|
15
|
+
it_should_behave_like "BigRecord::Model"
|
16
16
|
|
17
17
|
describe "embedded within a BigRecord::Base model" do
|
18
18
|
|
@@ -40,4 +40,14 @@ describe BigRecord::Embedded do
|
|
40
40
|
|
41
41
|
end
|
42
42
|
|
43
|
+
describe "standalone behavior" do
|
44
|
+
|
45
|
+
it "should generate the id in the constructor" do
|
46
|
+
link = Embedded::WebLink.new
|
47
|
+
link.attributes['id'].should_not be_nil
|
48
|
+
link.id.should_not be_nil
|
49
|
+
end
|
50
|
+
|
51
|
+
end
|
52
|
+
|
43
53
|
end
|
File without changes
|
data/tasks/{gem.rake → gem.rb}
RENAMED
@@ -6,7 +6,7 @@ begin
|
|
6
6
|
gemspec.authors = ["openplaces.org"]
|
7
7
|
gemspec.email = "bigrecord@openplaces.org"
|
8
8
|
gemspec.homepage = "http://www.bigrecord.org"
|
9
|
-
gemspec.summary = "Object mapper for supporting column-oriented data stores (supports #{DATA_STORES.join(" ")}) in Ruby on Rails."
|
9
|
+
gemspec.summary = "Object mapper for supporting column-oriented data stores (supports #{DATA_STORES.join(", ")}) in Ruby on Rails."
|
10
10
|
gemspec.description = "BigRecord is built from ActiveRecord, and intended to seamlessly integrate into your Ruby on Rails applications."
|
11
11
|
gemspec.files = FileList["{examples,guides,generators,lib,rails,spec,tasks}/**/*","init.rb","install.rb","Rakefile","VERSION"].to_a
|
12
12
|
gemspec.extra_rdoc_files = FileList["guides/**/*","LICENSE","README.rdoc"].to_a
|
data/tasks/{rdoc.rake → rdoc.rb}
RENAMED
File without changes
|
data/tasks/{spec.rake → spec.rb}
RENAMED
File without changes
|
metadata
CHANGED
@@ -1,7 +1,12 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: bigrecord
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
|
4
|
+
prerelease: false
|
5
|
+
segments:
|
6
|
+
- 0
|
7
|
+
- 1
|
8
|
+
- 0
|
9
|
+
version: 0.1.0
|
5
10
|
platform: ruby
|
6
11
|
authors:
|
7
12
|
- openplaces.org
|
@@ -9,49 +14,59 @@ autorequire:
|
|
9
14
|
bindir: bin
|
10
15
|
cert_chain: []
|
11
16
|
|
12
|
-
date: 2010-
|
17
|
+
date: 2010-04-27 00:00:00 -04:00
|
13
18
|
default_executable:
|
14
19
|
dependencies:
|
15
20
|
- !ruby/object:Gem::Dependency
|
16
21
|
name: rspec
|
17
|
-
|
18
|
-
|
19
|
-
version_requirements: !ruby/object:Gem::Requirement
|
22
|
+
prerelease: false
|
23
|
+
requirement: &id001 !ruby/object:Gem::Requirement
|
20
24
|
requirements:
|
21
25
|
- - ">="
|
22
26
|
- !ruby/object:Gem::Version
|
27
|
+
segments:
|
28
|
+
- 0
|
23
29
|
version: "0"
|
24
|
-
|
30
|
+
type: :development
|
31
|
+
version_requirements: *id001
|
25
32
|
- !ruby/object:Gem::Dependency
|
26
33
|
name: uuidtools
|
27
|
-
|
28
|
-
|
29
|
-
version_requirements: !ruby/object:Gem::Requirement
|
34
|
+
prerelease: false
|
35
|
+
requirement: &id002 !ruby/object:Gem::Requirement
|
30
36
|
requirements:
|
31
37
|
- - ">="
|
32
38
|
- !ruby/object:Gem::Version
|
39
|
+
segments:
|
40
|
+
- 2
|
41
|
+
- 0
|
42
|
+
- 0
|
33
43
|
version: 2.0.0
|
34
|
-
|
44
|
+
type: :runtime
|
45
|
+
version_requirements: *id002
|
35
46
|
- !ruby/object:Gem::Dependency
|
36
47
|
name: activesupport
|
37
|
-
|
38
|
-
|
39
|
-
version_requirements: !ruby/object:Gem::Requirement
|
48
|
+
prerelease: false
|
49
|
+
requirement: &id003 !ruby/object:Gem::Requirement
|
40
50
|
requirements:
|
41
51
|
- - ">="
|
42
52
|
- !ruby/object:Gem::Version
|
53
|
+
segments:
|
54
|
+
- 0
|
43
55
|
version: "0"
|
44
|
-
|
56
|
+
type: :runtime
|
57
|
+
version_requirements: *id003
|
45
58
|
- !ruby/object:Gem::Dependency
|
46
59
|
name: activerecord
|
47
|
-
|
48
|
-
|
49
|
-
version_requirements: !ruby/object:Gem::Requirement
|
60
|
+
prerelease: false
|
61
|
+
requirement: &id004 !ruby/object:Gem::Requirement
|
50
62
|
requirements:
|
51
63
|
- - ">="
|
52
64
|
- !ruby/object:Gem::Version
|
65
|
+
segments:
|
66
|
+
- 0
|
53
67
|
version: "0"
|
54
|
-
|
68
|
+
type: :runtime
|
69
|
+
version_requirements: *id004
|
55
70
|
description: BigRecord is built from ActiveRecord, and intended to seamlessly integrate into your Ruby on Rails applications.
|
56
71
|
email: bigrecord@openplaces.org
|
57
72
|
executables: []
|
@@ -82,7 +97,6 @@ files:
|
|
82
97
|
- init.rb
|
83
98
|
- install.rb
|
84
99
|
- lib/big_record.rb
|
85
|
-
- lib/big_record/abstract_base.rb
|
86
100
|
- lib/big_record/action_view_extensions.rb
|
87
101
|
- lib/big_record/ar_associations.rb
|
88
102
|
- lib/big_record/ar_associations/association_collection.rb
|
@@ -111,6 +125,7 @@ files:
|
|
111
125
|
- lib/big_record/connection_adapters/abstract/database_statements.rb
|
112
126
|
- lib/big_record/connection_adapters/abstract/quoting.rb
|
113
127
|
- lib/big_record/connection_adapters/abstract_adapter.rb
|
128
|
+
- lib/big_record/connection_adapters/cassandra_adapter.rb
|
114
129
|
- lib/big_record/connection_adapters/column.rb
|
115
130
|
- lib/big_record/connection_adapters/hbase_adapter.rb
|
116
131
|
- lib/big_record/connection_adapters/hbase_rest_adapter.rb
|
@@ -122,6 +137,7 @@ files:
|
|
122
137
|
- lib/big_record/family_span_columns.rb
|
123
138
|
- lib/big_record/fixtures.rb
|
124
139
|
- lib/big_record/migration.rb
|
140
|
+
- lib/big_record/model.rb
|
125
141
|
- lib/big_record/routing_ext.rb
|
126
142
|
- lib/big_record/timestamp.rb
|
127
143
|
- lib/big_record/validations.rb
|
@@ -158,6 +174,7 @@ files:
|
|
158
174
|
- spec/unit/attributes_spec.rb
|
159
175
|
- spec/unit/br_associations_spec.rb
|
160
176
|
- spec/unit/columns_spec.rb
|
177
|
+
- spec/unit/deletion_spec.rb
|
161
178
|
- spec/unit/embedded_spec.rb
|
162
179
|
- spec/unit/find_spec.rb
|
163
180
|
- spec/unit/hash_helper_spec.rb
|
@@ -166,10 +183,10 @@ files:
|
|
166
183
|
- spec/unit/scanner_spec.rb
|
167
184
|
- spec/unit/validations_spec.rb
|
168
185
|
- tasks/bigrecord_tasks.rake
|
169
|
-
- tasks/data_store.
|
170
|
-
- tasks/gem.
|
171
|
-
- tasks/rdoc.
|
172
|
-
- tasks/spec.
|
186
|
+
- tasks/data_store.rb
|
187
|
+
- tasks/gem.rb
|
188
|
+
- tasks/rdoc.rb
|
189
|
+
- tasks/spec.rb
|
173
190
|
- LICENSE
|
174
191
|
- README.rdoc
|
175
192
|
has_rdoc: true
|
@@ -185,20 +202,22 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
185
202
|
requirements:
|
186
203
|
- - ">="
|
187
204
|
- !ruby/object:Gem::Version
|
205
|
+
segments:
|
206
|
+
- 0
|
188
207
|
version: "0"
|
189
|
-
version:
|
190
208
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
191
209
|
requirements:
|
192
210
|
- - ">="
|
193
211
|
- !ruby/object:Gem::Version
|
212
|
+
segments:
|
213
|
+
- 0
|
194
214
|
version: "0"
|
195
|
-
version:
|
196
215
|
requirements: []
|
197
216
|
|
198
217
|
rubyforge_project:
|
199
|
-
rubygems_version: 1.3.
|
218
|
+
rubygems_version: 1.3.6
|
200
219
|
signing_key:
|
201
220
|
specification_version: 3
|
202
|
-
summary: Object mapper for supporting column-oriented data stores (supports hbase) in Ruby on Rails.
|
221
|
+
summary: Object mapper for supporting column-oriented data stores (supports hbase, cassandra) in Ruby on Rails.
|
203
222
|
test_files: []
|
204
223
|
|