ok_hbase 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. data/.gitignore +18 -0
  2. data/.rspec +2 -0
  3. data/.rvmrc +1 -0
  4. data/Gemfile +17 -0
  5. data/LICENSE.txt +22 -0
  6. data/README.md +47 -0
  7. data/Rakefile +22 -0
  8. data/examples/README.md +46 -0
  9. data/examples/advanced/README.md +36 -0
  10. data/examples/advanced/perf_read.rb +146 -0
  11. data/examples/advanced/perf_write.rb +143 -0
  12. data/examples/advanced/table_read.rb +115 -0
  13. data/examples/advanced/table_write.rb +128 -0
  14. data/examples/table_scan.rb +97 -0
  15. data/examples/table_write.rb +97 -0
  16. data/lib/ok_hbase/active_model.rb +35 -0
  17. data/lib/ok_hbase/client.rb +42 -0
  18. data/lib/ok_hbase/concerns/custom_row/class_methods.rb +13 -0
  19. data/lib/ok_hbase/concerns/custom_row.rb +40 -0
  20. data/lib/ok_hbase/concerns/indexable/class_methods.rb +13 -0
  21. data/lib/ok_hbase/concerns/indexable.rb +101 -0
  22. data/lib/ok_hbase/concerns/row.rb +85 -0
  23. data/lib/ok_hbase/concerns/table/batch.rb +95 -0
  24. data/lib/ok_hbase/concerns/table/class_methods.rb +13 -0
  25. data/lib/ok_hbase/concerns/table/instrumentation.rb +48 -0
  26. data/lib/ok_hbase/concerns/table.rb +241 -0
  27. data/lib/ok_hbase/concerns.rb +13 -0
  28. data/lib/ok_hbase/connection.rb +157 -0
  29. data/lib/ok_hbase/row.rb +21 -0
  30. data/lib/ok_hbase/table.rb +10 -0
  31. data/lib/ok_hbase/version.rb +3 -0
  32. data/lib/ok_hbase.rb +39 -0
  33. data/lib/thrift/hbase/hbase.rb +2643 -0
  34. data/lib/thrift/hbase/hbase_constants.rb +14 -0
  35. data/lib/thrift/hbase/hbase_types.rb +252 -0
  36. data/ok-hbase.gemspec +23 -0
  37. data/spec/ok_hbase/connection_spec.rb +99 -0
  38. data/spec/ok_hbase/table_spec.rb +149 -0
  39. data/spec/ok_hbase_spec.rb +24 -0
  40. data/spec/spec_helper.rb +20 -0
  41. data/tasks/bump.rb +30 -0
  42. metadata +122 -0
@@ -0,0 +1,95 @@
1
+ module OkHbase
2
+ module Concerns
3
+ module Table
4
+ class Batch
5
+
6
+ attr_accessor :batch_wrapper
7
+ attr_reader :table, :batch_size, :timestamp
8
+
9
+ def initialize(table, timestamp = nil, batch_size = nil, transaction = false, &batch_wrapper)
10
+ raise TypeError.new "'timestamp' must be an integer or nil" if timestamp && !timestamp.is_a?(Integer)
11
+
12
+ if batch_size
13
+ raise ArgumentError.new "'transaction' cannot be used when 'batch_size' is specified" if transaction
14
+ raise ValueError.new "'batch_size' must be > 0" unless batch_size > 0
15
+ end
16
+
17
+ @table = table
18
+ @batch_size = batch_size
19
+ @timestamp = timestamp
20
+ @transaction = transaction
21
+ @batch_wrapper = batch_wrapper
22
+ @families = nil
23
+
24
+ _reset_mutations()
25
+
26
+ end
27
+
28
+ def send_batch()
29
+ if batch_wrapper
30
+ batch_wrapper.call(@mutations) do
31
+ _send_batch
32
+ end
33
+ else
34
+ _send_batch
35
+ end
36
+ end
37
+
38
+ def put(row_key, data)
39
+ @mutations[row_key] ||= []
40
+
41
+ data.each_pair do |column, value|
42
+ @mutations[row_key] << Apache::Hadoop::Hbase::Thrift::Mutation.new(
43
+ isDelete: false, column: column, value: value
44
+ )
45
+ end
46
+
47
+ @mutation_count += data.size
48
+
49
+ send_batch if @batch_size && @mutation_count > @batch_size
50
+ end
51
+
52
+ def delete(row_key, columns = nil)
53
+ columns ||= @families ||= @table.send(:_column_family_names)
54
+
55
+ @mutations[row_key] ||= []
56
+
57
+ columns.each do |column|
58
+ @mutations[row_key] << Apache::Hadoop::Hbase::Thrift::Mutation.new(isDelete: true, column: column)
59
+ end
60
+
61
+ @mutation_count += columns.size
62
+ send_batch if @batch_size && @mutation_count > @batch_size
63
+ end
64
+
65
+ def transaction
66
+ yield self
67
+ send_batch
68
+ end
69
+
70
+ private
71
+
72
+ def _reset_mutations
73
+ @mutations = {}
74
+ @mutation_count = 0
75
+ end
76
+
77
+ def _send_batch()
78
+ batch_mutations = @mutations.map do |row_key, mutations|
79
+ Apache::Hadoop::Hbase::Thrift::BatchMutation.new(row: row_key.dup.force_encoding(Encoding::UTF_8), mutations: mutations)
80
+ end
81
+
82
+ return if batch_mutations.blank?
83
+
84
+ if @timestamp
85
+ @table.connection.client.mutateRowsTs(@table.connection.table_name(@table.table_name), batch_mutations, @timestamp)
86
+ else
87
+ @table.connection.client.mutateRows(@table.connection.table_name(@table.table_name), batch_mutations)
88
+ end
89
+
90
+ _reset_mutations()
91
+ end
92
+ end
93
+ end
94
+ end
95
+ end
@@ -0,0 +1,13 @@
1
+ module OkHbase
2
+ module Concerns
3
+ module Table
4
+ module ClassMethods
5
+ extend ActiveSupport::Concern
6
+
7
+ module ClassMethods
8
+ include OkHbase::Concerns::Table
9
+ end
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,48 @@
1
+ module OkHbase
2
+ module Concerns
3
+ module Table
4
+ module Instrumentation
5
+ extend ActiveSupport::Concern
6
+
7
+ module ClassMethods
8
+ instrumented_methods = {
9
+ load: %w[row rows cells scan],
10
+ }
11
+
12
+ instrumented_methods.each_pair do |method_type, methods|
13
+ methods.each do |method_to_instrument|
14
+ module_eval <<-RUBY, __FILE__, __LINE__
15
+ def #{method_to_instrument}(*args)
16
+ opts = {
17
+ name: [table_name, '#{method_type}'].join(' '),
18
+ description: "#{method_to_instrument}",
19
+ options: args
20
+ }
21
+ ActiveSupport::Notifications.instrument("#{method_type}.ok_hbase", opts) do
22
+ super
23
+ end
24
+ end
25
+ RUBY
26
+ end
27
+ end
28
+
29
+ def batch(timestamp = nil, batch_size = nil, transaction = false)
30
+
31
+ batch_wrapper = Proc.new do |*args, &block|
32
+ opts = {
33
+ name: [table_name, 'write'].join(' '),
34
+ description: "send_batch",
35
+ options: args
36
+ }
37
+ ActiveSupport::Notifications.instrument("write.ok_hbase", opts, &block)
38
+ end
39
+
40
+ batch = Batch.new(self, timestamp, batch_size, transaction)
41
+ batch.batch_wrapper = batch_wrapper
42
+ batch
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,241 @@
1
+ require 'ok_hbase/concerns/table/batch'
2
+
3
+ module OkHbase
4
+ module Concerns
5
+ module Table
6
+ extend ActiveSupport::Concern
7
+
8
+ SCANNER_DEFAULTS = {
9
+ start_row: nil,
10
+ stop_row: nil,
11
+ row_prefix: nil,
12
+ columns: nil,
13
+ filter_string: nil,
14
+ timestamp: nil,
15
+ include_timestamp: false,
16
+ caching: 1000,
17
+ limit: nil,
18
+ }.freeze
19
+
20
+ attr_accessor :table_name, :connection
21
+
22
+ def table_name
23
+ @table_name
24
+ end
25
+
26
+ def table_name=(val)
27
+ @table_name = val
28
+ end
29
+
30
+ def self.connection
31
+ @self.connection
32
+ end
33
+
34
+ def self.connection=(val)
35
+ @self.connection = val
36
+ end
37
+
38
+
39
+ def families()
40
+ descriptors = self.connection.client.getColumnDescriptors(self.connection.table_name(table_name))
41
+
42
+ families = {}
43
+
44
+ descriptors.each_pair do |name, descriptor|
45
+ name = name[0...-1] # remove trailing ':'
46
+ families[name] = OkHbase.thrift_type_to_dict(descriptor)
47
+ end
48
+ families
49
+ end
50
+
51
+ def regions
52
+ regions = self.connection.client.getTableRegions(self.connection.table_name(table_name))
53
+ regions.map { |r| OkHbase.thrift_type_to_dict(r) }
54
+ end
55
+
56
+ def row(row_key, columns = nil, timestamp = nil, include_timestamp = false)
57
+ raise TypeError.new "'columns' must be a tuple or list" if columns && !columns.is_a?(Array)
58
+
59
+ row_key.force_encoding(Encoding::UTF_8)
60
+
61
+ rows = if timestamp
62
+ raise TypeError.new "'timestamp' must be an integer" unless timestamp.is_a? Integer
63
+
64
+ self.connection.client.getRowWithColumnsTs(self.connection.table_name(table_name), row_key, columns, timestamp)
65
+ else
66
+ self.connection.client.getRowWithColumns(self.connection.table_name(table_name), row_key, columns)
67
+ end
68
+
69
+ rows.empty? ? {} : _make_row(rows[0].columns, include_timestamp)
70
+ end
71
+
72
+ def rows(row_keys, columns = nil, timestamp = nil, include_timestamp = false)
73
+ raise TypeError.new "'columns' must be a tuple or list" if columns && !columns.is_a?(Array)
74
+
75
+ row_keys.map! { |r| r.force_encoding(Encoding::UTF_8) }
76
+
77
+ return {} if row_keys.blank?
78
+
79
+ rows = if timestamp
80
+ raise TypeError.new "'timestamp' must be an integer" unless timestamp.is_a? Integer
81
+
82
+ columns = _column_family_names() unless columns
83
+
84
+ self.connection.client.getRowsWithColumnsTs(self.connection.table_name(table_name), row_keys, columns, timestamp)
85
+ else
86
+ self.connection.client.getRowsWithColumns(self.connection.table_name(table_name), row_keys, columns)
87
+ end
88
+
89
+ rows.map { |row| _make_row(row.columns, include_timestamp) }
90
+ end
91
+
92
+ def cells(row_key, column, versions = nil, timestamp = nil, include_timestamp = nil)
93
+
94
+ row_key.force_encoding(Encoding::UTF_8)
95
+
96
+ versions ||= (2 ** 31) -1
97
+
98
+ raise TypeError.new "'versions' parameter must be a number or nil" unless versions.is_a? Integer
99
+ raise ArgumentError.new "'versions' parameter must be >= 1" unless versions >= 1
100
+
101
+ cells = if timestamp
102
+ raise TypeError.new "'timestamp' must be an integer" unless timestamp.is_a? Integer
103
+
104
+ self.connection.client.getVerTs(self.connection.table_name(table_name), row_key, column, timestamp, versions)
105
+ else
106
+ self.connection.client.getVer(self.connection.table_name(table_name), row_key, column, versions)
107
+ end
108
+
109
+ cells.map { |cell| include_timestamp ? [cell.value, cell.timestamp] : cell.value }
110
+ end
111
+
112
+ def scan(opts={})
113
+
114
+ rows = [] unless block_given?
115
+ opts = SCANNER_DEFAULTS.merge opts.select { |k| SCANNER_DEFAULTS.keys.include? k }
116
+
117
+
118
+ raise ArgumentError.new "'caching' must be >= 1" unless opts[:caching] && opts[:caching] >= 1
119
+ raise ArgumentError.new "'limit' must be >= 1" if opts[:limit] && opts[:limit] < 1
120
+
121
+ if opts[:row_prefix]
122
+ raise ArgumentError.new "'row_prefix' cannot be combined with 'start_row' or 'stop_row'" if opts[:start_row] || opts[:stop_row]
123
+
124
+ opts[:start_row] = opts[:row_prefix]
125
+ opts[:stop_row] = OkHbase::increment_string opts[:start_row]
126
+
127
+ end
128
+ opts[:start_row] ||= ''
129
+
130
+ scanner = _scanner(opts)
131
+
132
+ scanner_id = self.connection.client.scannerOpenWithScan(self.connection.table_name(table_name), scanner)
133
+
134
+ fetched_count = returned_count = 0
135
+
136
+ begin
137
+ while true
138
+ how_many = opts[:limit] ? [opts[:caching], opts[:limit] - returned_count].min : opts[:caching]
139
+
140
+ items = if how_many == 1
141
+ self.connection.client.scannerGet(scanner_id)
142
+ else
143
+ self.connection.client.scannerGetList(scanner_id, how_many)
144
+ end
145
+
146
+ fetched_count += items.length
147
+
148
+ items.map.with_index do |item, index|
149
+ if block_given?
150
+ yield item.row, _make_row(item.columns, opts[:include_timestamp])
151
+ else
152
+ rows << [item.row, _make_row(item.columns, opts[:include_timestamp])]
153
+ end
154
+ return rows if opts[:limit] && index + 1 + returned_count == opts[:limit]
155
+ end
156
+
157
+ break if items.length < how_many
158
+ end
159
+ ensure
160
+ self.connection.client.scannerClose(scanner_id)
161
+ end
162
+ rows
163
+ end
164
+
165
+ def put(row_key, data, timestamp = nil)
166
+ batch = self.batch(timestamp)
167
+
168
+ batch.transaction do |batch|
169
+ batch.put(row_key, data)
170
+ end
171
+ end
172
+
173
+ def delete(row_key, columns = nil, timestamp = nil)
174
+ if columns
175
+ batch = self.batch(timestamp)
176
+ batch.transaction do |batch|
177
+ batch.delete(row_key, columns)
178
+ end
179
+
180
+ else
181
+ timestamp ? self.connection.client.deleteAllRowTs(self.connection.table_name(table_name), row_key, timestamp) : self.connection.client.deleteAllRow(self.connection.table_name(table_name), row_key)
182
+ end
183
+ end
184
+
185
+ def batch(timestamp = nil, batch_size = nil, transaction = false)
186
+ Batch.new(self, timestamp, batch_size, transaction)
187
+ end
188
+
189
+ def counter_get(row_key, column)
190
+ counter_inc(row_key, column, 0)
191
+ end
192
+
193
+ def counter_set(row_key, column, value = 0)
194
+ self.batch.transaction do |batch|
195
+ batch.put(row_key, { column => [value].pack('Q>') })
196
+ end
197
+ end
198
+
199
+ def counter_inc(row_key, column, value = 1)
200
+ self.connection.client.atomicIncrement(self.connection.table_name(table_name), row_key, column, value)
201
+ end
202
+
203
+ def counter_dec(row_key, column, value = 1)
204
+ counter_inc(row_key, column, -value)
205
+ end
206
+
207
+ alias_method :find, :scan
208
+
209
+ def _column_family_names()
210
+ self.connection.client.getColumnDescriptors(self.connection.table_name(table_name)).keys()
211
+ end
212
+
213
+ def _scanner(opts)
214
+ scanner = Apache::Hadoop::Hbase::Thrift::TScan.new()
215
+ scanner_fields = Apache::Hadoop::Hbase::Thrift::TScan::FIELDS
216
+
217
+ opts.each_pair do |k, v|
218
+ const = k.to_s.upcase.gsub('_', '')
219
+ const_value = Apache::Hadoop::Hbase::Thrift::TScan.const_get(const) rescue nil
220
+
221
+ if const_value
222
+ v.force_encoding(Encoding::UTF_8) if v.is_a?(String)
223
+ OkHbase.logger.info "setting scanner.#{scanner_fields[const_value][:name]}: #{v}"
224
+ scanner.send("#{scanner_fields[const_value][:name]}=", v)
225
+ else
226
+ end
227
+ end
228
+ scanner
229
+
230
+ end
231
+
232
+ def _make_row(cell_map, include_timestamp)
233
+ row = {}
234
+ cell_map.each_pair do |cell_name, cell|
235
+ row[cell_name] = include_timestamp ? [cell.value, cell.timestamp] : cell.value
236
+ end
237
+ row
238
+ end
239
+ end
240
+ end
241
+ end
@@ -0,0 +1,13 @@
1
+ module OkHbase
2
+ module Concerns
3
+
4
+ end
5
+ end
6
+ require 'ok_hbase/concerns/table'
7
+ require 'ok_hbase/concerns/table/class_methods'
8
+ require 'ok_hbase/concerns/table/instrumentation'
9
+ require 'ok_hbase/concerns/row'
10
+ require 'ok_hbase/concerns/custom_row'
11
+ require 'ok_hbase/concerns/custom_row/class_methods'
12
+ require 'ok_hbase/concerns/indexable'
13
+ require 'ok_hbase/concerns/indexable/class_methods'
@@ -0,0 +1,157 @@
1
+ require 'thrift'
2
+ require 'thrift/transport/socket'
3
+ require 'thrift/protocol/binary_protocol'
4
+
5
+ require 'thrift/hbase/hbase_constants'
6
+ require 'thrift/hbase/hbase_types'
7
+ require 'thrift/hbase/hbase'
8
+
9
+ require 'ok_hbase/client'
10
+
11
+ module OkHbase
12
+ class Connection
13
+
14
+
15
+ DEFAULT_OPTS = {
16
+ host: 'localhost',
17
+ port: 9090,
18
+ timeout: 5,
19
+ auto_connect: false,
20
+ table_prefix: nil,
21
+ table_prefix_separator: '_',
22
+ transport: :buffered,
23
+ max_tries: 3
24
+ }.freeze
25
+
26
+ THRIFT_TRANSPORTS = {
27
+ buffered: Thrift::BufferedTransport,
28
+ framed: Thrift::FramedTransport,
29
+ }
30
+
31
+ attr_accessor :host, :port, :timeout, :auto_connect, :table_prefix, :table_prefix_separator, :max_tries
32
+ attr_reader :client
33
+
34
+ def initialize(opts={})
35
+ opts = DEFAULT_OPTS.merge opts
36
+
37
+ raise ArgumentError.new ":transport must be one of: #{THRIFT_TRANSPORTS.keys}" unless THRIFT_TRANSPORTS.keys.include?(opts[:transport])
38
+ raise TypeError.new ":table_prefix must be a string" if opts[:table_prefix] && !opts[:table_prefix].is_a?(String)
39
+ raise TypeError.new ":table_prefix_separator must be a string" unless opts[:table_prefix_separator].is_a?(String)
40
+
41
+
42
+ @host = opts[:host]
43
+ @port = opts[:port]
44
+ @timeout = opts[:timeout]
45
+ @max_tries = opts[:max_tries]
46
+ @auto_connect = opts[:auto_connect]
47
+ @table_prefix = opts[:table_prefix]
48
+ @table_prefix_separator = opts[:table_prefix_separator]
49
+ @transport_class = THRIFT_TRANSPORTS[opts[:transport]]
50
+
51
+ _refresh_thrift_client
52
+ open if @auto_connect
53
+
54
+ end
55
+
56
+ def open
57
+ return if open?
58
+ @transport.open
59
+
60
+ OkHbase.logger.info "OkHbase connected"
61
+ end
62
+
63
+ def open?
64
+ @transport && @transport.open?
65
+ end
66
+
67
+ def close
68
+ return unless open?
69
+ @transport.close
70
+ end
71
+
72
+ def table(name, use_prefix=true)
73
+ name = table_name(name) if use_prefix
74
+ OkHbase::Table.new(name, self)
75
+ end
76
+
77
+ def tables
78
+ names = client.getTableNames
79
+ if table_prefix
80
+ names = names.map do |n|
81
+ n["#{table_prefix}#{table_prefix_separator}".size..-1] if n.start_with?(table_prefix)
82
+ end
83
+ end
84
+ names
85
+ end
86
+
87
+ def create_table(name, families)
88
+ name = table_name(name)
89
+
90
+ raise ArgumentError.new "Can't create table #{name}. (no column families specified)" unless families
91
+ raise TypeError.new "families' arg must be a hash" unless families.respond_to?(:[])
92
+
93
+ column_descriptors = []
94
+
95
+ families.each_pair do |family_name, options|
96
+ options ||= {}
97
+
98
+ args = {}
99
+ options.each_pair do |option_name, value|
100
+ args[option_name.to_s.camelcase(:lower)] = value
101
+ end
102
+
103
+ family_name = "#{family_name}:" unless family_name.to_s.end_with? ':'
104
+ args[:name] = family_name
105
+
106
+ column_descriptors << Apache::Hadoop::Hbase::Thrift::ColumnDescriptor.new(args)
107
+ end
108
+
109
+ client.createTable(name, column_descriptors)
110
+ table(name)
111
+ end
112
+
113
+ def delete_table(name, disable=false)
114
+ name = table_name(name)
115
+
116
+ disable_table(name) if disable && table_enabled?(name)
117
+ client.deleteTable(name)
118
+ end
119
+
120
+ def enable_table(name)
121
+ name = table_name(name)
122
+
123
+ client.enableTable(name)
124
+ end
125
+
126
+ def disable_table(name)
127
+ name = table_name(name)
128
+
129
+ client.disableTable(name)
130
+ end
131
+
132
+ def table_enabled?(name)
133
+ name = table_name(name)
134
+
135
+ client.isTableEnabled(name)
136
+ end
137
+
138
+ def compact_table(name, major=false)
139
+ name = table_name(name)
140
+
141
+ major ? client.majorCompact(name) : client.compact(name)
142
+ end
143
+
144
+ def table_name(name)
145
+ table_prefix && !name.start_with?(table_prefix) ? [table_prefix, name].join(table_prefix_separator) : name
146
+ end
147
+
148
+ private
149
+
150
+ def _refresh_thrift_client
151
+ socket = Thrift::Socket.new(host, port, timeout)
152
+ @transport = @transport_class.new(socket)
153
+ protocol = Thrift::BinaryProtocolAccelerated.new(@transport)
154
+ @client = OkHbase::Client.new(protocol, nil, max_tries)
155
+ end
156
+ end
157
+ end
@@ -0,0 +1,21 @@
1
+ module OkHbase
2
+ class Row
3
+ include OkHbase::Concerns::Row
4
+
5
+ def initialize(opts={})
6
+ opts = opts.with_indifferent_access
7
+
8
+ raise ArgumentError.new "'table' must be an OkHBase::Concerns::Table. is a #{opts[:table].class}" unless opts[:table] && opts[:table].is_a?(OkHbase::Concerns::Table)
9
+ @default_column_family = opts[:default_column_family]
10
+
11
+ @table = opts[:table]
12
+
13
+ @row_key = opts[:row_key]
14
+ @raw_data = {}.with_indifferent_access
15
+ opts[:raw_data].each_pair do |k, v|
16
+
17
+ send(:"#{k}=", v)
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,10 @@
1
+ module OkHbase
2
+ class Table
3
+ include OkHbase::Concerns::Table
4
+
5
+ def initialize(name, connection)
6
+ @connection = connection
7
+ @table_name = name
8
+ end
9
+ end
10
+ end
@@ -0,0 +1,3 @@
1
+ module OkHbase
2
+ VERSION = "0.0.5"
3
+ end
data/lib/ok_hbase.rb ADDED
@@ -0,0 +1,39 @@
1
+ require 'active_support/all'
2
+
3
+
4
+ module OkHbase
5
+
6
+ mattr_accessor :logger
7
+
8
+ def self.root
9
+ ::Pathname.new File.expand_path('../../', __FILE__)
10
+ end
11
+
12
+ def self.logger
13
+ @@logger ||= init_logger
14
+ end
15
+
16
+ def self.init_logger
17
+ Logger.new("/dev/null")
18
+ end
19
+
20
+ def self.increment_string(string)
21
+ bytes = string.bytes.to_a
22
+ (0...bytes.length).to_a.reverse.each do |i|
23
+ return (bytes[0...i] << bytes[i]+1).pack('C*').force_encoding(Encoding::UTF_8) unless bytes[i] == 255
24
+ end
25
+ nil
26
+ end
27
+
28
+ def self.thrift_type_to_dict(obj)
29
+ Hash[obj.class::FIELDS.map{ |k, v| [v[:name].underscore, obj.send(v[:name])]}]
30
+ end
31
+ end
32
+
33
+ require 'ok_hbase/version'
34
+ require 'ok_hbase/client'
35
+ require 'ok_hbase/connection'
36
+ require 'ok_hbase/concerns'
37
+ require 'ok_hbase/table'
38
+ require 'ok_hbase/row'
39
+ require 'ok_hbase/active_model'