hbase-jruby 0.4.4-java → 0.4.5-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 2dfaa38a2fb45434fd395443b5f28494851f14da
4
- data.tar.gz: ca32b60a6e041d476414f155ef7a507f9f7dd77b
3
+ metadata.gz: 317e3299f449c9c5f847ae8118c631d22ff91872
4
+ data.tar.gz: 092879ce25e3448c5a777310782c7fffab0ba586
5
5
  SHA512:
6
- metadata.gz: cb3f85a89139153976e241009e20b8c05058891ba388855110e072d13b83e36170be3ae95bf4dffbcb99ab46ad0bf4e24299ed4406ad4f868cacae90cdd4244c
7
- data.tar.gz: b5f50fa4a779ab31d1d827793aef548ff5d7582ed66f339eb889c6b1d00d3ecf6276cd96f358f5969048187e6bf706b821e347c8c98d6736b7ff5273cf0c74ac
6
+ metadata.gz: 3454275b00e7636a6d1bedf6b4172ad75f74da839f7bbdb91ac8882604c2e128cef248289abbb276629ed78d4da17834d59844c8fe949cf24bd5d180d3e42433
7
+ data.tar.gz: 1bb37e732100751e1861605af88f0cf0e1aef988a396a95478bc10af5966c8223bcc9f03bf31529c44846b6cc7b47aa64d2796e5852964e40528ac0a851bba43
@@ -1,9 +1,19 @@
1
1
  Changelog
2
2
  =========
3
3
 
4
+ 0.4.5
5
+ -----
6
+ - Fixed HBase 0.96 compatibily issues and tested on HBase 0.96 and 0.98
7
+ - Added `:split_policy` table property
8
+ - `Table#properties` and `Table#families` now include previously unknown
9
+ properties as String-String pairs
10
+ - Added `Row#byte_array` which returns an instance of `HBase::ByteArray`
11
+ - Equivalent to `HBase::ByteArray[row.raw(col)]`
12
+
13
+
4
14
  0.4.4
5
15
  -----
6
- - Fixed HBase::Table#raw_families/raw_properties on HBase shell
16
+ - Fixed `HBase::Table#raw_families/raw_properties` on HBase shell
7
17
 
8
18
  0.4.3
9
19
  -----
data/README.md CHANGED
@@ -58,15 +58,16 @@ hbase.schema = {
58
58
  book: {
59
59
  # Columns in cf1 family
60
60
  cf1: {
61
- title: :string, # String (UTF-8)
62
- author: :string,
63
- category: :string,
64
- year: :short, # Short integer (2-byte)
65
- pages: :int, # Integer (4-byte)
66
- price: :bigdecimal, # BigDecimal
67
- weight: :float, # Double-precision floating-point number
68
- in_print: :boolean, # Boolean (true | false)
69
- image: :raw # Java byte array; no automatic type conversion
61
+ title: :string, # String (UTF-8)
62
+ author: :string,
63
+ category: :string,
64
+ year: :short, # Short integer (2-byte)
65
+ pages: :int, # Integer (4-byte)
66
+ price: :bigdecimal, # BigDecimal
67
+ weight: :float, # Double-precision floating-point number
68
+ in_print: :boolean, # Boolean (true | false)
69
+ image: :raw # Java byte array; no automatic type conversion
70
+ thumbnail: :byte_array # HBase::ByteArray
70
71
  },
71
72
  # Columns in cf2 family
72
73
  cf2: {
@@ -151,21 +152,23 @@ or by `require`ing relevant JAR files after launching JRuby.
151
152
  Well, there's an easier way.
152
153
  Call `HBase.resolve_dependency!` helper method passing one of the arguments listed below.
153
154
 
154
- | Argument | Dependency | Default version | Required executable |
155
- | ---------- | ------------------------ | --------------- | ------------------- |
156
- | cdh4.5[.*] | Cloudera CDH4.5 | cdh4.5.0 | mvn |
157
- | cdh4.4[.*] | Cloudera CDH4.4 | cdh4.4.0 | mvn |
158
- | cdh4.3[.*] | Cloudera CDH4.3 | cdh4.3.2 | mvn |
159
- | cdh4.2[.*] | Cloudera CDH4.2 | cdh4.2.2 | mvn |
160
- | cdh4.1[.*] | Cloudera CDH4.1 | cdh4.1.5 | mvn |
161
- | cdh3[u*] | Cloudera CDH3 | cdh3u6 | mvn |
162
- | 0.94[.*] | Apache HBase 0.94 | 0.94.13 | mvn |
163
- | 0.92[.*] | Apache HBase 0.92 | 0.92.2 | mvn |
164
- | *POM PATH* | Custom Maven POM file | - | mvn |
165
- | `:local` | Local HBase installation | - | hbase |
155
+ | Argument | Dependency | Default version | Required executable |
156
+ | ---------- | ------------------------ | --------------- | ------------------- |
157
+ | cdh4.5[.*] | Cloudera CDH4.5 | cdh4.5.0 | mvn |
158
+ | cdh4.4[.*] | Cloudera CDH4.4 | cdh4.4.0 | mvn |
159
+ | cdh4.3[.*] | Cloudera CDH4.3 | cdh4.3.2 | mvn |
160
+ | cdh4.2[.*] | Cloudera CDH4.2 | cdh4.2.2 | mvn |
161
+ | cdh4.1[.*] | Cloudera CDH4.1 | cdh4.1.5 | mvn |
162
+ | cdh3[u*] | Cloudera CDH3 | cdh3u6 | mvn |
163
+ | 0.98[.*] | Apache HBase 0.98 | 0.98.0-hadoop2 | mvn |
164
+ | 0.96[.*] | Apache HBase 0.96 | 0.96.1.1-hadoop2 | mvn |
165
+ | 0.94[.*] | Apache HBase 0.94 | 0.94.16 | mvn |
166
+ | 0.92[.*] | Apache HBase 0.92 | 0.92.2 | mvn |
167
+ | *POM PATH* | Custom Maven POM file | - | mvn |
168
+ | `:local` | Local HBase installation | - | hbase |
166
169
 
167
170
  (Default version is used when an argument prefix is given without specific patch version.
168
- e.g. `cdh4.2` defaults to `cdh4.2.0`)
171
+ e.g. `cdh4.2` defaults to `cdh4.2.2`)
169
172
 
170
173
  #### Examples
171
174
 
@@ -1016,13 +1019,15 @@ Some of the properties are only available on recent versions of HBase.
1016
1019
 
1017
1020
  http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
1018
1021
 
1019
- | Property | Type | Description |
1020
- |-----------------------|---------|---------------------------------------------------------------------------------------------------------|
1021
- | `:max_filesize` | Fixnum | The maximum size upto which a region can grow to after which a region split is triggered |
1022
- | `:readonly` | Boolean | If the table is read-only |
1023
- | `:memstore_flushsize` | Fixnum | The maximum size of the memstore after which the contents of the memstore are flushed to the filesystem |
1024
- | `:deferred_log_flush` | Boolean | Defer the log edits syncing to the file system |
1025
- | `:splits` | Array | Region split points |
1022
+ | Property | Type | Description |
1023
+ | --------------------- | ------------- | --------------------------------------------------------------------------------------------------------- |
1024
+ | `:max_filesize` | Fixnum | The maximum size upto which a region can grow to after which a region split is triggered |
1025
+ | `:readonly` | Boolean | If the table is read-only |
1026
+ | `:memstore_flushsize` | Fixnum | The maximum size of the memstore after which the contents of the memstore are flushed to the filesystem |
1027
+ | `:deferred_log_flush` | Boolean | Defer the log edits syncing to the file system (deprecated in 0.96) |
1028
+ | `:durability` | Symbol/String | Durability setting of the table |
1029
+ | `:split_policy` | String/Class | Region split policy |
1030
+ | `:splits` | Array | Region split points |
1026
1031
 
1027
1032
  ### Managing column families
1028
1033
 
@@ -119,7 +119,9 @@ class Cell
119
119
  # @param [Cell] other
120
120
  # @return [Fixnum] -1, 0, or 1
121
121
  def <=> other
122
- KeyValue.COMPARATOR.compare(@java, other.java)
122
+ (@comparator ||=
123
+ KeyValue.const_defined?(:COMPARATOR) ?
124
+ KeyValue::COMPARATOR : KeyValue.COMPARATOR).compare(@java, other.java)
123
125
  end
124
126
 
125
127
  # Checks if the cells are the same
@@ -18,9 +18,10 @@ class HBase
18
18
  'cdh4.2' => 'cdh4.2.2',
19
19
  'cdh4.1' => 'cdh4.1.5',
20
20
  'cdh3' => 'cdh3u6',
21
+ '0.98' => '0.98.0-hadoop2',
21
22
  '0.96' => '0.96.1.1-hadoop2',
22
23
  '0.95' => '0.95.2-hadoop2',
23
- '0.94' => '0.94.15',
24
+ '0.94' => '0.94.16',
24
25
  '0.92' => '0.92.2',
25
26
  }
26
27
 
@@ -173,8 +174,8 @@ class HBase
173
174
  org.apache.hadoop.hbase.client.Put
174
175
  org.apache.hadoop.hbase.client.RowMutations
175
176
  org.apache.hadoop.hbase.io.hfile.Compression
176
- org.apache.hadoop.hbase.regionserver.StoreFile
177
- ],
177
+ org.apache.hadoop.hbase.io.compress.Compression
178
+ ], # hfile.Compression <= 0.94
178
179
  HBase::Scoped => %w[
179
180
  org.apache.hadoop.hbase.client.Get
180
181
  org.apache.hadoop.hbase.client.Scan
@@ -89,7 +89,12 @@ class HBase
89
89
  unless @closed
90
90
  @closed = true
91
91
  close_table_pool
92
- HConnectionManager.deleteConnection(@config, true)
92
+ begin
93
+ HConnectionManager.deleteConnection(@config)
94
+ rescue ArgumentError
95
+ # HBase 0.92 or below
96
+ HConnectionManager.deleteConnection(@config, true)
97
+ end
93
98
  end
94
99
  end
95
100
  end
@@ -154,7 +154,7 @@
154
154
  </dependencies>
155
155
  </profile>
156
156
 
157
- <% %w[0.95 0.96].each do |version| %>
157
+ <% %w[0.95 0.96 0.98].each do |version| %>
158
158
  <profile>
159
159
  <id><%= version %></id>
160
160
  <properties>
@@ -170,14 +170,14 @@
170
170
  <version>2.2.0</version>
171
171
  <% else %>
172
172
  <artifactId>hadoop-core</artifactId>
173
- <version>1.1.2</version>
173
+ <version>1.2.1</version>
174
174
  <% end %>
175
175
  <scope>compile</scope>
176
176
  </dependency>
177
177
 
178
178
  <dependency>
179
179
  <groupId>org.apache.hbase</groupId>
180
- <artifactId>hbase</artifactId>
180
+ <artifactId>hbase-client</artifactId>
181
181
  <version>${hbase.version}</version>
182
182
  <scope>compile</scope>
183
183
  </dependency>
@@ -190,7 +190,7 @@
190
190
  <id><%= version %></id>
191
191
  <properties>
192
192
  <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
193
- <hadoop.version>1.1.2</hadoop.version>
193
+ <hadoop.version>1.2.1</hadoop.version>
194
194
  <hbase.version><%= profiles[version] %></hbase.version>
195
195
  </properties>
196
196
 
@@ -80,191 +80,165 @@ class Row
80
80
  end
81
81
  alias to_hash_with_versions to_H
82
82
 
83
- # Returns column values as byte arrays
84
- # @overload raw(column)
85
- # Returns the latest column value as a byte array
86
- # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
87
- # @return [byte[]] Byte array representation of the latest value
83
+ # Returns the latest column value as a Java byte array
84
+ # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
85
+ # @return [byte[]] Byte array representation of the latest value
88
86
  def raw col
89
87
  get_value col
90
88
  end
91
89
 
92
- # Returns all versions of column values as byte arrays in a Hash indexed by their timestamps
93
- # @overload raws(column)
94
- # Returns all versions of column values as byte arrays in a Hash indexed by their timestamps
95
- # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
96
- # @return [Hash<Fixnum, byte[]>]
90
+ # Returns all versions of column values as Java byte arrays in a Hash indexed by their timestamps
91
+ # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
92
+ # @return [Hash<Fixnum, byte[]>]
97
93
  def raws col
98
94
  get_value col, true
99
95
  end
100
96
 
101
- # Returns column values as Strings
102
- # @overload string(column)
103
- # Returns the latest column value as a String
104
- # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
105
- # @return [String]
97
+ # Returns the latest column value as a HBase::ByteArray instance
98
+ # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
99
+ # @return [byte[]] Byte array representation of the latest value
100
+ def byte_array col
101
+ decode_value :byte_array, col
102
+ end
103
+
104
+ # Returns all versions of column values as HBase::ByteArray instances in a Hash indexed by their timestamps
105
+ # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
106
+ # @return [byte[]] Byte array representation of the latest value
107
+ def byte_arrays col
108
+ decode_value :byte_array, col, true
109
+ end
110
+
111
+ # Returns the latest column value as a String
112
+ # @param [String, Array] col Column name as String or 2-element Array of family and qualifier
113
+ # @return [String]
106
114
  def string col
107
115
  decode_value :string, col
108
116
  end
109
117
  alias str string
110
118
 
111
119
  # Returns all versions of column values as Strings in a Hash indexed by their timestamps
112
- # @overload strings(column)
113
- # Returns all versions of column values as Strings in a Hash indexed by their timestamps
114
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
115
- # @return [Hash<Fixnum, String>]
120
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
121
+ # @return [Hash<Fixnum, String>]
116
122
  def strings col
117
123
  decode_value :string, col, true
118
124
  end
119
125
  alias strs strings
120
126
 
121
- # Returns column values as Symbols
122
- # @overload symbol(column)
123
- # Returns the latest column value as a Symbol
124
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
125
- # @return [Symbol]
127
+ # Returns the latest column value as a Symbol
128
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
129
+ # @return [Symbol]
126
130
  def symbol col
127
131
  decode_value :symbol, col
128
132
  end
129
133
  alias sym symbol
130
134
 
131
135
  # Returns all versions of column values as Symbols in a Hash indexed by their timestamps
132
- # @overload symbols(column)
133
- # Returns all versions of column values as Symbols in a Hash indexed by their timestamps
134
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
135
- # @return [Hash<Fixnum, Symbol>]
136
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
137
+ # @return [Hash<Fixnum, Symbol>]
136
138
  def symbols col
137
139
  decode_value :symbol, col, true
138
140
  end
139
141
  alias syms symbols
140
142
 
141
- # Returns 1-byte column values as Fixnums
142
- # @overload byte(column)
143
- # Returns the latest column value as a Fixnum
144
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
145
- # @return [Fixnum]
143
+ # Returns the latest 1-byte column value as a Fixnum
144
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
145
+ # @return [Fixnum]
146
146
  def byte col
147
147
  decode_value :byte, col
148
148
  end
149
149
 
150
150
  # Returns all versions of 1-byte column values as Fixnums in a Hash indexed by their timestamps
151
- # @overload bytes(column)
152
- # Returns all versions of column values as Fixnums in a Hash indexed by their timestamps
153
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
154
- # @return [Hash<Fixnum, Fixnum>]
151
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
152
+ # @return [Hash<Fixnum, Fixnum>]
155
153
  def bytes col
156
154
  decode_value :byte, col, true
157
155
  end
158
156
 
159
- # Returns 2-byte column values as Fixnums
160
- # @overload short(column)
161
- # Returns the latest 2-byte column value as a Fixnum
162
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
163
- # @return [Fixnum]
157
+ # Returns the latest 2-byte column value as a Fixnum
158
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
159
+ # @return [Fixnum]
164
160
  def short col
165
161
  decode_value :short, col
166
162
  end
167
163
 
168
164
  # Returns all versions of 2-byte column values as Fixnums in a Hash indexed by their timestamps
169
- # @overload shorts(column)
170
- # Returns all versions of 2-byte column values as Fixnums in a Hash indexed by their timestamps
171
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
172
- # @return [Hash<Fixnum, Fixnum>]
165
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
166
+ # @return [Hash<Fixnum, Fixnum>]
173
167
  def shorts col
174
168
  decode_value :short, col, true
175
169
  end
176
170
 
177
- # Returns 4-byte column values as Fixnums
178
- # @overload int(column)
179
- # Returns the latest 4-byte column value as a Fixnum
180
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
181
- # @return [Fixnum]
171
+ # Returns the latest 4-byte column value as a Fixnum
172
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
173
+ # @return [Fixnum]
182
174
  def int col
183
175
  decode_value :int, col
184
176
  end
185
177
 
186
178
  # Returns all versions of 4-byte column values as Fixnums in a Hash indexed by their timestamps
187
- # @overload ints(column)
188
- # Returns all versions of 4-byte column values as Fixnums in a Hash indexed by their timestamps
189
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
190
- # @return [Hash<Fixnum, Fixnum>]
179
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
180
+ # @return [Hash<Fixnum, Fixnum>]
191
181
  def ints col
192
182
  decode_value :int, col, true
193
183
  end
194
184
 
195
- # Returns 8-byte column values as Fixnums
196
- # @overload fixnum(column)
197
- # Returns the latest 8-byte column value as a Fixnum
198
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
199
- # @return [Fixnum]
185
+ # Returns the latest 8-byte column value as a Fixnum
186
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
187
+ # @return [Fixnum]
200
188
  def fixnum col
201
189
  decode_value :fixnum, col
202
190
  end
203
191
  alias long fixnum
204
192
 
205
193
  # Returns all versions of 8-byte column values as Fixnums in a Hash indexed by their timestamps
206
- # @overload fixnums(column)
207
- # Returns all versions of 8-byte column values as Fixnums in a Hash indexed by their timestamps
208
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
209
- # @return [Hash<Fixnum, Fixnum>]
194
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
195
+ # @return [Hash<Fixnum, Fixnum>]
210
196
  def fixnums col
211
197
  decode_value :fixnum, col, true
212
198
  end
213
199
  alias longs fixnums
214
200
 
215
- # Returns column values as Bigdecimals
216
- # @overload bigdecimal(column)
217
- # Returns the latest column value as a BigDecimal
218
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
219
- # @return [BigDecimal]
201
+ # Returns the latest column value as a BigDecimal
202
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
203
+ # @return [BigDecimal]
220
204
  def bigdecimal col
221
205
  decode_value :bigdecimal, col
222
206
  end
223
207
 
224
208
  # Returns all versions of column values as BigDecimals in a Hash indexed by their timestamps
225
- # @overload bigdecimals(column)
226
- # Returns all versions of column values as BigDecimals in a Hash indexed by their timestamps
227
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
228
- # @return [Hash<Fixnum, BigDecimal>]
209
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
210
+ # @return [Hash<Fixnum, BigDecimal>]
229
211
  def bigdecimals col
230
212
  decode_value :bigdecimal, col, true
231
213
  end
232
214
 
233
- # Returns column values as Floats
234
- # @overload float(column)
235
- # Returns the latest column value as a Float
236
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
237
- # @return [Float]
215
+ # Returns the latest column value as a Float
216
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
217
+ # @return [Float]
238
218
  def float col
239
219
  decode_value :float, col
240
220
  end
241
221
  alias double float
242
222
 
243
223
  # Returns all versions of column values as Floats in a Hash indexed by their timestamps
244
- # @overload floats(column)
245
- # Returns all versions of column values as Floats in a Hash indexed by their timestamps
246
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
247
- # @return [Hash<Fixnum, Float>]
224
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
225
+ # @return [Hash<Fixnum, Float>]
248
226
  def floats col
249
227
  decode_value :float, col, true
250
228
  end
251
229
  alias doubles floats
252
230
 
253
- # Returns column values as Booleans
254
- # @overload boolean(column)
255
- # Returns the latest column value as a boolean value
256
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
257
- # @return [true, false]
231
+ # Returns the latest column value as a boolean value
232
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
233
+ # @return [true, false]
258
234
  def boolean col
259
235
  decode_value :boolean, col
260
236
  end
261
237
  alias bool boolean
262
238
 
263
- # Returns all versions of column values as Booleans in a Hash indexed by their timestamps
264
- # @overload booleans(column)
265
- # Returns all versions of column values as boolean values in a Hash indexed by their timestamps
266
- # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
267
- # @return [Hash<Fixnum, true|false>]
239
+ # Returns all versions of column values as boolean values in a Hash indexed by their timestamps
240
+ # @param [String, Array] column Column name as String or 2-element Array of family and qualifier
241
+ # @return [Hash<Fixnum, true|false>]
268
242
  def booleans col
269
243
  decode_value :boolean, col, true
270
244
  end
@@ -28,7 +28,7 @@ class Scoped
28
28
 
29
29
  scan = block_given? ? filtered_scan : filtered_scan_minimum
30
30
  scan.cache_blocks = options[:cache_blocks]
31
- if options[:caching] && (@mlimit.nil? || options[:caching] < @mlimit)
31
+ if options[:caching] && (@limit.nil? || options[:caching] < @limit)
32
32
  scan.caching = options[:caching]
33
33
  end
34
34
 
@@ -166,7 +166,7 @@ class Scoped
166
166
  unless (rows.is_a?(Fixnum) && rows >= 0) || rows.nil?
167
167
  raise ArgumentError, "Invalid limit. Must be a non-negative integer or nil."
168
168
  end
169
- spawn :@limit, rows, :@mlimit, nil
169
+ spawn :@limit, rows
170
170
  end
171
171
 
172
172
  # Returns an HBase::Scoped object with the specified time range
@@ -264,7 +264,6 @@ private
264
264
  @dcaching = default_caching
265
265
  @caching = nil
266
266
  @limit = nil
267
- @mlimit = nil
268
267
  @trange = nil
269
268
  @scan_cbs = []
270
269
  @get_cbs = []
@@ -529,14 +528,8 @@ private
529
528
 
530
529
  # Limit
531
530
  if @limit
532
- # setMaxResultSize not yet implemented in 0.94
533
- if scan.respond_to?(:setMaxResultSize)
534
- scan.setMaxResultSize(@limit)
535
- else
536
- @mlimit = @limit
537
- if [@caching, @dcaching].compact.all? { |c| @mlimit < c }
538
- scan.caching = @mlimit
539
- end
531
+ if [@caching, @dcaching].compact.all? { |c| @limit < c }
532
+ scan.caching = @limit
540
533
  end
541
534
  end
542
535
 
@@ -649,10 +642,10 @@ private
649
642
 
650
643
  def iterate scan
651
644
  scanner = htable.getScanner(scan)
652
- if @mlimit
645
+ if @limit
653
646
  scanner.each_with_index do |result, idx|
654
647
  yield result
655
- break if idx == @mlimit - 1
648
+ break if idx == @limit - 1
656
649
  end
657
650
  else
658
651
  scanner.each do |result|
@@ -211,7 +211,8 @@ class Table
211
211
  # m.put e: 3.14
212
212
  # end
213
213
  def mutate(rowkey, &block)
214
- htable.mutateRow @mutation.mutate(rowkey, &block)
214
+ ms = @mutation.mutate(rowkey, &block)
215
+ htable.mutateRow ms if ms
215
216
  end
216
217
 
217
218
  # Scan through the table
@@ -281,6 +281,29 @@ private
281
281
  :readonly => { :get => :isReadOnly, :set => :setReadOnly },
282
282
  :memstore_flushsize => { :get => :getMemStoreFlushSize, :set => :setMemStoreFlushSize },
283
283
  :deferred_log_flush => { :get => :isDeferredLogFlush, :set => :setDeferredLogFlush },
284
+ :split_policy => { :get => :getRegionSplitPolicyClassName,
285
+ :set => proc { |htd, v|
286
+ htd.setValue HTableDescriptor::SPLIT_POLICY,
287
+ case v
288
+ when String
289
+ v
290
+ when Class
291
+ v.java_class.name
292
+ when Java::JavaClass
293
+ v.name
294
+ else
295
+ raise ArgumentError, "Invalid type for region split policy"
296
+ end
297
+ }
298
+ },
299
+ :durability => { :get => :getDurability,
300
+ :set => proc { |htd, v|
301
+ const = const_shortcut(
302
+ org.apache.hadoop.hbase.client.Durability, v,
303
+ "Invalid durability setting")
304
+ htd.setDurability const
305
+ }
306
+ }
284
307
  }
285
308
 
286
309
  MAX_SPLIT_WAIT = 30
@@ -330,7 +353,7 @@ private
330
353
  end
331
354
  end
332
355
 
333
- def const_shortcut base, v, message
356
+ def self.const_shortcut base, v, message
334
357
  # Match by constant value
335
358
  # - const_get doesn't work with symbols in 1.8 compatibility mode
336
359
  if base.constants.map { |c| base.const_get c }.any? { |cv| v == cv }
@@ -343,12 +366,20 @@ private
343
366
  end
344
367
  end
345
368
 
369
+ def const_shortcut *args
370
+ Table.const_shortcut(*args)
371
+ end
372
+
346
373
  def patch_table_descriptor! htd, props
347
374
  props.each do |key, value|
348
375
  next if key == :splits
349
376
 
350
377
  if method = TABLE_PROPERTIES[key] && TABLE_PROPERTIES[key][:set]
351
- htd.send method, value
378
+ if method.is_a? Symbol
379
+ htd.send method, value
380
+ else
381
+ method.call htd, value
382
+ end
352
383
  elsif key.is_a?(String)
353
384
  htd.setValue key, value.to_s
354
385
  else
@@ -359,7 +390,7 @@ private
359
390
  end
360
391
 
361
392
  def _alter props, bang, &block
362
- raise ArgumentError, ":split not supported" if props[:splits]
393
+ raise ArgumentError, ":splits not supported" if props[:splits]
363
394
  with_admin do |admin|
364
395
  htd = admin.get_table_descriptor(@name.to_java_bytes)
365
396
  patch_table_descriptor! htd, props
@@ -10,13 +10,19 @@ class Table
10
10
  # @return [Hash]
11
11
  def properties
12
12
  desc = descriptor
13
- {}.tap { |props|
13
+ parse_raw_map(descriptor.values).tap { |props|
14
14
  TABLE_PROPERTIES.each do |prop, gs|
15
15
  get = gs[:get]
16
16
  if get && desc.respond_to?(get)
17
+ props.delete(prop.to_s.upcase)
17
18
  props[prop] = parse_property desc.send get
18
19
  end
19
20
  end
21
+
22
+ # deferred_log_flush is deprecated in 0.96
23
+ if props.has_key?(:durability) && props.has_key?(:deferred_log_flush)
24
+ props.delete :deferred_log_flush
25
+ end
20
26
  }
21
27
  end
22
28
 
@@ -33,10 +39,11 @@ class Table
33
39
  descriptor.families.each do |family|
34
40
  name = family.name_as_string
35
41
  ret[name] =
36
- {}.tap { |props|
42
+ parse_raw_map(family.values).tap { |props|
37
43
  COLUMN_PROPERTIES.each do |prop, gs|
38
44
  get = gs[:get]
39
45
  if get && family.respond_to?(get)
46
+ props.delete(prop.to_s.upcase)
40
47
  props[prop] = parse_property family.send get
41
48
  end
42
49
  end
@@ -83,7 +90,9 @@ private
83
90
  r[:id] = ri.region_id
84
91
  r[:start_key] = nil_if_empty ri.start_key
85
92
  r[:end_key] = nil_if_empty ri.end_key
86
- r[:root] = ri.is_root_region
93
+ if ri.respond_to?(:is_root_region)
94
+ r[:root] = ri.is_root_region
95
+ end
87
96
  r[:meta] = ri.is_meta_region
88
97
  r[:online] = !ri.is_offline
89
98
  }
@@ -102,12 +102,16 @@ class Mutation
102
102
  rm.mutations.each do |action|
103
103
  m.add action
104
104
  end
105
- }
105
+ } unless rm.empty?
106
106
  end
107
107
 
108
108
  class Mutator
109
109
  attr_reader :mutations
110
110
 
111
+ def empty?
112
+ @mutations.empty?
113
+ end
114
+
111
115
  def initialize mutation, rowkey
112
116
  @mutation = mutation
113
117
  @rowkey = rowkey
@@ -128,6 +128,8 @@ module Util
128
128
  Bytes.to_double val
129
129
  when :boolean, :bool
130
130
  Bytes.to_boolean val
131
+ when :byte_array
132
+ ByteArray[val]
131
133
  when :raw, nil
132
134
  val
133
135
  else
@@ -1,5 +1,5 @@
1
1
  class HBase
2
2
  module JRuby
3
- VERSION = '0.4.4'
3
+ VERSION = '0.4.5'
4
4
  end
5
5
  end
@@ -33,19 +33,20 @@ class TestHBaseJRubyBase < Test::Unit::TestCase
33
33
 
34
34
  def connect
35
35
  HBase.new('hbase.zookeeper.quorum' => ZK,
36
- 'hbase.client.retries.number' => 10,
36
+ 'hbase.client.retries.number' => 5,
37
37
  'hbase.client.scanner.caching' => 100)
38
38
  end
39
39
 
40
40
  def setup
41
41
  @hbase = connect
42
42
  @table = @hbase.table(TABLE)
43
+ @aggregation = defined?(org.apache.hadoop.hbase.client.coprocessor.AggregationClient)
43
44
 
44
45
  # Drop & Create
45
46
  @table.drop! if RECREATE && @table.exists?
46
47
  @table.create!(
47
- :cf1 => { :compression => :none, :bloomfilter => :row },
48
- :cf2 => { :bloomfilter => :rowcol },
48
+ :cf1 => { :compression => :none, :bloomfilter => :row, :versions => 3 },
49
+ :cf2 => { :bloomfilter => :rowcol, :versions => 3 },
49
50
  :cf3 => { :versions => 1, :bloomfilter => :rowcol }
50
51
  ) unless @table.exists?
51
52
  @table.enable! if @table.disabled?
@@ -5,6 +5,9 @@ require 'helper'
5
5
 
6
6
  class TestAggregation < TestHBaseJRubyBase
7
7
  def test_aggregation
8
+ # AggregationClient is removed in 0.96
9
+ return unless @aggregation
10
+
8
11
  (1..100).each do |idx|
9
12
  @table.put idx, 'cf1:a' => idx, 'cf1:b' => idx * 2
10
13
  end
@@ -279,11 +279,13 @@ class TestSchema < TestHBaseJRubyBase
279
279
  assert_equal data[:stars] + 5, inc2[:stars]
280
280
 
281
281
  # Coprocessor
282
- table.enable_aggregation!
283
- table.put next_rowkey, :reviews => 100, :stars => 500
284
- assert_equal data[:reviews] + 1 + data[:stars] + 5 + 100 + 500,
285
- table.project(:reviews, :stars).aggregate(:sum)
286
- #table.disable_aggregation!
282
+ if @aggregation
283
+ table.enable_aggregation!
284
+ table.put next_rowkey, :reviews => 100, :stars => 500
285
+ assert_equal data[:reviews] + 1 + data[:stars] + 5 + 100 + 500,
286
+ table.project(:reviews, :stars).aggregate(:sum)
287
+ #table.disable_aggregation!
288
+ end
287
289
 
288
290
  # Undefined columns
289
291
  table.put rk, 'cf1:x' => 1000
@@ -330,24 +332,48 @@ class TestSchema < TestHBaseJRubyBase
330
332
  assert_equal 'great', table.get(rk)[:comment4]
331
333
 
332
334
  # Batch
335
+ # FIXME: Mutation in batch hangs on 0.96, temporarily using @aggregation
336
+ # here to see if the version if 0.96 (no AggregationClient) or not
337
+ mutation_in_batch = @aggregation
333
338
  ret = table.batch do |b|
334
339
  b.put rk, :comment5 => 'gnarly'
335
340
  b.delete rk, :comment4
336
- b.increment rk, :stars => 100, :reviews => 200
337
- b.mutate(rk) do |m|
338
- m.put :comment6 => 'rad'
339
- m.delete :image
340
- end
341
+
342
+ # https://issues.apache.org/jira/browse/HBASE-10384
343
+ # Due to the bug introduced in 0.96 we have to order the columns
344
+ # b.increment rk, :stars => 100, :reviews => 200
345
+ b.increment rk, :reviews => 200, :stars => 100
341
346
  b.append rk, :category => '/Etc'
342
347
  b.get rk
348
+
349
+ if mutation_in_batch
350
+ b.mutate(rk) do |m|
351
+ m.put :comment6 => 'rad'
352
+ m.delete :image
353
+ end
354
+ else
355
+ table.mutate(rk) do |m|
356
+ m.put :comment6 => 'rad'
357
+ m.delete :image
358
+ end
359
+ end
360
+ end
361
+
362
+ if mutation_in_batch
363
+ assert_equal 6, ret.length
364
+ assert_equal [true] * 3, ret.values_at(0, 1, 5).map { |r| r[:result] }
365
+ else
366
+ assert_equal 5, ret.length
367
+ assert_equal [true] * 2, ret.values_at(0, 1).map { |r| r[:result] }
343
368
  end
344
- assert_equal 6, ret.length
345
- assert_equal [true] * 3, ret.values_at(0, 1, 3).map { |r| r[:result] }
346
- assert_equal data[:stars] + 5 + 100, ret[2][:result][:stars]
369
+
370
+ assert_equal data[:stars] + 5 + 100, ret[2][:result][:stars]
347
371
  assert_equal data[:reviews] + 1 + 200, ret[2][:result][:reviews]
348
- assert_equal data[:category] + '/Etc', ret[4][:result][:category]
349
- assert_instance_of HBase::Row, ret[5][:result]
350
- assert_equal 1890, ret[5][:result][:year]
372
+ assert_equal data[:category] + '/Etc', ret[3][:result][:category]
373
+ assert_instance_of HBase::Row, ret[4][:result]
374
+ assert_equal 1890, ret[4][:result][:year]
375
+ assert_equal nil, table.get(rk)[:image]
376
+ assert_equal 'rad', table.get(rk)[:comment6]
351
377
 
352
378
  # Delete :title column of book 1
353
379
  table.delete rk, :title
@@ -77,6 +77,7 @@ class TestTable < TestHBaseJRubyBase
77
77
  assert_equal 1, @table.get(row1).fixnum('cf1:a')
78
78
  assert_equal 'a', @table.get(row1).string('cf1:b')
79
79
  assert_equal 'a', String.from_java_bytes(@table.get(row1).raw('cf1:b'))
80
+ assert_equal 'a', @table.get(row1).byte_array('cf1:b').as(:string)
80
81
  assert_equal 3.14, @table.get(row1).float('cf1:c')
81
82
  assert_equal true, @table.get(row1).boolean('cf1:d')
82
83
  assert_equal :sym, @table.get(row1).symbol('cf1:f')
@@ -93,6 +94,7 @@ class TestTable < TestHBaseJRubyBase
93
94
  assert_equal [1, 2], @table.get(row1).fixnums('cf1:a').values
94
95
  assert_equal %w[a b], @table.get(row1).strings('cf1:b').values
95
96
  assert_equal %w[a b], @table.get(row1).raws('cf1:b').values.map { |v| String.from_java_bytes v }
97
+ assert_equal %w[a b], @table.get(row1).byte_arrays('cf1:b').values.map { |v| v.as :string }
96
98
  assert_equal [3.14, 6.28], @table.get(row1).floats('cf1:c').values
97
99
  assert_equal [true, false], @table.get(row1).booleans('cf1:d').values
98
100
  assert_equal [:sym, :bol], @table.get(row1).symbols('cf1:f').values
@@ -479,42 +481,56 @@ class TestTable < TestHBaseJRubyBase
479
481
  assert_equal true, ret[1][:result]
480
482
  assert_equal true, ret[2][:result]
481
483
 
484
+ # FIXME: Mutation in batch hangs on 0.96
485
+ mutation_in_batch = @aggregation
482
486
  ret = @table.batch { |b|
483
487
  b.put rk3, 'cf1:c' => 5
484
488
  b.delete rk1, 'cf1:a'
485
489
  b.increment rk2, 'cf1:a' => 10, 'cf1:b' => 20
486
490
  b.append rk2, 'cf2:c' => ' world'
487
- b.mutate(rk3) do |m|
488
- m.put 'cf2:d' => 'hola'
489
- m.put 'cf2:e' => 'mundo'
490
- m.delete 'cf1:b'
491
- end
492
491
  b.get(rk1)
493
492
  b.filter('cf1:a' => 0).get(rk1)
494
493
  b.versions(1).project('cf2').get(rk1)
494
+ if mutation_in_batch
495
+ b.mutate(rk3) do |m|
496
+ m.put 'cf2:d' => 'hola'
497
+ m.put 'cf2:e' => 'mundo'
498
+ m.delete 'cf1:b'
499
+ end
500
+ else
501
+ @table.mutate(rk3) do |m|
502
+ m.put 'cf2:d' => 'hola'
503
+ m.put 'cf2:e' => 'mundo'
504
+ m.delete 'cf1:b'
505
+ end
506
+ end
495
507
  }
496
- assert_equal 8, ret.length
497
- assert_equal [:put, :delete, :increment, :append, :mutate, :get, :get, :get],
498
- ret.map { |r| r[:type] }
499
- assert_equal [true, true, true],
500
- ret.values_at(0, 1, 4).map { |r| r[:result] }
508
+ if mutation_in_batch
509
+ assert_equal 8, ret.length
510
+ assert_equal [:put, :delete, :increment, :append, :get, :get, :get, :mutate], ret.map { |r| r[:type] }
511
+ assert_equal [true, true, true], ret.values_at(0, 1, 7).map { |r| r[:result] }
512
+ else
513
+ assert_equal 7, ret.length
514
+ assert_equal [:put, :delete, :increment, :append, :get, :get, :get], ret.map { |r| r[:type] }
515
+ assert_equal [true, true], ret.values_at(0, 1).map { |r| r[:result] }
516
+ end
501
517
  assert_equal 12, ret[2][:result]['cf1:a']
502
518
  assert_equal 23, ret[2][:result]['cf1:b']
503
519
  assert_equal 'hello world', ret[3][:result]['cf2:c'].to_s
504
520
  # assert_equal nil, ret[5][:result].long('cf1:a') # No guarantee
505
- assert_equal 2, ret[5][:result].long('cf1:b')
506
- assert_equal nil, ret[6][:result]
507
- assert_equal nil, ret[7][:result].fixnum('cf1:b')
508
- assert_equal 'hello', ret[7][:result].string('cf2:c')
509
-
510
- assert_equal nil, @table.get(rk1)['cf1:a']
511
- assert_equal 12, @table.get(rk2).long('cf1:a')
512
- assert_equal 23, @table.get(rk2).long('cf1:b')
513
- assert_equal 5, @table.get(rk3).long('cf1:c')
521
+ assert_equal 2, ret[4][:result].long('cf1:b')
522
+ assert_equal nil, ret[5][:result]
523
+ assert_equal nil, ret[6][:result].fixnum('cf1:b')
524
+ assert_equal 'hello', ret[6][:result].string('cf2:c')
525
+
526
+ assert_equal nil, @table.get(rk1)['cf1:a']
527
+ assert_equal 12, @table.get(rk2).long('cf1:a')
528
+ assert_equal 23, @table.get(rk2).long('cf1:b')
529
+ assert_equal 5, @table.get(rk3).long('cf1:c')
514
530
  assert_equal 'hello world', @table.get(rk2).string('cf2:c')
515
- assert_equal 'hola', @table.get(rk3).string('cf2:d')
516
- assert_equal 'mundo', @table.get(rk3).string('cf2:e')
517
- assert_equal nil, @table.get(rk3).string('cf2:b')
531
+ assert_equal 'hola', @table.get(rk3).string('cf2:d')
532
+ assert_equal 'mundo', @table.get(rk3).string('cf2:e')
533
+ assert_equal nil, @table.get(rk3).string('cf2:b')
518
534
  end
519
535
 
520
536
  def test_batch_exception
@@ -143,6 +143,8 @@ class TestTableAdmin < TestHBaseJRubyBase
143
143
  end
144
144
 
145
145
  def test_add_coprocessor!
146
+ return unless @aggregation
147
+
146
148
  coproc = 'org.apache.hadoop.hbase.coprocessor.AggregateImplementation'
147
149
  assert_false @table.has_coprocessor? coproc
148
150
  assert_raise(ArgumentError) {
@@ -163,10 +165,31 @@ class TestTableAdmin < TestHBaseJRubyBase
163
165
  @table.drop!
164
166
  assert @table.inspect.is_a?(String)
165
167
 
168
+ gz = begin
169
+ org.apache.hadoop.hbase.io.hfile.Compression::Algorithm::GZ
170
+ rescue Exception
171
+ org.apache.hadoop.hbase.io.compress.Compression::Algorithm::GZ
172
+ end
173
+
174
+ table_props = {
175
+ :max_filesize => 512 * 1024 ** 2,
176
+ :memstore_flushsize => 64 * 1024 ** 2,
177
+ :readonly => false,
178
+ :splits => [10, 20, 30, 40],
179
+ :split_policy =>
180
+ 'org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy',
181
+ 'hello' => 'world'
182
+ }
183
+ if org.apache.hadoop.hbase.HTableDescriptor.method_defined? :setDurability
184
+ table_props[:durability] = :async_wal
185
+ else
186
+ table_props[:deferred_log_flush] = true
187
+ end
188
+
166
189
  [
167
190
  'GZ',
168
191
  :gz,
169
- org.apache.hadoop.hbase.io.hfile.Compression::Algorithm::GZ
192
+ gz
170
193
  ].each do |cmp|
171
194
  @table.create!({
172
195
  :cf => {
@@ -184,14 +207,9 @@ class TestTableAdmin < TestHBaseJRubyBase
184
207
  :replication_scope => 0,
185
208
  :ttl => 100,
186
209
  :versions => 10,
210
+ 'whatever' => 'works',
187
211
  }
188
- },
189
- :max_filesize => 512 * 1024 ** 2,
190
- :memstore_flushsize => 64 * 1024 ** 2,
191
- :readonly => false,
192
- :deferred_log_flush => true,
193
- :splits => [10, 20, 30, 40]
194
- )
212
+ }, table_props)
195
213
 
196
214
  # Initial region count
197
215
  regions = @table.regions
@@ -199,16 +217,23 @@ class TestTableAdmin < TestHBaseJRubyBase
199
217
 
200
218
  # Table properties
201
219
  props = @table.properties
202
- assert_equal true, props[:deferred_log_flush]
220
+ assert props[:deferred_log_flush] || props[:durability] == 'ASYNC_WAL'
203
221
  assert_equal false, props[:readonly]
204
222
  assert_equal 64 * 1024 ** 2, props[:memstore_flushsize]
205
223
  assert_equal 512 * 1024 ** 2, props[:max_filesize]
224
+ assert_equal 'world', props['hello']
225
+ assert_equal 'org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy',
226
+ props[:split_policy]
206
227
 
207
228
  rprops = @table.raw_properties
208
- assert_equal true.to_s, rprops['DEFERRED_LOG_FLUSH']
229
+ assert rprops['DEFERRED_LOG_FLUSH'] == 'true' || rprops['DURABILITY'] == 'ASYNC_WAL'
230
+
209
231
  assert_equal false.to_s, rprops['READONLY']
210
232
  assert_equal((64 * 1024 ** 2).to_s, rprops['MEMSTORE_FLUSHSIZE'])
211
233
  assert_equal((512 * 1024 ** 2).to_s, rprops['MAX_FILESIZE'])
234
+ assert_equal 'world', rprops['hello']
235
+ assert_equal 'org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy',
236
+ rprops['SPLIT_POLICY']
212
237
 
213
238
  # Column family properties
214
239
  cf = @table.families['cf']
@@ -221,6 +246,7 @@ class TestTableAdmin < TestHBaseJRubyBase
221
246
  assert_equal 131072, cf[:blocksize]
222
247
  assert_equal true, cf[:in_memory]
223
248
  assert_equal true, cf[:blockcache]
249
+ assert_equal 'works', cf['whatever']
224
250
 
225
251
  rcf = @table.raw_families['cf']
226
252
  assert_equal 'ROW', rcf['BLOOMFILTER']
@@ -232,27 +258,16 @@ class TestTableAdmin < TestHBaseJRubyBase
232
258
  assert_equal 131072.to_s, rcf['BLOCKSIZE']
233
259
  assert_equal true.to_s, rcf['IN_MEMORY']
234
260
  assert_equal true.to_s, rcf['BLOCKCACHE']
261
+ assert_equal 'works', rcf['whatever']
235
262
 
236
263
  @table.put 31, 'cf:a' => 100
237
264
  @table.put 37, 'cf:a' => 100
238
265
  @table.split!(35)
239
-
240
- # FIXME
241
- 10.times do |i|
242
- break if @table.regions.count == 6
243
- sleep 1
244
- assert false, "Region not split" if i == 9
245
- end
266
+ wait_for_regions 6
246
267
 
247
268
  @table.put 39, 'cf:a' => 100
248
269
  @table.split!(38)
249
-
250
- # FIXME
251
- 10.times do |i|
252
- break if @table.regions.count == 7
253
- sleep 1
254
- assert false, "Region not split" if i == 9
255
- end
270
+ wait_for_regions 7
256
271
 
257
272
  regions = @table.regions
258
273
  assert_equal [10, 20, 30, 35, 38, 40], regions.map { |r| HBase::Util.from_bytes :fixnum, r[:start_key] }.compact.sort
@@ -293,5 +308,17 @@ class TestTableAdmin < TestHBaseJRubyBase
293
308
  rescue Exception
294
309
  # TODO: Only works on HBase 0.94 or above
295
310
  end
311
+
312
+ private
313
+ def wait_for_regions rnum, max_tries = 30
314
+ sleep 5
315
+ max_tries.times do |i|
316
+ if @table.regions.count == rnum && @table.regions.all? { |r| r[:online] }
317
+ return
318
+ end
319
+ sleep 1
320
+ end
321
+ assert false, "Region not split"
322
+ end
296
323
  end unless ENV['HBASE_JRUBY_TEST_SKIP_ADMIN']
297
324
 
@@ -49,6 +49,10 @@ class TestUtil < Test::Unit::TestCase
49
49
 
50
50
  assert_equal 0, Util.to_bytes(nil).length
51
51
 
52
+ byte_array = Util.from_bytes :byte_array, "1234".to_java_bytes
53
+ assert_instance_of HBase::ByteArray, byte_array
54
+ assert_equal "1234", byte_array.as(:string)
55
+
52
56
  assert_raise(ArgumentError) { Util.from_bytes(:xxx, [].to_java(Java::byte)) }
53
57
  assert_raise(ArgumentError) { Util.to_bytes({}) }
54
58
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: hbase-jruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.4
4
+ version: 0.4.5
5
5
  platform: java
6
6
  authors:
7
7
  - Junegunn Choi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-01-20 00:00:00.000000000 Z
11
+ date: 2014-02-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: test-unit