tablestore-ruby-sdk 0.0.0 → 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,14 +1,11 @@
1
- syntax = "proto3";
2
1
 
3
2
  enum FilterType {
4
- FT_DEFAULT = 0;
5
3
  FT_SINGLE_COLUMN_VALUE = 1;
6
4
  FT_COMPOSITE_COLUMN_VALUE = 2;
7
5
  FT_COLUMN_PAGINATION = 3;
8
6
  }
9
7
 
10
8
  enum ComparatorType {
11
- CT_DEFAULT = 0;
12
9
  CT_EQUAL = 1;
13
10
  CT_NOT_EQUAL = 2;
14
11
  CT_GREATER_THAN = 3;
@@ -18,31 +15,30 @@ enum ComparatorType {
18
15
  }
19
16
 
20
17
  message SingleColumnValueFilter {
21
- ComparatorType comparator = 1;
22
- string column_name = 2;
23
- bytes column_value = 3; // Serialized SQLVariant
24
- bool filter_if_missing = 4;
25
- bool latest_version_only = 5;
18
+ required ComparatorType comparator = 1;
19
+ required string column_name = 2;
20
+ required bytes column_value = 3; // Serialized SQLVariant
21
+ required bool filter_if_missing = 4;
22
+ required bool latest_version_only = 5;
26
23
  }
27
24
 
28
25
  enum LogicalOperator {
29
- LO_DEFAULT = 0;
30
26
  LO_NOT = 1;
31
27
  LO_AND = 2;
32
28
  LO_OR = 3;
33
29
  }
34
30
 
35
31
  message CompositeColumnValueFilter {
36
- LogicalOperator combinator = 1;
32
+ required LogicalOperator combinator = 1;
37
33
  repeated Filter sub_filters = 2;
38
34
  }
39
35
 
40
36
  message ColumnPaginationFilter {
41
- int32 offset = 1;
42
- int32 limit = 2;
37
+ required int32 offset = 1;
38
+ required int32 limit = 2;
43
39
  }
44
40
 
45
41
  message Filter {
46
- FilterType type = 1;
47
- bytes filter = 2; // Serialized string of filter of the type
42
+ required FilterType type = 1;
43
+ required bytes filter = 2; // Serialized string of filter of the type
48
44
  }
@@ -2,7 +2,7 @@ require 'rest-client'
2
2
  require 'openssl'
3
3
  require 'base64'
4
4
  require 'tablestore/error'
5
- require 'tablestore/ots'
5
+ require 'tablestore/table_store_client'
6
6
  require 'tablestore/metadata'
7
7
 
8
8
  class TableStore
@@ -13,12 +13,12 @@ class TableStore
13
13
 
14
14
 
15
15
  def initialize(end_point, access_key_id, access_key_secret, instance_name, **kwargs)
16
- # 初始化OTSClient实例。
17
- # end_point是OTS服务的地址(例如 'http://instance.cn-hangzhou.ots.aliyun.com'),必须以'http://'或'https://'开头。
18
- # access_key_id是访问OTS服务的accessid,通过官方网站申请或通过管理员获取。
19
- # access_key_secret是访问OTS服务的accesskey,通过官方网站申请或通过管理员获取。
16
+ # 初始化TableStoreClient实例。
17
+ # end_point是TableStoreClient服务的地址(例如 'http://instance.cn-hangzhou.TableStoreClient.aliyun.com'),必须以'http://'或'https://'开头。
18
+ # access_key_id是访问TableStoreClient服务的accessid,通过官方网站申请或通过管理员获取。
19
+ # access_key_secret是访问TableStoreClient服务的accesskey,通过官方网站申请或通过管理员获取。
20
20
  # instance_name是要访问的实例名,通过官方网站控制台创建或通过管理员获取。
21
- # sts_token是访问OTS服务的STS token,从STS服务获取,具有有效期,过期后需要重新获取。
21
+ # sts_token是访问TableStoreClient服务的STS token,从STS服务获取,具有有效期,过期后需要重新获取。
22
22
  # encoding请求参数的字符串编码类型,默认是utf8。
23
23
  # socket_timeout是连接池中每个连接的Socket超时,单位为秒,可以为int或float。默认值为50。
24
24
  # max_connection是连接池的最大连接数。默认为50,
@@ -28,21 +28,21 @@ class TableStore
28
28
  #self.validate_parameter(end_point, access_key_id, access_key_secret, instance_name)
29
29
  #sts_token = kwargs.get('sts_token')
30
30
 
31
- #示例:创建一个OTSClient实例
32
- # from tablestore.client import OTSClient
33
- # client = OTSClient('your_instance_endpoint', 'your_user_id', 'your_user_key', 'your_instance_name')
31
+ #示例:创建一个TableStoreClient实例
32
+ # from tablestore.client import TableStoreClient
33
+ # client = TableStoreClient('your_instance_endpoint', 'your_user_id', 'your_user_key', 'your_instance_name')
34
34
  end
35
35
 
36
36
  def _get_range(request)
37
37
  api_name = 'GetRange'
38
- body = OTS.new.encode_get_range_request(request)
38
+ body = TableStoreClient.new.encode_get_range_request(request)
39
39
  response = post_request(body, api_name)
40
- OTS.new.decode_get_range_request(api_name, response.headers, response.body)
40
+ TableStoreClient.new.decode_get_range_request(api_name, response.headers, response.body)
41
41
  end
42
42
 
43
43
  def _put_row(table_name, row, condition)
44
44
  api_name = 'PutRow'
45
- body = OTS.new.encode_put_row(table_name, row, condition)
45
+ body = TableStoreClient.new.encode_put_row(table_name, row, condition)
46
46
  response = post_request(body, api_name)
47
47
  if response.code == 200
48
48
  'write succeed!'
@@ -51,21 +51,21 @@ class TableStore
51
51
 
52
52
  def _get_row(table_name, primary_key, columns_to_get=nil, column_filter=nil, max_version=1)
53
53
  api_name = 'GetRow'
54
- body = OTS.new.encode_get_row(table_name, primary_key, columns_to_get, column_filter, max_version)
54
+ body = TableStoreClient.new.encode_get_row(table_name, primary_key, columns_to_get, column_filter, max_version)
55
55
  response = post_request(body, api_name)
56
- OTS.new.decode_get_row(response.body)
56
+ TableStoreClient.new.decode_get_row(response.body)
57
57
  end
58
58
 
59
59
  def _batch_get_row(request)
60
60
  api_name = 'BatchGetRow'
61
- body = OTS.new.make_batch_get_row(request)
61
+ body = TableStoreClient.new.make_batch_get_row(request)
62
62
  response = post_request(body, api_name)
63
- OTS.new.decode_batch_get_row(response.body)
63
+ TableStoreClient.new.decode_batch_get_row(response.body)
64
64
  end
65
65
 
66
66
  def _batch_write_row(request)
67
67
  api_name = 'BatchWriteRow'
68
- body = OTS.new.make_batch_write_row(request)
68
+ body = TableStoreClient.new.make_batch_write_row(request)
69
69
  response = post_request(body, api_name)
70
70
  if response.code == 200
71
71
  'write succeed!'
@@ -82,7 +82,7 @@ class TableStore
82
82
 
83
83
  def get_headers(md5, api_name)
84
84
  headers = {
85
- "x-ots-date": Time.now.getutc.strftime('%Y-%m-%dT%H:%M:%S.000Z'),
85
+ "x-ots-date": '2018-01-09T14:48:12.000Z',#Time.now.getutc.strftime('%Y-%m-%dT%H:%M:%S.000Z'),
86
86
  "x-ots-apiversion": '2015-12-31',
87
87
  "x-ots-accesskeyid": UserID,
88
88
  "x-ots-contentmd5": md5,
@@ -1,5 +1,5 @@
1
1
 
2
- class OTSError
2
+ class TableStoreError
3
3
 
4
4
  end
5
5
 
@@ -110,7 +110,7 @@ module Metadata
110
110
 
111
111
  set_row_existence_expectation(row_existence_expectation)
112
112
 
113
- set_column_condition(column_condition) if @column_condition.present?
113
+ set_column_condition(column_condition) if @column_condition
114
114
  end
115
115
 
116
116
  def set_row_existence_expectation(row_existence_expectation)
@@ -156,38 +156,25 @@ class PlainBufferCodedOutputStream
156
156
  write_tag(TAG_CELL)
157
157
  cell_check_sum = write_cell_name(column_name, cell_check_sum)
158
158
  timestamp = nil
159
- if column_value.present?
159
+ if column_value
160
160
  if column_value.is_a?(Array)
161
- if column_value[0].present?
161
+ if column_value[0]
162
162
  cell_check_sum = write_column_value_with_checksum(column_value[0], cell_check_sum)
163
163
  end
164
- if column_value[1].present?
164
+ if column_value[1]
165
165
  timestamp = column_value[1]
166
166
  end
167
167
  else
168
168
  cell_check_sum = write_column_value_with_checksum(column_value, cell_check_sum)
169
169
  end
170
170
  end
171
- if update_type == UpdateType.DELETE
172
- write_tag(TAG_CELL_TYPE)
173
- @output_stream.write_raw_byte(const.DELETE_ONE_VERSION)
174
- elsif update_type == UpdateType.DELETE_ALL
175
- write_tag(TAG_CELL_TYPE)
176
- @output_stream.write_raw_byte(const.DELETE_ALL_VERSION)
177
- end
178
- if timestamp.present?
171
+ if timestamp
179
172
  write_tag(TAG_CELL_TIMESTAMP)
180
173
  @output_stream.write_raw_little_endian64(timestamp)
181
174
  end
182
- if timestamp.present?
175
+ if timestamp
183
176
  cell_check_sum = PlainBufferCrc8.crc_int64(cell_check_sum, timestamp)
184
177
  end
185
- if update_type == UpdateType.DELETE
186
- cell_check_sum = PlainBufferCrc8.crc_int8(cell_check_sum, const.DELETE_ONE_VERSION)
187
- end
188
- if update_type == UpdateType.DELETE_ALL
189
- cell_check_sum = PlainBufferCrc8.crc_int8(cell_check_sum, const.DELETE_ALL_VERSION)
190
- end
191
178
  write_tag(TAG_CELL_CHECKSUM)
192
179
  @output_stream.write_raw_byte(cell_check_sum)
193
180
  PlainBufferCrc8.crc_int8(row_check_sum, cell_check_sum)
@@ -202,7 +189,7 @@ class PlainBufferCodedOutputStream
202
189
  end
203
190
 
204
191
  def write_columns(columns, row_check_sum)
205
- if columns.present? and columns.length != 0
192
+ if columns and columns.length != 0
206
193
  write_tag(TAG_ROW_DATA)
207
194
  columns.each do |column|
208
195
  if column.length == 2
@@ -62,7 +62,7 @@ class OTSProtocol
62
62
  'x-ots-contentmd5': md5,
63
63
  }
64
64
 
65
- headers['x-ots-ststoken'] = @sts_token if @sts_token.present?
65
+ headers['x-ots-ststoken'] = @sts_token if @sts_token
66
66
 
67
67
  signature = make_request_signature(query, headers)
68
68
  headers['x-ots-signature'] = signature
@@ -1,14 +1,13 @@
1
- require 'protobuf'
2
1
  require 'os'
3
- require 'protobuf/ots_pb'
4
- require 'protobuf/ots_filiter_pb'
2
+ require 'protobuf/table_store.pb'
3
+ require 'protobuf/table_store_filiter.pb'
5
4
  require 'consts'
6
5
  require 'tablestore/plain_buffer_coded_output_stream'
7
6
  require 'tablestore/plain_buffer_output_stream'
8
7
  require 'tablestore/plain_buffer_coded_input_stream'
9
8
  require 'tablestore/plain_buffer_input_stream'
10
9
 
11
- class OTS
10
+ class TableStoreClient
12
11
  def encode_get_range_request(request)
13
12
  proto = GetRangeRequest.new
14
13
  proto.table_name = request[:table_name]
@@ -17,15 +16,12 @@ class OTS
17
16
  proto.exclusive_end_primary_key = serialize_primary_key(request[:exclusive_end_primary_key])
18
17
  proto.max_versions = request[:max_version]
19
18
  proto.limit = request[:limit]
20
- proto_string = GetRangeRequest.encode(proto)
21
- if proto_string.match("#{request[:table_name]}\x10\x02")
22
- proto_string.sub("#{request[:table_name]}\x10\x02", "#{request[:table_name]}\x10\x00")
23
- end
24
- proto_string.sub("#{request[:table_name]}\x10\x02", "#{request[:table_name]}\x10\x00")
19
+ proto.serialize_to_string
25
20
  end
26
21
 
27
22
  def decode_get_range_request(api_name, headers, body)
28
- proto = GetRangeResponse.decode(body)
23
+ proto = GetRangeResponse.new
24
+ proto.parse_from_string(body)
29
25
  #capacity_unit = parse_capacity_unit(proto.consumed.capacity_unit)
30
26
 
31
27
  next_start_pk = nil
@@ -55,26 +51,24 @@ class OTS
55
51
  contion_proto = Condition.new
56
52
  proto.condition = make_condition(contion_proto, condition)
57
53
  proto.row = serialize_for_put_row(row.primary_key, row.attribute_columns)
58
- proto_string = PutRowRequest.encode(proto)
59
- proto_string = proto_string[0..-2] + [0].pack('C') if proto_string[-1] == "\x03"
60
- proto_string
54
+ proto.serialize_to_string
61
55
  end
62
56
 
63
57
  def encode_get_row(table_name, primary_key, columns_to_get, column_filter, max_version)
64
58
  proto = GetRowRequest.new
65
59
  proto.table_name = table_name
66
60
  make_repeated_column_names(proto.columns_to_get, columns_to_get)
67
- if column_filter.present?
68
- pb_filter = make_column_condition(column_filter)
69
- proto.filter = Filter.encode(pb_filter)
61
+ if column_filter
62
+ proto.filter = make_column_condition(column_filter).serialize_to_string
70
63
  end
71
64
  proto.primary_key = serialize_primary_key(primary_key)
72
- proto.max_versions = max_version if max_version.present?
73
- GetRowRequest.encode(proto)
65
+ proto.max_versions = max_version if max_version
66
+ proto.serialize_to_string
74
67
  end
75
68
 
76
69
  def decode_get_row(body)
77
- proto = GetRowResponse.decode(body)
70
+ proto = GetRowResponse.new
71
+ proto.parse_from_string(body)
78
72
 
79
73
  return_row = nil
80
74
  if proto.row.length > 0
@@ -86,7 +80,8 @@ class OTS
86
80
  end
87
81
 
88
82
  def decode_put_row(body)
89
- proto = PutRowResponse.decode(body)
83
+ proto = PutRowResponse.new
84
+ proto.parse_from_string(body)
90
85
  return_row = nil
91
86
  if proto.row.length != 0
92
87
  inputStream = PlainBufferInputStream.new(proto.row)
@@ -104,24 +99,23 @@ class OTS
104
99
  table_item.table_name = table_value.table_name
105
100
  make_repeated_column_names(table_item.columns_to_get, table_value.columns_to_get)
106
101
 
107
- if table_value.column_filter.present?
108
- pb_filter = make_column_condition(table_value.column_filter)
109
- table_item.filter = Filter.encode(pb_filter)
102
+ if table_value.column_filter
103
+ table_item.filter = make_column_condition(table_value.column_filter).serialize_to_string
110
104
  end
111
105
 
112
106
  table_value.primary_keys.each do |pk|
113
107
  table_item.primary_key << serialize_primary_key(pk)
114
108
  end
115
109
 
116
- if table_value.token.present?
110
+ if table_value.token
117
111
  table_value.token.each do |tk|
118
112
  table_item.token << tk
119
113
  end
120
114
  end
121
- if table_value.max_version.present?
115
+ if table_value.max_version
122
116
  table_item.max_versions = table_value.max_version
123
117
  end
124
- if table_value.time_range.present?
118
+ if table_value.time_range
125
119
  if table_value.time_range.is_a?(Array)
126
120
  table_item.time_range.start_time = table_value.time_range[0]
127
121
  table_item.time_range.end_time = table_value.time_range[1]
@@ -129,19 +123,20 @@ class OTS
129
123
  table_item.time_range.specific_time = table_value.time_range
130
124
  end
131
125
  end
132
- if table_value.start_column.present?
126
+ if table_value.start_column
133
127
  table_item.start_column = table_value.start_column
134
128
  end
135
- if table_value.end_column.present?
129
+ if table_value.end_column
136
130
  table_item.end_column = table_value.end_column
137
131
  end
138
132
  proto.tables << table_item
139
133
  end
140
- BatchGetRowRequest.encode(proto)
134
+ proto.serialize_to_string
141
135
  end
142
136
 
143
137
  def decode_batch_get_row(body)
144
- proto = BatchGetRowResponse.decode(body)
138
+ proto = BatchGetRowResponse.new
139
+ proto.parse_from_string(body)
145
140
  rows = []
146
141
  proto.tables.each do |table_item|
147
142
  rows << parse_get_row_item(table_item.rows)
@@ -168,8 +163,7 @@ class OTS
168
163
  end
169
164
  proto.tables << table_item
170
165
  end
171
- batch_string = BatchWriteRowRequest.encode(proto)
172
- batch_string.gsub("\x08\x03", "\x08\x00")
166
+ proto.serialize_to_string
173
167
  end
174
168
 
175
169
  def make_put_row_item(proto, put_row_item)
@@ -180,28 +174,28 @@ class OTS
180
174
  condition_proto = Condition.new
181
175
  proto.condition = make_condition(condition_proto, condition)
182
176
  if put_row_item.return_type == ReturnType::RT_PK
183
- proto.return_content.return_type = :RT_PK
177
+ proto.return_content.return_type = RT_PK
184
178
  end
185
179
 
186
180
  proto.row_change = serialize_for_put_row(put_row_item.row.primary_key, put_row_item.row.attribute_columns)
187
- proto.type = :PUT
181
+ proto.type = PUT
188
182
  proto
189
183
  end
190
184
 
191
185
  def make_update_row_item(proto, update_row_item)
192
186
  condition = update_row_item.condition
193
187
  if condition.nil?
194
- condition = Metadata::Condition(Metadata::RowExistenceExpectation::IGNORE, nil)
188
+ condition = Condition.new(RowExistenceExpectation::IGNORE, nil)
195
189
  end
190
+ condition_proto = Condition.new
191
+ proto.condition = make_condition(condition_proto, condition)
196
192
 
197
- make_condition(proto.condition, condition)
198
-
199
- if update_row_item.return_type == Metadata::ReturnType::RT_PK
200
- proto.return_content.return_type = :RT_PK
193
+ if update_row_item.return_type == ReturnType::RT_PK
194
+ proto.return_content.return_type = RT_PK
201
195
  end
202
196
 
203
197
  proto.row_change = serialize_for_update_row(update_row_item.row.primary_key, update_row_item.row.attribute_columns)
204
- proto.type = :UPDATE
198
+ proto.type = UPDATE
205
199
  proto
206
200
  end
207
201
 
@@ -221,9 +215,8 @@ class OTS
221
215
  proto.row_existence = expectation_str
222
216
  raise TableStoreClientError.new("row_existence_expectation should be one of [#{join(', ')}], not #{expectation_str}") if proto.row_existence.nil?
223
217
 
224
- if condition.get_column_condition.present?
225
- pb_filter = make_column_condition(condition.column_condition)
226
- proto.column_condition = Filter.encode(pb_filter)
218
+ if condition.get_column_condition
219
+ proto.column_condition = make_column_condition(condition.column_condition).serialize_to_string
227
220
  end
228
221
  proto
229
222
  end
@@ -252,7 +245,7 @@ class OTS
252
245
  proto.sub_filters << make_column_condition(sub)
253
246
  end
254
247
 
255
- CompositeColumnValueFilter.encode(proto)
248
+ proto.serialize_to_string
256
249
  end
257
250
 
258
251
  def make_relation_condition(condition)
@@ -263,17 +256,8 @@ class OTS
263
256
  proto.column_value = serialize_column_value(condition.get_column_value)
264
257
  proto.filter_if_missing = !condition.pass_if_missing
265
258
  proto.latest_version_only = condition.latest_version_only
266
- filter_string = SingleColumnValueFilter.encode(proto)
267
-
268
- if proto.filter_if_missing.blank? && proto.latest_version_only.blank?
269
- filter_string += [32, 0, 40, 0].pack("C*")
270
- elsif proto.filter_if_missing.blank?
271
- filter_string.insert(-3, [32, 0].pack("C*"))
272
- elsif proto.latest_version_only.blank?
273
- filter_string += [40, 0].pack("C*")
274
- end
259
+ proto.serialize_to_string
275
260
 
276
- filter_string
277
261
  end
278
262
 
279
263
  def parse_get_row_item(proto)
@@ -302,7 +286,7 @@ class OTS
302
286
  # end
303
287
  end
304
288
 
305
- row_list << {pk: primary_key_columns, attr: attribute_columns} if primary_key_columns.present?
289
+ row_list << {pk: primary_key_columns, attr: attribute_columns} if primary_key_columns
306
290
  end
307
291
  row_list
308
292
  end
@@ -365,6 +349,34 @@ class OTS
365
349
  stream.get_buffer.join('')
366
350
  end
367
351
 
352
+ def serialize_for_update_row(primary_key, attribute_columns)
353
+ unless attribute_columns.is_a?(Hash)
354
+ raise TableStoreClientError.new("the attribute columns of UpdateRow is not hash, but is #{attribute_columns.class}")
355
+ end
356
+
357
+ attribute_columns.keys.each do |key|
358
+ if attribute_columns[key] && !attribute_columns[key].is_a?(Hash)
359
+ raise TableStoreClientError.new("the columns value of update-row must be hash, but is #{attribute_columns[key].class}")
360
+ end
361
+ attribute_columns[key].each do |cell|
362
+ # if cell.is_a?(Array)
363
+ # raise TableStoreClientError.new("the cell of update-row must be array, but is #{cell.class}")
364
+ # end
365
+ end
366
+ end
367
+
368
+ buf_size = compute_update_row_size(primary_key, attribute_columns)
369
+ output_stream = PlainBufferOutputStream.new(buf_size)
370
+ coded_output_stream = PlainBufferCodedOutputStream.new(output_stream)
371
+ row_checksum = 0
372
+ coded_output_stream.write_header
373
+ row_checksum = coded_output_stream.write_primary_key(primary_key, row_checksum)
374
+ row_checksum = coded_output_stream.write_update_columns(attribute_columns, row_checksum)
375
+ row_checksum = PlainBufferCrc8.crc_int8(row_checksum, 0)
376
+ coded_output_stream.write_row_checksum(row_checksum)
377
+ output_stream.get_buffer.join('')
378
+ end
379
+
368
380
  def compute_variant_value_size(value)
369
381
  compute_primary_key_value_size(value) - LITTLE_ENDIAN_SIZE - 1
370
382
  end
@@ -474,8 +486,40 @@ class OTS
474
486
  elsif value.is_a?(Float)
475
487
  size += LITTLE_ENDIAN_64_SIZE
476
488
  else
477
- raise TableStoreClientError("Unsupported column type: " + value.class)
489
+ raise TableStoreClientError.new("Unsupported column type: " + value.class)
490
+ end
491
+ size
492
+ end
493
+
494
+ def compute_update_row_size(primary_key, attribute_columns)
495
+ size = LITTLE_ENDIAN_SIZE
496
+ size += compute_primary_key_size(primary_key)
497
+
498
+ if attribute_columns.length != 0
499
+ size += 1
500
+ attribute_columns.keys.each do |update_type|
501
+ columns = attribute_columns[update_type]
502
+ if columns.is_a?(String)
503
+ size += compute_column_size2(column, nil, update_type)
504
+ elsif columns.is_a?(Array)
505
+ columns.each do |column|
506
+ if column.length == 1
507
+ size += compute_column_size2(column[0], nil, update_type)
508
+ elsif columns.length >= 2
509
+ compute_column_size2(column[0], column[1], update_type)
510
+ else
511
+ #raise OTSClientError("Unsupported column type:" + str(type(columns)))
512
+ end
513
+ end
514
+ end
515
+ end
478
516
  end
517
+ size += 2
479
518
  size
480
519
  end
520
+
521
+ def compute_column_size2(column_name, column_value, update_type)
522
+ compute_column_size(column_name, column_value)
523
+ end
524
+
481
525
  end