td 0.11.8.2 → 0.11.9

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 3254397bd4682922aecaf0d08242c2219e16f76e
4
- data.tar.gz: ac9686878f407ad6c950a9907f85b695ffadef04
3
+ metadata.gz: 35121e210f89e8af8d88f3de09c5da1a46f465f5
4
+ data.tar.gz: 2ed8a007214c1861d9653ad88a6e204b11959487
5
5
  SHA512:
6
- metadata.gz: 881fdc4e2dcdc2a72c545ff7520e88ed507a348932e58996058d865b9e070d59efbbf93ee7d13fe97ffe0f0af65a1047ae5644c1ed0ef8371f3ba2d7c44ab5f9
7
- data.tar.gz: ee2a8080e1b490981a23dd2ce50314817c2abf1552cd8f09acf3d798485a5b8c872379eff7bda7edd3c84f126b7c410ad9df5fec063aa073230a3df46c9b7916
6
+ metadata.gz: e29254a97c4170455119d3d0d11a8c026d05b431f3fe58ad69bbaeb7f6837805fcdbe51fdd91c8ebe3ae6a9c7077f3312cd681a0f865a3bf2eddb43399446be0
7
+ data.tar.gz: 908f511b6ed2f6b293cdc809b203a367869607f7fd0d130c5f85919cc5d0462fec372a214c31df2ecde30cde248804c173fb9a5d32136d84f605d5557fc09cb3
data/ChangeLog CHANGED
@@ -1,12 +1,21 @@
1
- == 2015-03-24 version 0.11.8.2
2
-
3
- * Experimental: table:export with tsv.gz option.
4
-
5
- == 2015-03-19 version 0.11.8.1
6
-
7
- * Fix: exclude dist directory from gem package.
8
- This release is gem, not Toolbelt. It only includes td.gemspec change from
9
- 0.11.8.
1
+ == 2015-04-17 version 0.11.9
2
+
3
+ * add connector commands.
4
+ * add tsv.gz format to export command.
5
+ * add --null option to job:show that specifies what character to represent
6
+ null value in csv or tsv format. It emits 'null' by default for backward
7
+ compatibility but you can specify '--null ""' to use empty string instead
8
+ of 'null'.
9
+ * fix: remove illegal TAB character at the end of header record. Thanks y-ken!
10
+ * fix: do fancy format of numbers in table:list only if format is 'table'.
11
+ * fix: job:show to convert NaN to empty String if format is csv or tsv. The
12
+ column data NaN, Infinity and -Infinity caused an error if previous
13
+ versions. If json format is specified it still raises an error because JSON
14
+ format does not have representation of those values.
15
+ * fix: internal cwd handling for auto jar update that caused import:upload to
16
+ fail by changing cwd before reading specified file.
17
+ * fix: table:list to consider import_only databases. It just skips such
18
+ databases and no longer crashes with insufficient permission.
10
19
 
11
20
  == 2015-02-20 version 0.11.8
12
21
 
data/Gemfile CHANGED
@@ -1,3 +1,5 @@
1
1
  source 'https://rubygems.org'
2
2
 
3
+ #gem 'td-client', :git => 'https://github.com/treasure-data/td-client-ruby.git'
4
+
3
5
  gemspec
@@ -0,0 +1,299 @@
1
+ require 'td/command/common'
2
+ require 'td/command/job'
3
+ require 'json'
4
+ require 'uri'
5
+ require 'yaml'
6
+
7
+ module TreasureData
8
+ module Command
9
+
10
+ def required(opt, value)
11
+ if value.nil?
12
+ raise ParameterConfigurationError, "#{opt} option required"
13
+ end
14
+ end
15
+
16
+ def connector_guess(op)
17
+ type = 's3'
18
+ id = secret = source = nil
19
+ out = 'td-bulkload.yml'
20
+
21
+ op.on('--type[=TYPE]', "connector type; only 's3' is supported") { |s| type = s }
22
+ op.on('--access-id ID', "access ID (S3 access key id for type: s3)") { |s| id = s }
23
+ op.on('--access-secret SECRET', "access secret (S3 secret access key for type: s3)") { |s| secret = s }
24
+ op.on('--source SOURCE', "resource(s) URI to be imported (e.g. https://s3-us-west-1.amazonaws.com/bucketname/path/prefix/to/import/)") { |s| source = s }
25
+ op.on('--out FILE_NAME', "configuration file") { |s| out = s }
26
+
27
+ config = op.cmd_parse
28
+ if config
29
+ job = prepare_bulkload_job_config(config)
30
+ out ||= config
31
+ else
32
+ required('--access-id', id)
33
+ required('--access-secret', secret)
34
+ required('--source', source)
35
+ required('--out', out)
36
+
37
+ uri = URI.parse(source)
38
+ endpoint = uri.host
39
+ path_components = uri.path.scan(/\/[^\/]*/)
40
+ bucket = path_components.shift.sub(/\//, '')
41
+ path_prefix = path_components.join.sub(/\//, '')
42
+
43
+ job = API::BulkLoad::BulkLoad.from_hash(
44
+ :config => {
45
+ :type => type,
46
+ :access_key_id => id,
47
+ :secret_access_key => secret,
48
+ :endpoint => endpoint,
49
+ :bucket => bucket,
50
+ :path_prefix => path_prefix,
51
+ }
52
+ ).validate
53
+ end
54
+
55
+ client = get_client
56
+ job = client.bulk_load_guess(job)
57
+
58
+ create_bulkload_job_file_backup(out)
59
+ if /\.json\z/ =~ out
60
+ config_str = JSON.pretty_generate(job.to_h)
61
+ else
62
+ config_str = YAML.dump(job.to_h)
63
+ end
64
+ File.open(out, 'w') do |f|
65
+ f << config_str
66
+ end
67
+
68
+ puts "Created #{out} file."
69
+ puts "Use '#{$prog} " + Config.cl_options_string + "connector:preview #{out}' to see bulk load preview."
70
+ end
71
+
72
+ def connector_preview(op)
73
+ set_render_format_option(op)
74
+ config_file = op.cmd_parse
75
+ job = prepare_bulkload_job_config(config_file)
76
+ client = get_client()
77
+ preview = client.bulk_load_preview(job)
78
+
79
+ cols = preview.schema.sort_by { |col|
80
+ col['index']
81
+ }
82
+ fields = cols.map { |col| col['name'] + ':' + col['type'] }
83
+ types = cols.map { |col| col['type'] }
84
+ rows = preview.records.map { |row|
85
+ cols = {}
86
+ row.each_with_index do |col, idx|
87
+ cols[fields[idx]] = col.inspect
88
+ end
89
+ cols
90
+ }
91
+
92
+ puts cmd_render_table(rows, :fields => fields, :render_format => op.render_format)
93
+
94
+ puts "Update #{config_file} and use '#{$prog} " + Config.cl_options_string + "connector:preview #{config_file}' to preview again."
95
+ puts "Use '#{$prog} " + Config.cl_options_string + "connector:issue #{config_file}' to run Server-side bulk load."
96
+ end
97
+
98
+ def connector_issue(op)
99
+ database = table = nil
100
+ time_column = nil
101
+ wait = exclude = false
102
+ op.on('--database DB_NAME', "destination database") { |s| database = s }
103
+ op.on('--table TABLE_NAME', "destination table") { |s| table = s }
104
+ op.on('--time-column COLUMN_NAME', "data partitioning key") { |s| time_column = s }
105
+ op.on('-w', '--wait', 'wait for finishing the job', TrueClass) { |b| wait = b }
106
+ op.on('-x', '--exclude', 'do not automatically retrieve the job result', TrueClass) { |b| exclude = b }
107
+
108
+ config_file = op.cmd_parse
109
+
110
+ required('--database', database)
111
+ required('--table', table)
112
+
113
+ job = prepare_bulkload_job_config(config_file)
114
+ job['time_column'] = time_column if time_column
115
+
116
+ client = get_client()
117
+ job_id = client.bulk_load_issue(database, table, job)
118
+
119
+ puts "Job #{job_id} is queued."
120
+ puts "Use '#{$prog} " + Config.cl_options_string + "job:show #{job_id}' to show the status."
121
+
122
+ if wait
123
+ wait_connector_job(client, job_id, exclude)
124
+ end
125
+ end
126
+
127
+ def connector_list(op)
128
+ set_render_format_option(op)
129
+ op.cmd_parse
130
+
131
+ client = get_client()
132
+ # TODO database and table is empty at present. Fix API or Client.
133
+ keys = ['name', 'cron', 'timezone', 'delay', 'database', 'table', 'config']
134
+ fields = keys.map { |e| e.capitalize.to_sym }
135
+ rows = client.bulk_load_list().sort_by { |e|
136
+ e['name']
137
+ }.map { |e|
138
+ Hash[fields.zip(e.to_h.values_at(*keys))]
139
+ }
140
+
141
+ puts cmd_render_table(rows, :fields => fields, :render_format => op.render_format)
142
+ end
143
+
144
+ def connector_create(op)
145
+ # TODO it's a must parameter at this moment but API should be fixed
146
+ opts = {:timezone => 'UTC'}
147
+ op.on('--time-column COLUMN_NAME', "data partitioning key") {|s|
148
+ opts[:time_column] = s
149
+ }
150
+ op.on('-t', '--timezone TZ', "name of the timezone.",
151
+ " Only extended timezones like 'Asia/Tokyo', 'America/Los_Angeles' are supported,",
152
+ " (no 'PST', 'PDT', etc...).",
153
+ " When a timezone is specified, the cron schedule is referred to that timezone.",
154
+ " Otherwise, the cron schedule is referred to the UTC timezone.",
155
+ " E.g. cron schedule '0 12 * * *' will execute daily at 5 AM without timezone option",
156
+ " and at 12PM with the -t / --timezone 'America/Los_Angeles' timezone option") {|s|
157
+ opts[:timezone] = s
158
+ }
159
+ op.on('-D', '--delay SECONDS', 'delay time of the schedule', Integer) {|i|
160
+ opts[:delay] = i
161
+ }
162
+
163
+ name, cron, database, table, config_file = op.cmd_parse
164
+
165
+ job = prepare_bulkload_job_config(config_file)
166
+ opts[:cron] = cron
167
+
168
+ client = get_client()
169
+ get_table(client, database, table)
170
+
171
+ session = client.bulk_load_create(name, database, table, job, opts)
172
+ dump_connector_session(session)
173
+ end
174
+
175
+ def connector_show(op)
176
+ name = op.cmd_parse
177
+
178
+ client = get_client()
179
+ session = client.bulk_load_show(name)
180
+ dump_connector_session(session)
181
+ end
182
+
183
+ def connector_update(op)
184
+ name, config_file = op.cmd_parse
185
+
186
+ job = prepare_bulkload_job_config(config_file)
187
+
188
+ client = get_client()
189
+ session = client.bulk_load_update(name, job)
190
+ dump_connector_session(session)
191
+ end
192
+
193
+ def connector_delete(op)
194
+ name = op.cmd_parse
195
+
196
+ client = get_client()
197
+ session = client.bulk_load_delete(name)
198
+ puts 'Deleted session'
199
+ puts '--'
200
+ dump_connector_session(session)
201
+ end
202
+
203
+ def connector_history(op)
204
+ set_render_format_option(op)
205
+ name = op.cmd_parse
206
+
207
+ fields = [:JobID, :Status, :Records, :Database, :Table, :Priority, :Started, :Duration]
208
+ client = get_client()
209
+ rows = client.bulk_load_history(name).map { |e|
210
+ {
211
+ :JobID => e.job_id,
212
+ :Status => e.status,
213
+ :Records => e.records,
214
+ # TODO: td-client-ruby should retuan only name
215
+ :Database => e.database['name'],
216
+ :Table => e.table['name'],
217
+ :Priority => e.priority,
218
+ :Started => Time.at(e.start_at),
219
+ :Duration => (e.end_at.nil? ? Time.now.to_i : e.end_at) - e.start_at,
220
+ }
221
+ }
222
+ puts cmd_render_table(rows, :fields => fields, :render_format => op.render_format)
223
+ end
224
+
225
+ def connector_run(op)
226
+ wait = exclude = false
227
+ op.on('-w', '--wait', 'wait for finishing the job', TrueClass) { |b| wait = b }
228
+ op.on('-x', '--exclude', 'do not automatically retrieve the job result', TrueClass) { |b| exclude = b }
229
+
230
+ name, scheduled_time = op.cmd_parse
231
+
232
+ client = get_client()
233
+ job_id = client.bulk_load_run(name)
234
+ puts "Job #{job_id} is queued."
235
+ puts "Use '#{$prog} " + Config.cl_options_string + "job:show #{job_id}' to show the status."
236
+
237
+ if wait
238
+ wait_connector_job(client, job_id, exclude)
239
+ end
240
+ end
241
+
242
+ private
243
+
244
+ def file_type(str)
245
+ begin
246
+ YAML.load(str)
247
+ return :yaml
248
+ rescue
249
+ end
250
+ begin
251
+ JSON.parse(str)
252
+ return :json
253
+ rescue
254
+ end
255
+ nil
256
+ end
257
+
258
+ def prepare_bulkload_job_config(config_file)
259
+ unless File.exist?(config_file)
260
+ raise ParameterConfigurationError, "configuration file: #{config_file} not found"
261
+ end
262
+ config_str = File.read(config_file)
263
+ if file_type(config_str) == :yaml
264
+ config_str = JSON.pretty_generate(YAML.load(config_str))
265
+ end
266
+ API::BulkLoad::BulkLoad.from_json(config_str)
267
+ end
268
+
269
+ def create_bulkload_job_file_backup(out)
270
+ return unless File.exist?(out)
271
+ 0.upto(100) do |idx|
272
+ backup = "#{out}.#{idx}"
273
+ unless File.exist?(backup)
274
+ FileUtils.mv(out, backup)
275
+ return
276
+ end
277
+ end
278
+ raise "backup file creation failed"
279
+ end
280
+
281
+ def dump_connector_session(session)
282
+ puts "Name : #{session.name}"
283
+ puts "Cron : #{session.cron}"
284
+ puts "Timezone : #{session.timezone}"
285
+ puts "Delay : #{session.delay}"
286
+ puts "Database : #{session.database}"
287
+ puts "Table : #{session.table}"
288
+ puts "Config"
289
+ puts YAML.dump(session.config.to_h)
290
+ end
291
+
292
+ def wait_connector_job(client, job_id, exclude)
293
+ job = client.job(job_id)
294
+ wait_job(job, true)
295
+ puts "Status : #{job.status}"
296
+ end
297
+
298
+ end
299
+ end
@@ -1,4 +1,3 @@
1
-
2
1
  module TreasureData
3
2
  module Command
4
3
 
@@ -143,6 +142,10 @@ module Command
143
142
  exclude = b
144
143
  }
145
144
 
145
+ op.on('--null STRING', "null expression in csv or tsv") {|s|
146
+ render_opts[:null_expr] = s.to_s
147
+ }
148
+
146
149
  job_id = op.cmd_parse
147
150
 
148
151
  # parameter concurrency validation
@@ -166,8 +169,38 @@ module Command
166
169
  "Option -l / --limit is only valid when not outputting to file (no -o / --output option provided)"
167
170
  end
168
171
 
172
+ get_and_show_result(job_id, wait, exclude, output, limit, format, render_opts, verbose)
173
+ end
174
+
175
+ def job_status(op)
176
+ job_id = op.cmd_parse
169
177
  client = get_client
170
178
 
179
+ puts client.job_status(job_id)
180
+ end
181
+
182
+ def job_kill(op)
183
+ job_id = op.cmd_parse
184
+
185
+ client = get_client
186
+
187
+ former_status = client.kill(job_id)
188
+ if TreasureData::Job::FINISHED_STATUS.include?(former_status)
189
+ $stderr.puts "Job #{job_id} is already finished (#{former_status})"
190
+ exit 0
191
+ end
192
+
193
+ if former_status == TreasureData::Job::STATUS_RUNNING
194
+ $stderr.puts "Job #{job_id} is killed."
195
+ else
196
+ $stderr.puts "Job #{job_id} is canceled."
197
+ end
198
+ end
199
+
200
+ private
201
+
202
+ def get_and_show_result(job_id, wait, exclude = false, output = nil, limit = nil, format = nil, render_opts = {}, verbose = false)
203
+ client = get_client
171
204
  job = client.job(job_id)
172
205
 
173
206
  puts "JobID : #{job.job_id}"
@@ -194,61 +227,14 @@ module Command
194
227
  end
195
228
  end
196
229
 
197
- # up to 7 retries with exponential (base 2) back-off starting at 'retry_delay'
198
- retry_delay = 5
199
- max_cumul_retry_delay = 200
200
- cumul_retry_delay = 0
201
-
202
230
  if wait && !job.finished?
203
231
  wait_job(job)
204
232
  if [:hive, :pig, :impala, :presto].include?(job.type) && !exclude
205
- puts "Result :"
206
-
207
- begin
208
- show_result(job, output, limit, format, render_opts)
209
- rescue TreasureData::NotFoundError => e
210
- # Got 404 because result not found.
211
- rescue TreasureData::APIError, # HTTP status code 500 or more
212
- Errno::ECONNREFUSED, Errno::ECONNRESET, Timeout::Error, EOFError,
213
- OpenSSL::SSL::SSLError, SocketError => e
214
- # don't retry on 300 and 400 errors
215
- if e.class == TreasureData::APIError && e.message !~ /^5\d\d:\s+/
216
- raise e
217
- end
218
- if cumul_retry_delay > max_cumul_retry_delay
219
- raise e
220
- end
221
- $stderr.puts "Error #{e.class}: #{e.message}. Retrying after #{retry_delay} seconds..."
222
- sleep retry_delay
223
- cumul_retry_delay += retry_delay
224
- retry_delay *= 2
225
- retry
226
- end
233
+ show_result_with_retry(job, output, limit, format, render_opts)
227
234
  end
228
-
229
235
  else
230
236
  if [:hive, :pig, :impala, :presto].include?(job.type) && !exclude && job.finished?
231
- puts "Result :"
232
- begin
233
- show_result(job, output, limit, format, render_opts)
234
- rescue TreasureData::NotFoundError => e
235
- # Got 404 because result not found.
236
- rescue TreasureData::APIError,
237
- Errno::ECONNREFUSED, Errno::ECONNRESET, Timeout::Error, EOFError,
238
- OpenSSL::SSL::SSLError, SocketError => e
239
- # don't retry on 300 and 400 errors
240
- if e.class == TreasureData::APIError && e.message !~ /^5\d\d:\s+/
241
- raise e
242
- end
243
- if cumul_retry_delay > max_cumul_retry_delay
244
- raise e
245
- end
246
- $stderr.puts "Error #{e.class}: #{e.message}. Retrying after #{retry_delay} seconds..."
247
- sleep retry_delay
248
- cumul_retry_delay += retry_delay
249
- retry_delay *= 2
250
- retry
251
- end
237
+ show_result_with_retry(job, output, limit, format, render_opts)
252
238
  end
253
239
 
254
240
  if verbose
@@ -272,32 +258,6 @@ module Command
272
258
  puts "\rUse '-v' option to show detailed messages." + " " * 20 unless verbose
273
259
  end
274
260
 
275
- def job_status(op)
276
- job_id = op.cmd_parse
277
- client = get_client
278
-
279
- puts client.job_status(job_id)
280
- end
281
-
282
- def job_kill(op)
283
- job_id = op.cmd_parse
284
-
285
- client = get_client
286
-
287
- former_status = client.kill(job_id)
288
- if TreasureData::Job::FINISHED_STATUS.include?(former_status)
289
- $stderr.puts "Job #{job_id} is already finished (#{former_status})"
290
- exit 0
291
- end
292
-
293
- if former_status == TreasureData::Job::STATUS_RUNNING
294
- $stderr.puts "Job #{job_id} is killed."
295
- else
296
- $stderr.puts "Job #{job_id} is canceled."
297
- end
298
- end
299
-
300
- private
301
261
  def wait_job(job, first_call = false)
302
262
  $stderr.puts "queued..."
303
263
 
@@ -328,6 +288,35 @@ module Command
328
288
  end
329
289
  end
330
290
 
291
+ def show_result_with_retry(job, output, limit, format, render_opts)
292
+ # up to 7 retries with exponential (base 2) back-off starting at 'retry_delay'
293
+ retry_delay = 5
294
+ max_cumul_retry_delay = 200
295
+ cumul_retry_delay = 0
296
+
297
+ puts "Result :"
298
+ begin
299
+ show_result(job, output, limit, format, render_opts)
300
+ rescue TreasureData::NotFoundError => e
301
+ # Got 404 because result not found.
302
+ rescue TreasureData::APIError, # HTTP status code 500 or more
303
+ Errno::ECONNREFUSED, Errno::ECONNRESET, Timeout::Error, EOFError,
304
+ OpenSSL::SSL::SSLError, SocketError => e
305
+ # don't retry on 300 and 400 errors
306
+ if e.class == TreasureData::APIError && e.message !~ /^5\d\d:\s+/
307
+ raise e
308
+ end
309
+ if cumul_retry_delay > max_cumul_retry_delay
310
+ raise e
311
+ end
312
+ $stderr.puts "Error #{e.class}: #{e.message}. Retrying after #{retry_delay} seconds..."
313
+ sleep retry_delay
314
+ cumul_retry_delay += retry_delay
315
+ retry_delay *= 2
316
+ retry
317
+ end
318
+ end
319
+
331
320
  def show_result(job, output, limit, format, render_opts={})
332
321
  if output
333
322
  write_result(job, output, limit, format, render_opts)
@@ -387,7 +376,7 @@ module Command
387
376
  job.result_each_with_compr_size {|row, compr_size|
388
377
  # TODO limit the # of columns
389
378
  writer << row.map {|col|
390
- dump_column(col)
379
+ dump_column(col, render_opts[:null_expr])
391
380
  }
392
381
  n_rows += 1
393
382
  if n_rows % 100 == 0 # flush every 100 recods
@@ -405,10 +394,7 @@ module Command
405
394
  open_file(output, "w") {|f|
406
395
  # output headers
407
396
  if render_opts[:header] && job.hive_result_schema
408
- job.hive_result_schema.each {|name,type|
409
- f.write name + "\t"
410
- }
411
- f.write "\n"
397
+ f.write job.hive_result_schema.map {|name, type| name}.join("\t") + "\n"
412
398
  end
413
399
  # output data
414
400
  n_rows = 0
@@ -417,8 +403,9 @@ module Command
417
403
  "NOTE: the job result is being written to #{output} in tsv format",
418
404
  job.result_size, 0.1, 1)
419
405
  end
406
+
420
407
  job.result_each_with_compr_size {|row, compr_size|
421
- f.write row.map {|col| dump_column(col)}.join("\t") + "\n"
408
+ f.write row.map {|col| dump_column(col, render_opts[:null_expr])}.join("\t") + "\n"
422
409
  n_rows += 1
423
410
  if n_rows % 100 == 0
424
411
  f.flush # flush every 100 recods
@@ -493,7 +480,7 @@ module Command
493
480
  job.result_each_with_compr_size {|row, compr_size|
494
481
  indicator.update(compr_size)
495
482
  rows << row.map {|v|
496
- dump_column_safe_utf8(v)
483
+ dump_column_safe_utf8(v, render_opts[:null_expr])
497
484
  }
498
485
  n_rows += 1
499
486
  break if !limit.nil? and n_rows == limit
@@ -514,15 +501,17 @@ module Command
514
501
  end
515
502
  end
516
503
 
517
- def dump_column(v)
518
- s = v.is_a?(String) ? v.to_s : Yajl.dump(v)
504
+ def dump_column(v, null_expr = nil)
505
+ v = null_expr if v.nil? && null_expr
506
+
507
+ s = v.is_a?(String) ? v.to_s : Yajl.dump(sanitize_infinite_value(v))
519
508
  # CAUTION: msgpack-ruby populates byte sequences as Encoding.default_internal which should be BINARY
520
509
  s = s.force_encoding('BINARY') if s.respond_to?(:encode)
521
510
  s
522
511
  end
523
512
 
524
- def dump_column_safe_utf8(v)
525
- s = dump_column(v)
513
+ def dump_column_safe_utf8(v, null_expr = false)
514
+ s = dump_column(v, null_expr)
526
515
  # Here does UTF-8 -> UTF-16LE -> UTF8 conversion:
527
516
  # a) to make sure the string doesn't include invalid byte sequence
528
517
  # b) to display multi-byte characters as it is
@@ -532,6 +521,10 @@ module Command
532
521
  s
533
522
  end
534
523
 
524
+ def sanitize_infinite_value(v)
525
+ (v.is_a?(Float) && !v.finite?) ? v.to_s : v
526
+ end
527
+
535
528
  def job_priority_name_of(id)
536
529
  PRIORITY_FORMAT_MAP[id] || 'NORMAL'
537
530
  end
@@ -311,7 +311,7 @@ module List
311
311
  # TODO acl:test
312
312
 
313
313
  add_list 'server:status', %w[], 'Show status of the Treasure Data server'
314
- add_list 'server:endpoint', %w[api_endpoint], "Set the Treasure Data API server's endpoint (must be a valid URI)", ["td server:endpoint 'https://api.treasuredata.com'"]
314
+ add_list 'server:endpoint', %w[api_endpoint], "Set the Treasure Data API server's endpoint (must be a valid URI)", ["server:endpoint 'https://api.treasuredata.com'"]
315
315
 
316
316
  add_list 'sample:apache', %w[path.json], 'Create a sample log file', [], false
317
317
 
@@ -320,6 +320,19 @@ module List
320
320
 
321
321
  add_list 'update', %w[], 'Update td and related libraries for TreasureData toolbelt'
322
322
 
323
+ add_list 'connector:guess', %w[config?], 'Run guess to generate connector config file', ['connector:guess td-bulkload.yml', 'connector:guess --access-id s3accessId --access-secret s3AccessKey --source https://s3.amazonaws.com/bucketname/path/prefix --database connector_database --table connector_table']
324
+ add_list 'connector:preview', %w[config], 'Show preview of connector execution', ['connector:preview td-bulkload.yml']
325
+
326
+ add_list 'connector:issue', %w[config], 'Run one time connector execution', ['connector:issue td-bulkload.yml']
327
+
328
+ add_list 'connector:list', %w[], 'Show list of connector sessions', ['connector:list']
329
+ add_list 'connector:create', %w[name cron database table config], 'Create new connector session', ['connector:create connector1 "0 * * * *" connector_database connector_table td-bulkload.yml']
330
+ add_list 'connector:show', %w[name], 'Show connector session', ['connector:show connector1']
331
+ add_list 'connector:update', %w[name config], 'Modify connector session', ['connector:update connector1 td-bulkload.yml']
332
+ add_list 'connector:delete', %w[name], 'Delete connector session', ['connector:delete connector1']
333
+ add_list 'connector:history', %w[name], 'Show job history of connector session', ['connector:history connector1']
334
+ add_list 'connector:run', %w[name time], 'Run connector session for the specified time', ['connector:run connector1 "2016-01-01 00:00:00"']
335
+
323
336
  # aliases
324
337
  add_alias 'db', 'db:show'
325
338
  add_alias 'dbs', 'db:list'
@@ -377,6 +390,8 @@ module List
377
390
 
378
391
  add_alias 's', 'status'
379
392
 
393
+ add_alias 'connector', 'connector:guess'
394
+
380
395
  # backward compatibility
381
396
  add_alias 'show-databases', 'db:list'
382
397
  add_alias 'show-dbs', 'db:list'
@@ -46,6 +46,7 @@ Basic commands:
46
46
  result # create/delete/list result URLs
47
47
  sched # create/delete/list schedules that run a query periodically
48
48
  schema # create/delete/modify schemas of tables
49
+ connector # manage connectors
49
50
 
50
51
  Additional commands:
51
52
 
@@ -144,18 +144,25 @@ module Command
144
144
  databases = client.databases
145
145
  end
146
146
 
147
- has_item = databases.select {|db| db.tables.select {|table| table.type == :item}.length > 0 }.length > 0
147
+ has_item = databases.select {|db|
148
+ db.permission != :import_only ? (db.tables.select {|table| table.type == :item}.length > 0) : false
149
+ }.length > 0
148
150
 
151
+ # ref. https://github.com/treasure-data/td/issues/26
152
+ should_number_format = [nil, "table"].include?(op.render_format)
149
153
  rows = []
150
154
  ::Parallel.each(databases, :in_threads => num_threads) {|db|
151
155
  begin
156
+ if db.permission == :import_only
157
+ next
158
+ end
152
159
  db.tables.each {}
153
160
  db.tables.each {|table|
154
161
  pschema = table.schema.fields.map {|f|
155
162
  "#{f.name}:#{f.type}"
156
163
  }.join(', ')
157
164
  new_row = {
158
- :Database => db.name, :Table => table.name, :Type => table.type.to_s, :Count => TreasureData::Helpers.format_with_delimiter(table.count),
165
+ :Database => db.name, :Table => table.name, :Type => table.type.to_s, :Count => (should_number_format ? TreasureData::Helpers.format_with_delimiter(table.count) : table.count),
159
166
  :Size => show_size_in_bytes ? TreasureData::Helpers.format_with_delimiter(table.estimated_storage_size) : table.estimated_storage_size_string,
160
167
  'Last import' => table.last_import ? table.last_import.localtime : nil,
161
168
  'Last log timestamp' => table.last_log_timestamp ? table.last_log_timestamp.localtime : nil,
@@ -188,8 +195,12 @@ module Command
188
195
 
189
196
  if rows.empty?
190
197
  if db_name
191
- $stderr.puts "Database '#{db_name}' has no tables."
192
- $stderr.puts "Use '#{$prog} " + Config.cl_options_string + "table:create <db> <table>' to create a table."
198
+ if databases.first.permission == :import_only
199
+ $stderr.puts "Database '#{db_name}' is import only, cannot list or create tables."
200
+ else
201
+ $stderr.puts "Database '#{db_name}' has no tables."
202
+ $stderr.puts "Use '#{$prog} " + Config.cl_options_string + "table:create <db> <table>' to create a table."
203
+ end
193
204
  elsif databases.empty?
194
205
  $stderr.puts "There are no databases."
195
206
  $stderr.puts "Use '#{$prog} " + Config.cl_options_string + "db:create <db>' to create a database."
@@ -120,7 +120,7 @@ module Command
120
120
  client.add_user(name, nil, email, password)
121
121
 
122
122
  $stderr.puts "User '#{name}' is created."
123
- $stderr.puts "Use '#{$prog} " + Config.cl_options_string + "user:apikeys #{name}' to show the API key."
123
+ $stderr.puts "Use '#{$prog} " + Config.cl_options_string + "user:apikeys #{name}' to show the API key."
124
124
  end
125
125
 
126
126
  def user_delete(op)
data/lib/td/updater.rb CHANGED
@@ -330,35 +330,35 @@ end # module ModuleDefinition
330
330
 
331
331
  if updated > last_updated
332
332
  FileUtils.mkdir_p(Updater.jarfile_dest_path) unless File.exists?(Updater.jarfile_dest_path)
333
- Dir.chdir Updater.jarfile_dest_path
333
+ Dir.chdir(Updater.jarfile_dest_path) do
334
+ File.open('VERSION', 'w') {|f|
335
+ if hourly
336
+ f.print "#{version} via hourly jar auto-update"
337
+ else
338
+ f.print "#{version} via import:jar_update command"
339
+ end
340
+ }
341
+ File.open('td-import-java.version', 'w') {|f|
342
+ f.print "#{version} #{updated}"
343
+ }
334
344
 
335
- File.open('VERSION', 'w') {|f|
336
- if hourly
337
- f.print "#{version} via hourly jar auto-update"
338
- else
339
- f.print "#{version} via import:jar_update command"
340
- end
341
- }
342
- File.open('td-import-java.version', 'w') {|f|
343
- f.print "#{version} #{updated}"
344
- }
345
-
346
- status = nil
347
- indicator = Command::TimeBasedDownloadProgressIndicator.new(
348
- "Updating td-import.jar", Time.new.to_i, 2)
349
- File.open('td-import.jar.new', 'wb') {|binfile|
350
- status = Updater.stream_fetch("#{maven_repo}/#{version}/td-import-#{version}-jar-with-dependencies.jar", binfile) {
351
- indicator.update
345
+ status = nil
346
+ indicator = Command::TimeBasedDownloadProgressIndicator.new(
347
+ "Updating td-import.jar", Time.new.to_i, 2)
348
+ File.open('td-import.jar.new', 'wb') {|binfile|
349
+ status = Updater.stream_fetch("#{maven_repo}/#{version}/td-import-#{version}-jar-with-dependencies.jar", binfile) {
350
+ indicator.update
351
+ }
352
352
  }
353
- }
354
- indicator.finish()
353
+ indicator.finish()
355
354
 
356
- if status
357
- puts "Installed td-import.jar v#{version} in '#{Updater.jarfile_dest_path}'.\n"
358
- File.rename 'td-import.jar.new', 'td-import.jar'
359
- else
360
- puts "Update of td-import.jar failed." unless ENV['TD_TOOLBELT_DEBUG'].nil?
361
- File.delete 'td-import.jar.new' if File.exists? 'td-import.jar.new'
355
+ if status
356
+ puts "Installed td-import.jar v#{version} in '#{Updater.jarfile_dest_path}'.\n"
357
+ File.rename 'td-import.jar.new', 'td-import.jar'
358
+ else
359
+ puts "Update of td-import.jar failed." unless ENV['TD_TOOLBELT_DEBUG'].nil?
360
+ File.delete 'td-import.jar.new' if File.exists? 'td-import.jar.new'
361
+ end
362
362
  end
363
363
  else
364
364
  puts 'Installed td-import.jar is already at the latest version.' unless hourly
data/lib/td/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module TreasureData
2
- TOOLBELT_VERSION = '0.11.8.2'
2
+ TOOLBELT_VERSION = '0.11.9'
3
3
  end
@@ -13,6 +13,8 @@ module TreasureData::Command
13
13
  end
14
14
 
15
15
  describe 'write_result' do
16
+ let(:file) { Tempfile.new("job_spec") }
17
+
16
18
  let :job do
17
19
  job = TreasureData::Job.new(nil, 12345, 'hive', 'select * from employee')
18
20
  job.instance_eval do
@@ -23,22 +25,216 @@ module TreasureData::Command
23
25
  job
24
26
  end
25
27
 
26
- it 'supports json output' do
27
- file = Tempfile.new("job_spec")
28
- command.send(:show_result, job, file, nil, 'json')
29
- File.read(file.path).should == %Q([["1",2.0,{"key":3}],\n["4",5.0,{"key":6}],\n["7",8.0,{"key":9}]])
28
+ context 'result without nil' do
29
+ it 'supports json output' do
30
+ command.send(:show_result, job, file, nil, 'json')
31
+ File.read(file.path).should == %Q([["1",2.0,{"key":3}],\n["4",5.0,{"key":6}],\n["7",8.0,{"key":9}]])
32
+ end
33
+
34
+ it 'supports csv output' do
35
+ command.send(:show_result, job, file, nil, 'csv')
36
+ File.read(file.path).should == %Q(1,2.0,"{""key"":3}"\n4,5.0,"{""key"":6}"\n7,8.0,"{""key"":9}"\n)
37
+ end
38
+
39
+ it 'supports tsv output' do
40
+ command.send(:show_result, job, file, nil, 'tsv')
41
+ File.read(file.path).should == %Q(1\t2.0\t{"key":3}\n4\t5.0\t{"key":6}\n7\t8.0\t{"key":9}\n)
42
+ end
30
43
  end
31
44
 
32
- it 'supports csv output' do
33
- file = Tempfile.new("job_spec")
34
- command.send(:show_result, job, file, nil, 'csv')
35
- File.read(file.path).should == %Q(1,2.0,"{""key"":3}"\n4,5.0,"{""key"":6}"\n7,8.0,"{""key"":9}"\n)
45
+ context 'result with nil' do
46
+ let :job_id do
47
+ 12345
48
+ end
49
+
50
+ let :job do
51
+ job = TreasureData::Job.new(nil, job_id, 'hive', 'select * from employee')
52
+ job.instance_eval do
53
+ @result = [[[nil, 2.0, {key:3}], 1]]
54
+ @result_size = 3
55
+ @status = 'success'
56
+ end
57
+ job
58
+ end
59
+
60
+ context 'with --column-header option' do
61
+ before do
62
+ job.stub(:hive_result_schema).and_return([['c0', 'time'], ['c1', 'double'], ['v', nil], ['c3', 'long']])
63
+ client = Object.new
64
+ client.stub(:job).with(job_id).and_return(job)
65
+ command.stub(:get_client).and_return(client)
66
+ end
67
+
68
+ it 'supports json output' do
69
+ command.send(:show_result, job, file, nil, 'json', { header: true })
70
+ File.read(file.path).should == %Q([[null,2.0,{"key":3}]])
71
+ end
72
+
73
+ it 'supports csv output' do
74
+ command.send(:show_result, job, file, nil, 'csv', { header: true })
75
+ File.read(file.path).should == %Q(c0,c1,v,c3\nnull,2.0,"{""key"":3}"\n)
76
+ end
77
+
78
+ it 'supports tsv output' do
79
+ command.send(:show_result, job, file, nil, 'tsv', { header: true })
80
+ File.read(file.path).should == %Q(c0\tc1\tv\tc3\nnull\t2.0\t{"key":3}\n)
81
+ end
82
+ end
83
+
84
+ context 'without --null option' do
85
+ it 'supports json output' do
86
+ command.send(:show_result, job, file, nil, 'json')
87
+ File.read(file.path).should == %Q([[null,2.0,{"key":3}]])
88
+ end
89
+
90
+ it 'supports csv output' do
91
+ command.send(:show_result, job, file, nil, 'csv')
92
+ File.read(file.path).should == %Q(null,2.0,"{""key"":3}"\n)
93
+ end
94
+
95
+ it 'supports tsv output' do
96
+ command.send(:show_result, job, file, nil, 'tsv')
97
+ File.read(file.path).should == %Q(null\t2.0\t{"key":3}\n)
98
+ end
99
+ end
100
+
101
+ context 'with --null option' do
102
+ it 'dose not effect json output (nil will be shown as null)' do
103
+ command.send(:show_result, job, file, nil, 'json', { null_expr: "NULL" })
104
+ File.read(file.path).should == %Q([[null,2.0,{"key":3}]])
105
+ end
106
+
107
+ context 'csv format' do
108
+ context 'specified string is NULL' do
109
+ let!(:null_expr) { "NULL" }
110
+
111
+ it 'shows nill as specified string' do
112
+ command.send(:show_result, job, file, nil, 'csv', { null_expr: null_expr })
113
+ File.read(file.path).should == %Q(NULL,2.0,"{""key"":3}"\n)
114
+ end
115
+ end
116
+
117
+ context 'specified string is empty string' do
118
+ let!(:null_expr) { '' }
119
+
120
+ it 'shows nill as empty string' do
121
+ command.send(:show_result, job, file, nil, 'csv', { null_expr: null_expr })
122
+ File.read(file.path).should == %Q("",2.0,"{""key"":3}"\n)
123
+ end
124
+ end
125
+ end
126
+
127
+ it 'supports tsv output' do
128
+ command.send(:show_result, job, file, nil, 'tsv', { null_expr: "\"\"" })
129
+ File.read(file.path).should == %Q(""\t2.0\t{"key":3}\n)
130
+ end
131
+ end
36
132
  end
37
133
 
38
- it 'supports tsv output' do
39
- file = Tempfile.new("job_spec")
40
- command.send(:show_result, job, file, nil, 'tsv')
41
- File.read(file.path).should == %Q(1\t2.0\t{"key":3}\n4\t5.0\t{"key":6}\n7\t8.0\t{"key":9}\n)
134
+ context 'without NaN/Infinity' do
135
+
136
+ it 'supports json output' do
137
+ command.send(:show_result, job, file, nil, 'json')
138
+ File.read(file.path).should == %Q([["1",2.0,{"key":3}],\n["4",5.0,{"key":6}],\n["7",8.0,{"key":9}]])
139
+ end
140
+
141
+ it 'supports csv output' do
142
+ command.send(:show_result, job, file, nil, 'csv')
143
+ File.read(file.path).should == %Q(1,2.0,"{""key"":3}"\n4,5.0,"{""key"":6}"\n7,8.0,"{""key"":9}"\n)
144
+ end
145
+
146
+ it 'supports tsv output' do
147
+ command.send(:show_result, job, file, nil, 'tsv')
148
+ File.read(file.path).should == %Q(1\t2.0\t{"key":3}\n4\t5.0\t{"key":6}\n7\t8.0\t{"key":9}\n)
149
+ end
150
+ end
151
+
152
+ context 'with NaN/Infinity' do
153
+ let :job do
154
+ job = TreasureData::Job.new(nil, 12345, 'hive', 'select * from employee')
155
+ job.instance_eval do
156
+ @result = [[[0.0/0.0, 1.0/0.0, 1.0/-0.0], 1], [["4", 5.0, {key:6}], 2], [["7", 8.0, {key:9}], 3]]
157
+ @result_size = 3
158
+ @status = 'success'
159
+ end
160
+ job
161
+ end
162
+
163
+ it 'does not support json output' do
164
+ expect { command.send(:show_result, job, file, nil, 'json') }.to raise_error Yajl::EncodeError
165
+ end
166
+
167
+ it 'supports csv output' do
168
+ command.send(:show_result, job, file, nil, 'csv')
169
+ File.read(file.path).should == %Q("""NaN""","""Infinity""","""-Infinity"""\n4,5.0,"{""key"":6}"\n7,8.0,"{""key"":9}"\n)
170
+ end
171
+
172
+ it 'supports tsv output' do
173
+ command.send(:show_result, job, file, nil, 'tsv')
174
+ File.read(file.path).should == %Q("NaN"\t"Infinity"\t"-Infinity"\n4\t5.0\t{"key":6}\n7\t8.0\t{"key":9}\n)
175
+ end
176
+ end
177
+ end
178
+
179
+ describe '#job_show' do
180
+ let(:job_id) { "12345" }
181
+
182
+ let :job_classs do
183
+ Struct.new(:job_id,
184
+ :status,
185
+ :type,
186
+ :db_name,
187
+ :priority,
188
+ :retry_limit,
189
+ :result_url,
190
+ :query,
191
+ :cpu_time,
192
+ :result_size
193
+ )
194
+ end
195
+
196
+ let :job do
197
+ job_classs.new(job_id,
198
+ nil,
199
+ :hive,
200
+ "db_name",
201
+ 1,
202
+ 1,
203
+ "test_url",
204
+ "test_qury",
205
+ 1,
206
+ 3
207
+ )
208
+ end
209
+
210
+ before do
211
+ job.stub(:finished?).and_return(true)
212
+
213
+ client = Object.new
214
+ client.stub(:job).with(job_id).and_return(job)
215
+ command.stub(:get_client).and_return(client)
216
+ end
217
+
218
+ context 'without --null option' do
219
+ it 'calls #show_result without null_expr option' do
220
+ command.stub(:show_result).with(job, nil, nil, nil, {:header=>false})
221
+ op = List::CommandParser.new("job:show", %w[job_id], %w[], nil, ["12345"], true)
222
+ command.job_show(op)
223
+ end
224
+ end
225
+
226
+ context 'with --null option' do
227
+ it 'calls #show_result with null_expr option' do
228
+ command.stub(:show_result).with(job, nil, nil, nil, {:header=>false, :null_expr=>"NULL"} )
229
+ op = List::CommandParser.new("job:show", %w[job_id], %w[], nil, ["12345", "--null", "NULL"], true)
230
+ command.job_show(op)
231
+ end
232
+
233
+ it 'calls #show_result with null_expr option' do
234
+ command.stub(:show_result).with(job, nil, nil, nil, {:header=>false, :null_expr=>'""'} )
235
+ op = List::CommandParser.new("job:show", %w[job_id], %w[], nil, ["12345", "--null", '""'], true)
236
+ command.job_show(op)
237
+ end
42
238
  end
43
239
  end
44
240
 
@@ -0,0 +1,186 @@
1
+ require 'spec_helper'
2
+ require 'td/command/common'
3
+ require 'td/config'
4
+ require 'td/command/list'
5
+ require 'td/command/table'
6
+ require 'td/client/model'
7
+
8
+ module TreasureData::Command
9
+
10
+ describe 'table command' do
11
+ describe 'table_list' do
12
+ it 'lists tables in a database' do
13
+ client = Object.new
14
+
15
+ db = TreasureData::Database.new(client, 'full_access_db', nil, 1000, Time.now.to_i, Time.now.to_i, nil, 'full_access')
16
+
17
+ create_tables = lambda {|db_name|
18
+ (1..6).map {|i|
19
+ schema = TreasureData::Schema.new.from_json(JSON.parse('[]'))
20
+ TreasureData::Table.new(client, db_name, db_name + "_table_#{i}", 'log', schema, 500, Time.now.to_i, Time.now.to_i, 0, nil, nil, nil, nil, nil)
21
+ }
22
+ }
23
+ db_tables = create_tables.call(db.name)
24
+
25
+ client.stub(:tables).with(db.name).and_return(db_tables)
26
+
27
+ command = Class.new { include TreasureData::Command }.new
28
+ command.stub(:get_client).and_return(client)
29
+ command.stub(:get_database).and_return(db)
30
+
31
+ op = List::CommandParser.new('table:list', %w[], %w[db], false, %w(full_access_db), true)
32
+ expect {
33
+ command.table_list(op)
34
+ }.to_not raise_exception
35
+ end
36
+
37
+ it 'lists all tables in all databases' do
38
+ client = Object.new
39
+
40
+ qo_db = TreasureData::Database.new(client, 'query_only_db', nil, 2000, Time.now.to_i, Time.now.to_i, nil, 'query_only')
41
+ fa_db = TreasureData::Database.new(client, 'full_access_db', nil, 3000, Time.now.to_i, Time.now.to_i, nil, 'full_access')
42
+ own_db = TreasureData::Database.new(client, 'owner_db', nil, 4000, Time.now.to_i, Time.now.to_i, nil, 'owner')
43
+
44
+ create_tables = lambda {|db_name|
45
+ (1..6).map {|i|
46
+ schema = TreasureData::Schema.new.from_json(JSON.parse('[]'))
47
+ TreasureData::Table.new(client, db_name, db_name + "_table_#{i}", 'log', schema, 500, Time.now.to_i, Time.now.to_i, 0, nil, nil, nil, nil, nil)
48
+ }
49
+ }
50
+ qo_db_tables = create_tables.call(qo_db.name)
51
+ fa_db_tables = create_tables.call(fa_db.name)
52
+ own_db_tables = create_tables.call(own_db.name)
53
+
54
+ client.stub(:databases).and_return([qo_db, fa_db, own_db])
55
+
56
+ client.stub(:tables).with(qo_db.name).and_return(qo_db_tables)
57
+ client.stub(:tables).with(fa_db.name).and_return(fa_db_tables)
58
+ client.stub(:tables).with(own_db.name).and_return(own_db_tables)
59
+
60
+ command = Class.new { include TreasureData::Command }.new
61
+ command.stub(:get_client).and_return(client)
62
+
63
+ op = List::CommandParser.new('table:list', %w[], %w[db], false, %w(), true)
64
+ expect {
65
+ command.table_list(op)
66
+ }.to_not raise_exception
67
+ end
68
+
69
+ it 'avoids listing tables of an \'import_only\' database' do
70
+ client = Object.new
71
+
72
+ db = TreasureData::Database.new(client, 'import_only_db', nil, 1234, Time.now.to_i, Time.now.to_i, nil, 'import_only')
73
+
74
+ command = Class.new { include TreasureData::Command }.new
75
+ command.stub(:get_client).and_return(client)
76
+ command.stub(:get_database).and_return(db)
77
+
78
+ op = List::CommandParser.new('table:list', %w[], %w[db], false, %w(import_only_db), true)
79
+ expect {
80
+ command.table_list(op)
81
+ }.to_not raise_exception
82
+ end
83
+
84
+ it 'avoids listing tables of the \'import_only\' databases in the list' do
85
+ client = Object.new
86
+
87
+ io_db = TreasureData::Database.new(client, 'import_only_db', nil, 1000, Time.now.to_i, Time.now.to_i, nil, 'import_only')
88
+ qo_db = TreasureData::Database.new(client, 'query_only_db', nil, 2000, Time.now.to_i, Time.now.to_i, nil, 'query_only')
89
+ fa_db = TreasureData::Database.new(client, 'full_access_db', nil, 3000, Time.now.to_i, Time.now.to_i, nil, 'full_access')
90
+ own_db = TreasureData::Database.new(client, 'owner_db', nil, 4000, Time.now.to_i, Time.now.to_i, nil, 'owner')
91
+
92
+ create_tables = lambda {|db_name|
93
+ (1..6).map {|i|
94
+ schema = TreasureData::Schema.new.from_json(JSON.parse('[]'))
95
+ TreasureData::Table.new(client, db_name, db_name + "_table_#{i}", 'log', schema, 500, Time.now.to_i, Time.now.to_i, 0, nil, nil, nil, nil, nil)
96
+ }
97
+ }
98
+ qo_db_tables = create_tables.call(qo_db.name)
99
+ fa_db_tables = create_tables.call(fa_db.name)
100
+ own_db_tables = create_tables.call(own_db.name)
101
+
102
+ client.stub(:databases).and_return([io_db, qo_db, fa_db, own_db])
103
+
104
+ client.stub(:tables).with(io_db.name).and_raise("not permitted")
105
+ client.stub(:tables).with(qo_db.name).and_return(qo_db_tables)
106
+ client.stub(:tables).with(fa_db.name).and_return(fa_db_tables)
107
+ client.stub(:tables).with(own_db.name).and_return(own_db_tables)
108
+
109
+ command = Class.new { include TreasureData::Command }.new
110
+ command.stub(:get_client).and_return(client)
111
+
112
+ op = List::CommandParser.new('table:list', %w[], %w[db], false, %w(), true)
113
+ expect {
114
+ command.table_list(op)
115
+ }.to_not raise_exception
116
+ end
117
+
118
+ describe "number format" do
119
+ let(:number_raw) { "1234567" }
120
+ let(:number_format) { "1,234,567" }
121
+ let(:client) { double('null object').as_null_object }
122
+ let(:db) { TreasureData::Database.new(client, 'full_access_db', nil, 1000, Time.now.to_i, Time.now.to_i, nil, 'full_access') }
123
+ let(:command) do
124
+ command = Class.new { include TreasureData::Command }.new
125
+ command.stub(:get_client).and_return(client)
126
+ command.stub(:get_database).and_return(db)
127
+ command
128
+ end
129
+
130
+ before do
131
+ create_tables = lambda {|db_name|
132
+ (1..6).map {|i|
133
+ # NOTE: TreasureData::Helpers.format_with_delimiter uses `gsub!` to their argument
134
+ # the argument (in our case, `number_raw`) will be rewritten by them
135
+ # To avoid that behavior, pass `number_raw.dup` instead of `number_raw`
136
+ schema = TreasureData::Schema.new.from_json(JSON.parse('[]'))
137
+ TreasureData::Table.new(client, db_name, db_name + "_table_#{i}", 'log', schema, number_raw.dup, Time.now.to_i, Time.now.to_i, 0, nil, nil, nil, nil, nil)
138
+ }
139
+ }
140
+ db_tables = create_tables.call(db.name)
141
+ client.stub(:tables).with(db.name).and_return(db_tables)
142
+ end
143
+
144
+ subject do
145
+ # command.table_list uses `puts` to display result
146
+ # so temporary swapping $stdout with StringIO to fetch their output
147
+ backup = $stdout.dup
148
+ buf = StringIO.new
149
+ op = List::CommandParser.new('table:list', [], %w[db], false, options + %w(full_access_db), true)
150
+ begin
151
+ $stdout = buf
152
+ command.table_list(op)
153
+ $stdout.rewind
154
+ $stdout.read
155
+ ensure
156
+ $stdout = backup
157
+ end
158
+ end
159
+
160
+ context "without --format" do
161
+ let(:options) { [] }
162
+ it { should include(number_format) }
163
+ it { should_not include(number_raw) }
164
+ end
165
+
166
+ context "with --format table" do
167
+ let(:options) { %w(--format table) }
168
+ it { should include(number_format) }
169
+ it { should_not include(number_raw) }
170
+ end
171
+
172
+ context "with --format csv" do
173
+ let(:options) { %w(--format csv) }
174
+ it { should_not include(number_format) }
175
+ it { should include(number_raw) }
176
+ end
177
+
178
+ context "with --format tsv" do
179
+ let(:options) { %w(--format tsv) }
180
+ it { should_not include(number_format) }
181
+ it { should include(number_raw) }
182
+ end
183
+ end
184
+ end
185
+ end
186
+ end
@@ -105,6 +105,42 @@ module TreasureData::Updater
105
105
  end
106
106
  end
107
107
 
108
+ describe "current working directory doesn't change after call `jar_update`" do
109
+ shared_examples_for("jar_update behavior") do
110
+ it "doesn't change cwd" do
111
+ with_env('TD_TOOLBELT_JARUPDATE_ROOT', "https://localhost:#{@server.config[:Port]}") do
112
+ pwd = Dir.pwd
113
+ subject
114
+ expect(Dir.pwd).to eq pwd
115
+ end
116
+ end
117
+
118
+ it "don't exists td-import.jar.new" do
119
+ with_env('TD_TOOLBELT_JARUPDATE_ROOT', "https://localhost:#{@server.config[:Port]}") do
120
+ subject
121
+ end
122
+ tmpfile = File.join(TreasureData::Updater.jarfile_dest_path, 'td-import.jar.new')
123
+ expect(File.exists?(tmpfile)).to eq false
124
+ end
125
+ end
126
+
127
+ let(:updater) { JarUpdateTester.new }
128
+
129
+ subject { updater.kick }
130
+
131
+ context "not updated" do
132
+ before { updater.stub(:existent_jar_updated_time).and_return(Time.now) }
133
+
134
+ it_behaves_like "jar_update behavior"
135
+ end
136
+
137
+ context "updated" do
138
+ before { updater.stub(:existent_jar_updated_time).and_return(Time.at(0)) }
139
+
140
+ it_behaves_like "jar_update behavior"
141
+ end
142
+ end
143
+
108
144
  def with_proxy
109
145
  with_env('HTTP_PROXY', "http://localhost:#{@proxy_server.config[:Port]}") do
110
146
  yield
@@ -185,7 +221,7 @@ module TreasureData::Updater
185
221
  res.body = '<metadata><versioning><lastUpdated>20141204123456</lastUpdated><release>version</release></versioning></metadata>'
186
222
  end
187
223
 
188
- def jar
224
+ def jar(req, res)
189
225
  res['content-type'] = 'application/octet-stream'
190
226
  res.body = File.read(fixture_file('tmp.zip'))
191
227
  end
data/td.gemspec CHANGED
@@ -21,7 +21,7 @@ Gem::Specification.new do |gem|
21
21
  gem.add_dependency "yajl-ruby", "~> 1.1"
22
22
  gem.add_dependency "hirb", ">= 0.4.5"
23
23
  gem.add_dependency "parallel", "~> 0.6.1"
24
- gem.add_dependency "td-client", "~> 0.8.68"
24
+ gem.add_dependency "td-client", "~> 0.8.69"
25
25
  gem.add_dependency "td-logger", "~> 0.3.21"
26
26
  gem.add_dependency "rubyzip", "~> 0.9.9"
27
27
  gem.add_development_dependency "rake", "~> 0.9"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: td
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.11.8.2
4
+ version: 0.11.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - Treasure Data, Inc.
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-03-24 00:00:00.000000000 Z
11
+ date: 2015-04-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: msgpack
@@ -102,14 +102,14 @@ dependencies:
102
102
  requirements:
103
103
  - - "~>"
104
104
  - !ruby/object:Gem::Version
105
- version: 0.8.68
105
+ version: 0.8.69
106
106
  type: :runtime
107
107
  prerelease: false
108
108
  version_requirements: !ruby/object:Gem::Requirement
109
109
  requirements:
110
110
  - - "~>"
111
111
  - !ruby/object:Gem::Version
112
- version: 0.8.68
112
+ version: 0.8.69
113
113
  - !ruby/object:Gem::Dependency
114
114
  name: td-logger
115
115
  requirement: !ruby/object:Gem::Requirement
@@ -204,6 +204,7 @@ files:
204
204
  - lib/td/command/apikey.rb
205
205
  - lib/td/command/bulk_import.rb
206
206
  - lib/td/command/common.rb
207
+ - lib/td/command/connector.rb
207
208
  - lib/td/command/db.rb
208
209
  - lib/td/command/export.rb
209
210
  - lib/td/command/help.rb
@@ -239,7 +240,8 @@ files:
239
240
  - spec/spec_helper.rb
240
241
  - spec/td/command/import_spec.rb
241
242
  - spec/td/command/job_spec.rb
242
- - spec/td/command_sched_spec.rb
243
+ - spec/td/command/sched_spec.rb
244
+ - spec/td/command/table_spec.rb
243
245
  - spec/td/common_spec.rb
244
246
  - spec/td/fixture/ca.cert
245
247
  - spec/td/fixture/server.cert
@@ -282,7 +284,8 @@ test_files:
282
284
  - spec/spec_helper.rb
283
285
  - spec/td/command/import_spec.rb
284
286
  - spec/td/command/job_spec.rb
285
- - spec/td/command_sched_spec.rb
287
+ - spec/td/command/sched_spec.rb
288
+ - spec/td/command/table_spec.rb
286
289
  - spec/td/common_spec.rb
287
290
  - spec/td/fixture/ca.cert
288
291
  - spec/td/fixture/server.cert