td 0.10.72 → 0.10.73

Sign up to get free protection for your applications and to get access to all the features.
data/.gitignore CHANGED
@@ -1,5 +1,5 @@
1
1
  .bundle
2
- build/td-java
2
+ build/td-bulk-import-java
3
3
  Gemfile.lock
4
4
  vendor/*
5
5
  *~
data/ChangeLog CHANGED
@@ -1,3 +1,11 @@
1
+ == 2013-03-15 version 0.10.73
2
+
3
+ * Add --sampling option to query subcommand
4
+ * Add --show-bytes option to table:list subcommand
5
+ * Update README and Rakefile for td-toolbelt building
6
+ * Show warn message if table name is Hive reserved keyword
7
+
8
+
1
9
  == 2013-02-27 version 0.10.72
2
10
 
3
11
  * Add -q(--query) option to query subcommand
@@ -45,7 +45,7 @@ Install following binary packages:
45
45
 
46
46
  * MinGW with MSYS Basic System and using mingw-get-inst
47
47
  * Git for Windows, with Windows Command Prompt support
48
- * Ruby 1.9.2 using RubyInstaller for Windows, with PATH update
48
+ * Ruby ruby-1.9.3p327 using RubyInstaller for Windows, with PATH update
49
49
  * Inno Setup 5
50
50
 
51
51
  Then run following commands on MinGW Shell:
data/Rakefile CHANGED
@@ -37,7 +37,7 @@ def install_use_gems(target_dir)
37
37
  ENV['GEM_HOME'] = target_dir
38
38
  ENV['GEM_PATH'] = ''
39
39
  USE_GEMS.each {|gem|
40
- system "gem install '#{gem}' --no-rdoc --no-ri"
40
+ system "gem install '#{gem}' --no-rdoc --no-ri" || (exit 1)
41
41
  }
42
42
  end
43
43
 
@@ -1 +1 @@
1
- 95b0662ffcf4f2407c6044478abd288a98be448d Sat Feb 23 22:43:05 2013 +0900
1
+ 5be770a4fc06c741ab70716e819c8930f134db41 Thu Feb 28 14:22:13 2013 +0900
@@ -409,8 +409,7 @@ module Command
409
409
  end
410
410
 
411
411
  def bulk_import_upload_parts2(op)
412
- upload_opts = upload_parts2_config(op)
413
- prepare_opts = prepare_parts2_config(op)
412
+ opts = upload_parts2_config(op)
414
413
 
415
414
  # java command
416
415
  javacmd = 'java'
@@ -432,13 +431,14 @@ module Command
432
431
  # make system properties
433
432
  sysprops = []
434
433
  sysprops.concat(upload_parts2_sysprops(opts))
435
- sysprops.concat(prepare_parts2_sysprops(opts))
436
434
 
437
435
  # make application arguments
436
+ app_args = []
438
437
  app_args << 'com.treasure_data.tools.BulkImportTool'
439
438
  app_args << 'upload_parts'
440
- app_args << files
439
+ app_args << opts[21]
441
440
 
441
+ # TODO consider parameters including spaces; don't use join(' ')
442
442
  command = "#{javacmd} #{jvm_opts.join(' ')} #{app_opts.join(' ')} #{sysprops.join(' ')} #{app_args.join(' ')}"
443
443
 
444
444
  exec command
@@ -472,7 +472,7 @@ module Command
472
472
  app_args = []
473
473
  app_args << 'com.treasure_data.tools.BulkImportTool'
474
474
  app_args << 'prepare_parts'
475
- app_args << files
475
+ app_args << opts[18]
476
476
 
477
477
  # TODO consider parameters including spaces; don't use join(' ')
478
478
  command = "#{javacmd} #{jvm_opts.join(' ')} #{app_opts.join(' ')} #{sysprops.join(' ')} #{app_args.join(' ')}"
@@ -484,53 +484,95 @@ module Command
484
484
  def prepare_parts2_sysprops(opts)
485
485
  sysprops = []
486
486
  sysprops << "-Dtd.bulk_import.prepare_parts.format=#{opts[0]}"
487
- sysprops << "-Dtd.bulk_import.prepare_parts.columns=#{opts[1]}" if opts[1]
488
- sysprops << "-Dtd.bulk_import.prepare_parts.column-types=#{opts[2]}" if opts[2]
489
- sysprops << "-Dtd.bulk_import.prepare_parts.column-header=#{opts[3]}" if opts[3]
490
- sysprops << "-Dtd.bulk_import.prepare_parts.time-column=#{opts[4]}"
487
+ sysprops << "-Dtd.bulk_import.prepare_parts.compression=#{opts[1]}"
488
+ sysprops << "-Dtd.bulk_import.prepare_parts.encoding=#{opts[2]}"
489
+ sysprops << "-Dtd.bulk_import.prepare_parts.time-column=#{opts[3]}"
490
+ sysprops << "-Dtd.bulk_import.prepare_parts.time-format=#{opts[4]}"
491
491
  sysprops << "-Dtd.bulk_import.prepare_parts.time-value=#{opts[5].to_s}" if opts[5]
492
- sysprops << "-Dtd.bulk_import.prepare_parts.split-size=#{opts[6]}"
493
- sysprops << "-Dtd.bulk_import.prepare_parts.output-dir=#{opts[7]}"
492
+ sysprops << "-Dtd.bulk_import.prepare_parts.output-dir=#{opts[6]}"
493
+ sysprops << "-Dtd.bulk_import.prepare_parts.split-size=#{opts[7]}"
494
+ sysprops << "-Dtd.bulk_import.prepare_parts.error-record-output=#{opts[8]}" if opts[8]
495
+ sysprops << "-Dtd.bulk_import.prepare_parts.dry-run=#{opts[9]}"
496
+ sysprops << "-Dtd.bulk_import.prepare_parts.delimiter=#{opts[10]}"
497
+ sysprops << "-Dtd.bulk_import.prepare_parts.newline=#{opts[11]}"
498
+ sysprops << "-Dtd.bulk_import.prepare_parts.column-header=#{opts[12]}" if opts[3]
499
+ sysprops << "-Dtd.bulk_import.prepare_parts.columns=#{opts[13]}" if opts[1]
500
+ sysprops << "-Dtd.bulk_import.prepare_parts.column-types=#{opts[14]}" if opts[2]
501
+ sysprops << "-Dtd.bulk_import.prepare_parts.type-conversion-error=#{opts[15]}" if opts[15]
502
+ sysprops << "-Dtd.bulk_import.prepare_parts.exclude-columns=#{opts[16]}"
503
+ sysprops << "-Dtd.bulk_import.prepare_parts.only-columns=#{opts[17]}"
494
504
  sysprops
495
505
  end
496
506
 
497
507
  private
498
508
  def upload_parts2_sysprops(opts)
499
509
  sysprops = []
500
- sysprops << "td.bulk_import.upload_parts.parallel=#{opts[0]}"
501
- sysprops << "td.bulk_import.upload_parts.auto-perform=#{opts[1]}"
502
- sysprops << "td.bulk_import.upload_parts.auto-commit=#{opts[2]}"
503
- sysprops << "td.bulk_import.upload_parts.retrycount=10"
504
- sysprops << "td.bulk_import.upload_parts.waitsec=1"
510
+ sysprops << "-Dtd.bulk_import.prepare_parts.format=#{opts[0]}"
511
+ sysprops << "-Dtd.bulk_import.prepare_parts.compression=#{opts[1]}"
512
+ sysprops << "-Dtd.bulk_import.prepare_parts.encoding=#{opts[2]}"
513
+ sysprops << "-Dtd.bulk_import.prepare_parts.time-column=#{opts[3]}"
514
+ sysprops << "-Dtd.bulk_import.prepare_parts.time-format=#{opts[4]}"
515
+ sysprops << "-Dtd.bulk_import.prepare_parts.time-value=#{opts[5].to_s}" if opts[5]
516
+ sysprops << "-Dtd.bulk_import.prepare_parts.output-dir=#{opts[6]}"
517
+ sysprops << "-Dtd.bulk_import.prepare_parts.split-size=#{opts[7]}"
518
+ sysprops << "-Dtd.bulk_import.prepare_parts.error-record-output=#{opts[8]}" if opts[8]
519
+ sysprops << "-Dtd.bulk_import.prepare_parts.dry-run=#{opts[9]}"
520
+ sysprops << "-Dtd.bulk_import.prepare_parts.delimiter=#{opts[10]}"
521
+ sysprops << "-Dtd.bulk_import.prepare_parts.newline=#{opts[11]}"
522
+ sysprops << "-Dtd.bulk_import.prepare_parts.column-header=#{opts[12]}" if opts[3]
523
+ sysprops << "-Dtd.bulk_import.prepare_parts.columns=#{opts[13]}" if opts[1]
524
+ sysprops << "-Dtd.bulk_import.prepare_parts.column-types=#{opts[14]}" if opts[2]
525
+ sysprops << "-Dtd.bulk_import.prepare_parts.type-conversion-error=#{opts[15]}" if opts[15]
526
+ sysprops << "-Dtd.bulk_import.prepare_parts.exclude-columns=#{opts[16]}"
527
+ sysprops << "-Dtd.bulk_import.prepare_parts.only-columns=#{opts[17]}"
528
+
529
+ sysprops << "-Dtd.bulk_import.upload_parts.auto-perform=#{opts[18]}"
530
+ sysprops << "-Dtd.bulk_import.upload_parts.auto-commit=#{opts[19]}"
531
+ sysprops << "-Dtd.bulk_import.upload_parts.parallel=#{opts[20]}"
532
+ sysprops << "-Dtd.bulk_import.upload_parts.retrycount=10"
533
+ sysprops << "-Dtd.bulk_import.upload_parts.waitsec=1"
534
+ sysprops << "-Dtd.api.key=#{get_client.apikey}"
505
535
  sysprops
506
536
  end
507
537
 
508
538
  private
509
539
  def prepare_parts2_config(op)
510
540
  format = 'csv'
511
- columns = nil
512
- column_types = nil
513
- has_header = nil
541
+ compress = 'none'
542
+ encoding = 'utf-8'
514
543
  time_column = 'time'
544
+ time_format = nil
515
545
  time_value = nil
516
- split_size_kb = PART_SPLIT_SIZE / 1024 # kb
517
546
  outdir = nil
547
+ split_size_kb = PART_SPLIT_SIZE / 1024 # kb
548
+ error_record_output = nil
549
+ dry_run = false
550
+
551
+ delimiter = ','
552
+ newline = 'CRLF'
553
+ column_header = nil
554
+ columns = nil
555
+ column_types = nil
556
+ type_conversion_error = 'skip'
557
+ exclude_columns = nil
558
+ only_columns = nil
518
559
 
519
- op.on('-f', '--format NAME', 'source file format [csv]') {|s|
560
+ # prepare_parts
561
+ op.on('-f', '--format NAME', 'source file format [csv, tsv]; default=csv') {|s|
520
562
  format = s
521
563
  }
522
- op.on('-h', '--columns NAME,NAME,...', 'column names (use --column-header instead if the first line has column names)') {|s|
523
- columns = s
524
- }
525
- op.on('--column-types TYPE,TYPE,...', 'column types [string, int, long]') {|s|
526
- column_types = s
564
+ op.on('-C', '--compress TYPE', 'compressed type [gzip, none]; default=auto detect') {|s|
565
+ compress = s
527
566
  }
528
- op.on('-H', '--column-header', 'first line includes column names', TrueClass) {|b|
529
- has_header = b
567
+ op.on('-e', '--encoding TYPE', 'encoding type [utf-8]') {|s|
568
+ encoding = s
530
569
  }
531
570
  op.on('-t', '--time-column NAME', 'name of the time column') {|s|
532
571
  time_column = s
533
572
  }
573
+ op.on('-T', '--time-format', 'STRF_FORMAT; default=auto detect') {|s|
574
+ time_format = s
575
+ }
534
576
  op.on('--time-value TIME', 'long value of the time column') {|s|
535
577
  if s.to_i.to_s == s
536
578
  time_value = s.to_i
@@ -539,46 +581,179 @@ module Command
539
581
  time_value = Time.parse(s).to_i
540
582
  end
541
583
  }
584
+ op.on('-o', '--output DIR', 'output directory') {|s|
585
+ outdir = s
586
+ }
542
587
  op.on('-s', '--split-size SIZE_IN_KB', "size of each parts (default: #{split_size_kb})", Integer) {|i|
543
588
  split_size_kb = i
544
589
  }
545
- op.on('-o', '--output DIR', 'output directory') {|s|
546
- outdir = s
590
+ op.on('--error-record-output FILE', 'error records output file; default=NULL output stream') {|s|
591
+ error_record_outdir = s
592
+ }
593
+ op.on('--dry-run', 'show samples as JSON and exit', FalseClass) {|b|
594
+ dry_run = b
595
+ }
596
+ op.on('--delimiter CHAR', 'delimiter CHAR; default="," at csv, "\t" at tsv') {|s|
597
+ delimiter = s
598
+ }
599
+ op.on('--newline', 'newline [CRLR, LR, CR]; default=CRLF') {|s|
600
+ newline = s
601
+ }
602
+ op.on('-H', '--column-header', 'first line includes column names', TrueClass) {|b|
603
+ column_header = b
604
+ }
605
+ op.on('-h', '--columns NAME,NAME,...', 'column names (use --column-header instead if the first line has column names)') {|s|
606
+ columns = s
607
+ }
608
+ op.on('--column-types TYPE,TYPE,...', 'column types [string, int, long]') {|s|
609
+ column_types = s
610
+ }
611
+ op.on('--type-conversion-error TYPE', 'type conversion error [skip,null]; default=skip') {|s|
612
+ type_conversion_error = s
613
+ }
614
+ op.on('--exclude-columns NAME,NAME,...', 'exclude columns') {|s|
615
+ exclude_columns = s
616
+ }
617
+ op.on('--only-columns NAME,NAME,...', 'only columns') {|s|
618
+ only_columns = s
547
619
  }
548
620
 
549
621
  files = op.cmd_parse
550
622
  files = [files] unless files.is_a?(Array) # TODO ruby 1.9
551
623
 
552
624
  # options validation
553
- unless column_types
554
- $stderr.puts "--column-types TYPE,TYPE,... option is required."
555
- exit 1
556
- end
625
+ #unless column_types
626
+ # $stderr.puts "--column-types TYPE,TYPE,... option is required."
627
+ # exit 1
628
+ #end
557
629
  unless outdir
558
630
  $stderr.puts "-o, --output DIR option is required."
559
631
  exit 1
560
632
  end
561
633
 
562
- return [ format, columns, column_types, has_header, time_column, time_value, split_size_kb, outdir ]
634
+ return [ format, compress, encoding,
635
+ time_column, time_format, time_value,
636
+ outdir, split_size_kb, error_record_output, dry_run,
637
+ delimiter, newline, column_header, columns, column_types,
638
+ type_conversion_error, exclude_columns, only_columns, files ]
563
639
  end
564
640
 
565
641
  private
566
642
  def upload_parts2_config(op)
567
- auto_perform = true
643
+ format = 'csv'
644
+ compress = 'none'
645
+ encoding = 'utf-8'
646
+ time_column = 'time'
647
+ time_format = nil
648
+ time_value = nil
649
+ outdir = nil
650
+ split_size_kb = PART_SPLIT_SIZE / 1024 # kb
651
+ error_record_output = nil
652
+ dry_run = false
653
+
654
+ delimiter = ','
655
+ newline = 'CRLF'
656
+ column_header = nil
657
+ columns = nil
658
+ column_types = nil
659
+ type_conversion_error = 'skip'
660
+ exclude_columns = nil
661
+ only_columns = nil
662
+
663
+ auto_perform = false
568
664
  auto_commit = false
569
- paraallel = 2
665
+ parallel = 2
570
666
 
667
+ # prepare_parts
668
+ op.on('-f', '--format NAME', 'source file format [csv, tsv]; default=csv') {|s|
669
+ format = s
670
+ }
671
+ op.on('-C', '--compress TYPE', 'compressed type [gzip, none]; default=auto detect') {|s|
672
+ compress = s
673
+ }
674
+ op.on('-e', '--encoding TYPE', 'encoding type [utf-8]') {|s|
675
+ encoding = s
676
+ }
677
+ op.on('-t', '--time-column NAME', 'name of the time column') {|s|
678
+ time_column = s
679
+ }
680
+ op.on('-T', '--time-format', 'STRF_FORMAT; default=auto detect') {|s|
681
+ time_format = s
682
+ }
683
+ op.on('--time-value TIME', 'long value of the time column') {|s|
684
+ if s.to_i.to_s == s
685
+ time_value = s.to_i
686
+ else
687
+ require 'time'
688
+ time_value = Time.parse(s).to_i
689
+ end
690
+ }
691
+ op.on('-o', '--output DIR', 'output directory') {|s|
692
+ outdir = s
693
+ }
694
+ op.on('-s', '--split-size SIZE_IN_KB', "size of each parts (default: #{split_size_kb})", Integer) {|i|
695
+ split_size_kb = i
696
+ }
697
+ op.on('--error-record-output FILE', 'error records output file; default=NULL output stream') {|s|
698
+ error_record_outdir = s
699
+ }
700
+ op.on('--dry-run', 'show samples as JSON and exit', FalseClass) {|b|
701
+ dry_run = b
702
+ }
703
+ op.on('--delimiter CHAR', 'delimiter CHAR; default="," at csv, "\t" at tsv') {|s|
704
+ delimiter = s
705
+ }
706
+ op.on('--newline', 'newline [CRLR, LR, CR]; default=CRLF') {|s|
707
+ newline = s
708
+ }
709
+ op.on('-H', '--column-header', 'first line includes column names', TrueClass) {|b|
710
+ column_header = b
711
+ }
712
+ op.on('-h', '--columns NAME,NAME,...', 'column names (use --column-header instead if the first line has column names)') {|s|
713
+ columns = s
714
+ }
715
+ op.on('--column-types TYPE,TYPE,...', 'column types [string, int, long]') {|s|
716
+ column_types = s
717
+ }
718
+ op.on('--type-conversion-error TYPE', 'type conversion error [skip,null]; default=skip') {|s|
719
+ type_conversion_error = s
720
+ }
721
+ op.on('--exclude-columns NAME,NAME,...', 'exclude columns') {|s|
722
+ exclude_columns = s
723
+ }
724
+ op.on('--only-columns NAME,NAME,...', 'only columns') {|s|
725
+ only_columns = s
726
+ }
727
+ # upload_parts
571
728
  op.on('--auto-perform', 'perform bulk import job automatically', TrueClass) {|b|
572
729
  auto_perform = b
573
730
  }
574
- op.on('--auto-commit', 'perform bulk import job automatically', FalseClass) {|b|
575
- auto_perform = b
731
+ op.on('--auto-commit', 'commit bulk import job automatically', FalseClass) {|b|
732
+ auto_commit = b
576
733
  }
577
- op.on('--parallel NUM', 'perform uploading in parallel (default: 2; max 8)', Integer) {|i|
734
+ op.on('--parallel NUM', 'upload in parallel (default: 2; max 8)', Integer) {|i|
578
735
  parallel = i
579
736
  }
580
737
 
581
- return [ auto_perform, auto_commit, parallel ]
738
+ files = op.cmd_parse
739
+ files = [files] unless files.is_a?(Array) # TODO ruby 1.9
740
+
741
+ # options validation
742
+ #unless column_types
743
+ # $stderr.puts "--column-types TYPE,TYPE,... option is required."
744
+ # exit 1
745
+ #end
746
+ unless outdir
747
+ $stderr.puts "-o, --output DIR option is required."
748
+ exit 1
749
+ end
750
+
751
+ return [ format, compress, encoding,
752
+ time_column, time_format, time_value,
753
+ outdir, split_size_kb, error_record_output, dry_run,
754
+ delimiter, newline, column_header, columns, column_types,
755
+ type_conversion_error, exclude_columns, only_columns,
756
+ auto_perform, auto_commit, parallel, files ]
582
757
  end
583
758
 
584
759
  private
@@ -15,6 +15,7 @@ module Command
15
15
  priority = nil
16
16
  retry_limit = nil
17
17
  query = nil
18
+ sampling_all = nil
18
19
 
19
20
  op.on('-g', '--org ORGANIZATION', "issue the query under this organization") {|s|
20
21
  org = s
@@ -58,6 +59,9 @@ module Command
58
59
  op.on('-q', '--query PATH', 'use file instead of inline query') {|s|
59
60
  query = File.open(s) { |f| f.read.strip }
60
61
  }
62
+ op.on('--sampling DENOMINATOR', 'enable random sampling to reduce records 1/DENOMINATOR', Integer) {|i|
63
+ sampling_all = i
64
+ }
61
65
 
62
66
  sql = op.cmd_parse
63
67
 
@@ -89,6 +93,7 @@ module Command
89
93
 
90
94
  opts = {}
91
95
  opts['organization'] = org if org
96
+ opts['sampling_all'] = sampling_all if sampling_all
92
97
  job = client.query(db_name, sql, result_url, priority, retry_limit, opts)
93
98
 
94
99
  $stderr.puts "Job #{job.job_id} is queued."
@@ -1,6 +1,15 @@
1
1
 
2
2
  module TreasureData
3
3
  module Command
4
+ HIVE_RESERVED_KEYWORDS = %W[
5
+ TRUE FALSE ALL AND OR NOT LIKE ASC DESC ORDER BY GROUP WHERE FROM AS SELECT DISTINCT INSERT OVERWRITE
6
+ OUTER JOIN LEFT RIGHT FULL ON PARTITION PARTITIONS TABLE TABLES TBLPROPERTIES SHOW MSCK DIRECTORY LOCAL
7
+ TRANSFORM USING CLUSTER DISTRIBUTE SORT UNION LOAD DATA INPATH IS NULL CREATE EXTERNAL ALTER DESCRIBE
8
+ DROP REANME TO COMMENT BOOLEAN TINYINT SMALLINT INT BIGINT FLOAT DOUBLE DATE DATETIME TIMESTAMP STRING
9
+ BINARY ARRAY MAP REDUCE PARTITIONED CLUSTERED SORTED INTO BUCKETS ROW FORMAT DELIMITED FIELDS TERMINATED
10
+ COLLECTION ITEMS KEYS LINES STORED SEQUENCEFILE TEXTFILE INPUTFORMAT OUTPUTFORMAT LOCATION TABLESAMPLE BUCKET OUT
11
+ OF CAST ADD REPLACE COLUMNS RLIKE REGEXP TEMPORARY FUNCTION EXPLAIN EXTENDED SERDE WITH SERDEPROPERTIES LIMIT SET TBLPROPERTIES
12
+ ]
4
13
 
5
14
  def table_create(op)
6
15
  db_name, table_name = op.cmd_parse
@@ -8,6 +17,12 @@ module Command
8
17
  #API.validate_database_name(db_name)
9
18
  API.validate_table_name(table_name)
10
19
 
20
+ if HIVE_RESERVED_KEYWORDS.include?(table_name.upcase)
21
+ $stderr.puts "* WARNING *"
22
+ $stderr.puts " '#{table_name}' is a reserved keyword in Hive. We recommend renaming the table."
23
+ $stderr.puts " For a list of all reserved keywords, see our FAQ: http://docs.treasure-data.com/articles/faq"
24
+ end
25
+
11
26
  client = get_client
12
27
 
13
28
  begin
@@ -72,9 +87,14 @@ module Command
72
87
  require 'parallel'
73
88
 
74
89
  num_threads = 4
90
+ show_size_in_bytes = false
91
+
75
92
  op.on('-n', '--num_threads VAL', 'number of threads to get list in parallel') { |i|
76
93
  num_threads = Integer(i)
77
94
  }
95
+ op.on('--show-bytes', 'show estimated table size in bytes') {
96
+ show_size_in_bytes = true
97
+ }
78
98
 
79
99
  db_name = op.cmd_parse
80
100
 
@@ -93,8 +113,12 @@ module Command
93
113
  pschema = table.schema.fields.map {|f|
94
114
  "#{f.name}:#{f.type}"
95
115
  }.join(', ')
96
- rows << {:Database => db.name, :Table => table.name, :Type => table.type.to_s, :Count => table.count.to_s,
97
- :Size => table.estimated_storage_size_string, 'Last import' => table.last_import ? table.last_import.localtime : nil, :Schema => pschema}
116
+ rows << {
117
+ :Database => db.name, :Table => table.name, :Type => table.type.to_s, :Count => table.count.to_s.gsub(/(?<=\d)(?=(?:\d{3})+(?!\d))/, ','),
118
+ :Size => show_size_in_bytes ? table.estimated_storage_size.to_s.gsub(/(?<=\d)(?=(?:\d{3})+(?!\d))/, ',') : table.estimated_storage_size_string,
119
+ 'Last import' => table.last_import ? table.last_import.localtime : nil,
120
+ :Schema => pschema
121
+ }
98
122
  }
99
123
  }
100
124
  rows = rows.sort_by {|map|
@@ -1,5 +1,5 @@
1
1
  module TreasureData
2
2
 
3
- VERSION = '0.10.72'
3
+ VERSION = '0.10.73'
4
4
 
5
5
  end
metadata CHANGED
@@ -1,18 +1,20 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: td
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.10.72
4
+ version: 0.10.73
5
+ prerelease:
5
6
  platform: ruby
6
7
  authors:
7
8
  - Treasure Data, Inc.
8
9
  autorequire:
9
10
  bindir: bin
10
11
  cert_chain: []
11
- date: 2013-02-27 00:00:00.000000000 Z
12
+ date: 2013-03-15 00:00:00.000000000 Z
12
13
  dependencies:
13
14
  - !ruby/object:Gem::Dependency
14
15
  name: msgpack
15
16
  requirement: !ruby/object:Gem::Requirement
17
+ none: false
16
18
  requirements:
17
19
  - - ~>
18
20
  - !ruby/object:Gem::Version
@@ -20,6 +22,7 @@ dependencies:
20
22
  type: :runtime
21
23
  prerelease: false
22
24
  version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
23
26
  requirements:
24
27
  - - ~>
25
28
  - !ruby/object:Gem::Version
@@ -27,6 +30,7 @@ dependencies:
27
30
  - !ruby/object:Gem::Dependency
28
31
  name: yajl-ruby
29
32
  requirement: !ruby/object:Gem::Requirement
33
+ none: false
30
34
  requirements:
31
35
  - - ~>
32
36
  - !ruby/object:Gem::Version
@@ -34,6 +38,7 @@ dependencies:
34
38
  type: :runtime
35
39
  prerelease: false
36
40
  version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
37
42
  requirements:
38
43
  - - ~>
39
44
  - !ruby/object:Gem::Version
@@ -41,20 +46,23 @@ dependencies:
41
46
  - !ruby/object:Gem::Dependency
42
47
  name: hirb
43
48
  requirement: !ruby/object:Gem::Requirement
49
+ none: false
44
50
  requirements:
45
- - - '>='
51
+ - - ! '>='
46
52
  - !ruby/object:Gem::Version
47
53
  version: 0.4.5
48
54
  type: :runtime
49
55
  prerelease: false
50
56
  version_requirements: !ruby/object:Gem::Requirement
57
+ none: false
51
58
  requirements:
52
- - - '>='
59
+ - - ! '>='
53
60
  - !ruby/object:Gem::Version
54
61
  version: 0.4.5
55
62
  - !ruby/object:Gem::Dependency
56
63
  name: parallel
57
64
  requirement: !ruby/object:Gem::Requirement
65
+ none: false
58
66
  requirements:
59
67
  - - ~>
60
68
  - !ruby/object:Gem::Version
@@ -62,6 +70,7 @@ dependencies:
62
70
  type: :runtime
63
71
  prerelease: false
64
72
  version_requirements: !ruby/object:Gem::Requirement
73
+ none: false
65
74
  requirements:
66
75
  - - ~>
67
76
  - !ruby/object:Gem::Version
@@ -69,6 +78,7 @@ dependencies:
69
78
  - !ruby/object:Gem::Dependency
70
79
  name: td-client
71
80
  requirement: !ruby/object:Gem::Requirement
81
+ none: false
72
82
  requirements:
73
83
  - - ~>
74
84
  - !ruby/object:Gem::Version
@@ -76,6 +86,7 @@ dependencies:
76
86
  type: :runtime
77
87
  prerelease: false
78
88
  version_requirements: !ruby/object:Gem::Requirement
89
+ none: false
79
90
  requirements:
80
91
  - - ~>
81
92
  - !ruby/object:Gem::Version
@@ -83,6 +94,7 @@ dependencies:
83
94
  - !ruby/object:Gem::Dependency
84
95
  name: td-logger
85
96
  requirement: !ruby/object:Gem::Requirement
97
+ none: false
86
98
  requirements:
87
99
  - - ~>
88
100
  - !ruby/object:Gem::Version
@@ -90,6 +102,7 @@ dependencies:
90
102
  type: :runtime
91
103
  prerelease: false
92
104
  version_requirements: !ruby/object:Gem::Requirement
105
+ none: false
93
106
  requirements:
94
107
  - - ~>
95
108
  - !ruby/object:Gem::Version
@@ -97,6 +110,7 @@ dependencies:
97
110
  - !ruby/object:Gem::Dependency
98
111
  name: rake
99
112
  requirement: !ruby/object:Gem::Requirement
113
+ none: false
100
114
  requirements:
101
115
  - - ~>
102
116
  - !ruby/object:Gem::Version
@@ -104,6 +118,7 @@ dependencies:
104
118
  type: :development
105
119
  prerelease: false
106
120
  version_requirements: !ruby/object:Gem::Requirement
121
+ none: false
107
122
  requirements:
108
123
  - - ~>
109
124
  - !ruby/object:Gem::Version
@@ -111,6 +126,7 @@ dependencies:
111
126
  - !ruby/object:Gem::Dependency
112
127
  name: rspec
113
128
  requirement: !ruby/object:Gem::Requirement
129
+ none: false
114
130
  requirements:
115
131
  - - ~>
116
132
  - !ruby/object:Gem::Version
@@ -118,6 +134,7 @@ dependencies:
118
134
  type: :development
119
135
  prerelease: false
120
136
  version_requirements: !ruby/object:Gem::Requirement
137
+ none: false
121
138
  requirements:
122
139
  - - ~>
123
140
  - !ruby/object:Gem::Version
@@ -125,6 +142,7 @@ dependencies:
125
142
  - !ruby/object:Gem::Dependency
126
143
  name: simplecov
127
144
  requirement: !ruby/object:Gem::Requirement
145
+ none: false
128
146
  requirements:
129
147
  - - ~>
130
148
  - !ruby/object:Gem::Version
@@ -132,6 +150,7 @@ dependencies:
132
150
  type: :development
133
151
  prerelease: false
134
152
  version_requirements: !ruby/object:Gem::Requirement
153
+ none: false
135
154
  requirements:
136
155
  - - ~>
137
156
  - !ruby/object:Gem::Version
@@ -208,26 +227,33 @@ files:
208
227
  - td.gemspec
209
228
  homepage: http://treasure-data.com/
210
229
  licenses: []
211
- metadata: {}
212
230
  post_install_message:
213
231
  rdoc_options: []
214
232
  require_paths:
215
233
  - lib
216
234
  required_ruby_version: !ruby/object:Gem::Requirement
235
+ none: false
217
236
  requirements:
218
- - - '>='
237
+ - - ! '>='
219
238
  - !ruby/object:Gem::Version
220
239
  version: '0'
240
+ segments:
241
+ - 0
242
+ hash: 4066201097881457752
221
243
  required_rubygems_version: !ruby/object:Gem::Requirement
244
+ none: false
222
245
  requirements:
223
- - - '>='
246
+ - - ! '>='
224
247
  - !ruby/object:Gem::Version
225
248
  version: '0'
249
+ segments:
250
+ - 0
251
+ hash: 4066201097881457752
226
252
  requirements: []
227
253
  rubyforge_project:
228
- rubygems_version: 2.0.0
254
+ rubygems_version: 1.8.23
229
255
  signing_key:
230
- specification_version: 4
256
+ specification_version: 3
231
257
  summary: CLI to manage data on Treasure Data, the Hadoop-based cloud data warehousing
232
258
  test_files:
233
259
  - spec/file_reader/filter_spec.rb
checksums.yaml DELETED
@@ -1,7 +0,0 @@
1
- ---
2
- SHA1:
3
- metadata.gz: 756988829cba729420d255670f1b7db27c7f3bc3
4
- data.tar.gz: ccf55b0be4c0ee2fd934ad822034336ad33a80da
5
- SHA512:
6
- metadata.gz: 87c00a05581a54d35b4db41969a74156e5c522dbb3285dc9c615a4619e33dc6efd715f8c58b72ed82d19a841d80967be641933b93b0ab12c7af0292ca51b5faa
7
- data.tar.gz: 735d45002439d770e565521033c9f470e846b93a4a3d904048c50fdb12f7b93c0c792787ea535aae923c1bc28adc2571b7672a0d3c36003dd23e332b4314e525