embulk-output-td 0.3.5 → 0.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 063c7abaa3b8821de5ab2e8ccdabaa7b9bcfa301
4
- data.tar.gz: 60d13492de0d417e94fc80bb5ac1cd65e0b7810f
3
+ metadata.gz: ecdc89638bd881305f9a2103c0f93949487fd7cf
4
+ data.tar.gz: fbb66bd29c049357f814baf21c649dc0ba6aab1f
5
5
  SHA512:
6
- metadata.gz: 5e3d7967b0d474e0077c153fc13ccd451b6672aebfa3258398911c897b8bb5d86c034525bca0e015c53c45e8b7df350d0976f92a96d51dcb4e0a66db5548d07a
7
- data.tar.gz: 7c03578dd33048e89376f185bbf161bc1773b7f660e0c68a6cc0e95ad71d7f68465c979d001eb57168af48c67536f96020aef5db2c128be3870a6e940f52791e
6
+ metadata.gz: ff210b5c00c02e08392c162f5c8ddeb1bebf7ae3432074ce7a17d8a7890692664799ea667262f00548ac9c9e4cc775191138a7f6ec0cd9d835279b615d1594f8
7
+ data.tar.gz: 2146190b46c8eb9ec89337ff4c03aab16097e9d82bc2c55f36271d0501eef986f8a1d23bcea1dc2db8d7222d90a00501356f4e4f8a9864643a16dd78d264599c
data/CHANGELOG.md CHANGED
@@ -1,3 +1,8 @@
1
+ ## 0.3.6 - 2016-07-11
2
+
3
+ * [new feature] Http proxy config from system properties [#47](https://github.com/treasure-data/embulk-output-td/pull/47)
4
+ * [maintenance] Remove redundant logging during run stage [#48](https://github.com/treasure-data/embulk-output-td/pull/48)
5
+
1
6
  ## 0.3.5 - 2016-06-29
2
7
 
3
8
  * [new feature] Enable user/password for `http_proxy` option [#46](https://github.com/treasure-data/embulk-output-td/pull/46)
data/build.gradle CHANGED
@@ -16,7 +16,7 @@ configurations {
16
16
  provided
17
17
  }
18
18
 
19
- version = "0.3.5"
19
+ version = "0.3.6"
20
20
 
21
21
  compileJava.options.encoding = 'UTF-8' // source encoding
22
22
  sourceCompatibility = 1.7
@@ -1,7 +1,7 @@
1
1
 
2
2
  Gem::Specification.new do |spec|
3
3
  spec.name = "embulk-output-td"
4
- spec.version = "0.3.5"
4
+ spec.version = "0.3.6"
5
5
  spec.authors = ["Muga Nishizawa"]
6
6
  spec.summary = %[TreasureData output plugin for Embulk]
7
7
  spec.description = %[TreasureData output plugin is an Embulk plugin that loads records to TreasureData read by any input plugins. Search the input plugins by 'embulk-output' keyword.]
@@ -58,7 +58,7 @@ public class RecordWriter
58
58
 
59
59
  public static void validateSchema(Logger log, TdOutputPlugin.PluginTask task, Schema schema)
60
60
  {
61
- new FieldWriterSet(log, task, schema);
61
+ new FieldWriterSet(log, task, schema, false);
62
62
  }
63
63
 
64
64
  @VisibleForTesting
@@ -8,6 +8,7 @@ import java.util.ArrayList;
8
8
  import java.util.Map;
9
9
  import java.util.HashMap;
10
10
  import java.nio.charset.StandardCharsets;
11
+ import java.util.Properties;
11
12
  import java.util.regex.Pattern;
12
13
  import java.util.zip.GZIPInputStream;
13
14
 
@@ -17,6 +18,8 @@ import javax.validation.constraints.Max;
17
18
  import com.google.common.annotations.VisibleForTesting;
18
19
  import com.google.common.base.Function;
19
20
  import com.google.common.base.Optional;
21
+ import com.google.common.base.Preconditions;
22
+ import com.google.common.base.Predicates;
20
23
  import com.google.common.base.Throwables;
21
24
  import com.fasterxml.jackson.annotation.JsonCreator;
22
25
  import com.fasterxml.jackson.annotation.JsonValue;
@@ -57,6 +60,9 @@ import org.msgpack.core.MessageUnpacker;
57
60
  import org.msgpack.value.Value;
58
61
  import org.slf4j.Logger;
59
62
 
63
+ import static com.google.common.base.Optional.fromNullable;
64
+ import static java.lang.Integer.parseInt;
65
+
60
66
  public class TdOutputPlugin
61
67
  implements OutputPlugin
62
68
  {
@@ -436,14 +442,42 @@ public class TdOutputPlugin
436
442
  builder.setApiKey(task.getApiKey());
437
443
  builder.setEndpoint(task.getEndpoint());
438
444
  builder.setUseSSL(task.getUseSsl());
439
- if (task.getHttpProxy().isPresent()) {
440
- HttpProxyTask proxyTask = task.getHttpProxy().get();
441
- builder.setProxy(new ProxyConfig(proxyTask.getHost(), proxyTask.getPort(), proxyTask.getUseSsl(),
442
- proxyTask.getUser(), proxyTask.getPassword()));
445
+
446
+ Optional<ProxyConfig> proxyConfig = newProxyConfig(task.getHttpProxy());
447
+ if (proxyConfig.isPresent()) {
448
+ builder.setProxy(proxyConfig.get());
443
449
  }
450
+
444
451
  return builder.build();
445
452
  }
446
453
 
454
+ @VisibleForTesting
455
+ Optional<ProxyConfig> newProxyConfig(Optional<HttpProxyTask> task)
456
+ {
457
+ // This plugin searches http proxy settings and configures them to TDClient. The order of proxy setting searching is:
458
+ // 1. System properties
459
+ // 2. http_proxy config option provided by this plugin
460
+
461
+ Properties props = System.getProperties();
462
+ if (props.containsKey("http.proxyHost") || props.containsKey("https.proxyHost")) {
463
+ boolean useSsl = props.containsKey("https.proxyHost");
464
+ String proto = !useSsl ? "http" : "https";
465
+ String host = props.getProperty(proto + ".proxyHost");
466
+ int port = parseInt(props.getProperty(proto + ".proxyPort", !useSsl ? "80" : "443"));
467
+ Optional<String> user = fromNullable(props.getProperty(proto + ".proxyUser"));
468
+ Optional<String> password = fromNullable(props.getProperty(proto + ".proxyPassword"));
469
+ return Optional.of(new ProxyConfig(host, port, useSsl, user, password));
470
+ }
471
+ else if (task.isPresent()) {
472
+ HttpProxyTask proxyTask = task.get();
473
+ return Optional.of(new ProxyConfig(proxyTask.getHost(), proxyTask.getPort(), proxyTask.getUseSsl(),
474
+ proxyTask.getUser(), proxyTask.getPassword()));
475
+ }
476
+ else {
477
+ return Optional.absent();
478
+ }
479
+ }
480
+
447
481
  @VisibleForTesting
448
482
  void createTableIfNotExists(TDClient client, String databaseName, String tableName)
449
483
  {
@@ -795,7 +829,7 @@ public class TdOutputPlugin
795
829
 
796
830
  RecordWriter closeLater = null;
797
831
  try {
798
- FieldWriterSet fieldWriters = new FieldWriterSet(log, task, schema);
832
+ FieldWriterSet fieldWriters = new FieldWriterSet(log, task, schema, true);
799
833
  closeLater = new RecordWriter(task, taskIndex, newTDClient(task), fieldWriters);
800
834
  RecordWriter recordWriter = closeLater;
801
835
  recordWriter.open(schema);
@@ -41,7 +41,7 @@ public class FieldWriterSet
41
41
  private final IFieldWriter[] fieldWriters;
42
42
  private final Optional<TimeValueGenerator> staticTimeValue;
43
43
 
44
- public FieldWriterSet(Logger log, TdOutputPlugin.PluginTask task, Schema schema)
44
+ public FieldWriterSet(Logger log, TdOutputPlugin.PluginTask task, Schema schema, boolean runStage)
45
45
  {
46
46
  Optional<String> userDefinedPrimaryKeySourceColumnName = task.getTimeColumn();
47
47
  ConvertTimestampType convertTimestampType = task.getConvertTimestampType();
@@ -79,14 +79,18 @@ public class FieldWriterSet
79
79
  if (userDefinedPrimaryKeySourceColumnName.isPresent()) {
80
80
  columnName = newColumnUniqueName(columnName, schema);
81
81
  mode = ColumnWriterMode.SIMPLE_VALUE;
82
- log.warn("time_column '{}' is set but 'time' column also exists. The existent 'time' column is renamed to {}",
83
- userDefinedPrimaryKeySourceColumnName.get(), columnName);
82
+ if (!runStage) {
83
+ log.warn("time_column '{}' is set but 'time' column also exists. The existent 'time' column is renamed to {}",
84
+ userDefinedPrimaryKeySourceColumnName.get(), columnName);
85
+ }
84
86
  }
85
87
  else if (timeValueConfig.isPresent()) {
86
88
  columnName = newColumnUniqueName(columnName, schema);
87
89
  mode = ColumnWriterMode.SIMPLE_VALUE;
88
- log.warn("time_value is set but 'time' column also exists. The existent 'time' column is renamed to {}",
89
- columnName);
90
+ if (!runStage) {
91
+ log.warn("time_value is set but 'time' column also exists. The existent 'time' column is renamed to {}",
92
+ columnName);
93
+ }
90
94
  }
91
95
  else {
92
96
  mode = ColumnWriterMode.PRIMARY_KEY;
@@ -101,10 +105,14 @@ public class FieldWriterSet
101
105
 
102
106
  switch (mode) {
103
107
  case PRIMARY_KEY:
104
- log.info("Using {}:{} column as the data partitioning key", columnName, columnType);
108
+ if (!runStage) {
109
+ log.info("Using {}:{} column as the data partitioning key", columnName, columnType);
110
+ }
105
111
  if (columnType instanceof LongType) {
106
112
  if (task.getUnixTimestampUnit() != TdOutputPlugin.UnixTimestampUnit.SEC) {
107
- log.warn("time column is converted from {} to seconds", task.getUnixTimestampUnit());
113
+ if (!runStage) {
114
+ log.warn("time column is converted from {} to seconds", task.getUnixTimestampUnit());
115
+ }
108
116
  }
109
117
  writer = new UnixTimestampLongFieldWriter(columnName, task.getUnixTimestampUnit().getFractionUnit());
110
118
  foundPrimaryKey = true;
@@ -160,14 +168,18 @@ public class FieldWriterSet
160
168
 
161
169
  IFieldWriter writer;
162
170
  if (columnType instanceof LongType) {
163
- log.info("Duplicating {}:{} column (unix timestamp {}) to 'time' column as seconds for the data partitioning",
164
- columnName, columnType, task.getUnixTimestampUnit());
171
+ if (!runStage) {
172
+ log.info("Duplicating {}:{} column (unix timestamp {}) to 'time' column as seconds for the data partitioning",
173
+ columnName, columnType, task.getUnixTimestampUnit());
174
+ }
165
175
  IFieldWriter fw = new LongFieldWriter(columnName);
166
176
  writer = new UnixTimestampFieldDuplicator(fw, "time", task.getUnixTimestampUnit().getFractionUnit());
167
177
  }
168
178
  else if (columnType instanceof TimestampType) {
169
- log.info("Duplicating {}:{} column to 'time' column as seconds for the data partitioning",
170
- columnName, columnType);
179
+ if (!runStage) {
180
+ log.info("Duplicating {}:{} column to 'time' column as seconds for the data partitioning",
181
+ columnName, columnType);
182
+ }
171
183
  IFieldWriter fw = newSimpleTimestampFieldWriter(columnName, columnType, convertTimestampType, timestampFormatters[duplicatePrimaryKeySourceIndex]);
172
184
  writer = new TimestampFieldLongDuplicator(fw, "time");
173
185
  }
@@ -191,7 +203,9 @@ public class FieldWriterSet
191
203
  }
192
204
 
193
205
  long uploadTime = System.currentTimeMillis() / 1000;
194
- log.info("'time' column is generated and is set to a unix time {}", uploadTime);
206
+ if (!runStage) {
207
+ log.info("'time' column is generated and is set to a unix time {}", uploadTime);
208
+ }
195
209
  TimeValueConfig newConfig = Exec.newConfigSource().set("mode", "fixed_time").set("value", uploadTime).loadConfig(TimeValueConfig.class);
196
210
  task.setTimeValue(Optional.of(newConfig));
197
211
  staticTimeValue = Optional.of(TimeValueGenerator.newGenerator(newConfig));
@@ -1,8 +1,10 @@
1
1
  package org.embulk.output.td;
2
2
 
3
+ import com.google.common.base.Optional;
3
4
  import com.google.common.collect.ImmutableList;
4
5
  import com.google.common.collect.ImmutableMap;
5
6
  import com.google.common.collect.Lists;
7
+ import com.treasuredata.client.ProxyConfig;
6
8
  import com.treasuredata.client.TDClient;
7
9
  import com.treasuredata.client.TDClientHttpConflictException;
8
10
  import com.treasuredata.client.TDClientHttpNotFoundException;
@@ -18,6 +20,7 @@ import org.embulk.config.ConfigException;
18
20
  import org.embulk.config.ConfigSource;
19
21
  import org.embulk.config.TaskSource;
20
22
  import org.embulk.output.td.TdOutputPlugin.PluginTask;
23
+ import org.embulk.output.td.TdOutputPlugin.HttpProxyTask;
21
24
  import org.embulk.output.td.TdOutputPlugin.TimestampColumnOption;
22
25
  import org.embulk.output.td.TdOutputPlugin.UnixTimestampUnit;
23
26
  import org.embulk.output.td.writer.FieldWriterSet;
@@ -390,6 +393,30 @@ public class TestTdOutputPlugin
390
393
  }
391
394
  }
392
395
 
396
+ @Test
397
+ public void newProxyConfig()
398
+ {
399
+ // confirm if proxy system properties override proxy setting by http_proxy config option.
400
+
401
+ HttpProxyTask proxyTask = Exec.newConfigSource()
402
+ .set("host", "option_host")
403
+ .set("port", 8080)
404
+ .loadConfig(HttpProxyTask.class);
405
+
406
+ String originalProxyHost = System.getProperty("http.proxyHost");
407
+ try {
408
+ System.setProperty("http.proxyHost", "property_host");
409
+ Optional<ProxyConfig> proxyConfig = plugin.newProxyConfig(Optional.of(proxyTask));
410
+ assertEquals("property_host", proxyConfig.get().getHost());
411
+ assertEquals(80, proxyConfig.get().getPort());
412
+ }
413
+ finally {
414
+ if (originalProxyHost != null) {
415
+ System.setProperty("http.proxyHost", originalProxyHost);
416
+ }
417
+ }
418
+ }
419
+
393
420
  @Test
394
421
  public void completeBulkImportSession()
395
422
  {
@@ -533,7 +560,7 @@ public class TestTdOutputPlugin
533
560
 
534
561
  public static FieldWriterSet fieldWriters(Logger log, PluginTask task, Schema schema)
535
562
  {
536
- return spy(new FieldWriterSet(log, task, schema));
563
+ return spy(new FieldWriterSet(log, task, schema, false));
537
564
  }
538
565
 
539
566
  public static RecordWriter recordWriter(PluginTask task, TDClient client, FieldWriterSet fieldWriters)
@@ -41,12 +41,12 @@ public class TestTimeValueGenerator
41
41
  // incremental_time
42
42
  { // {from: 0, to: 0} # default incremental_time
43
43
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
44
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L, "to", 0L))), schema);
44
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L, "to", 0L))), schema, false);
45
45
  }
46
46
  { // {from: 0} # default incremental_time
47
47
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
48
48
  try {
49
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L))), schema);
49
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L))), schema, false);
50
50
  fail();
51
51
  }
52
52
  catch (Throwable t) {
@@ -56,7 +56,7 @@ public class TestTimeValueGenerator
56
56
  { // {to: 0} # default incremental_time
57
57
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
58
58
  try {
59
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("to", 0L))), schema);
59
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("to", 0L))), schema, false);
60
60
  fail();
61
61
  }
62
62
  catch (Throwable t) {
@@ -65,12 +65,12 @@ public class TestTimeValueGenerator
65
65
  }
66
66
  { // {from: 0, to: 0, mode: incremental_time}
67
67
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
68
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L, "to", 0L, "mode", "incremental_time"))), schema);
68
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L, "to", 0L, "mode", "incremental_time"))), schema, false);
69
69
  }
70
70
  { // {from: 0, mode: incremental_time}
71
71
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
72
72
  try {
73
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L, "mode", "incremental_time"))), schema);
73
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("from", 0L, "mode", "incremental_time"))), schema, false);
74
74
  fail();
75
75
  }
76
76
  catch (Throwable t) {
@@ -80,7 +80,7 @@ public class TestTimeValueGenerator
80
80
  { // {to: 0, mode: incremental_time}
81
81
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
82
82
  try {
83
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("to", 0L, "mode", "incremental_time"))), schema);
83
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("to", 0L, "mode", "incremental_time"))), schema, false);
84
84
  fail();
85
85
  }
86
86
  catch (Throwable t) {
@@ -90,7 +90,7 @@ public class TestTimeValueGenerator
90
90
  { // {mode: incremental_time}
91
91
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
92
92
  try {
93
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("mode", "incremental_time"))), schema);
93
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("mode", "incremental_time"))), schema, false);
94
94
  fail();
95
95
  }
96
96
  catch (Throwable t) {
@@ -101,12 +101,12 @@ public class TestTimeValueGenerator
101
101
  // fixed_time
102
102
  { // {value: 0, mode: fixed_time}
103
103
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
104
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("value", 0L, "mode", "fixed_time"))), schema);
104
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("value", 0L, "mode", "fixed_time"))), schema, false);
105
105
  }
106
106
  { // {mode: fixed_time}
107
107
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
108
108
  try {
109
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("mode", "fixed_time"))), schema);
109
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("mode", "fixed_time"))), schema, false);
110
110
  }
111
111
  catch (Throwable t) {
112
112
  assertTrue(t instanceof ConfigException);
@@ -115,7 +115,7 @@ public class TestTimeValueGenerator
115
115
  { // {value: 0}
116
116
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
117
117
  try {
118
- new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("value", 0L))), schema);
118
+ new FieldWriterSet(log, pluginTask(config.set("time_value", ImmutableMap.of("value", 0L))), schema, false);
119
119
  }
120
120
  catch (Throwable t) {
121
121
  assertTrue(t instanceof ConfigException);
@@ -40,7 +40,7 @@ public class TestFieldWriterSet
40
40
  { // if schema doesn't have appropriate time column, it throws ConfigError.
41
41
  schema = schema("_c0", Types.STRING, "time", Types.STRING); // not long or timestamp
42
42
  try {
43
- new FieldWriterSet(log, pluginTask(config), schema);
43
+ new FieldWriterSet(log, pluginTask(config), schema, false);
44
44
  fail();
45
45
  }
46
46
  catch (Throwable t) {
@@ -51,7 +51,7 @@ public class TestFieldWriterSet
51
51
  { // if schema doesn't have a column specified as time_column column, it throws ConfigError
52
52
  schema = schema("_c0", Types.STRING, "_c1", Types.STRING);
53
53
  try {
54
- new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c2")), schema);
54
+ new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c2")), schema, false);
55
55
  fail();
56
56
  }
57
57
  catch (Throwable t) {
@@ -62,7 +62,7 @@ public class TestFieldWriterSet
62
62
  { // if time_column column is not appropriate column type, it throws ConfigError.
63
63
  schema = schema("_c0", Types.STRING, "_c1", Types.STRING);
64
64
  try {
65
- new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c1")), schema);
65
+ new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c1")), schema, false);
66
66
  fail();
67
67
  }
68
68
  catch (Throwable t) {
@@ -73,7 +73,7 @@ public class TestFieldWriterSet
73
73
  { // if both of time_column and time_value are specified, it throws ConfigError.
74
74
  schema = schema("_c0", Types.STRING, "_c1", Types.LONG);
75
75
  try {
76
- new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c1").set("time_value", ImmutableMap.of("from", 0L, "to", 0L))), schema);
76
+ new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c1").set("time_value", ImmutableMap.of("from", 0L, "to", 0L))), schema, false);
77
77
  fail();
78
78
  }
79
79
  catch (Throwable t) {
@@ -87,14 +87,14 @@ public class TestFieldWriterSet
87
87
  {
88
88
  { // time column (timestamp type) exists
89
89
  Schema schema = schema("time", Types.TIMESTAMP, "_c0", Types.TIMESTAMP);
90
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config), schema);
90
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config), schema, false);
91
91
 
92
92
  assertTrue(writers.getFieldWriter(0) instanceof TimestampLongFieldWriter);
93
93
  }
94
94
 
95
95
  { // time column (long type) exists
96
96
  Schema schema = schema("time", Types.LONG, "_c0", Types.TIMESTAMP);
97
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config), schema);
97
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config), schema, false);
98
98
 
99
99
  assertTrue(writers.getFieldWriter(0) instanceof UnixTimestampLongFieldWriter);
100
100
 
@@ -106,21 +106,21 @@ public class TestFieldWriterSet
106
106
  {
107
107
  { // time_column option (timestamp type)
108
108
  Schema schema = schema("_c0", Types.TIMESTAMP, "_c1", Types.STRING);
109
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema);
109
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema, false);
110
110
 
111
111
  assertTrue(writers.getFieldWriter(0) instanceof TimestampFieldLongDuplicator);
112
112
  }
113
113
 
114
114
  { // time_column option (long type)
115
115
  Schema schema = schema("_c0", Types.LONG, "_c1", Types.STRING);
116
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema);
116
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema, false);
117
117
 
118
118
  assertTrue(writers.getFieldWriter(0) instanceof UnixTimestampFieldDuplicator);
119
119
  }
120
120
 
121
121
  { // time_column option (typestamp type) if time column exists
122
122
  Schema schema = schema("_c0", Types.TIMESTAMP, "time", Types.TIMESTAMP);
123
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema);
123
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema, false);
124
124
 
125
125
  assertTrue(writers.getFieldWriter(0) instanceof TimestampFieldLongDuplicator); // c0
126
126
  assertTrue(writers.getFieldWriter(1) instanceof TimestampStringFieldWriter); // renamed column
@@ -128,7 +128,7 @@ public class TestFieldWriterSet
128
128
 
129
129
  { // time_column option (long type) if time column exists
130
130
  Schema schema = schema("_c0", Types.LONG, "time", Types.TIMESTAMP);
131
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema);
131
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema, false);
132
132
 
133
133
  assertTrue(writers.getFieldWriter(0) instanceof UnixTimestampFieldDuplicator); // c0
134
134
  assertTrue(writers.getFieldWriter(1) instanceof TimestampStringFieldWriter); // renamed column
@@ -140,7 +140,7 @@ public class TestFieldWriterSet
140
140
  {
141
141
  { // if not specify default_timestamp_type_convert_to, use string by default
142
142
  Schema schema = schema("_c0", Types.TIMESTAMP, "time", Types.TIMESTAMP);
143
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy()), schema);
143
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy()), schema, false);
144
144
 
145
145
  assertTrue(writers.getFieldWriter(0) instanceof TimestampStringFieldWriter); // c0
146
146
  assertTrue(writers.getFieldWriter(1) instanceof TimestampLongFieldWriter); // time
@@ -148,7 +148,7 @@ public class TestFieldWriterSet
148
148
 
149
149
  { // and use time_column option
150
150
  Schema schema = schema("_c0", Types.TIMESTAMP, "time", Types.TIMESTAMP);
151
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema);
151
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("time_column", "_c0")), schema, false);
152
152
 
153
153
  assertTrue(writers.getFieldWriter(0) instanceof TimestampFieldLongDuplicator); // c0
154
154
  assertTrue(writers.getFieldWriter(1) instanceof TimestampStringFieldWriter); // time renamed
@@ -156,7 +156,7 @@ public class TestFieldWriterSet
156
156
 
157
157
  { // if default_timestamp_type_convert_to is string, use string
158
158
  Schema schema = schema("_c0", Types.TIMESTAMP, "time", Types.TIMESTAMP);
159
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "string")), schema);
159
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "string")), schema, false);
160
160
 
161
161
  assertTrue(writers.getFieldWriter(0) instanceof TimestampStringFieldWriter); // c0
162
162
  assertTrue(writers.getFieldWriter(1) instanceof TimestampLongFieldWriter); // time
@@ -164,7 +164,7 @@ public class TestFieldWriterSet
164
164
 
165
165
  { // and use time_column option
166
166
  Schema schema = schema("_c0", Types.TIMESTAMP, "time", Types.TIMESTAMP);
167
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "string").set("time_column", "_c0")), schema);
167
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "string").set("time_column", "_c0")), schema, false);
168
168
 
169
169
  assertTrue(writers.getFieldWriter(0) instanceof TimestampFieldLongDuplicator); // c0
170
170
  assertTrue(writers.getFieldWriter(1) instanceof TimestampStringFieldWriter); // time renamed
@@ -172,7 +172,7 @@ public class TestFieldWriterSet
172
172
 
173
173
  { // if default_timestamp_type_conver_to is sec, use long
174
174
  Schema schema = schema("_c0", Types.TIMESTAMP, "time", Types.TIMESTAMP);
175
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "sec")), schema);
175
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "sec")), schema, false);
176
176
 
177
177
  assertTrue(writers.getFieldWriter(0) instanceof TimestampLongFieldWriter); // c0
178
178
  assertTrue(writers.getFieldWriter(1) instanceof TimestampLongFieldWriter); // time
@@ -180,7 +180,7 @@ public class TestFieldWriterSet
180
180
 
181
181
  { // and use time_column option
182
182
  Schema schema = schema("_c0", Types.TIMESTAMP, "time", Types.TIMESTAMP);
183
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "sec").set("time_column", "_c0")), schema);
183
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config.deepCopy().set("default_timestamp_type_convert_to", "sec").set("time_column", "_c0")), schema, false);
184
184
 
185
185
  assertTrue(writers.getFieldWriter(0) instanceof TimestampFieldLongDuplicator); // c0
186
186
  assertTrue(writers.getFieldWriter(1) instanceof TimestampLongFieldWriter); // time renamed
@@ -192,7 +192,7 @@ public class TestFieldWriterSet
192
192
  throws Exception
193
193
  {
194
194
  Schema schema = schema("_c0", Types.TIMESTAMP, "_c1", Types.LONG);
195
- FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config), schema);
195
+ FieldWriterSet writers = new FieldWriterSet(log, pluginTask(config), schema, false);
196
196
 
197
197
  assertTrue(writers.getFieldWriter(0) instanceof TimestampStringFieldWriter); // c0
198
198
  assertTrue(writers.getFieldWriter(1) instanceof LongFieldWriter); // c1
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: embulk-output-td
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.5
4
+ version: 0.3.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Muga Nishizawa
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-06-29 00:00:00.000000000 Z
11
+ date: 2016-07-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -82,7 +82,7 @@ files:
82
82
  - src/test/java/org/embulk/output/td/TestTdOutputPlugin.java
83
83
  - src/test/java/org/embulk/output/td/TestTimeValueGenerator.java
84
84
  - src/test/java/org/embulk/output/td/writer/TestFieldWriterSet.java
85
- - classpath/embulk-output-td-0.3.5.jar
85
+ - classpath/embulk-output-td-0.3.6.jar
86
86
  - classpath/hamcrest-core-1.1.jar
87
87
  - classpath/jackson-annotations-2.6.2.jar
88
88
  - classpath/jackson-core-2.6.2.jar