embulk-output-orc 0.3.2 → 0.3.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 61ec735e56403db3cc61f33efd862b6c4987a674
4
- data.tar.gz: a6cd8b2039a7b7cf5da2cfcf3595ae6fa2d860f3
3
+ metadata.gz: f90685be3f76457be27d9ee129b56276a66dd42b
4
+ data.tar.gz: ac33ac212a7bb6352aef8eb0caf310f5afd25bbe
5
5
  SHA512:
6
- metadata.gz: 477bd1ae6e064d01d65ef1286bca1bc981e2408fd350b76ce41ceb73b4a0816d8aae8fe3bf60a14fd0b607dfacd34fabb36557a9c005c190999a904fa04c968a
7
- data.tar.gz: 86496906b4a0aa7388a313dcfe16a5c519894f2cd8c760a37bf45a67075fd302a55570b36be204b6b607eceb8decb940fd8fdcf5cfe9f07ea6bd9aa2cd33c2da
6
+ metadata.gz: aecde246d738967d2a91906560d9735ea11981e8f7349da57d7418a3ab08a7796fb5573f1e9b72b54ff6eee1a08f4ade83194024bfcf67ff6113ff6f570fb7b6
7
+ data.tar.gz: 34b3eb0dc0c01388edf4310f5f55d00b0eab65b95b2def0a888867df8f79e0087c66d70e099d2af6327e57be03a80a75a6135cf281a58f5c4a0b8e22ed7caa1a
data/README.md CHANGED
@@ -13,15 +13,16 @@
13
13
  ## Configuration
14
14
 
15
15
  - **path_prefix**: A prefix of output path. (string, required)
16
- - support: `file`, `s3n` and `s3a`.
16
+ - support: `file`, `s3`, `s3n` and `s3a`.
17
17
  - **file_ext**: An extension of output file. (string, default: `.orc`)
18
18
  - **sequence_format**: (string, default: `.%03d`)
19
- - **buffer_size**: Set the ORC buffer size (integer, default: `262144`)
20
- - **strip_size**: Set the ORC strip size (integer, default: `67108864`)
21
- - **block_size**: Set the ORC block size (integer, default: `268435456`)
19
+ - **buffer_size**: Set the ORC buffer size (integer, default: `262144(256KB)` )
20
+ - **strip_size**: Set the ORC strip size (integer, default: `67108864(64MB)` )
21
+ - **block_size**: Set the ORC block size (integer, default: `268435456(256MB)`)
22
22
  - **compression_kind**: description (string, default: `'ZLIB'`)
23
- - `NONE`, `ZLIB`, `SNAPPY`
24
- - **overwrite**: (LocalFileSystem only) Overwrite if output files already exist. (boolean, default: `false`)
23
+ - `NONE`, `ZLIB`, `SNAPPY`, `LZO`, `LZ4`
24
+ - **overwrite**: Overwrite if output files already exist. (boolean, default: `false`)
25
+ - Support: `LocalFileSystem`, `S3(s3, s3a, s3n)`
25
26
  - **default_from_timezone** Time zone of timestamp columns. This can be overwritten for each column using column_options (DateTimeZone, default: `UTC`)
26
27
 
27
28
  - **auth_method**: name of mechanism to authenticate requests (basic, env, instance, profile, properties, anonymous, or session. default: basic)
@@ -42,6 +43,11 @@ out:
42
43
 
43
44
  ## ChangeLog
44
45
 
46
+ ### ver 0.3.3
47
+
48
+ - bugfix
49
+ - Bump `orc` library to `1.4.4`
50
+
45
51
  ### ver 0.3.2
46
52
 
47
53
  - Update `orc` libraries to `1.4.3`
@@ -1,6 +1,6 @@
1
1
  plugins {
2
2
  id "com.jfrog.bintray" version "1.1"
3
- id "com.github.jruby-gradle.base" version "0.1.5"
3
+ id "com.github.jruby-gradle.base" version "1.5.0"
4
4
  id "java"
5
5
  id "checkstyle"
6
6
  id "org.sonarqube" version "2.5"
@@ -18,7 +18,7 @@ configurations {
18
18
  runtime.exclude group: "org.slf4j", module: "slf4j-log4j12"
19
19
  }
20
20
 
21
- version = "0.3.2"
21
+ version = "0.3.3"
22
22
 
23
23
  sourceCompatibility = 1.8
24
24
  targetCompatibility = 1.8
@@ -27,15 +27,19 @@ dependencies {
27
27
  compile "org.embulk:embulk-core:0.8.34"
28
28
  provided "org.embulk:embulk-core:0.8.34"
29
29
 
30
- compile "org.apache.orc:orc:1.4.3"
31
- compile "org.apache.orc:orc-core:1.4.3"
32
- compile "org.apache.hadoop:hadoop-hdfs:2.6.4"
30
+ compile "org.apache.orc:orc:1.4.4"
31
+ compile "org.apache.orc:orc-core:1.4.4"
32
+ compile "org.apache.hadoop:hadoop-hdfs:2.7.5"
33
33
 
34
34
  compile 'org.embulk.input.s3:embulk-util-aws-credentials:0.2.8'
35
35
  compile "com.amazonaws:aws-java-sdk-s3:1.10.33"
36
- compile "org.apache.hadoop:hadoop-aws:2.7.3"
36
+ compile "org.apache.hadoop:hadoop-aws:2.7.5"
37
+ compile 'com.google.guava:guava:24.1-jre'
37
38
 
38
- testCompile "junit:junit:4.+"
39
+ testCompile 'org.jmockit:jmockit:1.38'
40
+ // testCompile "junit:junit:4.+"
41
+ testCompile 'org.hamcrest:hamcrest-core:1.3'
42
+ testCompile 'org.testng:testng:6.14.2'
39
43
  testCompile "org.embulk:embulk-core:0.8.34:tests"
40
44
  testCompile "org.embulk:embulk-standards:0.8.34"
41
45
  }
@@ -65,14 +69,16 @@ task checkstyle(type: Checkstyle) {
65
69
  }
66
70
 
67
71
  task gem(type: JRubyExec, dependsOn: ["gemspec", "classpath"]) {
68
- jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "build"
69
- script "${project.name}.gemspec"
72
+ jrubyArgs "-S"
73
+ script "gem"
74
+ scriptArgs "build", "${project.name}.gemspec"
70
75
  doLast { ant.move(file: "${project.name}-${project.version}.gem", todir: "pkg") }
71
76
  }
72
77
 
73
78
  task gemPush(type: JRubyExec, dependsOn: ["gem"]) {
74
- jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "push"
75
- script "pkg/${project.name}-${project.version}.gem"
79
+ jrubyArgs "-S"
80
+ script "gem"
81
+ scriptArgs "push", "pkg/${project.name}-${project.version}.gem"
76
82
  }
77
83
 
78
84
  task "package"(dependsOn: ["gemspec", "classpath"]) {
@@ -1,5 +1,6 @@
1
+ #Wed Jan 09 23:08:09 JST 2019
1
2
  distributionBase=GRADLE_USER_HOME
2
3
  distributionPath=wrapper/dists
3
4
  zipStoreBase=GRADLE_USER_HOME
4
5
  zipStorePath=wrapper/dists
5
- distributionUrl=https\://services.gradle.org/distributions/gradle-4.2.1-bin.zip
6
+ distributionUrl=https\://services.gradle.org/distributions/gradle-4.10-all.zip
@@ -30,7 +30,8 @@ public class OrcColumnVisitor
30
30
  public void booleanColumn(Column column)
31
31
  {
32
32
  if (reader.isNull(column)) {
33
- ((LongColumnVector) batch.cols[column.getIndex()]).vector[i] = 0;
33
+ batch.cols[column.getIndex()].noNulls = false;
34
+ batch.cols[column.getIndex()].isNull[i] = true;
34
35
  }
35
36
  else {
36
37
  if (reader.getBoolean(column)) {
@@ -45,20 +46,38 @@ public class OrcColumnVisitor
45
46
  @Override
46
47
  public void longColumn(Column column)
47
48
  {
48
- ((LongColumnVector) batch.cols[column.getIndex()]).vector[i] = reader.getLong(column);
49
+ if (reader.isNull(column)) {
50
+ batch.cols[column.getIndex()].noNulls = false;
51
+ batch.cols[column.getIndex()].isNull[i] = true;
52
+ }
53
+ else {
54
+ ((LongColumnVector) batch.cols[column.getIndex()]).vector[i] = reader.getLong(column);
55
+ }
49
56
  }
50
57
 
51
58
  @Override
52
59
  public void doubleColumn(Column column)
53
60
  {
54
- ((DoubleColumnVector) batch.cols[column.getIndex()]).vector[i] = reader.getDouble(column);
61
+ if (reader.isNull(column)) {
62
+ batch.cols[column.getIndex()].noNulls = false;
63
+ batch.cols[column.getIndex()].isNull[i] = true;
64
+ }
65
+ else {
66
+ ((DoubleColumnVector) batch.cols[column.getIndex()]).vector[i] = reader.getDouble(column);
67
+ }
55
68
  }
56
69
 
57
70
  @Override
58
71
  public void stringColumn(Column column)
59
72
  {
60
- ((BytesColumnVector) batch.cols[column.getIndex()]).setVal(i,
61
- reader.getString(column).getBytes(StandardCharsets.UTF_8));
73
+ if (!reader.isNull(column)) {
74
+ ((BytesColumnVector) batch.cols[column.getIndex()])
75
+ .setVal(i, reader.getString(column).getBytes(StandardCharsets.UTF_8));
76
+ }
77
+ else {
78
+ batch.cols[column.getIndex()].noNulls = false;
79
+ batch.cols[column.getIndex()].isNull[i] = true;
80
+ }
62
81
  }
63
82
 
64
83
  @Override
@@ -1,5 +1,6 @@
1
1
  package org.embulk.output.orc;
2
2
 
3
+ import com.amazonaws.auth.AWSCredentials;
3
4
  import com.google.common.base.Throwables;
4
5
  import org.apache.hadoop.conf.Configuration;
5
6
  import org.apache.hadoop.fs.LocalFileSystem;
@@ -60,7 +61,6 @@ public class OrcOutputPlugin
60
61
  public void cleanup(TaskSource taskSource,
61
62
  Schema schema, int taskCount,
62
63
  List<TaskReport> successTaskReports)
63
-
64
64
  {
65
65
  }
66
66
 
@@ -70,7 +70,8 @@ public class OrcOutputPlugin
70
70
  PluginTask task = taskSource.loadTask(PluginTask.class);
71
71
 
72
72
  if (task.getOverwrite()) {
73
- OrcOutputPluginHelper.removeOldFile(buildPath(task, taskIndex));
73
+ AWSCredentials credentials = AwsCredentials.getAWSCredentialsProvider(task).getCredentials();
74
+ OrcOutputPluginHelper.removeOldFile(buildPath(task, taskIndex), task);
74
75
  }
75
76
 
76
77
  final PageReader reader = new PageReader(schema);
@@ -137,6 +138,7 @@ public class OrcOutputPlugin
137
138
  }
138
139
  if (task.getEndpoint().isPresent()) {
139
140
  conf.set("fs.s3a.endpoint", task.getEndpoint().get());
141
+ conf.set("fs.s3n.endpoint", task.getEndpoint().get());
140
142
  }
141
143
  return conf;
142
144
  }
@@ -158,9 +160,11 @@ public class OrcOutputPlugin
158
160
  OrcFile.WriterOptions writerOptions = createWriterOptions(task, conf);
159
161
  // see: https://stackoverflow.com/questions/9256733/how-to-connect-hive-in-ireport
160
162
  // see: https://community.hortonworks.com/content/kbentry/73458/connecting-dbvisualizer-and-datagrip-to-hive-with.html
161
- writer = OrcFile.createWriter(new Path(buildPath(task, processorIndex)),
163
+ writer = OrcFile.createWriter(
164
+ new Path(buildPath(task, processorIndex)),
162
165
  writerOptions.setSchema(oschema)
163
- .version(OrcFile.Version.V_0_12));
166
+ .version(OrcFile.Version.V_0_12)
167
+ );
164
168
  }
165
169
  catch (IOException e) {
166
170
  Throwables.propagate(e);
@@ -1,11 +1,19 @@
1
1
  package org.embulk.output.orc;
2
2
 
3
+ import com.amazonaws.auth.profile.ProfileCredentialsProvider;
4
+ import com.amazonaws.services.s3.AmazonS3;
5
+ import com.amazonaws.services.s3.AmazonS3Client;
6
+ import com.amazonaws.services.s3.model.DeleteObjectRequest;
7
+ import com.google.common.base.Joiner;
8
+ import com.google.common.base.Splitter;
3
9
  import com.google.common.base.Throwables;
4
10
 
5
11
  import java.io.IOException;
6
12
  import java.nio.file.Files;
7
13
  import java.nio.file.Path;
8
14
  import java.nio.file.Paths;
15
+ import java.util.Arrays;
16
+ import java.util.List;
9
17
 
10
18
  class OrcOutputPluginHelper
11
19
  {
@@ -14,15 +22,90 @@ class OrcOutputPluginHelper
14
22
  throw new UnsupportedOperationException();
15
23
  }
16
24
 
17
- static void removeOldFile(String fpath)
25
+ static void removeOldFile(String fpath, PluginTask task)
18
26
  {
19
- Path path = Paths.get(fpath);
20
- // TODO: Check local file. not HDFS or S3.
21
- try {
22
- Files.deleteIfExists(path);
27
+ // NOTE: Delete a file if local-filesystem, not HDFS or S3.
28
+ String schema = getSchema(fpath);
29
+ if (isDeleteTarget(schema)) {
30
+ switch (schema) {
31
+ case "file":
32
+ try {
33
+ Files.deleteIfExists(Paths.get(fpath));
34
+ }
35
+ catch (IOException e) {
36
+ Throwables.propagate(e);
37
+ }
38
+ break;
39
+ case "s3":
40
+ case "s3n":
41
+ case "s3a":
42
+ AmazonS3URILikeObject s3Url = parseS3Url(fpath);
43
+ AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());
44
+ if (task.getEndpoint().isPresent()) {
45
+ s3client.setEndpoint(task.getEndpoint().get());
46
+ }
47
+ s3client.deleteObject(new DeleteObjectRequest(s3Url.getBucket(), s3Url.getKey()));
48
+ default:
49
+ // TODO: Unsupported
50
+ }
23
51
  }
24
- catch (IOException e) {
25
- Throwables.propagate(e);
52
+ }
53
+
54
+ public static boolean isDeleteTarget(String schema)
55
+ {
56
+ switch (schema) {
57
+ case "file":
58
+ return true;
59
+ case "s3":
60
+ case "s3a":
61
+ case "s3n":
62
+ return true;
63
+ default:
64
+ return false;
65
+ }
66
+ }
67
+
68
+ static String getSchema(String fpath)
69
+ {
70
+ String schema = Splitter.on("://")
71
+ .splitToList(fpath).get(0);
72
+ if (schema.equals("s3a") || schema.equals("s3n") || schema.equals("s3")) {
73
+ return schema;
74
+ }
75
+ else {
76
+ Path path = Paths.get(fpath);
77
+ return path.getFileSystem().provider().getScheme();
78
+ }
79
+ }
80
+
81
+ static AmazonS3URILikeObject parseS3Url(String s3url)
82
+ {
83
+ List<String> parts = Arrays.asList(
84
+ s3url.split("(://|/)"));
85
+ String bucket = parts.get(1);
86
+ String key = Joiner.on("/").join(parts.subList(2, parts.size()));
87
+ return new AmazonS3URILikeObject(bucket, key);
88
+ }
89
+
90
+ static class AmazonS3URILikeObject
91
+ {
92
+ String bucket;
93
+ String key;
94
+
95
+ public AmazonS3URILikeObject(String bucket, String key)
96
+ {
97
+ this.bucket = bucket;
98
+ this.key = key;
99
+ }
100
+
101
+ public String getBucket()
102
+ {
103
+ return bucket;
104
+ }
105
+
106
+ public String getKey()
107
+ {
108
+ return key;
26
109
  }
27
110
  }
28
111
  }
@@ -0,0 +1,71 @@
1
+ package org.embulk.output.orc;
2
+
3
+ import org.testng.annotations.DataProvider;
4
+ import org.testng.annotations.Test;
5
+
6
+ import static org.hamcrest.MatcherAssert.assertThat;
7
+ import static org.hamcrest.core.Is.is;
8
+
9
+ public class OrcOutputPluginHelperTest
10
+ {
11
+ @DataProvider(name = "url-provider")
12
+ public Object[][] dataProvider()
13
+ {
14
+ return new Object[][] {
15
+ {"file://tmp/output.orc", "file"},
16
+ {"/tmp/output.000.orc", "file"},
17
+ {"s3n://embulk-test/output.0001.orc", "s3n"},
18
+ {"s3a://embulk-test/output.0001.orc", "s3a"},
19
+ {"s3://embulk-test/output.0001.orc", "s3"},
20
+ };
21
+ }
22
+
23
+ @Test(dataProvider = "url-provider")
24
+ public void getFPathTest(String file, String expect)
25
+ {
26
+ String schema = OrcOutputPluginHelper.getSchema(file);
27
+ assertThat(schema, is(expect));
28
+ }
29
+
30
+ @DataProvider(name = "schema-provider")
31
+ public Object[][] schemaProvider()
32
+ {
33
+ return new Object[][] {
34
+ {"file", true},
35
+ {"s3", true},
36
+ {"s3n", true},
37
+ {"s3a", true},
38
+ {"hdfs", false},
39
+ };
40
+ }
41
+
42
+ @Test(dataProvider = "schema-provider")
43
+ public void isDeleteTargetTest(String schema, boolean expect)
44
+ {
45
+ boolean result = OrcOutputPluginHelper.isDeleteTarget(schema);
46
+ assertThat(result, is(expect));
47
+ }
48
+
49
+ @DataProvider(name = "parserTest-provider")
50
+ public Object[][] parserTestProvider()
51
+ {
52
+ String baseurl = "demo-bucket/test/output.000.orc";
53
+ String bucket = "demo-bucket";
54
+ String keyname = "test/output.000.orc";
55
+
56
+ return new Object[][] {
57
+ {"s3://" + baseurl, bucket, keyname},
58
+ {"s3a://" + baseurl, bucket, keyname},
59
+ {"s3n://" + baseurl, bucket, keyname},
60
+ };
61
+ }
62
+
63
+ @Test(dataProvider = "parserTest-provider")
64
+ public void parseS3UrlTest(String url, String bucket, String key)
65
+ {
66
+ OrcOutputPluginHelper.AmazonS3URILikeObject parts =
67
+ OrcOutputPluginHelper.parseS3Url(url);
68
+ assertThat(parts.getBucket(), is(bucket));
69
+ assertThat(parts.getKey(), is(key));
70
+ }
71
+ }
@@ -0,0 +1,25 @@
1
+ ---
2
+ in:
3
+ type: randomj
4
+ rows: 8
5
+ threads: 1
6
+ # default_timezone: Asia/Tokyo
7
+ primary_key: myid
8
+ schema:
9
+ - {name: myid, type: long}
10
+ - {name: named, type: string, null_rate: 10000}
11
+ - {name: x_flag, type: boolean, null_rate: 10000}
12
+ - {name: pit_rate, type: double, null_rate: 10000}
13
+ - {name: score, type: long, null_rate: 10000}
14
+ - {name: time, type: timestamp, format: '%Y-%m-%d %H:%M:%S', null_rate: 10000}
15
+ - {name: purchase, type: timestamp, format: '%Y/%m/%d', null_rate: 10000}
16
+
17
+ exec:
18
+ max_threads: 2 # run at most 8 tasks concurrently
19
+ min_output_tasks: 1 # disable page scattering
20
+
21
+ out:
22
+ type: orc
23
+ overwrite: true
24
+ path_prefix: "/tmp/output"
25
+ compression_kind: ZLIB
@@ -0,0 +1,25 @@
1
+ ---
2
+ in:
3
+ type: randomj
4
+ rows: 1024
5
+ threads: 1
6
+ # default_timezone: Asia/Tokyo
7
+ primary_key: myid
8
+ schema:
9
+ - {name: myid, type: long}
10
+ - {name: named, type: string, null_rate: 1000}
11
+ - {name: x_flag, type: boolean, null_rate: 1000}
12
+ - {name: pit_rate, type: double, null_rate: 1000}
13
+ - {name: score, type: long, null_rate: 1000}
14
+ - {name: time, type: timestamp, format: '%Y-%m-%d %H:%M:%S'}
15
+ - {name: purchase, type: timestamp, format: '%Y/%m/%d'}
16
+
17
+ exec:
18
+ max_threads: 2 # run at most 8 tasks concurrently
19
+ min_output_tasks: 1 # disable page scattering
20
+
21
+ out:
22
+ type: orc
23
+ overwrite: true
24
+ path_prefix: "/tmp/output"
25
+ compression_kind: ZLIB
metadata CHANGED
@@ -1,19 +1,19 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: embulk-output-orc
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.2
4
+ version: 0.3.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - yuokada
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-02-25 00:00:00.000000000 Z
11
+ date: 2019-01-10 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
- - - ~>
16
+ - - "~>"
17
17
  - !ruby/object:Gem::Version
18
18
  version: '1.0'
19
19
  name: bundler
@@ -21,13 +21,13 @@ dependencies:
21
21
  type: :development
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ~>
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '1.0'
27
27
  - !ruby/object:Gem::Dependency
28
28
  requirement: !ruby/object:Gem::Requirement
29
29
  requirements:
30
- - - '>='
30
+ - - ">="
31
31
  - !ruby/object:Gem::Version
32
32
  version: '10.0'
33
33
  name: rake
@@ -35,7 +35,7 @@ dependencies:
35
35
  type: :development
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
- - - '>='
38
+ - - ">="
39
39
  - !ruby/object:Gem::Version
40
40
  version: '10.0'
41
41
  description: Dumps records to Orc format file.
@@ -45,27 +45,14 @@ executables: []
45
45
  extensions: []
46
46
  extra_rdoc_files: []
47
47
  files:
48
- - .gitignore
49
- - .travis.yml
48
+ - ".gitignore"
49
+ - ".travis.yml"
50
50
  - LICENSE.txt
51
51
  - README.md
52
52
  - build.gradle
53
- - config/checkstyle/checkstyle.xml
54
- - config/checkstyle/default.xml
55
- - example/example.yml
56
- - gradle/wrapper/gradle-wrapper.jar
57
- - gradle/wrapper/gradle-wrapper.properties
58
- - gradlew
59
- - gradlew.bat
60
- - lib/embulk/output/orc.rb
61
- - src/main/java/org/embulk/output/orc/OrcColumnVisitor.java
62
- - src/main/java/org/embulk/output/orc/OrcOutputPlugin.java
63
- - src/main/java/org/embulk/output/orc/OrcOutputPluginHelper.java
64
- - src/main/java/org/embulk/output/orc/PluginTask.java
65
- - src/main/java/org/embulk/output/orc/TimestampColumnOption.java
66
- - src/test/java/org/embulk/output/orc/TestOrcOutputPlugin.java
67
53
  - classpath/activation-1.1.jar
68
54
  - classpath/aircompressor-0.8.jar
55
+ - classpath/animal-sniffer-annotations-1.14.jar
69
56
  - classpath/apacheds-i18n-2.0.0-M15.jar
70
57
  - classpath/apacheds-kerberos-codec-2.0.0-M15.jar
71
58
  - classpath/api-asn1-api-1.0.0-M20.jar
@@ -76,6 +63,7 @@ files:
76
63
  - classpath/aws-java-sdk-core-1.10.33.jar
77
64
  - classpath/aws-java-sdk-kms-1.10.33.jar
78
65
  - classpath/aws-java-sdk-s3-1.10.33.jar
66
+ - classpath/checker-compat-qual-2.0.0.jar
79
67
  - classpath/commons-beanutils-1.7.0.jar
80
68
  - classpath/commons-cli-1.2.jar
81
69
  - classpath/commons-codec-1.6.jar
@@ -84,7 +72,6 @@ files:
84
72
  - classpath/commons-configuration-1.6.jar
85
73
  - classpath/commons-daemon-1.0.13.jar
86
74
  - classpath/commons-digester-1.8.jar
87
- - classpath/commons-el-1.0.jar
88
75
  - classpath/commons-httpclient-3.1.jar
89
76
  - classpath/commons-io-2.4.jar
90
77
  - classpath/commons-lang-2.6.jar
@@ -94,24 +81,25 @@ files:
94
81
  - classpath/curator-client-2.7.1.jar
95
82
  - classpath/curator-framework-2.7.1.jar
96
83
  - classpath/curator-recipes-2.7.1.jar
97
- - classpath/embulk-output-orc-0.3.2.jar
84
+ - classpath/embulk-output-orc-0.3.3.jar
98
85
  - classpath/embulk-util-aws-credentials-0.2.8.jar
86
+ - classpath/error_prone_annotations-2.1.3.jar
99
87
  - classpath/gson-2.2.4.jar
100
- - classpath/hadoop-annotations-2.7.3.jar
101
- - classpath/hadoop-auth-2.7.3.jar
102
- - classpath/hadoop-aws-2.7.3.jar
103
- - classpath/hadoop-common-2.7.3.jar
104
- - classpath/hadoop-hdfs-2.6.4.jar
88
+ - classpath/guava-24.1-jre.jar
89
+ - classpath/hadoop-annotations-2.7.5.jar
90
+ - classpath/hadoop-auth-2.7.5.jar
91
+ - classpath/hadoop-aws-2.7.5.jar
92
+ - classpath/hadoop-common-2.7.5.jar
93
+ - classpath/hadoop-hdfs-2.7.5.jar
105
94
  - classpath/hive-storage-api-2.2.1.jar
106
- - classpath/htrace-core-3.0.4.jar
107
95
  - classpath/htrace-core-3.1.0-incubating.jar
108
96
  - classpath/httpclient-4.3.6.jar
109
97
  - classpath/httpcore-4.3.3.jar
98
+ - classpath/j2objc-annotations-1.1.jar
110
99
  - classpath/jackson-core-asl-1.9.13.jar
111
100
  - classpath/jackson-jaxrs-1.8.3.jar
112
101
  - classpath/jackson-mapper-asl-1.9.13.jar
113
102
  - classpath/jackson-xc-1.8.3.jar
114
- - classpath/jasper-runtime-5.5.23.jar
115
103
  - classpath/java-xmlbuilder-0.4.jar
116
104
  - classpath/jaxb-api-2.2.2.jar
117
105
  - classpath/jaxb-impl-2.2.3-1.jar
@@ -122,17 +110,20 @@ files:
122
110
  - classpath/jets3t-0.9.0.jar
123
111
  - classpath/jettison-1.1.jar
124
112
  - classpath/jetty-6.1.26.jar
113
+ - classpath/jetty-sslengine-6.1.26.jar
125
114
  - classpath/jetty-util-6.1.26.jar
126
115
  - classpath/jline-0.9.94.jar
127
- - classpath/joda-time-2.9.9.jar
128
- - classpath/jsch-0.1.42.jar
116
+ - classpath/jsch-0.1.54.jar
129
117
  - classpath/jsp-api-2.1.jar
130
118
  - classpath/jsr305-3.0.0.jar
119
+ - classpath/leveldbjni-all-1.8.jar
131
120
  - classpath/log4j-1.2.17.jar
132
121
  - classpath/netty-3.7.0.Final.jar
133
- - classpath/orc-core-1.4.3.jar
122
+ - classpath/netty-all-4.0.23.Final.jar
123
+ - classpath/orc-core-1.4.4.jar
134
124
  - classpath/paranamer-2.3.jar
135
125
  - classpath/protobuf-java-2.5.0.jar
126
+ - classpath/servlet-api-2.5-20081211.jar
136
127
  - classpath/servlet-api-2.5.jar
137
128
  - classpath/snappy-java-1.0.4.1.jar
138
129
  - classpath/stax-api-1.0-2.jar
@@ -141,6 +132,22 @@ files:
141
132
  - classpath/xmlenc-0.52.jar
142
133
  - classpath/xz-1.0.jar
143
134
  - classpath/zookeeper-3.4.6.jar
135
+ - config/checkstyle/checkstyle.xml
136
+ - config/checkstyle/default.xml
137
+ - example/example.yml
138
+ - gradle/wrapper/gradle-wrapper.jar
139
+ - gradle/wrapper/gradle-wrapper.properties
140
+ - gradlew
141
+ - gradlew.bat
142
+ - lib/embulk/output/orc.rb
143
+ - src/main/java/org/embulk/output/orc/OrcColumnVisitor.java
144
+ - src/main/java/org/embulk/output/orc/OrcOutputPlugin.java
145
+ - src/main/java/org/embulk/output/orc/OrcOutputPluginHelper.java
146
+ - src/main/java/org/embulk/output/orc/PluginTask.java
147
+ - src/main/java/org/embulk/output/orc/TimestampColumnOption.java
148
+ - src/test/java/org/embulk/output/orc/OrcOutputPluginHelperTest.java
149
+ - src/test/resources/example-null.yml
150
+ - src/test/resources/example.yml
144
151
  homepage: https://github.com/yuokada/embulk-output-orc
145
152
  licenses:
146
153
  - MIT
@@ -151,17 +158,17 @@ require_paths:
151
158
  - lib
152
159
  required_ruby_version: !ruby/object:Gem::Requirement
153
160
  requirements:
154
- - - '>='
161
+ - - ">="
155
162
  - !ruby/object:Gem::Version
156
163
  version: '0'
157
164
  required_rubygems_version: !ruby/object:Gem::Requirement
158
165
  requirements:
159
- - - '>='
166
+ - - ">="
160
167
  - !ruby/object:Gem::Version
161
168
  version: '0'
162
169
  requirements: []
163
170
  rubyforge_project:
164
- rubygems_version: 2.1.9
171
+ rubygems_version: 2.6.8
165
172
  signing_key:
166
173
  specification_version: 4
167
174
  summary: Orc output plugin for Embulk
@@ -1,5 +0,0 @@
1
- package org.embulk.output.orc;
2
-
3
- public class TestOrcOutputPlugin
4
- {
5
- }