embulk-output-parquet 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +7 -0
- data/LICENSE.txt +21 -0
- data/README.md +33 -0
- data/build.gradle +64 -0
- data/classpath/activation-1.1.jar +0 -0
- data/classpath/apacheds-i18n-2.0.0-M15.jar +0 -0
- data/classpath/apacheds-kerberos-codec-2.0.0-M15.jar +0 -0
- data/classpath/api-asn1-api-1.0.0-M20.jar +0 -0
- data/classpath/api-util-1.0.0-M20.jar +0 -0
- data/classpath/avro-1.7.4.jar +0 -0
- data/classpath/commons-beanutils-1.7.0.jar +0 -0
- data/classpath/commons-cli-1.2.jar +0 -0
- data/classpath/commons-codec-1.6.jar +0 -0
- data/classpath/commons-collections-3.2.1.jar +0 -0
- data/classpath/commons-compress-1.4.1.jar +0 -0
- data/classpath/commons-configuration-1.6.jar +0 -0
- data/classpath/commons-digester-1.8.jar +0 -0
- data/classpath/commons-httpclient-3.1.jar +0 -0
- data/classpath/commons-io-2.4.jar +0 -0
- data/classpath/commons-lang-2.6.jar +0 -0
- data/classpath/commons-logging-1.1.3.jar +0 -0
- data/classpath/commons-math3-3.1.1.jar +0 -0
- data/classpath/commons-net-3.1.jar +0 -0
- data/classpath/curator-client-2.6.0.jar +0 -0
- data/classpath/curator-framework-2.6.0.jar +0 -0
- data/classpath/curator-recipes-2.6.0.jar +0 -0
- data/classpath/embulk-output-parquet-0.1.0.jar +0 -0
- data/classpath/gson-2.2.4.jar +0 -0
- data/classpath/hadoop-annotations-2.6.0.jar +0 -0
- data/classpath/hadoop-auth-2.6.0.jar +0 -0
- data/classpath/hadoop-client-2.6.0.jar +0 -0
- data/classpath/hadoop-common-2.6.0.jar +0 -0
- data/classpath/hadoop-hdfs-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-app-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-common-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-core-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-jobclient-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-shuffle-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-api-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-client-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-common-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-server-common-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-server-nodemanager-2.6.0.jar +0 -0
- data/classpath/htrace-core-3.0.4.jar +0 -0
- data/classpath/httpclient-4.2.5.jar +0 -0
- data/classpath/httpcore-4.2.4.jar +0 -0
- data/classpath/jackson-core-asl-1.9.13.jar +0 -0
- data/classpath/jackson-jaxrs-1.9.13.jar +0 -0
- data/classpath/jackson-mapper-asl-1.9.13.jar +0 -0
- data/classpath/jackson-xc-1.9.13.jar +0 -0
- data/classpath/jaxb-api-2.2.2.jar +0 -0
- data/classpath/jaxb-impl-2.2.3-1.jar +0 -0
- data/classpath/jersey-client-1.9.jar +0 -0
- data/classpath/jersey-core-1.9.jar +0 -0
- data/classpath/jersey-guice-1.9.jar +0 -0
- data/classpath/jersey-json-1.9.jar +0 -0
- data/classpath/jersey-server-1.9.jar +0 -0
- data/classpath/jettison-1.1.jar +0 -0
- data/classpath/jetty-util-6.1.26.jar +0 -0
- data/classpath/jline-0.9.94.jar +0 -0
- data/classpath/jsr305-1.3.9.jar +0 -0
- data/classpath/leveldbjni-all-1.8.jar +0 -0
- data/classpath/netty-3.7.0.Final.jar +0 -0
- data/classpath/paranamer-2.3.jar +0 -0
- data/classpath/parquet-column-1.5.0.jar +0 -0
- data/classpath/parquet-common-1.5.0.jar +0 -0
- data/classpath/parquet-encoding-1.5.0.jar +0 -0
- data/classpath/parquet-format-2.1.0.jar +0 -0
- data/classpath/parquet-generator-1.5.0.jar +0 -0
- data/classpath/parquet-hadoop-1.5.0.jar +0 -0
- data/classpath/parquet-jackson-1.5.0.jar +0 -0
- data/classpath/protobuf-java-2.5.0.jar +0 -0
- data/classpath/servlet-api-2.5.jar +0 -0
- data/classpath/snappy-java-1.1.1.6.jar +0 -0
- data/classpath/stax-api-1.0-2.jar +0 -0
- data/classpath/xercesImpl-2.9.1.jar +0 -0
- data/classpath/xml-apis-1.3.04.jar +0 -0
- data/classpath/xmlenc-0.52.jar +0 -0
- data/classpath/xz-1.0.jar +0 -0
- data/classpath/zookeeper-3.4.6.jar +0 -0
- data/gradle/wrapper/gradle-wrapper.jar +0 -0
- data/gradle/wrapper/gradle-wrapper.properties +6 -0
- data/gradlew +164 -0
- data/gradlew.bat +90 -0
- data/lib/embulk/output/parquet.rb +3 -0
- data/src/main/java/org/embulk/output/EmbulkWriteSupport.java +154 -0
- data/src/main/java/org/embulk/output/ParquetOutputPlugin.java +199 -0
- data/src/test/java/org/embulk/output/TestParquetOutputPlugin.java +5 -0
- metadata +160 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 66f4589bf4371ed6fba6f3f52271587eb76cc457
|
4
|
+
data.tar.gz: 6d5bd8a010ec341b9a5d0b37a3e0c5a7e15770ae
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: abc657f97ba0791f170c40f4c7d7f0248fa4147b1c90803f4b1cdc37f43b60905eeed60523b178e3a0cce9d7cad9f082d2a98af0de090bc7a6b308c95f3de972
|
7
|
+
data.tar.gz: b279faf534596db8366b89a92440242218718369e04a67960e34bbd8b5b18b47d40bfb31f3aa04812bc607b5be5146ee76934bec43f1f176f15f563d0ca290e9
|
data/.gitignore
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
|
2
|
+
MIT License
|
3
|
+
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
5
|
+
a copy of this software and associated documentation files (the
|
6
|
+
"Software"), to deal in the Software without restriction, including
|
7
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
8
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
9
|
+
permit persons to whom the Software is furnished to do so, subject to
|
10
|
+
the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be
|
13
|
+
included in all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
16
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
17
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
18
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
19
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
20
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
21
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,33 @@
|
|
1
|
+
# Parquet output plugin for Embulk
|
2
|
+
|
3
|
+
|
4
|
+
## Overview
|
5
|
+
|
6
|
+
* **Plugin type**: output
|
7
|
+
* **Load all or nothing**: no
|
8
|
+
* **Resume supported**: no
|
9
|
+
* **Cleanup supported**: no
|
10
|
+
|
11
|
+
## Configuration
|
12
|
+
|
13
|
+
- **path_prefix**: A prefix of output path. This is hadoop Path URI, and you can also include `scheme` and `authority` within this parameter. (string, required)
|
14
|
+
- **file_ext**: An extension of output path. (string, default: .parquet)
|
15
|
+
- **sequence_format**: (string, default: .%03d)
|
16
|
+
- **block_size**: A block size of parquet file. (int, default: 134217728(128M))
|
17
|
+
- **page_size**: A page size of parquet file. (int, default: 1048576(1M))
|
18
|
+
- **compression_codec**: A compression codec. available: UNCOMPRESSED, SNAPPY, GZIP (string, default: UNCOMPRESSED)
|
19
|
+
- **timezone**: A timezone for timestamp format. (string, default: UTC)
|
20
|
+
|
21
|
+
## Example
|
22
|
+
|
23
|
+
```yaml
|
24
|
+
out:
|
25
|
+
type: parquet
|
26
|
+
path_prefix: file:///data/output
|
27
|
+
```
|
28
|
+
|
29
|
+
## Build
|
30
|
+
|
31
|
+
```
|
32
|
+
$ ./gradlew gem
|
33
|
+
```
|
data/build.gradle
ADDED
@@ -0,0 +1,64 @@
|
|
1
|
+
plugins {
|
2
|
+
id "com.jfrog.bintray" version "1.1"
|
3
|
+
id "com.github.jruby-gradle.base" version "0.1.5"
|
4
|
+
id "java"
|
5
|
+
}
|
6
|
+
import com.github.jrubygradle.JRubyExec
|
7
|
+
repositories {
|
8
|
+
mavenCentral()
|
9
|
+
jcenter()
|
10
|
+
maven {
|
11
|
+
url "http://maven.twttr.com/"
|
12
|
+
}
|
13
|
+
}
|
14
|
+
configurations {
|
15
|
+
provided
|
16
|
+
}
|
17
|
+
|
18
|
+
version = "0.1.0"
|
19
|
+
|
20
|
+
dependencies {
|
21
|
+
compile "org.embulk:embulk-core:0.4.2"
|
22
|
+
provided "org.embulk:embulk-core:0.4.2"
|
23
|
+
|
24
|
+
compile "com.twitter:parquet-hadoop:1.5.0"
|
25
|
+
compile "org.apache.hadoop:hadoop-client:2.6.0"
|
26
|
+
compile "org.xerial.snappy:snappy-java:1.1.1.6"
|
27
|
+
|
28
|
+
testCompile "junit:junit:4.+"
|
29
|
+
}
|
30
|
+
|
31
|
+
task classpath(type: Copy, dependsOn: ["jar"]) {
|
32
|
+
doFirst { file("classpath").deleteDir() }
|
33
|
+
from (configurations.runtime - configurations.provided + files(jar.archivePath))
|
34
|
+
into "classpath"
|
35
|
+
}
|
36
|
+
clean { delete 'classpath' }
|
37
|
+
|
38
|
+
task gem(type: JRubyExec, dependsOn: ["build", "gemspec", "classpath"]) {
|
39
|
+
jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "build"
|
40
|
+
script "build/gemspec"
|
41
|
+
doLast { ant.move(file: "${project.name}-${project.version}.gem", todir: "pkg") }
|
42
|
+
}
|
43
|
+
|
44
|
+
task gemspec << { file("build/gemspec").write($/
|
45
|
+
Gem::Specification.new do |spec|
|
46
|
+
spec.name = "${project.name}"
|
47
|
+
spec.version = "${project.version}"
|
48
|
+
spec.authors = ["OKUNO Akihiro"]
|
49
|
+
spec.summary = %[Parquet output plugin for Embulk]
|
50
|
+
spec.description = %[Parquet output plugin is an Embulk plugin that loads records to Parquet read by any input plugins. Search the input plugins by "embulk-input" keyword.]
|
51
|
+
spec.email = ["choplin.choplin@gmail.com"]
|
52
|
+
spec.licenses = ["MIT"]
|
53
|
+
spec.homepage = "https://github.com/choplin/embulk-output-parquet"
|
54
|
+
|
55
|
+
spec.files = `git ls-files`.split("\n") + Dir["classpath/*.jar"]
|
56
|
+
spec.test_files = spec.files.grep(%r"^(test|spec)/")
|
57
|
+
spec.require_paths = ["lib"]
|
58
|
+
|
59
|
+
#spec.add_dependency 'YOUR_GEM_DEPENDENCY', ['~> YOUR_GEM_DEPENDENCY_VERSION']
|
60
|
+
spec.add_development_dependency 'bundler', ['~> 1.0']
|
61
|
+
spec.add_development_dependency 'rake', ['>= 10.0']
|
62
|
+
end
|
63
|
+
/$)
|
64
|
+
}
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
data/gradlew
ADDED
@@ -0,0 +1,164 @@
|
|
1
|
+
#!/usr/bin/env bash
|
2
|
+
|
3
|
+
##############################################################################
|
4
|
+
##
|
5
|
+
## Gradle start up script for UN*X
|
6
|
+
##
|
7
|
+
##############################################################################
|
8
|
+
|
9
|
+
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
10
|
+
DEFAULT_JVM_OPTS=""
|
11
|
+
|
12
|
+
APP_NAME="Gradle"
|
13
|
+
APP_BASE_NAME=`basename "$0"`
|
14
|
+
|
15
|
+
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
16
|
+
MAX_FD="maximum"
|
17
|
+
|
18
|
+
warn ( ) {
|
19
|
+
echo "$*"
|
20
|
+
}
|
21
|
+
|
22
|
+
die ( ) {
|
23
|
+
echo
|
24
|
+
echo "$*"
|
25
|
+
echo
|
26
|
+
exit 1
|
27
|
+
}
|
28
|
+
|
29
|
+
# OS specific support (must be 'true' or 'false').
|
30
|
+
cygwin=false
|
31
|
+
msys=false
|
32
|
+
darwin=false
|
33
|
+
case "`uname`" in
|
34
|
+
CYGWIN* )
|
35
|
+
cygwin=true
|
36
|
+
;;
|
37
|
+
Darwin* )
|
38
|
+
darwin=true
|
39
|
+
;;
|
40
|
+
MINGW* )
|
41
|
+
msys=true
|
42
|
+
;;
|
43
|
+
esac
|
44
|
+
|
45
|
+
# For Cygwin, ensure paths are in UNIX format before anything is touched.
|
46
|
+
if $cygwin ; then
|
47
|
+
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
48
|
+
fi
|
49
|
+
|
50
|
+
# Attempt to set APP_HOME
|
51
|
+
# Resolve links: $0 may be a link
|
52
|
+
PRG="$0"
|
53
|
+
# Need this for relative symlinks.
|
54
|
+
while [ -h "$PRG" ] ; do
|
55
|
+
ls=`ls -ld "$PRG"`
|
56
|
+
link=`expr "$ls" : '.*-> \(.*\)$'`
|
57
|
+
if expr "$link" : '/.*' > /dev/null; then
|
58
|
+
PRG="$link"
|
59
|
+
else
|
60
|
+
PRG=`dirname "$PRG"`"/$link"
|
61
|
+
fi
|
62
|
+
done
|
63
|
+
SAVED="`pwd`"
|
64
|
+
cd "`dirname \"$PRG\"`/" >&-
|
65
|
+
APP_HOME="`pwd -P`"
|
66
|
+
cd "$SAVED" >&-
|
67
|
+
|
68
|
+
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
69
|
+
|
70
|
+
# Determine the Java command to use to start the JVM.
|
71
|
+
if [ -n "$JAVA_HOME" ] ; then
|
72
|
+
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
73
|
+
# IBM's JDK on AIX uses strange locations for the executables
|
74
|
+
JAVACMD="$JAVA_HOME/jre/sh/java"
|
75
|
+
else
|
76
|
+
JAVACMD="$JAVA_HOME/bin/java"
|
77
|
+
fi
|
78
|
+
if [ ! -x "$JAVACMD" ] ; then
|
79
|
+
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
80
|
+
|
81
|
+
Please set the JAVA_HOME variable in your environment to match the
|
82
|
+
location of your Java installation."
|
83
|
+
fi
|
84
|
+
else
|
85
|
+
JAVACMD="java"
|
86
|
+
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
87
|
+
|
88
|
+
Please set the JAVA_HOME variable in your environment to match the
|
89
|
+
location of your Java installation."
|
90
|
+
fi
|
91
|
+
|
92
|
+
# Increase the maximum file descriptors if we can.
|
93
|
+
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
|
94
|
+
MAX_FD_LIMIT=`ulimit -H -n`
|
95
|
+
if [ $? -eq 0 ] ; then
|
96
|
+
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
97
|
+
MAX_FD="$MAX_FD_LIMIT"
|
98
|
+
fi
|
99
|
+
ulimit -n $MAX_FD
|
100
|
+
if [ $? -ne 0 ] ; then
|
101
|
+
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
102
|
+
fi
|
103
|
+
else
|
104
|
+
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
105
|
+
fi
|
106
|
+
fi
|
107
|
+
|
108
|
+
# For Darwin, add options to specify how the application appears in the dock
|
109
|
+
if $darwin; then
|
110
|
+
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
111
|
+
fi
|
112
|
+
|
113
|
+
# For Cygwin, switch paths to Windows format before running java
|
114
|
+
if $cygwin ; then
|
115
|
+
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
116
|
+
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
117
|
+
|
118
|
+
# We build the pattern for arguments to be converted via cygpath
|
119
|
+
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
120
|
+
SEP=""
|
121
|
+
for dir in $ROOTDIRSRAW ; do
|
122
|
+
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
123
|
+
SEP="|"
|
124
|
+
done
|
125
|
+
OURCYGPATTERN="(^($ROOTDIRS))"
|
126
|
+
# Add a user-defined pattern to the cygpath arguments
|
127
|
+
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
128
|
+
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
129
|
+
fi
|
130
|
+
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
131
|
+
i=0
|
132
|
+
for arg in "$@" ; do
|
133
|
+
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
134
|
+
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
135
|
+
|
136
|
+
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
137
|
+
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
138
|
+
else
|
139
|
+
eval `echo args$i`="\"$arg\""
|
140
|
+
fi
|
141
|
+
i=$((i+1))
|
142
|
+
done
|
143
|
+
case $i in
|
144
|
+
(0) set -- ;;
|
145
|
+
(1) set -- "$args0" ;;
|
146
|
+
(2) set -- "$args0" "$args1" ;;
|
147
|
+
(3) set -- "$args0" "$args1" "$args2" ;;
|
148
|
+
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
149
|
+
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
150
|
+
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
151
|
+
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
152
|
+
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
153
|
+
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
154
|
+
esac
|
155
|
+
fi
|
156
|
+
|
157
|
+
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
|
158
|
+
function splitJvmOpts() {
|
159
|
+
JVM_OPTS=("$@")
|
160
|
+
}
|
161
|
+
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
|
162
|
+
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
|
163
|
+
|
164
|
+
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
data/gradlew.bat
ADDED
@@ -0,0 +1,90 @@
|
|
1
|
+
@if "%DEBUG%" == "" @echo off
|
2
|
+
@rem ##########################################################################
|
3
|
+
@rem
|
4
|
+
@rem Gradle startup script for Windows
|
5
|
+
@rem
|
6
|
+
@rem ##########################################################################
|
7
|
+
|
8
|
+
@rem Set local scope for the variables with windows NT shell
|
9
|
+
if "%OS%"=="Windows_NT" setlocal
|
10
|
+
|
11
|
+
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
12
|
+
set DEFAULT_JVM_OPTS=
|
13
|
+
|
14
|
+
set DIRNAME=%~dp0
|
15
|
+
if "%DIRNAME%" == "" set DIRNAME=.
|
16
|
+
set APP_BASE_NAME=%~n0
|
17
|
+
set APP_HOME=%DIRNAME%
|
18
|
+
|
19
|
+
@rem Find java.exe
|
20
|
+
if defined JAVA_HOME goto findJavaFromJavaHome
|
21
|
+
|
22
|
+
set JAVA_EXE=java.exe
|
23
|
+
%JAVA_EXE% -version >NUL 2>&1
|
24
|
+
if "%ERRORLEVEL%" == "0" goto init
|
25
|
+
|
26
|
+
echo.
|
27
|
+
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
28
|
+
echo.
|
29
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
30
|
+
echo location of your Java installation.
|
31
|
+
|
32
|
+
goto fail
|
33
|
+
|
34
|
+
:findJavaFromJavaHome
|
35
|
+
set JAVA_HOME=%JAVA_HOME:"=%
|
36
|
+
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
37
|
+
|
38
|
+
if exist "%JAVA_EXE%" goto init
|
39
|
+
|
40
|
+
echo.
|
41
|
+
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
42
|
+
echo.
|
43
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
44
|
+
echo location of your Java installation.
|
45
|
+
|
46
|
+
goto fail
|
47
|
+
|
48
|
+
:init
|
49
|
+
@rem Get command-line arguments, handling Windowz variants
|
50
|
+
|
51
|
+
if not "%OS%" == "Windows_NT" goto win9xME_args
|
52
|
+
if "%@eval[2+2]" == "4" goto 4NT_args
|
53
|
+
|
54
|
+
:win9xME_args
|
55
|
+
@rem Slurp the command line arguments.
|
56
|
+
set CMD_LINE_ARGS=
|
57
|
+
set _SKIP=2
|
58
|
+
|
59
|
+
:win9xME_args_slurp
|
60
|
+
if "x%~1" == "x" goto execute
|
61
|
+
|
62
|
+
set CMD_LINE_ARGS=%*
|
63
|
+
goto execute
|
64
|
+
|
65
|
+
:4NT_args
|
66
|
+
@rem Get arguments from the 4NT Shell from JP Software
|
67
|
+
set CMD_LINE_ARGS=%$
|
68
|
+
|
69
|
+
:execute
|
70
|
+
@rem Setup the command line
|
71
|
+
|
72
|
+
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
73
|
+
|
74
|
+
@rem Execute Gradle
|
75
|
+
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
76
|
+
|
77
|
+
:end
|
78
|
+
@rem End local scope for the variables with windows NT shell
|
79
|
+
if "%ERRORLEVEL%"=="0" goto mainEnd
|
80
|
+
|
81
|
+
:fail
|
82
|
+
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
83
|
+
rem the _cmd.exe /c_ return code!
|
84
|
+
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
85
|
+
exit /b 1
|
86
|
+
|
87
|
+
:mainEnd
|
88
|
+
if "%OS%"=="Windows_NT" endlocal
|
89
|
+
|
90
|
+
:omega
|
@@ -0,0 +1,154 @@
|
|
1
|
+
package org.embulk.output;
|
2
|
+
|
3
|
+
import org.apache.hadoop.conf.Configuration;
|
4
|
+
import org.embulk.spi.Column;
|
5
|
+
import org.embulk.spi.ColumnVisitor;
|
6
|
+
import org.embulk.spi.PageReader;
|
7
|
+
import org.embulk.spi.Schema;
|
8
|
+
import org.embulk.spi.time.Timestamp;
|
9
|
+
import org.embulk.spi.time.TimestampFormatter;
|
10
|
+
import parquet.hadoop.api.WriteSupport;
|
11
|
+
import parquet.io.api.Binary;
|
12
|
+
import parquet.io.api.RecordConsumer;
|
13
|
+
import parquet.schema.MessageType;
|
14
|
+
import parquet.schema.PrimitiveType;
|
15
|
+
import parquet.schema.PrimitiveType.PrimitiveTypeName;
|
16
|
+
import parquet.schema.Type;
|
17
|
+
|
18
|
+
import java.util.ArrayList;
|
19
|
+
import java.util.HashMap;
|
20
|
+
import java.util.List;
|
21
|
+
import java.util.Map;
|
22
|
+
|
23
|
+
public class EmbulkWriteSupport extends WriteSupport<PageReader> {
|
24
|
+
final Schema schema;
|
25
|
+
RecordConsumer consumer;
|
26
|
+
WriteContext writeContext;
|
27
|
+
Map<Integer, TimestampFormatter> timestampFormatters;
|
28
|
+
|
29
|
+
public EmbulkWriteSupport(Schema schema, Map<Integer, TimestampFormatter> timestampFormatters) {
|
30
|
+
this.schema = schema;
|
31
|
+
this.timestampFormatters = timestampFormatters;
|
32
|
+
}
|
33
|
+
|
34
|
+
@Override
|
35
|
+
public WriteContext init(Configuration configuration) {
|
36
|
+
if (writeContext == null) {
|
37
|
+
init();
|
38
|
+
}
|
39
|
+
return writeContext;
|
40
|
+
}
|
41
|
+
|
42
|
+
@Override
|
43
|
+
public void prepareForWrite(RecordConsumer recordConsumer) {
|
44
|
+
this.consumer = recordConsumer;
|
45
|
+
}
|
46
|
+
|
47
|
+
@Override
|
48
|
+
public void write(PageReader record) {
|
49
|
+
final ColumnVisitor visitor = new ParquetColumnVisitor(record, consumer);
|
50
|
+
consumer.startMessage();
|
51
|
+
for (Column c : schema.getColumns()) {
|
52
|
+
if (!record.isNull(c)) {
|
53
|
+
consumer.startField(c.getName(), c.getIndex());
|
54
|
+
c.visit(visitor);
|
55
|
+
consumer.endField(c.getName(), c.getIndex());
|
56
|
+
}
|
57
|
+
}
|
58
|
+
consumer.endMessage();
|
59
|
+
}
|
60
|
+
|
61
|
+
private void init() {
|
62
|
+
MessageType messageType = convertSchema(schema);
|
63
|
+
Map<String, String> metadata = new HashMap<>();
|
64
|
+
writeContext = new WriteContext(messageType, metadata);
|
65
|
+
}
|
66
|
+
|
67
|
+
private MessageType convertSchema(Schema schema) {
|
68
|
+
SchemaConvertColumnVisitor visitor = new SchemaConvertColumnVisitor();
|
69
|
+
schema.visitColumns(visitor);
|
70
|
+
String messageName = "embulk";
|
71
|
+
return new MessageType(messageName, visitor.getConvertedFields());
|
72
|
+
}
|
73
|
+
|
74
|
+
class ParquetColumnVisitor implements ColumnVisitor {
|
75
|
+
final PageReader record;
|
76
|
+
final RecordConsumer consumer;
|
77
|
+
|
78
|
+
public ParquetColumnVisitor(PageReader record, RecordConsumer consumer) {
|
79
|
+
this.record = record;
|
80
|
+
this.consumer = consumer;
|
81
|
+
}
|
82
|
+
|
83
|
+
@Override
|
84
|
+
public void booleanColumn(Column column) {
|
85
|
+
if (!record.isNull(column)) {
|
86
|
+
consumer.addBoolean(record.getBoolean(column));
|
87
|
+
}
|
88
|
+
}
|
89
|
+
|
90
|
+
@Override
|
91
|
+
public void longColumn(Column column) {
|
92
|
+
if (!record.isNull(column)) {
|
93
|
+
consumer.addLong(record.getLong(column));
|
94
|
+
}
|
95
|
+
}
|
96
|
+
|
97
|
+
@Override
|
98
|
+
public void doubleColumn(Column column) {
|
99
|
+
if (!record.isNull(column)) {
|
100
|
+
consumer.addDouble(record.getDouble(column));
|
101
|
+
}
|
102
|
+
}
|
103
|
+
|
104
|
+
@Override
|
105
|
+
public void stringColumn(Column column) {
|
106
|
+
if (!record.isNull(column)) {
|
107
|
+
consumer.addBinary(Binary.fromString(record.getString(column)));
|
108
|
+
}
|
109
|
+
}
|
110
|
+
|
111
|
+
@Override
|
112
|
+
public void timestampColumn(Column column) {
|
113
|
+
if (!record.isNull(column)) {
|
114
|
+
Timestamp t = record.getTimestamp(column);
|
115
|
+
String formatted = timestampFormatters.get(column.getIndex()).format(t);
|
116
|
+
consumer.addBinary(Binary.fromString(formatted));
|
117
|
+
}
|
118
|
+
}
|
119
|
+
}
|
120
|
+
|
121
|
+
class SchemaConvertColumnVisitor implements ColumnVisitor {
|
122
|
+
List<Type> fields = new ArrayList<>();
|
123
|
+
|
124
|
+
@Override
|
125
|
+
public void booleanColumn(Column column) {
|
126
|
+
fields.add(new PrimitiveType(Type.Repetition.OPTIONAL, PrimitiveTypeName.BOOLEAN, column.getName()));
|
127
|
+
}
|
128
|
+
|
129
|
+
@Override
|
130
|
+
public void longColumn(Column column) {
|
131
|
+
fields.add(new PrimitiveType(Type.Repetition.OPTIONAL, PrimitiveTypeName.INT64, column.getName()));
|
132
|
+
}
|
133
|
+
|
134
|
+
@Override
|
135
|
+
public void doubleColumn(Column column) {
|
136
|
+
fields.add(new PrimitiveType(Type.Repetition.OPTIONAL, PrimitiveTypeName.DOUBLE, column.getName()));
|
137
|
+
}
|
138
|
+
|
139
|
+
@Override
|
140
|
+
public void stringColumn(Column column) {
|
141
|
+
fields.add(new PrimitiveType(Type.Repetition.OPTIONAL, PrimitiveTypeName.BINARY, column.getName()));
|
142
|
+
}
|
143
|
+
|
144
|
+
@Override
|
145
|
+
public void timestampColumn(Column column) {
|
146
|
+
// formatted as string
|
147
|
+
fields.add(new PrimitiveType(Type.Repetition.OPTIONAL, PrimitiveTypeName.BINARY, column.getName()));
|
148
|
+
}
|
149
|
+
|
150
|
+
public List<Type> getConvertedFields() {
|
151
|
+
return fields;
|
152
|
+
}
|
153
|
+
}
|
154
|
+
}
|
@@ -0,0 +1,199 @@
|
|
1
|
+
package org.embulk.output;
|
2
|
+
|
3
|
+
import java.io.IOException;
|
4
|
+
import java.util.List;
|
5
|
+
import java.util.Map;
|
6
|
+
|
7
|
+
import com.google.common.base.Throwables;
|
8
|
+
import com.google.common.collect.ImmutableBiMap;
|
9
|
+
import com.google.common.collect.ImmutableMap;
|
10
|
+
import org.apache.hadoop.conf.Configuration;
|
11
|
+
import org.apache.hadoop.fs.LocalFileSystem;
|
12
|
+
import org.apache.hadoop.fs.Path;
|
13
|
+
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
14
|
+
import org.embulk.config.CommitReport;
|
15
|
+
import org.embulk.config.Config;
|
16
|
+
import org.embulk.config.ConfigDefault;
|
17
|
+
import org.embulk.config.ConfigDiff;
|
18
|
+
import org.embulk.config.ConfigSource;
|
19
|
+
import org.embulk.config.Task;
|
20
|
+
import org.embulk.config.TaskSource;
|
21
|
+
import org.embulk.spi.Column;
|
22
|
+
import org.embulk.spi.Exec;
|
23
|
+
import org.embulk.spi.OutputPlugin;
|
24
|
+
import org.embulk.spi.Page;
|
25
|
+
import org.embulk.spi.PageReader;
|
26
|
+
import org.embulk.spi.Schema;
|
27
|
+
import org.embulk.spi.TransactionalPageOutput;
|
28
|
+
import org.embulk.spi.time.TimestampFormatter;
|
29
|
+
import org.embulk.spi.type.TimestampType;
|
30
|
+
import parquet.hadoop.ParquetWriter;
|
31
|
+
import parquet.hadoop.api.WriteSupport;
|
32
|
+
import parquet.hadoop.metadata.CompressionCodecName;
|
33
|
+
|
34
|
+
@SuppressWarnings("unused")
|
35
|
+
public class ParquetOutputPlugin
|
36
|
+
implements OutputPlugin
|
37
|
+
{
|
38
|
+
public interface PluginTask
|
39
|
+
extends Task, TimestampFormatter.FormatterTask
|
40
|
+
{
|
41
|
+
@Config("path_prefix")
|
42
|
+
public String getPathPrefix();
|
43
|
+
|
44
|
+
@Config("file_ext")
|
45
|
+
@ConfigDefault("\".parquet\"")
|
46
|
+
public String getFileNameExtension();
|
47
|
+
|
48
|
+
@Config("sequence_format")
|
49
|
+
@ConfigDefault("\".%03d\"")
|
50
|
+
public String getSequenceFormat();
|
51
|
+
|
52
|
+
@Config("block_size")
|
53
|
+
@ConfigDefault("134217728") // 128M
|
54
|
+
public int getBlockSize();
|
55
|
+
|
56
|
+
@Config("page_size")
|
57
|
+
@ConfigDefault("1048576") // 1M
|
58
|
+
public int getPageSize();
|
59
|
+
|
60
|
+
@Config("compression_codec")
|
61
|
+
@ConfigDefault("\"UNCOMPRESSED\"")
|
62
|
+
public String getCompressionCodec();
|
63
|
+
}
|
64
|
+
|
65
|
+
public ConfigDiff transaction(ConfigSource config,
|
66
|
+
Schema schema, int processorCount,
|
67
|
+
OutputPlugin.Control control)
|
68
|
+
{
|
69
|
+
PluginTask task = config.loadConfig(PluginTask.class);
|
70
|
+
|
71
|
+
//TODO
|
72
|
+
|
73
|
+
control.run(task.dump());
|
74
|
+
return Exec.newConfigDiff();
|
75
|
+
}
|
76
|
+
|
77
|
+
public ConfigDiff resume(TaskSource taskSource,
|
78
|
+
Schema schema, int processorCount,
|
79
|
+
OutputPlugin.Control control)
|
80
|
+
{
|
81
|
+
throw new UnsupportedOperationException("parquet output plugin does not support resuming");
|
82
|
+
}
|
83
|
+
|
84
|
+
public void cleanup(TaskSource taskSource,
|
85
|
+
Schema schema, int processorCount,
|
86
|
+
List<CommitReport> successCommitReports)
|
87
|
+
{
|
88
|
+
//TODO
|
89
|
+
}
|
90
|
+
|
91
|
+
public TransactionalPageOutput open(TaskSource taskSource, final Schema schema, int processorIndex)
|
92
|
+
{
|
93
|
+
PluginTask task = taskSource.loadTask(PluginTask.class);
|
94
|
+
|
95
|
+
final String pathPrefix = task.getPathPrefix();
|
96
|
+
final String pathSuffix = task.getFileNameExtension();
|
97
|
+
final String sequenceFormat = task.getSequenceFormat();
|
98
|
+
final CompressionCodecName codec = CompressionCodecName.valueOf(task.getCompressionCodec());
|
99
|
+
final int blockSize = task.getBlockSize();
|
100
|
+
final int pageSize = task.getPageSize();
|
101
|
+
|
102
|
+
final String path = pathPrefix + String.format(sequenceFormat, processorIndex) + pathSuffix;
|
103
|
+
|
104
|
+
final PageReader reader = new PageReader(schema);
|
105
|
+
|
106
|
+
final Map<Integer, TimestampFormatter> timestampFormatters = newTimestampFormatters(task, schema);
|
107
|
+
final EmbulkWriteSupport writeSupport = new EmbulkWriteSupport(schema, timestampFormatters);
|
108
|
+
ParquetWriter<PageReader> writer = createParquetWriter(new Path(path), writeSupport, codec, blockSize, pageSize);
|
109
|
+
|
110
|
+
return new ParquetTransactionalPageOutput(reader, writer);
|
111
|
+
}
|
112
|
+
|
113
|
+
private Map<Integer, TimestampFormatter> newTimestampFormatters(
|
114
|
+
TimestampFormatter.FormatterTask task, Schema schema)
|
115
|
+
{
|
116
|
+
ImmutableMap.Builder<Integer, TimestampFormatter> builder = new ImmutableBiMap.Builder<>();
|
117
|
+
for (Column column : schema.getColumns()) {
|
118
|
+
if (column.getType() instanceof TimestampType) {
|
119
|
+
TimestampType tt = (TimestampType) column.getType();
|
120
|
+
builder.put(column.getIndex(), new TimestampFormatter(tt.getFormat(), task));
|
121
|
+
}
|
122
|
+
}
|
123
|
+
return builder.build();
|
124
|
+
}
|
125
|
+
|
126
|
+
private <T> ParquetWriter<T> createParquetWriter(Path path, WriteSupport<T> writeSupport, CompressionCodecName codec, int blockSize, int pageSize) {
|
127
|
+
ParquetWriter<T> writer = null;
|
128
|
+
|
129
|
+
Configuration conf = new Configuration();
|
130
|
+
conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
|
131
|
+
conf.set("fs.file.impl", LocalFileSystem.class.getName());
|
132
|
+
conf.setClassLoader(this.getClass().getClassLoader());
|
133
|
+
|
134
|
+
try {
|
135
|
+
writer = new ParquetWriter<>(
|
136
|
+
path,
|
137
|
+
writeSupport,
|
138
|
+
codec,
|
139
|
+
blockSize,
|
140
|
+
pageSize,
|
141
|
+
pageSize,
|
142
|
+
ParquetWriter.DEFAULT_IS_DICTIONARY_ENABLED,
|
143
|
+
ParquetWriter.DEFAULT_IS_VALIDATING_ENABLED,
|
144
|
+
ParquetWriter.DEFAULT_WRITER_VERSION,
|
145
|
+
conf);
|
146
|
+
} catch (IOException e) {
|
147
|
+
Throwables.propagate(e);
|
148
|
+
}
|
149
|
+
return writer;
|
150
|
+
}
|
151
|
+
|
152
|
+
class ParquetTransactionalPageOutput implements TransactionalPageOutput {
|
153
|
+
private PageReader reader;
|
154
|
+
private ParquetWriter<PageReader> writer;
|
155
|
+
|
156
|
+
public ParquetTransactionalPageOutput(PageReader reader, ParquetWriter<PageReader> writer) {
|
157
|
+
this.reader = reader;
|
158
|
+
this.writer = writer;
|
159
|
+
}
|
160
|
+
|
161
|
+
@Override
|
162
|
+
public void add(Page page) {
|
163
|
+
try {
|
164
|
+
reader.setPage(page);
|
165
|
+
while (reader.nextRecord()) {
|
166
|
+
writer.write(reader);
|
167
|
+
}
|
168
|
+
} catch(IOException e) {
|
169
|
+
Throwables.propagate(e);
|
170
|
+
}
|
171
|
+
}
|
172
|
+
|
173
|
+
@Override
|
174
|
+
public void finish() {
|
175
|
+
try {
|
176
|
+
writer.close();
|
177
|
+
writer = null;
|
178
|
+
} catch (IOException e) {
|
179
|
+
Throwables.propagate(e);
|
180
|
+
}
|
181
|
+
}
|
182
|
+
|
183
|
+
@Override
|
184
|
+
public void close() {
|
185
|
+
//TODO
|
186
|
+
}
|
187
|
+
|
188
|
+
@Override
|
189
|
+
public void abort() {
|
190
|
+
//TODO
|
191
|
+
}
|
192
|
+
|
193
|
+
@Override
|
194
|
+
public CommitReport commit() {
|
195
|
+
return Exec.newCommitReport();
|
196
|
+
//TODO
|
197
|
+
}
|
198
|
+
}
|
199
|
+
}
|
metadata
ADDED
@@ -0,0 +1,160 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: embulk-output-parquet
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- OKUNO Akihiro
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2015-02-17 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
15
|
+
requirements:
|
16
|
+
- - ~>
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: '1.0'
|
19
|
+
name: bundler
|
20
|
+
prerelease: false
|
21
|
+
type: :development
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ~>
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
29
|
+
requirements:
|
30
|
+
- - '>='
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '10.0'
|
33
|
+
name: rake
|
34
|
+
prerelease: false
|
35
|
+
type: :development
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - '>='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '10.0'
|
41
|
+
description: Parquet output plugin is an Embulk plugin that loads records to Parquet read by any input plugins. Search the input plugins by "embulk-input" keyword.
|
42
|
+
email:
|
43
|
+
- choplin.choplin@gmail.com
|
44
|
+
executables: []
|
45
|
+
extensions: []
|
46
|
+
extra_rdoc_files: []
|
47
|
+
files:
|
48
|
+
- .gitignore
|
49
|
+
- LICENSE.txt
|
50
|
+
- README.md
|
51
|
+
- build.gradle
|
52
|
+
- gradle/wrapper/gradle-wrapper.jar
|
53
|
+
- gradle/wrapper/gradle-wrapper.properties
|
54
|
+
- gradlew
|
55
|
+
- gradlew.bat
|
56
|
+
- lib/embulk/output/parquet.rb
|
57
|
+
- src/main/java/org/embulk/output/EmbulkWriteSupport.java
|
58
|
+
- src/main/java/org/embulk/output/ParquetOutputPlugin.java
|
59
|
+
- src/test/java/org/embulk/output/TestParquetOutputPlugin.java
|
60
|
+
- classpath/activation-1.1.jar
|
61
|
+
- classpath/apacheds-i18n-2.0.0-M15.jar
|
62
|
+
- classpath/apacheds-kerberos-codec-2.0.0-M15.jar
|
63
|
+
- classpath/api-asn1-api-1.0.0-M20.jar
|
64
|
+
- classpath/api-util-1.0.0-M20.jar
|
65
|
+
- classpath/avro-1.7.4.jar
|
66
|
+
- classpath/commons-beanutils-1.7.0.jar
|
67
|
+
- classpath/commons-cli-1.2.jar
|
68
|
+
- classpath/commons-codec-1.6.jar
|
69
|
+
- classpath/commons-collections-3.2.1.jar
|
70
|
+
- classpath/commons-compress-1.4.1.jar
|
71
|
+
- classpath/commons-configuration-1.6.jar
|
72
|
+
- classpath/commons-digester-1.8.jar
|
73
|
+
- classpath/commons-httpclient-3.1.jar
|
74
|
+
- classpath/commons-io-2.4.jar
|
75
|
+
- classpath/commons-lang-2.6.jar
|
76
|
+
- classpath/commons-logging-1.1.3.jar
|
77
|
+
- classpath/commons-math3-3.1.1.jar
|
78
|
+
- classpath/commons-net-3.1.jar
|
79
|
+
- classpath/curator-client-2.6.0.jar
|
80
|
+
- classpath/curator-framework-2.6.0.jar
|
81
|
+
- classpath/curator-recipes-2.6.0.jar
|
82
|
+
- classpath/embulk-output-parquet-0.1.0.jar
|
83
|
+
- classpath/gson-2.2.4.jar
|
84
|
+
- classpath/hadoop-annotations-2.6.0.jar
|
85
|
+
- classpath/hadoop-auth-2.6.0.jar
|
86
|
+
- classpath/hadoop-client-2.6.0.jar
|
87
|
+
- classpath/hadoop-common-2.6.0.jar
|
88
|
+
- classpath/hadoop-hdfs-2.6.0.jar
|
89
|
+
- classpath/hadoop-mapreduce-client-app-2.6.0.jar
|
90
|
+
- classpath/hadoop-mapreduce-client-common-2.6.0.jar
|
91
|
+
- classpath/hadoop-mapreduce-client-core-2.6.0.jar
|
92
|
+
- classpath/hadoop-mapreduce-client-jobclient-2.6.0.jar
|
93
|
+
- classpath/hadoop-mapreduce-client-shuffle-2.6.0.jar
|
94
|
+
- classpath/hadoop-yarn-api-2.6.0.jar
|
95
|
+
- classpath/hadoop-yarn-client-2.6.0.jar
|
96
|
+
- classpath/hadoop-yarn-common-2.6.0.jar
|
97
|
+
- classpath/hadoop-yarn-server-common-2.6.0.jar
|
98
|
+
- classpath/hadoop-yarn-server-nodemanager-2.6.0.jar
|
99
|
+
- classpath/htrace-core-3.0.4.jar
|
100
|
+
- classpath/httpclient-4.2.5.jar
|
101
|
+
- classpath/httpcore-4.2.4.jar
|
102
|
+
- classpath/jackson-core-asl-1.9.13.jar
|
103
|
+
- classpath/jackson-jaxrs-1.9.13.jar
|
104
|
+
- classpath/jackson-mapper-asl-1.9.13.jar
|
105
|
+
- classpath/jackson-xc-1.9.13.jar
|
106
|
+
- classpath/jaxb-api-2.2.2.jar
|
107
|
+
- classpath/jaxb-impl-2.2.3-1.jar
|
108
|
+
- classpath/jersey-client-1.9.jar
|
109
|
+
- classpath/jersey-core-1.9.jar
|
110
|
+
- classpath/jersey-guice-1.9.jar
|
111
|
+
- classpath/jersey-json-1.9.jar
|
112
|
+
- classpath/jersey-server-1.9.jar
|
113
|
+
- classpath/jettison-1.1.jar
|
114
|
+
- classpath/jetty-util-6.1.26.jar
|
115
|
+
- classpath/jline-0.9.94.jar
|
116
|
+
- classpath/jsr305-1.3.9.jar
|
117
|
+
- classpath/leveldbjni-all-1.8.jar
|
118
|
+
- classpath/netty-3.7.0.Final.jar
|
119
|
+
- classpath/paranamer-2.3.jar
|
120
|
+
- classpath/parquet-column-1.5.0.jar
|
121
|
+
- classpath/parquet-common-1.5.0.jar
|
122
|
+
- classpath/parquet-encoding-1.5.0.jar
|
123
|
+
- classpath/parquet-format-2.1.0.jar
|
124
|
+
- classpath/parquet-generator-1.5.0.jar
|
125
|
+
- classpath/parquet-hadoop-1.5.0.jar
|
126
|
+
- classpath/parquet-jackson-1.5.0.jar
|
127
|
+
- classpath/protobuf-java-2.5.0.jar
|
128
|
+
- classpath/servlet-api-2.5.jar
|
129
|
+
- classpath/snappy-java-1.1.1.6.jar
|
130
|
+
- classpath/stax-api-1.0-2.jar
|
131
|
+
- classpath/xercesImpl-2.9.1.jar
|
132
|
+
- classpath/xml-apis-1.3.04.jar
|
133
|
+
- classpath/xmlenc-0.52.jar
|
134
|
+
- classpath/xz-1.0.jar
|
135
|
+
- classpath/zookeeper-3.4.6.jar
|
136
|
+
homepage: https://github.com/choplin/embulk-output-parquet
|
137
|
+
licenses:
|
138
|
+
- MIT
|
139
|
+
metadata: {}
|
140
|
+
post_install_message:
|
141
|
+
rdoc_options: []
|
142
|
+
require_paths:
|
143
|
+
- lib
|
144
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
145
|
+
requirements:
|
146
|
+
- - '>='
|
147
|
+
- !ruby/object:Gem::Version
|
148
|
+
version: '0'
|
149
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
150
|
+
requirements:
|
151
|
+
- - '>='
|
152
|
+
- !ruby/object:Gem::Version
|
153
|
+
version: '0'
|
154
|
+
requirements: []
|
155
|
+
rubyforge_project:
|
156
|
+
rubygems_version: 2.1.9
|
157
|
+
signing_key:
|
158
|
+
specification_version: 4
|
159
|
+
summary: Parquet output plugin for Embulk
|
160
|
+
test_files: []
|