embulk-output-hdfs 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +8 -0
- data/LICENSE.txt +21 -0
- data/README.md +52 -0
- data/build.gradle +75 -0
- data/classpath/activation-1.1.jar +0 -0
- data/classpath/apacheds-i18n-2.0.0-M15.jar +0 -0
- data/classpath/apacheds-kerberos-codec-2.0.0-M15.jar +0 -0
- data/classpath/api-asn1-api-1.0.0-M20.jar +0 -0
- data/classpath/api-util-1.0.0-M20.jar +0 -0
- data/classpath/asm-3.1.jar +0 -0
- data/classpath/avro-1.7.4.jar +0 -0
- data/classpath/commons-beanutils-1.7.0.jar +0 -0
- data/classpath/commons-cli-1.2.jar +0 -0
- data/classpath/commons-codec-1.6.jar +0 -0
- data/classpath/commons-collections-3.2.1.jar +0 -0
- data/classpath/commons-compress-1.4.1.jar +0 -0
- data/classpath/commons-configuration-1.6.jar +0 -0
- data/classpath/commons-digester-1.8.jar +0 -0
- data/classpath/commons-httpclient-3.1.jar +0 -0
- data/classpath/commons-io-2.4.jar +0 -0
- data/classpath/commons-lang-2.6.jar +0 -0
- data/classpath/commons-logging-1.1.3.jar +0 -0
- data/classpath/commons-math3-3.1.1.jar +0 -0
- data/classpath/commons-net-3.1.jar +0 -0
- data/classpath/curator-client-2.6.0.jar +0 -0
- data/classpath/curator-framework-2.6.0.jar +0 -0
- data/classpath/curator-recipes-2.6.0.jar +0 -0
- data/classpath/embulk-output-hdfs-0.1.0.jar +0 -0
- data/classpath/gson-2.2.4.jar +0 -0
- data/classpath/hadoop-annotations-2.6.0.jar +0 -0
- data/classpath/hadoop-auth-2.6.0.jar +0 -0
- data/classpath/hadoop-client-2.6.0.jar +0 -0
- data/classpath/hadoop-common-2.6.0.jar +0 -0
- data/classpath/hadoop-hdfs-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-app-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-common-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-core-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-jobclient-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-shuffle-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-api-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-client-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-common-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-server-common-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-server-nodemanager-2.6.0.jar +0 -0
- data/classpath/htrace-core-3.0.4.jar +0 -0
- data/classpath/httpclient-4.2.5.jar +0 -0
- data/classpath/httpcore-4.2.4.jar +0 -0
- data/classpath/jackson-core-asl-1.9.13.jar +0 -0
- data/classpath/jackson-jaxrs-1.9.13.jar +0 -0
- data/classpath/jackson-mapper-asl-1.9.13.jar +0 -0
- data/classpath/jackson-xc-1.9.13.jar +0 -0
- data/classpath/jaxb-api-2.2.2.jar +0 -0
- data/classpath/jaxb-impl-2.2.3-1.jar +0 -0
- data/classpath/jersey-client-1.9.jar +0 -0
- data/classpath/jersey-core-1.9.jar +0 -0
- data/classpath/jersey-guice-1.9.jar +0 -0
- data/classpath/jersey-json-1.9.jar +0 -0
- data/classpath/jersey-server-1.9.jar +0 -0
- data/classpath/jettison-1.1.jar +0 -0
- data/classpath/jetty-util-6.1.26.jar +0 -0
- data/classpath/jline-0.9.94.jar +0 -0
- data/classpath/jsr305-1.3.9.jar +0 -0
- data/classpath/leveldbjni-all-1.8.jar +0 -0
- data/classpath/log4j-1.2.17.jar +0 -0
- data/classpath/netty-3.7.0.Final.jar +0 -0
- data/classpath/paranamer-2.3.jar +0 -0
- data/classpath/protobuf-java-2.5.0.jar +0 -0
- data/classpath/servlet-api-2.5.jar +0 -0
- data/classpath/slf4j-log4j12-1.7.5.jar +0 -0
- data/classpath/snappy-java-1.0.4.1.jar +0 -0
- data/classpath/stax-api-1.0-2.jar +0 -0
- data/classpath/xercesImpl-2.9.1.jar +0 -0
- data/classpath/xml-apis-1.3.04.jar +0 -0
- data/classpath/xmlenc-0.52.jar +0 -0
- data/classpath/xz-1.0.jar +0 -0
- data/classpath/zookeeper-3.4.6.jar +0 -0
- data/gradle/wrapper/gradle-wrapper.jar +0 -0
- data/gradle/wrapper/gradle-wrapper.properties +6 -0
- data/gradlew +164 -0
- data/gradlew.bat +90 -0
- data/lib/embulk/output/hdfs.rb +3 -0
- data/src/main/java/org/embulk/output/HdfsOutputPlugin.java +219 -0
- data/src/test/java/org/embulk/output/TestHdfsOutputPlugin.java +5 -0
- metadata +155 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA1:
|
|
3
|
+
metadata.gz: aa7594a275e09ecc5309ecb88afc60d151aba6ee
|
|
4
|
+
data.tar.gz: f6210bca06c35840e871b51f0224cd1b5a5b8f80
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: be65b007a25cc8723a237c5eb35066ed34e59bd1b2d3924cf4a70e2d2023b3a25e6c321db1b7984271c4d704a3f0499c55bc06f7d3c4c5fc86e89486777835e9
|
|
7
|
+
data.tar.gz: cc8a9638f102baec69b007707103de38e5c7a43aac168ffcbea78862cde325535cde4b943a7abe85b65d7f00a8643bfd9f255748ded4fbd1292b409d60618b3c
|
data/.gitignore
ADDED
data/LICENSE.txt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
|
|
2
|
+
MIT License
|
|
3
|
+
|
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
|
5
|
+
a copy of this software and associated documentation files (the
|
|
6
|
+
"Software"), to deal in the Software without restriction, including
|
|
7
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
|
8
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
|
9
|
+
permit persons to whom the Software is furnished to do so, subject to
|
|
10
|
+
the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be
|
|
13
|
+
included in all copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
16
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
17
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
18
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
19
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
20
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
21
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# Hdfs output plugin for Embulk
|
|
2
|
+
|
|
3
|
+
A File Output Plugin for Embulk to write HDFS.
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
* **Plugin type**: file output
|
|
8
|
+
* **Load all or nothing**: no
|
|
9
|
+
* **Resume supported**: no
|
|
10
|
+
* **Cleanup supported**: no
|
|
11
|
+
|
|
12
|
+
## Configuration
|
|
13
|
+
|
|
14
|
+
- **config_files** list of paths to Hadoop's configuration files (array of strings, default: `[]`)
|
|
15
|
+
- **config** overwrites configuration parameters (hash, default: `{}`)
|
|
16
|
+
- **output_path** the path finally stored files. (string, default: `"/tmp/embulk.output.hdfs_output.%Y%m%d_%s"`)
|
|
17
|
+
- **working_path** the path temporary stored files. (string, default: `"/tmp/embulk.working.hdfs_output.%Y%m%d_%s"`)
|
|
18
|
+
|
|
19
|
+
## Example
|
|
20
|
+
|
|
21
|
+
```yaml
|
|
22
|
+
out:
|
|
23
|
+
type: hdfs
|
|
24
|
+
config_files:
|
|
25
|
+
- /etc/hadoop/conf/core-site.xml
|
|
26
|
+
- /etc/hadoop/conf/hdfs-site.xml
|
|
27
|
+
- /etc/hadoop/conf/mapred-site.xml
|
|
28
|
+
- /etc/hadoop/conf/yarn-site.xml
|
|
29
|
+
config:
|
|
30
|
+
fs.defaultFS: 'hdfs://hdp-nn1:8020'
|
|
31
|
+
dfs.replication: 1
|
|
32
|
+
mapreduce.client.submit.file.replication: 1
|
|
33
|
+
fs.hdfs.impl: 'org.apache.hadoop.hdfs.DistributedFileSystem'
|
|
34
|
+
fs.file.impl: 'org.apache.hadoop.fs.LocalFileSystem'
|
|
35
|
+
formatter:
|
|
36
|
+
type: csv
|
|
37
|
+
encoding: UTF-8
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
## Build
|
|
42
|
+
|
|
43
|
+
```
|
|
44
|
+
$ ./gradlew gem
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## Development
|
|
48
|
+
|
|
49
|
+
```
|
|
50
|
+
$ ./gradlew classpath
|
|
51
|
+
$ bundle exec embulk run -I lib example.yml
|
|
52
|
+
```
|
data/build.gradle
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
plugins {
|
|
2
|
+
id "com.jfrog.bintray" version "1.1"
|
|
3
|
+
id "com.github.jruby-gradle.base" version "0.1.5"
|
|
4
|
+
id "java"
|
|
5
|
+
}
|
|
6
|
+
import com.github.jrubygradle.JRubyExec
|
|
7
|
+
repositories {
|
|
8
|
+
mavenCentral()
|
|
9
|
+
jcenter()
|
|
10
|
+
}
|
|
11
|
+
configurations {
|
|
12
|
+
provided
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
version = "0.1.0"
|
|
16
|
+
|
|
17
|
+
dependencies {
|
|
18
|
+
compile "org.embulk:embulk-core:0.6.16"
|
|
19
|
+
provided "org.embulk:embulk-core:0.6.16"
|
|
20
|
+
// compile "YOUR_JAR_DEPENDENCY_GROUP:YOUR_JAR_DEPENDENCY_MODULE:YOUR_JAR_DEPENDENCY_VERSION"
|
|
21
|
+
compile 'org.apache.hadoop:hadoop-client:2.6.0'
|
|
22
|
+
compile 'com.google.guava:guava:14.0'
|
|
23
|
+
testCompile "junit:junit:4.+"
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
task classpath(type: Copy, dependsOn: ["jar"]) {
|
|
27
|
+
doFirst { file("classpath").deleteDir() }
|
|
28
|
+
from (configurations.runtime - configurations.provided + files(jar.archivePath))
|
|
29
|
+
into "classpath"
|
|
30
|
+
}
|
|
31
|
+
clean { delete "classpath" }
|
|
32
|
+
|
|
33
|
+
task gem(type: JRubyExec, dependsOn: ["gemspec", "classpath"]) {
|
|
34
|
+
jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "build"
|
|
35
|
+
script "${project.name}.gemspec"
|
|
36
|
+
doLast { ant.move(file: "${project.name}-${project.version}.gem", todir: "pkg") }
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
task gemPush(type: JRubyExec, dependsOn: ["gem"]) {
|
|
40
|
+
jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "push"
|
|
41
|
+
script "pkg/${project.name}-${project.version}.gem"
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
task "package"(dependsOn: ["gemspec", "classpath"]) << {
|
|
45
|
+
println "> Build succeeded."
|
|
46
|
+
println "> You can run embulk with '-L ${file(".").absolutePath}' argument."
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
task gemspec {
|
|
50
|
+
ext.gemspecFile = file("${project.name}.gemspec")
|
|
51
|
+
inputs.file "build.gradle"
|
|
52
|
+
outputs.file gemspecFile
|
|
53
|
+
doLast { gemspecFile.write($/
|
|
54
|
+
Gem::Specification.new do |spec|
|
|
55
|
+
spec.name = "${project.name}"
|
|
56
|
+
spec.version = "${project.version}"
|
|
57
|
+
spec.authors = ["takahiro.nakayama"]
|
|
58
|
+
spec.summary = %[Hdfs output plugin for Embulk]
|
|
59
|
+
spec.description = %[Dumps records to Hdfs.]
|
|
60
|
+
spec.email = ["civitaspo@gmail.com"]
|
|
61
|
+
spec.licenses = ["MIT"]
|
|
62
|
+
spec.homepage = "https://github.com/civitaspo/embulk-output-hdfs"
|
|
63
|
+
|
|
64
|
+
spec.files = `git ls-files`.split("\n") + Dir["classpath/*.jar"]
|
|
65
|
+
spec.test_files = spec.files.grep(%r"^(test|spec)/")
|
|
66
|
+
spec.require_paths = ["lib"]
|
|
67
|
+
|
|
68
|
+
#spec.add_dependency 'YOUR_GEM_DEPENDENCY', ['~> YOUR_GEM_DEPENDENCY_VERSION']
|
|
69
|
+
spec.add_development_dependency 'bundler', ['~> 1.0']
|
|
70
|
+
spec.add_development_dependency 'rake', ['>= 10.0']
|
|
71
|
+
end
|
|
72
|
+
/$)
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
clean { delete "${project.name}.gemspec" }
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
data/gradlew
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
|
|
3
|
+
##############################################################################
|
|
4
|
+
##
|
|
5
|
+
## Gradle start up script for UN*X
|
|
6
|
+
##
|
|
7
|
+
##############################################################################
|
|
8
|
+
|
|
9
|
+
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
|
10
|
+
DEFAULT_JVM_OPTS=""
|
|
11
|
+
|
|
12
|
+
APP_NAME="Gradle"
|
|
13
|
+
APP_BASE_NAME=`basename "$0"`
|
|
14
|
+
|
|
15
|
+
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
|
16
|
+
MAX_FD="maximum"
|
|
17
|
+
|
|
18
|
+
warn ( ) {
|
|
19
|
+
echo "$*"
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
die ( ) {
|
|
23
|
+
echo
|
|
24
|
+
echo "$*"
|
|
25
|
+
echo
|
|
26
|
+
exit 1
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# OS specific support (must be 'true' or 'false').
|
|
30
|
+
cygwin=false
|
|
31
|
+
msys=false
|
|
32
|
+
darwin=false
|
|
33
|
+
case "`uname`" in
|
|
34
|
+
CYGWIN* )
|
|
35
|
+
cygwin=true
|
|
36
|
+
;;
|
|
37
|
+
Darwin* )
|
|
38
|
+
darwin=true
|
|
39
|
+
;;
|
|
40
|
+
MINGW* )
|
|
41
|
+
msys=true
|
|
42
|
+
;;
|
|
43
|
+
esac
|
|
44
|
+
|
|
45
|
+
# For Cygwin, ensure paths are in UNIX format before anything is touched.
|
|
46
|
+
if $cygwin ; then
|
|
47
|
+
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
|
48
|
+
fi
|
|
49
|
+
|
|
50
|
+
# Attempt to set APP_HOME
|
|
51
|
+
# Resolve links: $0 may be a link
|
|
52
|
+
PRG="$0"
|
|
53
|
+
# Need this for relative symlinks.
|
|
54
|
+
while [ -h "$PRG" ] ; do
|
|
55
|
+
ls=`ls -ld "$PRG"`
|
|
56
|
+
link=`expr "$ls" : '.*-> \(.*\)$'`
|
|
57
|
+
if expr "$link" : '/.*' > /dev/null; then
|
|
58
|
+
PRG="$link"
|
|
59
|
+
else
|
|
60
|
+
PRG=`dirname "$PRG"`"/$link"
|
|
61
|
+
fi
|
|
62
|
+
done
|
|
63
|
+
SAVED="`pwd`"
|
|
64
|
+
cd "`dirname \"$PRG\"`/" >&-
|
|
65
|
+
APP_HOME="`pwd -P`"
|
|
66
|
+
cd "$SAVED" >&-
|
|
67
|
+
|
|
68
|
+
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
|
69
|
+
|
|
70
|
+
# Determine the Java command to use to start the JVM.
|
|
71
|
+
if [ -n "$JAVA_HOME" ] ; then
|
|
72
|
+
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
|
73
|
+
# IBM's JDK on AIX uses strange locations for the executables
|
|
74
|
+
JAVACMD="$JAVA_HOME/jre/sh/java"
|
|
75
|
+
else
|
|
76
|
+
JAVACMD="$JAVA_HOME/bin/java"
|
|
77
|
+
fi
|
|
78
|
+
if [ ! -x "$JAVACMD" ] ; then
|
|
79
|
+
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
|
80
|
+
|
|
81
|
+
Please set the JAVA_HOME variable in your environment to match the
|
|
82
|
+
location of your Java installation."
|
|
83
|
+
fi
|
|
84
|
+
else
|
|
85
|
+
JAVACMD="java"
|
|
86
|
+
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
|
87
|
+
|
|
88
|
+
Please set the JAVA_HOME variable in your environment to match the
|
|
89
|
+
location of your Java installation."
|
|
90
|
+
fi
|
|
91
|
+
|
|
92
|
+
# Increase the maximum file descriptors if we can.
|
|
93
|
+
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
|
|
94
|
+
MAX_FD_LIMIT=`ulimit -H -n`
|
|
95
|
+
if [ $? -eq 0 ] ; then
|
|
96
|
+
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
|
97
|
+
MAX_FD="$MAX_FD_LIMIT"
|
|
98
|
+
fi
|
|
99
|
+
ulimit -n $MAX_FD
|
|
100
|
+
if [ $? -ne 0 ] ; then
|
|
101
|
+
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
|
102
|
+
fi
|
|
103
|
+
else
|
|
104
|
+
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
|
105
|
+
fi
|
|
106
|
+
fi
|
|
107
|
+
|
|
108
|
+
# For Darwin, add options to specify how the application appears in the dock
|
|
109
|
+
if $darwin; then
|
|
110
|
+
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
|
111
|
+
fi
|
|
112
|
+
|
|
113
|
+
# For Cygwin, switch paths to Windows format before running java
|
|
114
|
+
if $cygwin ; then
|
|
115
|
+
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
|
116
|
+
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
|
117
|
+
|
|
118
|
+
# We build the pattern for arguments to be converted via cygpath
|
|
119
|
+
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
|
120
|
+
SEP=""
|
|
121
|
+
for dir in $ROOTDIRSRAW ; do
|
|
122
|
+
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
|
123
|
+
SEP="|"
|
|
124
|
+
done
|
|
125
|
+
OURCYGPATTERN="(^($ROOTDIRS))"
|
|
126
|
+
# Add a user-defined pattern to the cygpath arguments
|
|
127
|
+
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
|
128
|
+
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
|
129
|
+
fi
|
|
130
|
+
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
|
131
|
+
i=0
|
|
132
|
+
for arg in "$@" ; do
|
|
133
|
+
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
|
134
|
+
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
|
135
|
+
|
|
136
|
+
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
|
137
|
+
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
|
138
|
+
else
|
|
139
|
+
eval `echo args$i`="\"$arg\""
|
|
140
|
+
fi
|
|
141
|
+
i=$((i+1))
|
|
142
|
+
done
|
|
143
|
+
case $i in
|
|
144
|
+
(0) set -- ;;
|
|
145
|
+
(1) set -- "$args0" ;;
|
|
146
|
+
(2) set -- "$args0" "$args1" ;;
|
|
147
|
+
(3) set -- "$args0" "$args1" "$args2" ;;
|
|
148
|
+
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
|
149
|
+
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
|
150
|
+
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
|
151
|
+
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
|
152
|
+
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
|
153
|
+
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
|
154
|
+
esac
|
|
155
|
+
fi
|
|
156
|
+
|
|
157
|
+
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
|
|
158
|
+
function splitJvmOpts() {
|
|
159
|
+
JVM_OPTS=("$@")
|
|
160
|
+
}
|
|
161
|
+
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
|
|
162
|
+
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
|
|
163
|
+
|
|
164
|
+
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
data/gradlew.bat
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
@if "%DEBUG%" == "" @echo off
|
|
2
|
+
@rem ##########################################################################
|
|
3
|
+
@rem
|
|
4
|
+
@rem Gradle startup script for Windows
|
|
5
|
+
@rem
|
|
6
|
+
@rem ##########################################################################
|
|
7
|
+
|
|
8
|
+
@rem Set local scope for the variables with windows NT shell
|
|
9
|
+
if "%OS%"=="Windows_NT" setlocal
|
|
10
|
+
|
|
11
|
+
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
|
12
|
+
set DEFAULT_JVM_OPTS=
|
|
13
|
+
|
|
14
|
+
set DIRNAME=%~dp0
|
|
15
|
+
if "%DIRNAME%" == "" set DIRNAME=.
|
|
16
|
+
set APP_BASE_NAME=%~n0
|
|
17
|
+
set APP_HOME=%DIRNAME%
|
|
18
|
+
|
|
19
|
+
@rem Find java.exe
|
|
20
|
+
if defined JAVA_HOME goto findJavaFromJavaHome
|
|
21
|
+
|
|
22
|
+
set JAVA_EXE=java.exe
|
|
23
|
+
%JAVA_EXE% -version >NUL 2>&1
|
|
24
|
+
if "%ERRORLEVEL%" == "0" goto init
|
|
25
|
+
|
|
26
|
+
echo.
|
|
27
|
+
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
|
28
|
+
echo.
|
|
29
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
|
30
|
+
echo location of your Java installation.
|
|
31
|
+
|
|
32
|
+
goto fail
|
|
33
|
+
|
|
34
|
+
:findJavaFromJavaHome
|
|
35
|
+
set JAVA_HOME=%JAVA_HOME:"=%
|
|
36
|
+
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
|
37
|
+
|
|
38
|
+
if exist "%JAVA_EXE%" goto init
|
|
39
|
+
|
|
40
|
+
echo.
|
|
41
|
+
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
|
42
|
+
echo.
|
|
43
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
|
44
|
+
echo location of your Java installation.
|
|
45
|
+
|
|
46
|
+
goto fail
|
|
47
|
+
|
|
48
|
+
:init
|
|
49
|
+
@rem Get command-line arguments, handling Windowz variants
|
|
50
|
+
|
|
51
|
+
if not "%OS%" == "Windows_NT" goto win9xME_args
|
|
52
|
+
if "%@eval[2+2]" == "4" goto 4NT_args
|
|
53
|
+
|
|
54
|
+
:win9xME_args
|
|
55
|
+
@rem Slurp the command line arguments.
|
|
56
|
+
set CMD_LINE_ARGS=
|
|
57
|
+
set _SKIP=2
|
|
58
|
+
|
|
59
|
+
:win9xME_args_slurp
|
|
60
|
+
if "x%~1" == "x" goto execute
|
|
61
|
+
|
|
62
|
+
set CMD_LINE_ARGS=%*
|
|
63
|
+
goto execute
|
|
64
|
+
|
|
65
|
+
:4NT_args
|
|
66
|
+
@rem Get arguments from the 4NT Shell from JP Software
|
|
67
|
+
set CMD_LINE_ARGS=%$
|
|
68
|
+
|
|
69
|
+
:execute
|
|
70
|
+
@rem Setup the command line
|
|
71
|
+
|
|
72
|
+
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
|
73
|
+
|
|
74
|
+
@rem Execute Gradle
|
|
75
|
+
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
|
76
|
+
|
|
77
|
+
:end
|
|
78
|
+
@rem End local scope for the variables with windows NT shell
|
|
79
|
+
if "%ERRORLEVEL%"=="0" goto mainEnd
|
|
80
|
+
|
|
81
|
+
:fail
|
|
82
|
+
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
|
83
|
+
rem the _cmd.exe /c_ return code!
|
|
84
|
+
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
|
85
|
+
exit /b 1
|
|
86
|
+
|
|
87
|
+
:mainEnd
|
|
88
|
+
if "%OS%"=="Windows_NT" endlocal
|
|
89
|
+
|
|
90
|
+
:omega
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
package org.embulk.output;
|
|
2
|
+
|
|
3
|
+
import com.google.common.base.Throwables;
|
|
4
|
+
import org.apache.hadoop.conf.Configuration;
|
|
5
|
+
import org.apache.hadoop.fs.FileSystem;
|
|
6
|
+
import org.apache.hadoop.fs.Path;
|
|
7
|
+
import org.embulk.config.*;
|
|
8
|
+
import org.embulk.spi.Buffer;
|
|
9
|
+
import org.embulk.spi.Exec;
|
|
10
|
+
import org.embulk.spi.FileOutputPlugin;
|
|
11
|
+
import org.embulk.spi.TransactionalFileOutput;
|
|
12
|
+
import org.jruby.embed.ScriptingContainer;
|
|
13
|
+
import org.slf4j.Logger;
|
|
14
|
+
|
|
15
|
+
import java.io.IOException;
|
|
16
|
+
import java.io.OutputStream;
|
|
17
|
+
import java.util.List;
|
|
18
|
+
import java.util.Map;
|
|
19
|
+
|
|
20
|
+
public class HdfsOutputPlugin implements FileOutputPlugin
|
|
21
|
+
{
|
|
22
|
+
private static final Logger logger = Exec.getLogger(HdfsOutputPlugin.class);
|
|
23
|
+
|
|
24
|
+
public interface PluginTask extends Task
|
|
25
|
+
{
|
|
26
|
+
@Config("config_files")
|
|
27
|
+
@ConfigDefault("[]")
|
|
28
|
+
public List<String> getConfigFiles();
|
|
29
|
+
|
|
30
|
+
@Config("config")
|
|
31
|
+
@ConfigDefault("{}")
|
|
32
|
+
public Map<String, String> getConfig();
|
|
33
|
+
|
|
34
|
+
@Config("sequence_format")
|
|
35
|
+
@ConfigDefault("\"%03d.%02d\"")
|
|
36
|
+
public String getSequenceFormat();
|
|
37
|
+
|
|
38
|
+
@Config("output_path")
|
|
39
|
+
@ConfigDefault("\"/tmp/embulk.output.hdfs_output.%Y%m%d_%s\"")
|
|
40
|
+
public String getOutputPath();
|
|
41
|
+
|
|
42
|
+
@Config("working_path")
|
|
43
|
+
@ConfigDefault("\"/tmp/embulk.working.hdfs_output.%Y%m%d_%s\"")
|
|
44
|
+
public String getWorkingPath();
|
|
45
|
+
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
@Override
|
|
49
|
+
public ConfigDiff transaction(ConfigSource config,
|
|
50
|
+
int taskCount,
|
|
51
|
+
FileOutputPlugin.Control control)
|
|
52
|
+
{
|
|
53
|
+
PluginTask task = config.loadConfig(PluginTask.class);
|
|
54
|
+
return resume(task.dump(), taskCount, control);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
@Override
|
|
58
|
+
public ConfigDiff resume(TaskSource taskSource,
|
|
59
|
+
int taskCount,
|
|
60
|
+
FileOutputPlugin.Control control)
|
|
61
|
+
{
|
|
62
|
+
control.run(taskSource);
|
|
63
|
+
return Exec.newConfigDiff();
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@Override
|
|
68
|
+
public void cleanup(TaskSource taskSource,
|
|
69
|
+
int taskCount,
|
|
70
|
+
List<CommitReport> successCommitReports)
|
|
71
|
+
{
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
@Override
|
|
75
|
+
public TransactionalFileOutput open(TaskSource taskSource, final int taskIndex)
|
|
76
|
+
{
|
|
77
|
+
PluginTask task = taskSource.loadTask(PluginTask.class);
|
|
78
|
+
|
|
79
|
+
Configuration configuration = getHdfsConfiguration(task);
|
|
80
|
+
FileSystem fs = getFs(configuration);
|
|
81
|
+
String workingPath = strftime(task.getWorkingPath());
|
|
82
|
+
String outputPath = strftime(task.getOutputPath());
|
|
83
|
+
return new TransactionalHdfsFileOutput(task, fs, workingPath, outputPath, taskIndex);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
private Configuration getHdfsConfiguration(final PluginTask task)
|
|
87
|
+
{
|
|
88
|
+
Configuration configuration = new Configuration();
|
|
89
|
+
|
|
90
|
+
List configFiles = task.getConfigFiles();
|
|
91
|
+
for (Object configFile : configFiles) {
|
|
92
|
+
configuration.addResource(configFile.toString());
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
for (Map.Entry<String, String> entry: task.getConfig().entrySet()) {
|
|
96
|
+
configuration.set(entry.getKey(), entry.getValue());
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
return configuration;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
private FileSystem getFs(final Configuration configuration) {
|
|
103
|
+
try {
|
|
104
|
+
FileSystem fs = FileSystem.get(configuration);
|
|
105
|
+
return fs;
|
|
106
|
+
}
|
|
107
|
+
catch (IOException e) {
|
|
108
|
+
logger.error(e.getMessage());
|
|
109
|
+
throw Throwables.propagate(e);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
private String strftime(final String path)
|
|
114
|
+
{
|
|
115
|
+
// strftime
|
|
116
|
+
ScriptingContainer jruby = new ScriptingContainer();
|
|
117
|
+
Object result = jruby.runScriptlet("Time.now.strftime('" + path + "')");
|
|
118
|
+
return result.toString();
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
static class TransactionalHdfsFileOutput implements TransactionalFileOutput
|
|
122
|
+
{
|
|
123
|
+
private final int taskIndex;
|
|
124
|
+
private final FileSystem fs;
|
|
125
|
+
private final String workingPath;
|
|
126
|
+
private final String outputPath;
|
|
127
|
+
private final String sequenceFormat;
|
|
128
|
+
|
|
129
|
+
private int fileIndex = 0;
|
|
130
|
+
private int callCount = 0;
|
|
131
|
+
private Path currentPath = null;
|
|
132
|
+
private OutputStream currentStream = null;
|
|
133
|
+
|
|
134
|
+
public TransactionalHdfsFileOutput(PluginTask task, FileSystem fs, String workingPath, String outputPath, int taskIndex)
|
|
135
|
+
{
|
|
136
|
+
this.taskIndex = taskIndex;
|
|
137
|
+
this.fs = fs;
|
|
138
|
+
this.workingPath = workingPath;
|
|
139
|
+
this.outputPath = outputPath;
|
|
140
|
+
this.sequenceFormat = task.getSequenceFormat();
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
public void nextFile() {
|
|
144
|
+
closeCurrentStream();
|
|
145
|
+
currentPath = new Path(workingPath + '/' + String.format(sequenceFormat, taskIndex, fileIndex));
|
|
146
|
+
try {
|
|
147
|
+
if (fs.exists(currentPath)) {
|
|
148
|
+
throw new IllegalAccessException(currentPath.toString() + "already exists.");
|
|
149
|
+
}
|
|
150
|
+
currentStream = fs.create(currentPath);
|
|
151
|
+
logger.info("Uploading '{}'", currentPath.toString());
|
|
152
|
+
}
|
|
153
|
+
catch (IOException | IllegalAccessException e) {
|
|
154
|
+
logger.error(e.getMessage());
|
|
155
|
+
throw Throwables.propagate(e);
|
|
156
|
+
}
|
|
157
|
+
fileIndex++;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
@Override
|
|
161
|
+
public void add(Buffer buffer) {
|
|
162
|
+
if (currentStream == null) {
|
|
163
|
+
throw new IllegalStateException("nextFile() must be called before poll()");
|
|
164
|
+
}
|
|
165
|
+
try {
|
|
166
|
+
logger.debug("#add called {} times for taskIndex {}", callCount, taskIndex);
|
|
167
|
+
currentStream.write(buffer.array(), buffer.offset(), buffer.limit());
|
|
168
|
+
callCount++;
|
|
169
|
+
} catch (IOException e) {
|
|
170
|
+
throw new RuntimeException(e);
|
|
171
|
+
} finally {
|
|
172
|
+
buffer.release();
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
@Override
|
|
177
|
+
public void finish() {
|
|
178
|
+
closeCurrentStream();
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
@Override
|
|
182
|
+
public void close() {
|
|
183
|
+
closeCurrentStream();
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
@Override
|
|
187
|
+
public void abort() {
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
@Override
|
|
191
|
+
public CommitReport commit() {
|
|
192
|
+
try {
|
|
193
|
+
fs.rename(new Path(workingPath), new Path(outputPath));
|
|
194
|
+
logger.info("rename {} => {}", workingPath, outputPath);
|
|
195
|
+
} catch (IOException e) {
|
|
196
|
+
logger.error(e.getMessage());
|
|
197
|
+
throw Throwables.propagate(e);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
CommitReport report = Exec.newCommitReport();
|
|
201
|
+
report.set("files", currentPath);
|
|
202
|
+
return report;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
private void closeCurrentStream() {
|
|
206
|
+
try {
|
|
207
|
+
if (currentStream != null) {
|
|
208
|
+
currentStream.close();
|
|
209
|
+
currentStream = null;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
callCount = 0;
|
|
213
|
+
} catch (IOException e) {
|
|
214
|
+
logger.error(e.getMessage());
|
|
215
|
+
throw Throwables.propagate(e);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
}
|
metadata
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: embulk-output-hdfs
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.1.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- takahiro.nakayama
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2015-07-07 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
|
15
|
+
requirements:
|
|
16
|
+
- - ~>
|
|
17
|
+
- !ruby/object:Gem::Version
|
|
18
|
+
version: '1.0'
|
|
19
|
+
name: bundler
|
|
20
|
+
prerelease: false
|
|
21
|
+
type: :development
|
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
23
|
+
requirements:
|
|
24
|
+
- - ~>
|
|
25
|
+
- !ruby/object:Gem::Version
|
|
26
|
+
version: '1.0'
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
|
29
|
+
requirements:
|
|
30
|
+
- - '>='
|
|
31
|
+
- !ruby/object:Gem::Version
|
|
32
|
+
version: '10.0'
|
|
33
|
+
name: rake
|
|
34
|
+
prerelease: false
|
|
35
|
+
type: :development
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - '>='
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: '10.0'
|
|
41
|
+
description: Dumps records to Hdfs.
|
|
42
|
+
email:
|
|
43
|
+
- civitaspo@gmail.com
|
|
44
|
+
executables: []
|
|
45
|
+
extensions: []
|
|
46
|
+
extra_rdoc_files: []
|
|
47
|
+
files:
|
|
48
|
+
- .gitignore
|
|
49
|
+
- LICENSE.txt
|
|
50
|
+
- README.md
|
|
51
|
+
- build.gradle
|
|
52
|
+
- gradle/wrapper/gradle-wrapper.jar
|
|
53
|
+
- gradle/wrapper/gradle-wrapper.properties
|
|
54
|
+
- gradlew
|
|
55
|
+
- gradlew.bat
|
|
56
|
+
- lib/embulk/output/hdfs.rb
|
|
57
|
+
- src/main/java/org/embulk/output/HdfsOutputPlugin.java
|
|
58
|
+
- src/test/java/org/embulk/output/TestHdfsOutputPlugin.java
|
|
59
|
+
- classpath/activation-1.1.jar
|
|
60
|
+
- classpath/apacheds-i18n-2.0.0-M15.jar
|
|
61
|
+
- classpath/apacheds-kerberos-codec-2.0.0-M15.jar
|
|
62
|
+
- classpath/api-asn1-api-1.0.0-M20.jar
|
|
63
|
+
- classpath/api-util-1.0.0-M20.jar
|
|
64
|
+
- classpath/asm-3.1.jar
|
|
65
|
+
- classpath/avro-1.7.4.jar
|
|
66
|
+
- classpath/commons-beanutils-1.7.0.jar
|
|
67
|
+
- classpath/commons-cli-1.2.jar
|
|
68
|
+
- classpath/commons-codec-1.6.jar
|
|
69
|
+
- classpath/commons-collections-3.2.1.jar
|
|
70
|
+
- classpath/commons-compress-1.4.1.jar
|
|
71
|
+
- classpath/commons-configuration-1.6.jar
|
|
72
|
+
- classpath/commons-digester-1.8.jar
|
|
73
|
+
- classpath/commons-httpclient-3.1.jar
|
|
74
|
+
- classpath/commons-io-2.4.jar
|
|
75
|
+
- classpath/commons-lang-2.6.jar
|
|
76
|
+
- classpath/commons-logging-1.1.3.jar
|
|
77
|
+
- classpath/commons-math3-3.1.1.jar
|
|
78
|
+
- classpath/commons-net-3.1.jar
|
|
79
|
+
- classpath/curator-client-2.6.0.jar
|
|
80
|
+
- classpath/curator-framework-2.6.0.jar
|
|
81
|
+
- classpath/curator-recipes-2.6.0.jar
|
|
82
|
+
- classpath/embulk-output-hdfs-0.1.0.jar
|
|
83
|
+
- classpath/gson-2.2.4.jar
|
|
84
|
+
- classpath/hadoop-annotations-2.6.0.jar
|
|
85
|
+
- classpath/hadoop-auth-2.6.0.jar
|
|
86
|
+
- classpath/hadoop-client-2.6.0.jar
|
|
87
|
+
- classpath/hadoop-common-2.6.0.jar
|
|
88
|
+
- classpath/hadoop-hdfs-2.6.0.jar
|
|
89
|
+
- classpath/hadoop-mapreduce-client-app-2.6.0.jar
|
|
90
|
+
- classpath/hadoop-mapreduce-client-common-2.6.0.jar
|
|
91
|
+
- classpath/hadoop-mapreduce-client-core-2.6.0.jar
|
|
92
|
+
- classpath/hadoop-mapreduce-client-jobclient-2.6.0.jar
|
|
93
|
+
- classpath/hadoop-mapreduce-client-shuffle-2.6.0.jar
|
|
94
|
+
- classpath/hadoop-yarn-api-2.6.0.jar
|
|
95
|
+
- classpath/hadoop-yarn-client-2.6.0.jar
|
|
96
|
+
- classpath/hadoop-yarn-common-2.6.0.jar
|
|
97
|
+
- classpath/hadoop-yarn-server-common-2.6.0.jar
|
|
98
|
+
- classpath/hadoop-yarn-server-nodemanager-2.6.0.jar
|
|
99
|
+
- classpath/htrace-core-3.0.4.jar
|
|
100
|
+
- classpath/httpclient-4.2.5.jar
|
|
101
|
+
- classpath/httpcore-4.2.4.jar
|
|
102
|
+
- classpath/jackson-core-asl-1.9.13.jar
|
|
103
|
+
- classpath/jackson-jaxrs-1.9.13.jar
|
|
104
|
+
- classpath/jackson-mapper-asl-1.9.13.jar
|
|
105
|
+
- classpath/jackson-xc-1.9.13.jar
|
|
106
|
+
- classpath/jaxb-api-2.2.2.jar
|
|
107
|
+
- classpath/jaxb-impl-2.2.3-1.jar
|
|
108
|
+
- classpath/jersey-client-1.9.jar
|
|
109
|
+
- classpath/jersey-core-1.9.jar
|
|
110
|
+
- classpath/jersey-guice-1.9.jar
|
|
111
|
+
- classpath/jersey-json-1.9.jar
|
|
112
|
+
- classpath/jersey-server-1.9.jar
|
|
113
|
+
- classpath/jettison-1.1.jar
|
|
114
|
+
- classpath/jetty-util-6.1.26.jar
|
|
115
|
+
- classpath/jline-0.9.94.jar
|
|
116
|
+
- classpath/jsr305-1.3.9.jar
|
|
117
|
+
- classpath/leveldbjni-all-1.8.jar
|
|
118
|
+
- classpath/log4j-1.2.17.jar
|
|
119
|
+
- classpath/netty-3.7.0.Final.jar
|
|
120
|
+
- classpath/paranamer-2.3.jar
|
|
121
|
+
- classpath/protobuf-java-2.5.0.jar
|
|
122
|
+
- classpath/servlet-api-2.5.jar
|
|
123
|
+
- classpath/slf4j-log4j12-1.7.5.jar
|
|
124
|
+
- classpath/snappy-java-1.0.4.1.jar
|
|
125
|
+
- classpath/stax-api-1.0-2.jar
|
|
126
|
+
- classpath/xercesImpl-2.9.1.jar
|
|
127
|
+
- classpath/xml-apis-1.3.04.jar
|
|
128
|
+
- classpath/xmlenc-0.52.jar
|
|
129
|
+
- classpath/xz-1.0.jar
|
|
130
|
+
- classpath/zookeeper-3.4.6.jar
|
|
131
|
+
homepage: https://github.com/civitaspo/embulk-output-hdfs
|
|
132
|
+
licenses:
|
|
133
|
+
- MIT
|
|
134
|
+
metadata: {}
|
|
135
|
+
post_install_message:
|
|
136
|
+
rdoc_options: []
|
|
137
|
+
require_paths:
|
|
138
|
+
- lib
|
|
139
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
140
|
+
requirements:
|
|
141
|
+
- - '>='
|
|
142
|
+
- !ruby/object:Gem::Version
|
|
143
|
+
version: '0'
|
|
144
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
145
|
+
requirements:
|
|
146
|
+
- - '>='
|
|
147
|
+
- !ruby/object:Gem::Version
|
|
148
|
+
version: '0'
|
|
149
|
+
requirements: []
|
|
150
|
+
rubyforge_project:
|
|
151
|
+
rubygems_version: 2.1.9
|
|
152
|
+
signing_key:
|
|
153
|
+
specification_version: 4
|
|
154
|
+
summary: Hdfs output plugin for Embulk
|
|
155
|
+
test_files: []
|