embulk-input-hdfs 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +9 -0
- data/LICENSE.txt +21 -0
- data/README.md +64 -0
- data/build.gradle +75 -0
- data/classpath/activation-1.1.jar +0 -0
- data/classpath/apacheds-i18n-2.0.0-M15.jar +0 -0
- data/classpath/apacheds-kerberos-codec-2.0.0-M15.jar +0 -0
- data/classpath/api-asn1-api-1.0.0-M20.jar +0 -0
- data/classpath/api-util-1.0.0-M20.jar +0 -0
- data/classpath/asm-3.1.jar +0 -0
- data/classpath/avro-1.7.4.jar +0 -0
- data/classpath/commons-beanutils-1.7.0.jar +0 -0
- data/classpath/commons-cli-1.2.jar +0 -0
- data/classpath/commons-codec-1.6.jar +0 -0
- data/classpath/commons-collections-3.2.1.jar +0 -0
- data/classpath/commons-compress-1.4.1.jar +0 -0
- data/classpath/commons-configuration-1.6.jar +0 -0
- data/classpath/commons-digester-1.8.jar +0 -0
- data/classpath/commons-httpclient-3.1.jar +0 -0
- data/classpath/commons-io-2.4.jar +0 -0
- data/classpath/commons-lang-2.6.jar +0 -0
- data/classpath/commons-logging-1.1.3.jar +0 -0
- data/classpath/commons-math3-3.1.1.jar +0 -0
- data/classpath/commons-net-3.1.jar +0 -0
- data/classpath/curator-client-2.6.0.jar +0 -0
- data/classpath/curator-framework-2.6.0.jar +0 -0
- data/classpath/curator-recipes-2.6.0.jar +0 -0
- data/classpath/embulk-input-hdfs-0.0.1.jar +0 -0
- data/classpath/gson-2.2.4.jar +0 -0
- data/classpath/hadoop-annotations-2.6.0.jar +0 -0
- data/classpath/hadoop-auth-2.6.0.jar +0 -0
- data/classpath/hadoop-client-2.6.0.jar +0 -0
- data/classpath/hadoop-common-2.6.0.jar +0 -0
- data/classpath/hadoop-hdfs-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-app-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-common-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-core-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-jobclient-2.6.0.jar +0 -0
- data/classpath/hadoop-mapreduce-client-shuffle-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-api-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-client-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-common-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-server-common-2.6.0.jar +0 -0
- data/classpath/hadoop-yarn-server-nodemanager-2.6.0.jar +0 -0
- data/classpath/htrace-core-3.0.4.jar +0 -0
- data/classpath/httpclient-4.2.5.jar +0 -0
- data/classpath/httpcore-4.2.4.jar +0 -0
- data/classpath/jackson-core-asl-1.9.13.jar +0 -0
- data/classpath/jackson-jaxrs-1.9.13.jar +0 -0
- data/classpath/jackson-mapper-asl-1.9.13.jar +0 -0
- data/classpath/jackson-xc-1.9.13.jar +0 -0
- data/classpath/jaxb-api-2.2.2.jar +0 -0
- data/classpath/jaxb-impl-2.2.3-1.jar +0 -0
- data/classpath/jersey-client-1.9.jar +0 -0
- data/classpath/jersey-core-1.9.jar +0 -0
- data/classpath/jersey-guice-1.9.jar +0 -0
- data/classpath/jersey-json-1.9.jar +0 -0
- data/classpath/jersey-server-1.9.jar +0 -0
- data/classpath/jettison-1.1.jar +0 -0
- data/classpath/jetty-util-6.1.26.jar +0 -0
- data/classpath/jline-0.9.94.jar +0 -0
- data/classpath/jsr305-1.3.9.jar +0 -0
- data/classpath/leveldbjni-all-1.8.jar +0 -0
- data/classpath/log4j-1.2.17.jar +0 -0
- data/classpath/netty-3.7.0.Final.jar +0 -0
- data/classpath/paranamer-2.3.jar +0 -0
- data/classpath/protobuf-java-2.5.0.jar +0 -0
- data/classpath/servlet-api-2.5.jar +0 -0
- data/classpath/slf4j-log4j12-1.7.5.jar +0 -0
- data/classpath/snappy-java-1.0.4.1.jar +0 -0
- data/classpath/stax-api-1.0-2.jar +0 -0
- data/classpath/xercesImpl-2.9.1.jar +0 -0
- data/classpath/xml-apis-1.3.04.jar +0 -0
- data/classpath/xmlenc-0.52.jar +0 -0
- data/classpath/xz-1.0.jar +0 -0
- data/classpath/zookeeper-3.4.6.jar +0 -0
- data/gradle/wrapper/gradle-wrapper.jar +0 -0
- data/gradle/wrapper/gradle-wrapper.properties +6 -0
- data/gradlew +164 -0
- data/gradlew.bat +90 -0
- data/lib/embulk/input/hdfs.rb +3 -0
- data/src/main/java/org/embulk/input/HdfsFileInputPlugin.java +231 -0
- data/src/test/java/org/embulk/input/TestHdfsFileInputPlugin.java +5 -0
- metadata +155 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 6f94621b0b1287754465fd73ec83a94d1963d517
|
4
|
+
data.tar.gz: 63a0dfa45079b4c8e4ed028ec040b71bdb24813d
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: d41f8938dc3cfcf4076d49bd3450b7fd71146b32068c3f0b50d291d1b8a3bdbc02848266b5523a0e3a7e107fa8aaf45ff6dd43b2157fefda75170644ed37648a
|
7
|
+
data.tar.gz: c2cc0f02085dc2634b89d513eebdee9616f9285ba1f8ff9b3d63ebbe737d74a162a34a132d3d6935457359ec5b7e972f8540e483fec1d11827549bedb818fc29
|
data/.gitignore
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
|
2
|
+
MIT License
|
3
|
+
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
5
|
+
a copy of this software and associated documentation files (the
|
6
|
+
"Software"), to deal in the Software without restriction, including
|
7
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
8
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
9
|
+
permit persons to whom the Software is furnished to do so, subject to
|
10
|
+
the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be
|
13
|
+
included in all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
16
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
17
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
18
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
19
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
20
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
21
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,64 @@
|
|
1
|
+
# Hdfs file input plugin for Embulk
|
2
|
+
|
3
|
+
Read files on Hdfs.
|
4
|
+
|
5
|
+
## Overview
|
6
|
+
|
7
|
+
* **Plugin type**: file input
|
8
|
+
* **Resume supported**: not yet
|
9
|
+
* **Cleanup supported**: no
|
10
|
+
|
11
|
+
## Configuration
|
12
|
+
|
13
|
+
- **config_files** list of paths to Hadoop's configuration files (array of strings, default: `[]`)
|
14
|
+
- **config** overwrites configuration parameters (hash, default: `{}`)
|
15
|
+
- **input_path** file path on Hdfs. you can use glob and Date format like `%Y%m%d/%s`.
|
16
|
+
- **rewind_seconds** When you use Date format in input_path property, the format is executed by using the time which is Now minus this property.
|
17
|
+
|
18
|
+
## Example
|
19
|
+
|
20
|
+
```yaml
|
21
|
+
in:
|
22
|
+
type: hdfs
|
23
|
+
config_files:
|
24
|
+
- /opt/analytics/etc/hadoop/conf/core-site.xml
|
25
|
+
- /opt/analytics/etc/hadoop/conf/hdfs-site.xml
|
26
|
+
config:
|
27
|
+
fs.defaultFS: 'hdfs://hdp-nn1:8020'
|
28
|
+
dfs.replication: 1
|
29
|
+
fs.hdfs.impl: 'org.apache.hadoop.hdfs.DistributedFileSystem'
|
30
|
+
fs.file.impl: 'org.apache.hadoop.fs.LocalFileSystem'
|
31
|
+
input_path: /user/embulk/test/%Y-%m-%d/*
|
32
|
+
rewind_seconds: 86400
|
33
|
+
decoders:
|
34
|
+
- {type: gzip}
|
35
|
+
parser:
|
36
|
+
charset: UTF-8
|
37
|
+
newline: CRLF
|
38
|
+
type: csv
|
39
|
+
delimiter: "\t"
|
40
|
+
quote: ''
|
41
|
+
escape: ''
|
42
|
+
trim_if_not_quoted: true
|
43
|
+
skip_header_lines: 0
|
44
|
+
allow_extra_columns: true
|
45
|
+
allow_optional_columns: true
|
46
|
+
columns:
|
47
|
+
- {name: c0, type: string}
|
48
|
+
- {name: c1, type: string}
|
49
|
+
- {name: c2, type: string}
|
50
|
+
- {name: c3, type: long}
|
51
|
+
```
|
52
|
+
|
53
|
+
## Build
|
54
|
+
|
55
|
+
```
|
56
|
+
$ ./gradlew gem
|
57
|
+
```
|
58
|
+
|
59
|
+
## Development
|
60
|
+
|
61
|
+
```
|
62
|
+
$ ./gradlew classpath
|
63
|
+
$ bundle exec embulk run -I lib example.yml
|
64
|
+
```
|
data/build.gradle
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
plugins {
|
2
|
+
id "com.jfrog.bintray" version "1.1"
|
3
|
+
id "com.github.jruby-gradle.base" version "0.1.5"
|
4
|
+
id "java"
|
5
|
+
}
|
6
|
+
import com.github.jrubygradle.JRubyExec
|
7
|
+
repositories {
|
8
|
+
mavenCentral()
|
9
|
+
jcenter()
|
10
|
+
}
|
11
|
+
configurations {
|
12
|
+
provided
|
13
|
+
}
|
14
|
+
|
15
|
+
version = "0.0.1"
|
16
|
+
|
17
|
+
dependencies {
|
18
|
+
compile "org.embulk:embulk-core:0.6.16"
|
19
|
+
provided "org.embulk:embulk-core:0.6.16"
|
20
|
+
// compile "YOUR_JAR_DEPENDENCY_GROUP:YOUR_JAR_DEPENDENCY_MODULE:YOUR_JAR_DEPENDENCY_VERSION"
|
21
|
+
compile 'org.apache.hadoop:hadoop-client:2.6.0'
|
22
|
+
compile 'com.google.guava:guava:14.0'
|
23
|
+
testCompile "junit:junit:4.+"
|
24
|
+
}
|
25
|
+
|
26
|
+
task classpath(type: Copy, dependsOn: ["jar"]) {
|
27
|
+
doFirst { file("classpath").deleteDir() }
|
28
|
+
from (configurations.runtime - configurations.provided + files(jar.archivePath))
|
29
|
+
into "classpath"
|
30
|
+
}
|
31
|
+
clean { delete "classpath" }
|
32
|
+
|
33
|
+
task gem(type: JRubyExec, dependsOn: ["gemspec", "classpath"]) {
|
34
|
+
jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "build"
|
35
|
+
script "${project.name}.gemspec"
|
36
|
+
doLast { ant.move(file: "${project.name}-${project.version}.gem", todir: "pkg") }
|
37
|
+
}
|
38
|
+
|
39
|
+
task gemPush(type: JRubyExec, dependsOn: ["gem"]) {
|
40
|
+
jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "push"
|
41
|
+
script "pkg/${project.name}-${project.version}.gem"
|
42
|
+
}
|
43
|
+
|
44
|
+
task "package"(dependsOn: ["gemspec", "classpath"]) << {
|
45
|
+
println "> Build succeeded."
|
46
|
+
println "> You can run embulk with '-L ${file(".").absolutePath}' argument."
|
47
|
+
}
|
48
|
+
|
49
|
+
task gemspec {
|
50
|
+
ext.gemspecFile = file("${project.name}.gemspec")
|
51
|
+
inputs.file "build.gradle"
|
52
|
+
outputs.file gemspecFile
|
53
|
+
doLast { gemspecFile.write($/
|
54
|
+
Gem::Specification.new do |spec|
|
55
|
+
spec.name = "${project.name}"
|
56
|
+
spec.version = "${project.version}"
|
57
|
+
spec.authors = ["takahiro.nakayama"]
|
58
|
+
spec.summary = %[Hdfs file input plugin for Embulk]
|
59
|
+
spec.description = %[Reads files stored on Hdfs.]
|
60
|
+
spec.email = ["civitaspo@gmail.com"]
|
61
|
+
spec.licenses = ["MIT"]
|
62
|
+
spec.homepage = "https://github.com/civitaspo/embulk-input-hdfs"
|
63
|
+
|
64
|
+
spec.files = `git ls-files`.split("\n") + Dir["classpath/*.jar"]
|
65
|
+
spec.test_files = spec.files.grep(%r"^(test|spec)/")
|
66
|
+
spec.require_paths = ["lib"]
|
67
|
+
|
68
|
+
#spec.add_dependency 'YOUR_GEM_DEPENDENCY', ['~> YOUR_GEM_DEPENDENCY_VERSION']
|
69
|
+
spec.add_development_dependency 'bundler', ['~> 1.0']
|
70
|
+
spec.add_development_dependency 'rake', ['>= 10.0']
|
71
|
+
end
|
72
|
+
/$)
|
73
|
+
}
|
74
|
+
}
|
75
|
+
clean { delete "${project.name}.gemspec" }
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
data/gradlew
ADDED
@@ -0,0 +1,164 @@
|
|
1
|
+
#!/usr/bin/env bash
|
2
|
+
|
3
|
+
##############################################################################
|
4
|
+
##
|
5
|
+
## Gradle start up script for UN*X
|
6
|
+
##
|
7
|
+
##############################################################################
|
8
|
+
|
9
|
+
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
10
|
+
DEFAULT_JVM_OPTS=""
|
11
|
+
|
12
|
+
APP_NAME="Gradle"
|
13
|
+
APP_BASE_NAME=`basename "$0"`
|
14
|
+
|
15
|
+
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
16
|
+
MAX_FD="maximum"
|
17
|
+
|
18
|
+
warn ( ) {
|
19
|
+
echo "$*"
|
20
|
+
}
|
21
|
+
|
22
|
+
die ( ) {
|
23
|
+
echo
|
24
|
+
echo "$*"
|
25
|
+
echo
|
26
|
+
exit 1
|
27
|
+
}
|
28
|
+
|
29
|
+
# OS specific support (must be 'true' or 'false').
|
30
|
+
cygwin=false
|
31
|
+
msys=false
|
32
|
+
darwin=false
|
33
|
+
case "`uname`" in
|
34
|
+
CYGWIN* )
|
35
|
+
cygwin=true
|
36
|
+
;;
|
37
|
+
Darwin* )
|
38
|
+
darwin=true
|
39
|
+
;;
|
40
|
+
MINGW* )
|
41
|
+
msys=true
|
42
|
+
;;
|
43
|
+
esac
|
44
|
+
|
45
|
+
# For Cygwin, ensure paths are in UNIX format before anything is touched.
|
46
|
+
if $cygwin ; then
|
47
|
+
[ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
|
48
|
+
fi
|
49
|
+
|
50
|
+
# Attempt to set APP_HOME
|
51
|
+
# Resolve links: $0 may be a link
|
52
|
+
PRG="$0"
|
53
|
+
# Need this for relative symlinks.
|
54
|
+
while [ -h "$PRG" ] ; do
|
55
|
+
ls=`ls -ld "$PRG"`
|
56
|
+
link=`expr "$ls" : '.*-> \(.*\)$'`
|
57
|
+
if expr "$link" : '/.*' > /dev/null; then
|
58
|
+
PRG="$link"
|
59
|
+
else
|
60
|
+
PRG=`dirname "$PRG"`"/$link"
|
61
|
+
fi
|
62
|
+
done
|
63
|
+
SAVED="`pwd`"
|
64
|
+
cd "`dirname \"$PRG\"`/" >&-
|
65
|
+
APP_HOME="`pwd -P`"
|
66
|
+
cd "$SAVED" >&-
|
67
|
+
|
68
|
+
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
69
|
+
|
70
|
+
# Determine the Java command to use to start the JVM.
|
71
|
+
if [ -n "$JAVA_HOME" ] ; then
|
72
|
+
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
73
|
+
# IBM's JDK on AIX uses strange locations for the executables
|
74
|
+
JAVACMD="$JAVA_HOME/jre/sh/java"
|
75
|
+
else
|
76
|
+
JAVACMD="$JAVA_HOME/bin/java"
|
77
|
+
fi
|
78
|
+
if [ ! -x "$JAVACMD" ] ; then
|
79
|
+
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
80
|
+
|
81
|
+
Please set the JAVA_HOME variable in your environment to match the
|
82
|
+
location of your Java installation."
|
83
|
+
fi
|
84
|
+
else
|
85
|
+
JAVACMD="java"
|
86
|
+
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
87
|
+
|
88
|
+
Please set the JAVA_HOME variable in your environment to match the
|
89
|
+
location of your Java installation."
|
90
|
+
fi
|
91
|
+
|
92
|
+
# Increase the maximum file descriptors if we can.
|
93
|
+
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
|
94
|
+
MAX_FD_LIMIT=`ulimit -H -n`
|
95
|
+
if [ $? -eq 0 ] ; then
|
96
|
+
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
97
|
+
MAX_FD="$MAX_FD_LIMIT"
|
98
|
+
fi
|
99
|
+
ulimit -n $MAX_FD
|
100
|
+
if [ $? -ne 0 ] ; then
|
101
|
+
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
102
|
+
fi
|
103
|
+
else
|
104
|
+
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
105
|
+
fi
|
106
|
+
fi
|
107
|
+
|
108
|
+
# For Darwin, add options to specify how the application appears in the dock
|
109
|
+
if $darwin; then
|
110
|
+
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
111
|
+
fi
|
112
|
+
|
113
|
+
# For Cygwin, switch paths to Windows format before running java
|
114
|
+
if $cygwin ; then
|
115
|
+
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
116
|
+
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
117
|
+
|
118
|
+
# We build the pattern for arguments to be converted via cygpath
|
119
|
+
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
120
|
+
SEP=""
|
121
|
+
for dir in $ROOTDIRSRAW ; do
|
122
|
+
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
123
|
+
SEP="|"
|
124
|
+
done
|
125
|
+
OURCYGPATTERN="(^($ROOTDIRS))"
|
126
|
+
# Add a user-defined pattern to the cygpath arguments
|
127
|
+
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
128
|
+
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
129
|
+
fi
|
130
|
+
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
131
|
+
i=0
|
132
|
+
for arg in "$@" ; do
|
133
|
+
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
134
|
+
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
135
|
+
|
136
|
+
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
137
|
+
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
138
|
+
else
|
139
|
+
eval `echo args$i`="\"$arg\""
|
140
|
+
fi
|
141
|
+
i=$((i+1))
|
142
|
+
done
|
143
|
+
case $i in
|
144
|
+
(0) set -- ;;
|
145
|
+
(1) set -- "$args0" ;;
|
146
|
+
(2) set -- "$args0" "$args1" ;;
|
147
|
+
(3) set -- "$args0" "$args1" "$args2" ;;
|
148
|
+
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
149
|
+
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
150
|
+
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
151
|
+
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
152
|
+
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
153
|
+
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
154
|
+
esac
|
155
|
+
fi
|
156
|
+
|
157
|
+
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
|
158
|
+
function splitJvmOpts() {
|
159
|
+
JVM_OPTS=("$@")
|
160
|
+
}
|
161
|
+
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
|
162
|
+
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
|
163
|
+
|
164
|
+
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
|
data/gradlew.bat
ADDED
@@ -0,0 +1,90 @@
|
|
1
|
+
@if "%DEBUG%" == "" @echo off
|
2
|
+
@rem ##########################################################################
|
3
|
+
@rem
|
4
|
+
@rem Gradle startup script for Windows
|
5
|
+
@rem
|
6
|
+
@rem ##########################################################################
|
7
|
+
|
8
|
+
@rem Set local scope for the variables with windows NT shell
|
9
|
+
if "%OS%"=="Windows_NT" setlocal
|
10
|
+
|
11
|
+
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
12
|
+
set DEFAULT_JVM_OPTS=
|
13
|
+
|
14
|
+
set DIRNAME=%~dp0
|
15
|
+
if "%DIRNAME%" == "" set DIRNAME=.
|
16
|
+
set APP_BASE_NAME=%~n0
|
17
|
+
set APP_HOME=%DIRNAME%
|
18
|
+
|
19
|
+
@rem Find java.exe
|
20
|
+
if defined JAVA_HOME goto findJavaFromJavaHome
|
21
|
+
|
22
|
+
set JAVA_EXE=java.exe
|
23
|
+
%JAVA_EXE% -version >NUL 2>&1
|
24
|
+
if "%ERRORLEVEL%" == "0" goto init
|
25
|
+
|
26
|
+
echo.
|
27
|
+
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
28
|
+
echo.
|
29
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
30
|
+
echo location of your Java installation.
|
31
|
+
|
32
|
+
goto fail
|
33
|
+
|
34
|
+
:findJavaFromJavaHome
|
35
|
+
set JAVA_HOME=%JAVA_HOME:"=%
|
36
|
+
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
37
|
+
|
38
|
+
if exist "%JAVA_EXE%" goto init
|
39
|
+
|
40
|
+
echo.
|
41
|
+
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
42
|
+
echo.
|
43
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
44
|
+
echo location of your Java installation.
|
45
|
+
|
46
|
+
goto fail
|
47
|
+
|
48
|
+
:init
|
49
|
+
@rem Get command-line arguments, handling Windowz variants
|
50
|
+
|
51
|
+
if not "%OS%" == "Windows_NT" goto win9xME_args
|
52
|
+
if "%@eval[2+2]" == "4" goto 4NT_args
|
53
|
+
|
54
|
+
:win9xME_args
|
55
|
+
@rem Slurp the command line arguments.
|
56
|
+
set CMD_LINE_ARGS=
|
57
|
+
set _SKIP=2
|
58
|
+
|
59
|
+
:win9xME_args_slurp
|
60
|
+
if "x%~1" == "x" goto execute
|
61
|
+
|
62
|
+
set CMD_LINE_ARGS=%*
|
63
|
+
goto execute
|
64
|
+
|
65
|
+
:4NT_args
|
66
|
+
@rem Get arguments from the 4NT Shell from JP Software
|
67
|
+
set CMD_LINE_ARGS=%$
|
68
|
+
|
69
|
+
:execute
|
70
|
+
@rem Setup the command line
|
71
|
+
|
72
|
+
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
73
|
+
|
74
|
+
@rem Execute Gradle
|
75
|
+
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
76
|
+
|
77
|
+
:end
|
78
|
+
@rem End local scope for the variables with windows NT shell
|
79
|
+
if "%ERRORLEVEL%"=="0" goto mainEnd
|
80
|
+
|
81
|
+
:fail
|
82
|
+
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
83
|
+
rem the _cmd.exe /c_ return code!
|
84
|
+
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
85
|
+
exit /b 1
|
86
|
+
|
87
|
+
:mainEnd
|
88
|
+
if "%OS%"=="Windows_NT" endlocal
|
89
|
+
|
90
|
+
:omega
|
@@ -0,0 +1,231 @@
|
|
1
|
+
package org.embulk.input;
|
2
|
+
|
3
|
+
import org.apache.hadoop.conf.Configuration;
|
4
|
+
import org.apache.hadoop.fs.FileStatus;
|
5
|
+
import org.apache.hadoop.fs.FileSystem;
|
6
|
+
import org.apache.hadoop.fs.Path;
|
7
|
+
import org.embulk.config.*;
|
8
|
+
import org.embulk.spi.BufferAllocator;
|
9
|
+
import org.embulk.spi.Exec;
|
10
|
+
import org.embulk.spi.FileInputPlugin;
|
11
|
+
import org.embulk.spi.TransactionalFileInput;
|
12
|
+
import org.embulk.spi.util.InputStreamFileInput;
|
13
|
+
import org.jruby.embed.ScriptingContainer;
|
14
|
+
import org.slf4j.Logger;
|
15
|
+
|
16
|
+
import java.io.IOException;
|
17
|
+
import java.io.InputStream;
|
18
|
+
import java.util.ArrayList;
|
19
|
+
import java.util.List;
|
20
|
+
import java.util.Map;
|
21
|
+
|
22
|
+
public class HdfsFileInputPlugin implements FileInputPlugin
|
23
|
+
{
|
24
|
+
private static final Logger logger = Exec.getLogger(HdfsFileInputPlugin.class);
|
25
|
+
|
26
|
+
public interface PluginTask extends Task
|
27
|
+
{
|
28
|
+
@Config("config_files")
|
29
|
+
@ConfigDefault("[]")
|
30
|
+
public List<String> getConfigFiles();
|
31
|
+
|
32
|
+
@Config("config")
|
33
|
+
@ConfigDefault("{}")
|
34
|
+
public Map<String, String> getConfig();
|
35
|
+
|
36
|
+
@Config("input_path")
|
37
|
+
public String getInputPath();
|
38
|
+
|
39
|
+
@Config("rewind_seconds")
|
40
|
+
@ConfigDefault("0")
|
41
|
+
public int getRewindSeconds();
|
42
|
+
|
43
|
+
public List<String> getTargetFiles();
|
44
|
+
public void setTargetFiles(List<String> targetFiles);
|
45
|
+
|
46
|
+
@ConfigInject
|
47
|
+
public BufferAllocator getBufferAllocator();
|
48
|
+
}
|
49
|
+
|
50
|
+
@Override
|
51
|
+
public ConfigDiff transaction(ConfigSource config, FileInputPlugin.Control control)
|
52
|
+
{
|
53
|
+
PluginTask task = config.loadConfig(PluginTask.class);
|
54
|
+
|
55
|
+
// prepare
|
56
|
+
Configuration configuration = getHdfsConfiguration(task);
|
57
|
+
FileSystem fs = getFs(configuration);
|
58
|
+
Path inputPath = new Path(strftime(task.getInputPath(), task.getRewindSeconds()));
|
59
|
+
|
60
|
+
// listing
|
61
|
+
List<String> targetFiles;
|
62
|
+
try {
|
63
|
+
targetFiles = globRecursive(fs, inputPath);
|
64
|
+
} catch (IOException e) {
|
65
|
+
logger.error(e.getMessage());
|
66
|
+
throw new RuntimeException(e);
|
67
|
+
}
|
68
|
+
logger.info("Loading target files: {}", targetFiles);
|
69
|
+
task.setTargetFiles(targetFiles);
|
70
|
+
|
71
|
+
// number of processors is same with number of targets
|
72
|
+
int taskCount = targetFiles.size();
|
73
|
+
|
74
|
+
return resume(task.dump(), taskCount, control);
|
75
|
+
}
|
76
|
+
|
77
|
+
@Override
|
78
|
+
public ConfigDiff resume(TaskSource taskSource,
|
79
|
+
int taskCount,
|
80
|
+
FileInputPlugin.Control control)
|
81
|
+
{
|
82
|
+
control.run(taskSource, taskCount);
|
83
|
+
return Exec.newConfigDiff();
|
84
|
+
}
|
85
|
+
|
86
|
+
@Override
|
87
|
+
public void cleanup(TaskSource taskSource,
|
88
|
+
int taskCount,
|
89
|
+
List<CommitReport> successCommitReports)
|
90
|
+
{
|
91
|
+
}
|
92
|
+
|
93
|
+
@Override
|
94
|
+
public TransactionalFileInput open(TaskSource taskSource, int taskIndex)
|
95
|
+
{
|
96
|
+
PluginTask task = taskSource.loadTask(PluginTask.class);
|
97
|
+
|
98
|
+
// prepare
|
99
|
+
Configuration configuration = getHdfsConfiguration(task);
|
100
|
+
FileSystem fs = getFs(configuration);
|
101
|
+
|
102
|
+
return new HdfsFileInput(task, fs, taskIndex);
|
103
|
+
}
|
104
|
+
|
105
|
+
private Configuration getHdfsConfiguration(final PluginTask task)
|
106
|
+
{
|
107
|
+
Configuration configuration = new Configuration();
|
108
|
+
|
109
|
+
for (Object configFile : task.getConfigFiles()) {
|
110
|
+
configuration.addResource(configFile.toString());
|
111
|
+
}
|
112
|
+
configuration.reloadConfiguration();
|
113
|
+
|
114
|
+
for (Map.Entry<String, String> entry: task.getConfig().entrySet()) {
|
115
|
+
configuration.set(entry.getKey(), entry.getValue());
|
116
|
+
}
|
117
|
+
|
118
|
+
return configuration;
|
119
|
+
}
|
120
|
+
|
121
|
+
private FileSystem getFs(final Configuration configuration)
|
122
|
+
{
|
123
|
+
try {
|
124
|
+
FileSystem fs = FileSystem.get(configuration);
|
125
|
+
return fs;
|
126
|
+
}
|
127
|
+
catch (IOException e) {
|
128
|
+
logger.error(e.getMessage());
|
129
|
+
throw new RuntimeException(e);
|
130
|
+
}
|
131
|
+
}
|
132
|
+
|
133
|
+
private String strftime(final String raw, final int rewind_seconds)
|
134
|
+
{
|
135
|
+
ScriptingContainer jruby = new ScriptingContainer();
|
136
|
+
Object resolved = jruby.runScriptlet(
|
137
|
+
String.format("(Time.now - %s).strftime('%s')", String.valueOf(rewind_seconds), raw));
|
138
|
+
return resolved.toString();
|
139
|
+
}
|
140
|
+
|
141
|
+
private List<String> globRecursive(final FileSystem fs, final Path hdfsPath) throws IOException
|
142
|
+
{
|
143
|
+
List<String> container = new ArrayList<String>();
|
144
|
+
for (FileStatus entry : fs.globStatus(hdfsPath)) {
|
145
|
+
if (entry.isDirectory()) {
|
146
|
+
container.addAll(listRecursive(fs, entry));
|
147
|
+
}
|
148
|
+
else {
|
149
|
+
container.add(entry.getPath().toString());
|
150
|
+
}
|
151
|
+
}
|
152
|
+
return container;
|
153
|
+
}
|
154
|
+
|
155
|
+
private List<String> listRecursive(final FileSystem fs, FileStatus status) throws IOException {
|
156
|
+
List<String> container = new ArrayList<String>();
|
157
|
+
if (status.isDirectory()) {
|
158
|
+
for (FileStatus entry : fs.listStatus(status.getPath())) {
|
159
|
+
container.addAll(listRecursive(fs, entry));
|
160
|
+
}
|
161
|
+
}
|
162
|
+
else {
|
163
|
+
container.add(status.getPath().toString());
|
164
|
+
}
|
165
|
+
return container;
|
166
|
+
}
|
167
|
+
|
168
|
+
|
169
|
+
|
170
|
+
// private List<String> listUniquify(List<String> stringList)
|
171
|
+
// {
|
172
|
+
// Set<String> set = new HashSet<String>();
|
173
|
+
// set.addAll(stringList);
|
174
|
+
// List<String> uniqueStringList = new ArrayList<String>();
|
175
|
+
// uniqueStringList.addAll(set);
|
176
|
+
// return uniqueStringList;
|
177
|
+
// }
|
178
|
+
|
179
|
+
public static class HdfsFileInput extends InputStreamFileInput implements TransactionalFileInput
|
180
|
+
{
|
181
|
+
private static class HdfsFileProvider implements InputStreamFileInput.Provider
|
182
|
+
{
|
183
|
+
private final FileSystem fs;
|
184
|
+
private final Path hdfsPath;
|
185
|
+
private boolean opened = false;
|
186
|
+
|
187
|
+
public HdfsFileProvider(PluginTask task, FileSystem fs, int taskIndex)
|
188
|
+
{
|
189
|
+
this.fs = fs;
|
190
|
+
this.hdfsPath = new Path(task.getTargetFiles().get(taskIndex));
|
191
|
+
}
|
192
|
+
|
193
|
+
@Override
|
194
|
+
public InputStream openNext() throws IOException
|
195
|
+
{
|
196
|
+
if (opened) {
|
197
|
+
return null;
|
198
|
+
}
|
199
|
+
|
200
|
+
opened = true;
|
201
|
+
return fs.open(hdfsPath);
|
202
|
+
}
|
203
|
+
|
204
|
+
@Override
|
205
|
+
public void close()
|
206
|
+
{
|
207
|
+
}
|
208
|
+
}
|
209
|
+
|
210
|
+
public HdfsFileInput(PluginTask task, FileSystem fs, int taskIndex)
|
211
|
+
{
|
212
|
+
super(task.getBufferAllocator(), new HdfsFileProvider(task, fs, taskIndex));
|
213
|
+
}
|
214
|
+
|
215
|
+
@Override
|
216
|
+
public void close()
|
217
|
+
{
|
218
|
+
}
|
219
|
+
|
220
|
+
@Override
|
221
|
+
public void abort()
|
222
|
+
{
|
223
|
+
}
|
224
|
+
|
225
|
+
@Override
|
226
|
+
public CommitReport commit()
|
227
|
+
{
|
228
|
+
return Exec.newCommitReport();
|
229
|
+
}
|
230
|
+
}
|
231
|
+
}
|
metadata
ADDED
@@ -0,0 +1,155 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: embulk-input-hdfs
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- takahiro.nakayama
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2015-07-17 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
15
|
+
requirements:
|
16
|
+
- - ~>
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: '1.0'
|
19
|
+
name: bundler
|
20
|
+
prerelease: false
|
21
|
+
type: :development
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ~>
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
29
|
+
requirements:
|
30
|
+
- - '>='
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '10.0'
|
33
|
+
name: rake
|
34
|
+
prerelease: false
|
35
|
+
type: :development
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - '>='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '10.0'
|
41
|
+
description: Reads files stored on Hdfs.
|
42
|
+
email:
|
43
|
+
- civitaspo@gmail.com
|
44
|
+
executables: []
|
45
|
+
extensions: []
|
46
|
+
extra_rdoc_files: []
|
47
|
+
files:
|
48
|
+
- .gitignore
|
49
|
+
- LICENSE.txt
|
50
|
+
- README.md
|
51
|
+
- build.gradle
|
52
|
+
- gradle/wrapper/gradle-wrapper.jar
|
53
|
+
- gradle/wrapper/gradle-wrapper.properties
|
54
|
+
- gradlew
|
55
|
+
- gradlew.bat
|
56
|
+
- lib/embulk/input/hdfs.rb
|
57
|
+
- src/main/java/org/embulk/input/HdfsFileInputPlugin.java
|
58
|
+
- src/test/java/org/embulk/input/TestHdfsFileInputPlugin.java
|
59
|
+
- classpath/activation-1.1.jar
|
60
|
+
- classpath/apacheds-i18n-2.0.0-M15.jar
|
61
|
+
- classpath/apacheds-kerberos-codec-2.0.0-M15.jar
|
62
|
+
- classpath/api-asn1-api-1.0.0-M20.jar
|
63
|
+
- classpath/api-util-1.0.0-M20.jar
|
64
|
+
- classpath/asm-3.1.jar
|
65
|
+
- classpath/avro-1.7.4.jar
|
66
|
+
- classpath/commons-beanutils-1.7.0.jar
|
67
|
+
- classpath/commons-cli-1.2.jar
|
68
|
+
- classpath/commons-codec-1.6.jar
|
69
|
+
- classpath/commons-collections-3.2.1.jar
|
70
|
+
- classpath/commons-compress-1.4.1.jar
|
71
|
+
- classpath/commons-configuration-1.6.jar
|
72
|
+
- classpath/commons-digester-1.8.jar
|
73
|
+
- classpath/commons-httpclient-3.1.jar
|
74
|
+
- classpath/commons-io-2.4.jar
|
75
|
+
- classpath/commons-lang-2.6.jar
|
76
|
+
- classpath/commons-logging-1.1.3.jar
|
77
|
+
- classpath/commons-math3-3.1.1.jar
|
78
|
+
- classpath/commons-net-3.1.jar
|
79
|
+
- classpath/curator-client-2.6.0.jar
|
80
|
+
- classpath/curator-framework-2.6.0.jar
|
81
|
+
- classpath/curator-recipes-2.6.0.jar
|
82
|
+
- classpath/embulk-input-hdfs-0.0.1.jar
|
83
|
+
- classpath/gson-2.2.4.jar
|
84
|
+
- classpath/hadoop-annotations-2.6.0.jar
|
85
|
+
- classpath/hadoop-auth-2.6.0.jar
|
86
|
+
- classpath/hadoop-client-2.6.0.jar
|
87
|
+
- classpath/hadoop-common-2.6.0.jar
|
88
|
+
- classpath/hadoop-hdfs-2.6.0.jar
|
89
|
+
- classpath/hadoop-mapreduce-client-app-2.6.0.jar
|
90
|
+
- classpath/hadoop-mapreduce-client-common-2.6.0.jar
|
91
|
+
- classpath/hadoop-mapreduce-client-core-2.6.0.jar
|
92
|
+
- classpath/hadoop-mapreduce-client-jobclient-2.6.0.jar
|
93
|
+
- classpath/hadoop-mapreduce-client-shuffle-2.6.0.jar
|
94
|
+
- classpath/hadoop-yarn-api-2.6.0.jar
|
95
|
+
- classpath/hadoop-yarn-client-2.6.0.jar
|
96
|
+
- classpath/hadoop-yarn-common-2.6.0.jar
|
97
|
+
- classpath/hadoop-yarn-server-common-2.6.0.jar
|
98
|
+
- classpath/hadoop-yarn-server-nodemanager-2.6.0.jar
|
99
|
+
- classpath/htrace-core-3.0.4.jar
|
100
|
+
- classpath/httpclient-4.2.5.jar
|
101
|
+
- classpath/httpcore-4.2.4.jar
|
102
|
+
- classpath/jackson-core-asl-1.9.13.jar
|
103
|
+
- classpath/jackson-jaxrs-1.9.13.jar
|
104
|
+
- classpath/jackson-mapper-asl-1.9.13.jar
|
105
|
+
- classpath/jackson-xc-1.9.13.jar
|
106
|
+
- classpath/jaxb-api-2.2.2.jar
|
107
|
+
- classpath/jaxb-impl-2.2.3-1.jar
|
108
|
+
- classpath/jersey-client-1.9.jar
|
109
|
+
- classpath/jersey-core-1.9.jar
|
110
|
+
- classpath/jersey-guice-1.9.jar
|
111
|
+
- classpath/jersey-json-1.9.jar
|
112
|
+
- classpath/jersey-server-1.9.jar
|
113
|
+
- classpath/jettison-1.1.jar
|
114
|
+
- classpath/jetty-util-6.1.26.jar
|
115
|
+
- classpath/jline-0.9.94.jar
|
116
|
+
- classpath/jsr305-1.3.9.jar
|
117
|
+
- classpath/leveldbjni-all-1.8.jar
|
118
|
+
- classpath/log4j-1.2.17.jar
|
119
|
+
- classpath/netty-3.7.0.Final.jar
|
120
|
+
- classpath/paranamer-2.3.jar
|
121
|
+
- classpath/protobuf-java-2.5.0.jar
|
122
|
+
- classpath/servlet-api-2.5.jar
|
123
|
+
- classpath/slf4j-log4j12-1.7.5.jar
|
124
|
+
- classpath/snappy-java-1.0.4.1.jar
|
125
|
+
- classpath/stax-api-1.0-2.jar
|
126
|
+
- classpath/xercesImpl-2.9.1.jar
|
127
|
+
- classpath/xml-apis-1.3.04.jar
|
128
|
+
- classpath/xmlenc-0.52.jar
|
129
|
+
- classpath/xz-1.0.jar
|
130
|
+
- classpath/zookeeper-3.4.6.jar
|
131
|
+
homepage: https://github.com/civitaspo/embulk-input-hdfs
|
132
|
+
licenses:
|
133
|
+
- MIT
|
134
|
+
metadata: {}
|
135
|
+
post_install_message:
|
136
|
+
rdoc_options: []
|
137
|
+
require_paths:
|
138
|
+
- lib
|
139
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
140
|
+
requirements:
|
141
|
+
- - '>='
|
142
|
+
- !ruby/object:Gem::Version
|
143
|
+
version: '0'
|
144
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
145
|
+
requirements:
|
146
|
+
- - '>='
|
147
|
+
- !ruby/object:Gem::Version
|
148
|
+
version: '0'
|
149
|
+
requirements: []
|
150
|
+
rubyforge_project:
|
151
|
+
rubygems_version: 2.1.9
|
152
|
+
signing_key:
|
153
|
+
specification_version: 4
|
154
|
+
summary: Hdfs file input plugin for Embulk
|
155
|
+
test_files: []
|