embulk-output-kafka 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +12 -0
- data/LICENSE.txt +21 -0
- data/README.md +110 -0
- data/build.gradle +113 -0
- data/classpath/audience-annotations-0.5.0.jar +0 -0
- data/classpath/avro-1.9.0.jar +0 -0
- data/classpath/common-config-5.3.0.jar +0 -0
- data/classpath/common-utils-5.3.0.jar +0 -0
- data/classpath/commons-compress-1.18.jar +0 -0
- data/classpath/embulk-output-kafka-0.1.0.jar +0 -0
- data/classpath/jackson-annotations-2.9.0.jar +0 -0
- data/classpath/jackson-core-2.9.9.jar +0 -0
- data/classpath/jackson-databind-2.9.9.jar +0 -0
- data/classpath/jline-0.9.94.jar +0 -0
- data/classpath/jsr305-3.0.2.jar +0 -0
- data/classpath/kafka-avro-serializer-5.3.0.jar +0 -0
- data/classpath/kafka-clients-5.3.0-ccs.jar +0 -0
- data/classpath/kafka-schema-registry-client-5.3.0.jar +0 -0
- data/classpath/lz4-java-1.6.0.jar +0 -0
- data/classpath/netty-3.10.6.Final.jar +0 -0
- data/classpath/slf4j-api-1.7.26.jar +0 -0
- data/classpath/snappy-java-1.1.7.3.jar +0 -0
- data/classpath/spotbugs-annotations-3.1.9.jar +0 -0
- data/classpath/zkclient-0.10.jar +0 -0
- data/classpath/zookeeper-3.4.14.jar +0 -0
- data/classpath/zstd-jni-1.4.0-1.jar +0 -0
- data/config/checkstyle/checkstyle.xml +128 -0
- data/config/checkstyle/default.xml +108 -0
- data/gradle/wrapper/gradle-wrapper.jar +0 -0
- data/gradle/wrapper/gradle-wrapper.properties +5 -0
- data/gradlew +172 -0
- data/gradlew.bat +84 -0
- data/lib/embulk/output/kafka.rb +3 -0
- data/src/main/java/org/embulk/output/kafka/AvroFormatColumnVisitor.java +189 -0
- data/src/main/java/org/embulk/output/kafka/JsonFormatColumnVisitor.java +103 -0
- data/src/main/java/org/embulk/output/kafka/KafkaJsonSerializer.java +23 -0
- data/src/main/java/org/embulk/output/kafka/KafkaOutputColumnVisitor.java +53 -0
- data/src/main/java/org/embulk/output/kafka/KafkaOutputPlugin.java +323 -0
- data/src/main/java/org/embulk/output/kafka/RecordProducerFactory.java +105 -0
- data/src/test/java/org/embulk/output/kafka/TestKafkaOutputPlugin.java +5 -0
- data/src/test/resources/SimpleRecord.avsc +9 -0
- data/src/test/resources/config_complex.yml +26 -0
- data/src/test/resources/config_complex_avro.yml +43 -0
- data/src/test/resources/config_simple.yml +22 -0
- data/src/test/resources/config_simple_avro.yml +32 -0
- data/src/test/resources/config_simple_avro_avsc_file.yml +25 -0
- data/src/test/resources/config_with_key_column.yml +23 -0
- data/src/test/resources/in1.csv +4 -0
- data/src/test/resources/in_complex.csv +5 -0
- metadata +121 -0
data/gradlew
ADDED
@@ -0,0 +1,172 @@
|
|
1
|
+
#!/usr/bin/env sh
|
2
|
+
|
3
|
+
##############################################################################
|
4
|
+
##
|
5
|
+
## Gradle start up script for UN*X
|
6
|
+
##
|
7
|
+
##############################################################################
|
8
|
+
|
9
|
+
# Attempt to set APP_HOME
|
10
|
+
# Resolve links: $0 may be a link
|
11
|
+
PRG="$0"
|
12
|
+
# Need this for relative symlinks.
|
13
|
+
while [ -h "$PRG" ] ; do
|
14
|
+
ls=`ls -ld "$PRG"`
|
15
|
+
link=`expr "$ls" : '.*-> \(.*\)$'`
|
16
|
+
if expr "$link" : '/.*' > /dev/null; then
|
17
|
+
PRG="$link"
|
18
|
+
else
|
19
|
+
PRG=`dirname "$PRG"`"/$link"
|
20
|
+
fi
|
21
|
+
done
|
22
|
+
SAVED="`pwd`"
|
23
|
+
cd "`dirname \"$PRG\"`/" >/dev/null
|
24
|
+
APP_HOME="`pwd -P`"
|
25
|
+
cd "$SAVED" >/dev/null
|
26
|
+
|
27
|
+
APP_NAME="Gradle"
|
28
|
+
APP_BASE_NAME=`basename "$0"`
|
29
|
+
|
30
|
+
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
31
|
+
DEFAULT_JVM_OPTS=""
|
32
|
+
|
33
|
+
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
34
|
+
MAX_FD="maximum"
|
35
|
+
|
36
|
+
warn () {
|
37
|
+
echo "$*"
|
38
|
+
}
|
39
|
+
|
40
|
+
die () {
|
41
|
+
echo
|
42
|
+
echo "$*"
|
43
|
+
echo
|
44
|
+
exit 1
|
45
|
+
}
|
46
|
+
|
47
|
+
# OS specific support (must be 'true' or 'false').
|
48
|
+
cygwin=false
|
49
|
+
msys=false
|
50
|
+
darwin=false
|
51
|
+
nonstop=false
|
52
|
+
case "`uname`" in
|
53
|
+
CYGWIN* )
|
54
|
+
cygwin=true
|
55
|
+
;;
|
56
|
+
Darwin* )
|
57
|
+
darwin=true
|
58
|
+
;;
|
59
|
+
MINGW* )
|
60
|
+
msys=true
|
61
|
+
;;
|
62
|
+
NONSTOP* )
|
63
|
+
nonstop=true
|
64
|
+
;;
|
65
|
+
esac
|
66
|
+
|
67
|
+
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
68
|
+
|
69
|
+
# Determine the Java command to use to start the JVM.
|
70
|
+
if [ -n "$JAVA_HOME" ] ; then
|
71
|
+
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
72
|
+
# IBM's JDK on AIX uses strange locations for the executables
|
73
|
+
JAVACMD="$JAVA_HOME/jre/sh/java"
|
74
|
+
else
|
75
|
+
JAVACMD="$JAVA_HOME/bin/java"
|
76
|
+
fi
|
77
|
+
if [ ! -x "$JAVACMD" ] ; then
|
78
|
+
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
79
|
+
|
80
|
+
Please set the JAVA_HOME variable in your environment to match the
|
81
|
+
location of your Java installation."
|
82
|
+
fi
|
83
|
+
else
|
84
|
+
JAVACMD="java"
|
85
|
+
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
86
|
+
|
87
|
+
Please set the JAVA_HOME variable in your environment to match the
|
88
|
+
location of your Java installation."
|
89
|
+
fi
|
90
|
+
|
91
|
+
# Increase the maximum file descriptors if we can.
|
92
|
+
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
93
|
+
MAX_FD_LIMIT=`ulimit -H -n`
|
94
|
+
if [ $? -eq 0 ] ; then
|
95
|
+
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
96
|
+
MAX_FD="$MAX_FD_LIMIT"
|
97
|
+
fi
|
98
|
+
ulimit -n $MAX_FD
|
99
|
+
if [ $? -ne 0 ] ; then
|
100
|
+
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
101
|
+
fi
|
102
|
+
else
|
103
|
+
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
104
|
+
fi
|
105
|
+
fi
|
106
|
+
|
107
|
+
# For Darwin, add options to specify how the application appears in the dock
|
108
|
+
if $darwin; then
|
109
|
+
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
110
|
+
fi
|
111
|
+
|
112
|
+
# For Cygwin, switch paths to Windows format before running java
|
113
|
+
if $cygwin ; then
|
114
|
+
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
115
|
+
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
116
|
+
JAVACMD=`cygpath --unix "$JAVACMD"`
|
117
|
+
|
118
|
+
# We build the pattern for arguments to be converted via cygpath
|
119
|
+
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
120
|
+
SEP=""
|
121
|
+
for dir in $ROOTDIRSRAW ; do
|
122
|
+
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
123
|
+
SEP="|"
|
124
|
+
done
|
125
|
+
OURCYGPATTERN="(^($ROOTDIRS))"
|
126
|
+
# Add a user-defined pattern to the cygpath arguments
|
127
|
+
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
128
|
+
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
129
|
+
fi
|
130
|
+
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
131
|
+
i=0
|
132
|
+
for arg in "$@" ; do
|
133
|
+
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
134
|
+
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
135
|
+
|
136
|
+
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
137
|
+
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
138
|
+
else
|
139
|
+
eval `echo args$i`="\"$arg\""
|
140
|
+
fi
|
141
|
+
i=$((i+1))
|
142
|
+
done
|
143
|
+
case $i in
|
144
|
+
(0) set -- ;;
|
145
|
+
(1) set -- "$args0" ;;
|
146
|
+
(2) set -- "$args0" "$args1" ;;
|
147
|
+
(3) set -- "$args0" "$args1" "$args2" ;;
|
148
|
+
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
149
|
+
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
150
|
+
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
151
|
+
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
152
|
+
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
153
|
+
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
154
|
+
esac
|
155
|
+
fi
|
156
|
+
|
157
|
+
# Escape application args
|
158
|
+
save () {
|
159
|
+
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
160
|
+
echo " "
|
161
|
+
}
|
162
|
+
APP_ARGS=$(save "$@")
|
163
|
+
|
164
|
+
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
165
|
+
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
166
|
+
|
167
|
+
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
|
168
|
+
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
|
169
|
+
cd "$(dirname "$0")"
|
170
|
+
fi
|
171
|
+
|
172
|
+
exec "$JAVACMD" "$@"
|
data/gradlew.bat
ADDED
@@ -0,0 +1,84 @@
|
|
1
|
+
@if "%DEBUG%" == "" @echo off
|
2
|
+
@rem ##########################################################################
|
3
|
+
@rem
|
4
|
+
@rem Gradle startup script for Windows
|
5
|
+
@rem
|
6
|
+
@rem ##########################################################################
|
7
|
+
|
8
|
+
@rem Set local scope for the variables with windows NT shell
|
9
|
+
if "%OS%"=="Windows_NT" setlocal
|
10
|
+
|
11
|
+
set DIRNAME=%~dp0
|
12
|
+
if "%DIRNAME%" == "" set DIRNAME=.
|
13
|
+
set APP_BASE_NAME=%~n0
|
14
|
+
set APP_HOME=%DIRNAME%
|
15
|
+
|
16
|
+
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
17
|
+
set DEFAULT_JVM_OPTS=
|
18
|
+
|
19
|
+
@rem Find java.exe
|
20
|
+
if defined JAVA_HOME goto findJavaFromJavaHome
|
21
|
+
|
22
|
+
set JAVA_EXE=java.exe
|
23
|
+
%JAVA_EXE% -version >NUL 2>&1
|
24
|
+
if "%ERRORLEVEL%" == "0" goto init
|
25
|
+
|
26
|
+
echo.
|
27
|
+
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
28
|
+
echo.
|
29
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
30
|
+
echo location of your Java installation.
|
31
|
+
|
32
|
+
goto fail
|
33
|
+
|
34
|
+
:findJavaFromJavaHome
|
35
|
+
set JAVA_HOME=%JAVA_HOME:"=%
|
36
|
+
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
37
|
+
|
38
|
+
if exist "%JAVA_EXE%" goto init
|
39
|
+
|
40
|
+
echo.
|
41
|
+
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
42
|
+
echo.
|
43
|
+
echo Please set the JAVA_HOME variable in your environment to match the
|
44
|
+
echo location of your Java installation.
|
45
|
+
|
46
|
+
goto fail
|
47
|
+
|
48
|
+
:init
|
49
|
+
@rem Get command-line arguments, handling Windows variants
|
50
|
+
|
51
|
+
if not "%OS%" == "Windows_NT" goto win9xME_args
|
52
|
+
|
53
|
+
:win9xME_args
|
54
|
+
@rem Slurp the command line arguments.
|
55
|
+
set CMD_LINE_ARGS=
|
56
|
+
set _SKIP=2
|
57
|
+
|
58
|
+
:win9xME_args_slurp
|
59
|
+
if "x%~1" == "x" goto execute
|
60
|
+
|
61
|
+
set CMD_LINE_ARGS=%*
|
62
|
+
|
63
|
+
:execute
|
64
|
+
@rem Setup the command line
|
65
|
+
|
66
|
+
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
67
|
+
|
68
|
+
@rem Execute Gradle
|
69
|
+
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
70
|
+
|
71
|
+
:end
|
72
|
+
@rem End local scope for the variables with windows NT shell
|
73
|
+
if "%ERRORLEVEL%"=="0" goto mainEnd
|
74
|
+
|
75
|
+
:fail
|
76
|
+
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
77
|
+
rem the _cmd.exe /c_ return code!
|
78
|
+
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
79
|
+
exit /b 1
|
80
|
+
|
81
|
+
:mainEnd
|
82
|
+
if "%OS%"=="Windows_NT" endlocal
|
83
|
+
|
84
|
+
:omega
|
@@ -0,0 +1,189 @@
|
|
1
|
+
package org.embulk.output.kafka;
|
2
|
+
|
3
|
+
import org.apache.avro.Schema;
|
4
|
+
import org.apache.avro.generic.GenericData;
|
5
|
+
import org.apache.avro.generic.GenericRecord;
|
6
|
+
import org.embulk.spi.Column;
|
7
|
+
import org.embulk.spi.PageReader;
|
8
|
+
import org.msgpack.value.Value;
|
9
|
+
import org.msgpack.value.ValueFactory;
|
10
|
+
|
11
|
+
import java.util.HashMap;
|
12
|
+
import java.util.Map;
|
13
|
+
import java.util.Objects;
|
14
|
+
import java.util.Optional;
|
15
|
+
import java.util.stream.Collectors;
|
16
|
+
|
17
|
+
public class AvroFormatColumnVisitor extends KafkaOutputColumnVisitor
|
18
|
+
{
|
19
|
+
private Schema avroSchema;
|
20
|
+
public GenericRecord genericRecord;
|
21
|
+
|
22
|
+
public AvroFormatColumnVisitor(KafkaOutputPlugin.PluginTask task, PageReader pageReader, Schema avroSchema, GenericRecord genericRecord)
|
23
|
+
{
|
24
|
+
super(task, pageReader);
|
25
|
+
this.avroSchema = avroSchema;
|
26
|
+
this.genericRecord = genericRecord;
|
27
|
+
}
|
28
|
+
|
29
|
+
@Override
|
30
|
+
public void booleanColumn(Column column)
|
31
|
+
{
|
32
|
+
if (pageReader.isNull(column)) {
|
33
|
+
genericRecord.put(column.getName(), null);
|
34
|
+
return;
|
35
|
+
}
|
36
|
+
|
37
|
+
genericRecord.put(column.getName(), pageReader.getBoolean(column));
|
38
|
+
}
|
39
|
+
|
40
|
+
@Override
|
41
|
+
public void longColumn(Column column)
|
42
|
+
{
|
43
|
+
if (pageReader.isNull(column)) {
|
44
|
+
genericRecord.put(column.getName(), null);
|
45
|
+
return;
|
46
|
+
}
|
47
|
+
|
48
|
+
genericRecord.put(column.getName(), pageReader.getLong(column));
|
49
|
+
super.longColumn(column);
|
50
|
+
}
|
51
|
+
|
52
|
+
@Override
|
53
|
+
public void doubleColumn(Column column)
|
54
|
+
{
|
55
|
+
if (pageReader.isNull(column)) {
|
56
|
+
genericRecord.put(column.getName(), null);
|
57
|
+
return;
|
58
|
+
}
|
59
|
+
|
60
|
+
genericRecord.put(column.getName(), pageReader.getDouble(column));
|
61
|
+
super.doubleColumn(column);
|
62
|
+
}
|
63
|
+
|
64
|
+
@Override
|
65
|
+
public void stringColumn(Column column)
|
66
|
+
{
|
67
|
+
if (pageReader.isNull(column)) {
|
68
|
+
genericRecord.put(column.getName(), null);
|
69
|
+
return;
|
70
|
+
}
|
71
|
+
|
72
|
+
genericRecord.put(column.getName(), pageReader.getString(column));
|
73
|
+
super.stringColumn(column);
|
74
|
+
}
|
75
|
+
|
76
|
+
@Override
|
77
|
+
public void timestampColumn(Column column)
|
78
|
+
{
|
79
|
+
if (pageReader.isNull(column)) {
|
80
|
+
genericRecord.put(column.getName(), null);
|
81
|
+
return;
|
82
|
+
}
|
83
|
+
|
84
|
+
genericRecord.put(column.getName(), pageReader.getTimestamp(column).getInstant().toEpochMilli());
|
85
|
+
}
|
86
|
+
|
87
|
+
@Override
|
88
|
+
public void jsonColumn(Column column)
|
89
|
+
{
|
90
|
+
if (pageReader.isNull(column)) {
|
91
|
+
genericRecord.put(column.getName(), null);
|
92
|
+
return;
|
93
|
+
}
|
94
|
+
|
95
|
+
Value value = pageReader.getJson(column);
|
96
|
+
try {
|
97
|
+
Object avroValue = convertMsgPackValueToAvroValue(avroSchema.getField(column.getName()).schema(), value);
|
98
|
+
genericRecord.put(column.getName(), avroValue);
|
99
|
+
}
|
100
|
+
catch (RuntimeException ex) {
|
101
|
+
ex.printStackTrace();
|
102
|
+
}
|
103
|
+
}
|
104
|
+
|
105
|
+
private Object convertMsgPackValueToAvroValue(org.apache.avro.Schema avroSchema, Value value)
|
106
|
+
{
|
107
|
+
switch (avroSchema.getType()) {
|
108
|
+
case ARRAY:
|
109
|
+
if (value.isArrayValue()) {
|
110
|
+
return value.asArrayValue().list().stream().map(item -> {
|
111
|
+
return convertMsgPackValueToAvroValue(avroSchema.getElementType(), item);
|
112
|
+
}).filter(Objects::nonNull).collect(Collectors.toList());
|
113
|
+
}
|
114
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
115
|
+
case MAP:
|
116
|
+
if (value.isMapValue()) {
|
117
|
+
Map<String, Object> map = new HashMap<>();
|
118
|
+
for (Map.Entry<Value, Value> entry : value.asMapValue().entrySet()) {
|
119
|
+
if (!entry.getValue().isNilValue()) {
|
120
|
+
map.put(entry.getKey().asStringValue().toString(), convertMsgPackValueToAvroValue(avroSchema.getValueType(), entry.getValue()));
|
121
|
+
}
|
122
|
+
}
|
123
|
+
return map;
|
124
|
+
}
|
125
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
126
|
+
case RECORD:
|
127
|
+
if (value.isMapValue()) {
|
128
|
+
GenericRecord record = new GenericData.Record(avroSchema);
|
129
|
+
Map<Value, Value> valueMap = value.asMapValue().map();
|
130
|
+
for (org.apache.avro.Schema.Field field : avroSchema.getFields()) {
|
131
|
+
Optional.ofNullable(valueMap.get(ValueFactory.newString(field.name()))).ifPresent(v -> {
|
132
|
+
record.put(field.name(), convertMsgPackValueToAvroValue(field.schema(), v));
|
133
|
+
});
|
134
|
+
}
|
135
|
+
return record;
|
136
|
+
}
|
137
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
138
|
+
case LONG:
|
139
|
+
if (value.isIntegerValue()) {
|
140
|
+
return value.asIntegerValue().toLong();
|
141
|
+
}
|
142
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
143
|
+
case INT:
|
144
|
+
if (value.isIntegerValue()) {
|
145
|
+
return value.asIntegerValue().toInt();
|
146
|
+
}
|
147
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
148
|
+
case FLOAT:
|
149
|
+
if (value.isFloatValue()) {
|
150
|
+
return value.asFloatValue().toFloat();
|
151
|
+
}
|
152
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
153
|
+
case DOUBLE:
|
154
|
+
if (value.isFloatValue()) {
|
155
|
+
return value.asFloatValue().toDouble();
|
156
|
+
}
|
157
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
158
|
+
case BOOLEAN:
|
159
|
+
if (value.isBooleanValue()) {
|
160
|
+
return value.asBooleanValue().getBoolean();
|
161
|
+
}
|
162
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
163
|
+
case STRING:
|
164
|
+
case ENUM:
|
165
|
+
if (value.isStringValue()) {
|
166
|
+
return value.asStringValue().toString();
|
167
|
+
}
|
168
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
169
|
+
case NULL:
|
170
|
+
if (value.isNilValue()) {
|
171
|
+
return null;
|
172
|
+
}
|
173
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
174
|
+
case UNION:
|
175
|
+
for (org.apache.avro.Schema innerSchema : avroSchema.getTypes()) {
|
176
|
+
try {
|
177
|
+
return convertMsgPackValueToAvroValue(innerSchema, value);
|
178
|
+
}
|
179
|
+
catch (RuntimeException ignored) {
|
180
|
+
}
|
181
|
+
}
|
182
|
+
throw new RuntimeException(String.format("Schema mismatch: avro: %s, msgpack: %s", avroSchema.getType().getName(), value.getValueType().name()));
|
183
|
+
case BYTES:
|
184
|
+
case FIXED:
|
185
|
+
default:
|
186
|
+
throw new RuntimeException(String.format("Unsupported avro type %s", avroSchema.getType().getName()));
|
187
|
+
}
|
188
|
+
}
|
189
|
+
}
|
@@ -0,0 +1,103 @@
|
|
1
|
+
package org.embulk.output.kafka;
|
2
|
+
|
3
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
4
|
+
import com.fasterxml.jackson.databind.ObjectMapper;
|
5
|
+
import com.fasterxml.jackson.databind.node.ObjectNode;
|
6
|
+
import org.embulk.spi.Column;
|
7
|
+
import org.embulk.spi.PageReader;
|
8
|
+
import org.msgpack.value.Value;
|
9
|
+
|
10
|
+
import java.io.IOException;
|
11
|
+
import java.time.format.DateTimeFormatter;
|
12
|
+
|
13
|
+
public class JsonFormatColumnVisitor extends KafkaOutputColumnVisitor
|
14
|
+
{
|
15
|
+
private ObjectMapper objectMapper;
|
16
|
+
public ObjectNode jsonNode;
|
17
|
+
|
18
|
+
private static DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_INSTANT;
|
19
|
+
|
20
|
+
public JsonFormatColumnVisitor(KafkaOutputPlugin.PluginTask task, PageReader pageReader, ObjectMapper objectMapper)
|
21
|
+
{
|
22
|
+
super(task, pageReader);
|
23
|
+
this.objectMapper = objectMapper;
|
24
|
+
this.jsonNode = objectMapper.createObjectNode();
|
25
|
+
}
|
26
|
+
|
27
|
+
@Override
|
28
|
+
public void booleanColumn(Column column)
|
29
|
+
{
|
30
|
+
if (pageReader.isNull(column)) {
|
31
|
+
jsonNode.putNull(column.getName());
|
32
|
+
return;
|
33
|
+
}
|
34
|
+
|
35
|
+
jsonNode.put(column.getName(), pageReader.getBoolean(column));
|
36
|
+
}
|
37
|
+
|
38
|
+
@Override
|
39
|
+
public void longColumn(Column column)
|
40
|
+
{
|
41
|
+
if (pageReader.isNull(column)) {
|
42
|
+
jsonNode.putNull(column.getName());
|
43
|
+
return;
|
44
|
+
}
|
45
|
+
|
46
|
+
jsonNode.put(column.getName(), pageReader.getLong(column));
|
47
|
+
super.longColumn(column);
|
48
|
+
}
|
49
|
+
|
50
|
+
@Override
|
51
|
+
public void doubleColumn(Column column)
|
52
|
+
{
|
53
|
+
if (pageReader.isNull(column)) {
|
54
|
+
jsonNode.putNull(column.getName());
|
55
|
+
return;
|
56
|
+
}
|
57
|
+
|
58
|
+
jsonNode.put(column.getName(), pageReader.getDouble(column));
|
59
|
+
super.doubleColumn(column);
|
60
|
+
}
|
61
|
+
|
62
|
+
@Override
|
63
|
+
public void stringColumn(Column column)
|
64
|
+
{
|
65
|
+
if (pageReader.isNull(column)) {
|
66
|
+
jsonNode.putNull(column.getName());
|
67
|
+
return;
|
68
|
+
}
|
69
|
+
|
70
|
+
jsonNode.put(column.getName(), pageReader.getString(column));
|
71
|
+
super.stringColumn(column);
|
72
|
+
}
|
73
|
+
|
74
|
+
@Override
|
75
|
+
public void timestampColumn(Column column)
|
76
|
+
{
|
77
|
+
if (pageReader.isNull(column)) {
|
78
|
+
jsonNode.putNull(column.getName());
|
79
|
+
return;
|
80
|
+
}
|
81
|
+
|
82
|
+
jsonNode.put(column.getName(), timestampFormatter.format(pageReader.getTimestamp(column).getInstant()));
|
83
|
+
}
|
84
|
+
|
85
|
+
@Override
|
86
|
+
public void jsonColumn(Column column)
|
87
|
+
{
|
88
|
+
if (pageReader.isNull(column)) {
|
89
|
+
jsonNode.putNull(column.getName());
|
90
|
+
return;
|
91
|
+
}
|
92
|
+
|
93
|
+
Value value = pageReader.getJson(column);
|
94
|
+
JsonNode json;
|
95
|
+
try {
|
96
|
+
json = objectMapper.readTree(value.toJson());
|
97
|
+
}
|
98
|
+
catch (IOException e) {
|
99
|
+
return;
|
100
|
+
}
|
101
|
+
jsonNode.set(column.getName(), json);
|
102
|
+
}
|
103
|
+
}
|
@@ -0,0 +1,23 @@
|
|
1
|
+
package org.embulk.output.kafka;
|
2
|
+
|
3
|
+
import com.fasterxml.jackson.core.JsonProcessingException;
|
4
|
+
import com.fasterxml.jackson.databind.ObjectMapper;
|
5
|
+
import com.fasterxml.jackson.databind.node.ObjectNode;
|
6
|
+
import org.apache.kafka.common.serialization.Serializer;
|
7
|
+
|
8
|
+
public class KafkaJsonSerializer implements Serializer<ObjectNode>
|
9
|
+
{
|
10
|
+
private static ObjectMapper objectMapper = new ObjectMapper();
|
11
|
+
|
12
|
+
@Override
|
13
|
+
public byte[] serialize(String topic, ObjectNode data)
|
14
|
+
{
|
15
|
+
try {
|
16
|
+
return objectMapper.writeValueAsBytes(data);
|
17
|
+
}
|
18
|
+
catch (JsonProcessingException e) {
|
19
|
+
e.printStackTrace();
|
20
|
+
throw new RuntimeException(e);
|
21
|
+
}
|
22
|
+
}
|
23
|
+
}
|
@@ -0,0 +1,53 @@
|
|
1
|
+
package org.embulk.output.kafka;
|
2
|
+
|
3
|
+
import org.embulk.spi.Column;
|
4
|
+
import org.embulk.spi.ColumnVisitor;
|
5
|
+
import org.embulk.spi.PageReader;
|
6
|
+
|
7
|
+
public abstract class KafkaOutputColumnVisitor implements ColumnVisitor
|
8
|
+
{
|
9
|
+
KafkaOutputPlugin.PluginTask task;
|
10
|
+
PageReader pageReader;
|
11
|
+
|
12
|
+
public Object recordKey = null;
|
13
|
+
public String topicName = null;
|
14
|
+
|
15
|
+
public KafkaOutputColumnVisitor(KafkaOutputPlugin.PluginTask task, PageReader pageReader)
|
16
|
+
{
|
17
|
+
this.task = task;
|
18
|
+
this.pageReader = pageReader;
|
19
|
+
}
|
20
|
+
|
21
|
+
void setRecordKey(Column column, Object value)
|
22
|
+
{
|
23
|
+
if (task.getKeyColumnName().isPresent() && task.getKeyColumnName().get().equals(column.getName())) {
|
24
|
+
recordKey = value;
|
25
|
+
}
|
26
|
+
}
|
27
|
+
|
28
|
+
void setTopicName(Column column, String value)
|
29
|
+
{
|
30
|
+
if (task.getTopicColumn().isPresent() && task.getTopicColumn().get().equals(column.getName())) {
|
31
|
+
topicName = value;
|
32
|
+
}
|
33
|
+
}
|
34
|
+
|
35
|
+
@Override
|
36
|
+
public void longColumn(Column column)
|
37
|
+
{
|
38
|
+
setRecordKey(column, pageReader.getLong(column));
|
39
|
+
}
|
40
|
+
|
41
|
+
@Override
|
42
|
+
public void doubleColumn(Column column)
|
43
|
+
{
|
44
|
+
setRecordKey(column, pageReader.getDouble(column));
|
45
|
+
}
|
46
|
+
|
47
|
+
@Override
|
48
|
+
public void stringColumn(Column column)
|
49
|
+
{
|
50
|
+
setRecordKey(column, pageReader.getString(column));
|
51
|
+
setTopicName(column, pageReader.getString(column));
|
52
|
+
}
|
53
|
+
}
|