embulk-output-analytics_cloud 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: e56deed75f747698154a00fd6c09d78e10308d53
4
+ data.tar.gz: b9e83121ce91a01e7e4cf0d17fc31e104e5aa058
5
+ SHA512:
6
+ metadata.gz: 535210cd11f90c91510855c49b2118b28dacada5e9a01a2d341f53d54e95236e8508d46541862ee8d3342e1c1d80a0d0f372b6dd0830c9d8d39dd2edae9958af
7
+ data.tar.gz: a54652f0de1cd6f433f9664f73eed1c02cbc966864f98306c81b3ff62b0efb49574e529df0cc7cc6858e020b126861d3cdb9453f0ed0cd34abc8542a525dad0d
data/.gitignore ADDED
@@ -0,0 +1,9 @@
1
+ *~
2
+ /pkg/
3
+ /tmp/
4
+ *.gemspec
5
+ .gradle/
6
+ /classpath/
7
+ build/
8
+ .idea
9
+ *.iml
data/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
1
+
2
+ MIT License
3
+
4
+ Permission is hereby granted, free of charge, to any person obtaining
5
+ a copy of this software and associated documentation files (the
6
+ "Software"), to deal in the Software without restriction, including
7
+ without limitation the rights to use, copy, modify, merge, publish,
8
+ distribute, sublicense, and/or sell copies of the Software, and to
9
+ permit persons to whom the Software is furnished to do so, subject to
10
+ the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be
13
+ included in all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
18
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
19
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
20
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
21
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,39 @@
1
+ # Analytics Cloud output plugin for Embulk
2
+
3
+ Embulk output plugin to load into Analytics Cloud.
4
+
5
+ ## Overview
6
+
7
+ * **Plugin type**: output
8
+ * **Load all or nothing**: no
9
+ * **Resume supported**: no
10
+ * **Cleanup supported**: no
11
+
12
+ ## Configuration
13
+
14
+ - **username**: analytics cloud username (string, required)
15
+ - **password**: analytics cloud password (string, required)
16
+ - **login_endpoint**: endpoint (string, default: `https://login.salesforce.com`)
17
+ - **edgemart_alias**: edgemart alias (string, required)
18
+ - **operation**: operation (string, default: `"Append"`)
19
+ - **metadata_json**: MetadataJson (string, default: `null`)
20
+ - **auto_metadata**: auto creation for MetadataJson (integer, required)
21
+ - **batch_size**: data size for a InsightsExternalDataPart record (integer, default: `"3000"`)
22
+ - **version**: API version (string, default: `"34.0"`)
23
+
24
+ ## Example
25
+
26
+ ```yaml
27
+ out:
28
+ type: analytics_cloud
29
+ username: hoge
30
+ password: fuga
31
+ edgemart_alias: foobar
32
+ ```
33
+
34
+
35
+ ## Build
36
+
37
+ ```
38
+ $ ./gradlew gem # -t to watch change of files and rebuild continuously
39
+ ```
data/build.gradle ADDED
@@ -0,0 +1,75 @@
1
+ plugins {
2
+ id "com.jfrog.bintray" version "1.1"
3
+ id "com.github.jruby-gradle.base" version "0.1.5"
4
+ id "java"
5
+ }
6
+ import com.github.jrubygradle.JRubyExec
7
+ repositories {
8
+ mavenCentral()
9
+ jcenter()
10
+ }
11
+ configurations {
12
+ provided
13
+ }
14
+
15
+ version = "0.1.0"
16
+
17
+ dependencies {
18
+ compile "org.embulk:embulk-core:0.7.4"
19
+ provided "org.embulk:embulk-core:0.7.4"
20
+ // compile "YOUR_JAR_DEPENDENCY_GROUP:YOUR_JAR_DEPENDENCY_MODULE:YOUR_JAR_DEPENDENCY_VERSION"
21
+ compile group: 'net.sf.supercsv', name: 'super-csv', version: '2.4.0'
22
+ compile files('lib/force-wsc-34.0.jar', 'lib/partner.jar')
23
+ testCompile "junit:junit:4.+"
24
+ }
25
+
26
+ task classpath(type: Copy, dependsOn: ["jar"]) {
27
+ doFirst { file("classpath").deleteDir() }
28
+ from (configurations.runtime - configurations.provided + files(jar.archivePath))
29
+ into "classpath"
30
+ }
31
+ clean { delete "classpath" }
32
+
33
+ task gem(type: JRubyExec, dependsOn: ["gemspec", "classpath"]) {
34
+ jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "build"
35
+ script "${project.name}.gemspec"
36
+ doLast { ant.move(file: "${project.name}-${project.version}.gem", todir: "pkg") }
37
+ }
38
+
39
+ task gemPush(type: JRubyExec, dependsOn: ["gem"]) {
40
+ jrubyArgs "-rrubygems/gem_runner", "-eGem::GemRunner.new.run(ARGV)", "push"
41
+ script "pkg/${project.name}-${project.version}.gem"
42
+ }
43
+
44
+ task "package"(dependsOn: ["gemspec", "classpath"]) << {
45
+ println "> Build succeeded."
46
+ println "> You can run embulk with '-L ${file(".").absolutePath}' argument."
47
+ }
48
+
49
+ task gemspec {
50
+ ext.gemspecFile = file("${project.name}.gemspec")
51
+ inputs.file "build.gradle"
52
+ outputs.file gemspecFile
53
+ doLast { gemspecFile.write($/
54
+ Gem::Specification.new do |spec|
55
+ spec.name = "${project.name}"
56
+ spec.version = "${project.version}"
57
+ spec.authors = ["Makoto Tajitsu"]
58
+ spec.summary = %[Embulk output plugin to load into Analytics Cloud]
59
+ spec.description = %[Dumps records to Analytics Cloud.]
60
+ spec.email = ["makoto_tajitsu@hotmail.co.jp"]
61
+ spec.licenses = ["MIT"]
62
+ spec.homepage = "https://github.com/tzmfreedom/embulk-output-analytics_cloud"
63
+
64
+ spec.files = `git ls-files`.split("\n") + Dir["classpath/*.jar"]
65
+ spec.test_files = spec.files.grep(%r"^(test|spec)/")
66
+ spec.require_paths = ["lib"]
67
+
68
+ #spec.add_dependency 'YOUR_GEM_DEPENDENCY', ['~> YOUR_GEM_DEPENDENCY_VERSION']
69
+ spec.add_development_dependency 'bundler', ['~> 1.0']
70
+ spec.add_development_dependency 'rake', ['>= 10.0']
71
+ end
72
+ /$)
73
+ }
74
+ }
75
+ clean { delete "${project.name}.gemspec" }
Binary file
@@ -0,0 +1,6 @@
1
+ #Tue Aug 11 00:26:20 PDT 2015
2
+ distributionBase=GRADLE_USER_HOME
3
+ distributionPath=wrapper/dists
4
+ zipStoreBase=GRADLE_USER_HOME
5
+ zipStorePath=wrapper/dists
6
+ distributionUrl=https\://services.gradle.org/distributions/gradle-2.6-bin.zip
data/gradlew ADDED
@@ -0,0 +1,164 @@
1
+ #!/usr/bin/env bash
2
+
3
+ ##############################################################################
4
+ ##
5
+ ## Gradle start up script for UN*X
6
+ ##
7
+ ##############################################################################
8
+
9
+ # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10
+ DEFAULT_JVM_OPTS=""
11
+
12
+ APP_NAME="Gradle"
13
+ APP_BASE_NAME=`basename "$0"`
14
+
15
+ # Use the maximum available, or set MAX_FD != -1 to use that value.
16
+ MAX_FD="maximum"
17
+
18
+ warn ( ) {
19
+ echo "$*"
20
+ }
21
+
22
+ die ( ) {
23
+ echo
24
+ echo "$*"
25
+ echo
26
+ exit 1
27
+ }
28
+
29
+ # OS specific support (must be 'true' or 'false').
30
+ cygwin=false
31
+ msys=false
32
+ darwin=false
33
+ case "`uname`" in
34
+ CYGWIN* )
35
+ cygwin=true
36
+ ;;
37
+ Darwin* )
38
+ darwin=true
39
+ ;;
40
+ MINGW* )
41
+ msys=true
42
+ ;;
43
+ esac
44
+
45
+ # For Cygwin, ensure paths are in UNIX format before anything is touched.
46
+ if $cygwin ; then
47
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
48
+ fi
49
+
50
+ # Attempt to set APP_HOME
51
+ # Resolve links: $0 may be a link
52
+ PRG="$0"
53
+ # Need this for relative symlinks.
54
+ while [ -h "$PRG" ] ; do
55
+ ls=`ls -ld "$PRG"`
56
+ link=`expr "$ls" : '.*-> \(.*\)$'`
57
+ if expr "$link" : '/.*' > /dev/null; then
58
+ PRG="$link"
59
+ else
60
+ PRG=`dirname "$PRG"`"/$link"
61
+ fi
62
+ done
63
+ SAVED="`pwd`"
64
+ cd "`dirname \"$PRG\"`/" >&-
65
+ APP_HOME="`pwd -P`"
66
+ cd "$SAVED" >&-
67
+
68
+ CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
69
+
70
+ # Determine the Java command to use to start the JVM.
71
+ if [ -n "$JAVA_HOME" ] ; then
72
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
73
+ # IBM's JDK on AIX uses strange locations for the executables
74
+ JAVACMD="$JAVA_HOME/jre/sh/java"
75
+ else
76
+ JAVACMD="$JAVA_HOME/bin/java"
77
+ fi
78
+ if [ ! -x "$JAVACMD" ] ; then
79
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
80
+
81
+ Please set the JAVA_HOME variable in your environment to match the
82
+ location of your Java installation."
83
+ fi
84
+ else
85
+ JAVACMD="java"
86
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
87
+
88
+ Please set the JAVA_HOME variable in your environment to match the
89
+ location of your Java installation."
90
+ fi
91
+
92
+ # Increase the maximum file descriptors if we can.
93
+ if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
94
+ MAX_FD_LIMIT=`ulimit -H -n`
95
+ if [ $? -eq 0 ] ; then
96
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
97
+ MAX_FD="$MAX_FD_LIMIT"
98
+ fi
99
+ ulimit -n $MAX_FD
100
+ if [ $? -ne 0 ] ; then
101
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
102
+ fi
103
+ else
104
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
105
+ fi
106
+ fi
107
+
108
+ # For Darwin, add options to specify how the application appears in the dock
109
+ if $darwin; then
110
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
111
+ fi
112
+
113
+ # For Cygwin, switch paths to Windows format before running java
114
+ if $cygwin ; then
115
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
116
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
117
+
118
+ # We build the pattern for arguments to be converted via cygpath
119
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120
+ SEP=""
121
+ for dir in $ROOTDIRSRAW ; do
122
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
123
+ SEP="|"
124
+ done
125
+ OURCYGPATTERN="(^($ROOTDIRS))"
126
+ # Add a user-defined pattern to the cygpath arguments
127
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129
+ fi
130
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
131
+ i=0
132
+ for arg in "$@" ; do
133
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135
+
136
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138
+ else
139
+ eval `echo args$i`="\"$arg\""
140
+ fi
141
+ i=$((i+1))
142
+ done
143
+ case $i in
144
+ (0) set -- ;;
145
+ (1) set -- "$args0" ;;
146
+ (2) set -- "$args0" "$args1" ;;
147
+ (3) set -- "$args0" "$args1" "$args2" ;;
148
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154
+ esac
155
+ fi
156
+
157
+ # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
158
+ function splitJvmOpts() {
159
+ JVM_OPTS=("$@")
160
+ }
161
+ eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
162
+ JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
163
+
164
+ exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
data/gradlew.bat ADDED
@@ -0,0 +1,90 @@
1
+ @if "%DEBUG%" == "" @echo off
2
+ @rem ##########################################################################
3
+ @rem
4
+ @rem Gradle startup script for Windows
5
+ @rem
6
+ @rem ##########################################################################
7
+
8
+ @rem Set local scope for the variables with windows NT shell
9
+ if "%OS%"=="Windows_NT" setlocal
10
+
11
+ @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12
+ set DEFAULT_JVM_OPTS=
13
+
14
+ set DIRNAME=%~dp0
15
+ if "%DIRNAME%" == "" set DIRNAME=.
16
+ set APP_BASE_NAME=%~n0
17
+ set APP_HOME=%DIRNAME%
18
+
19
+ @rem Find java.exe
20
+ if defined JAVA_HOME goto findJavaFromJavaHome
21
+
22
+ set JAVA_EXE=java.exe
23
+ %JAVA_EXE% -version >NUL 2>&1
24
+ if "%ERRORLEVEL%" == "0" goto init
25
+
26
+ echo.
27
+ echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28
+ echo.
29
+ echo Please set the JAVA_HOME variable in your environment to match the
30
+ echo location of your Java installation.
31
+
32
+ goto fail
33
+
34
+ :findJavaFromJavaHome
35
+ set JAVA_HOME=%JAVA_HOME:"=%
36
+ set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37
+
38
+ if exist "%JAVA_EXE%" goto init
39
+
40
+ echo.
41
+ echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42
+ echo.
43
+ echo Please set the JAVA_HOME variable in your environment to match the
44
+ echo location of your Java installation.
45
+
46
+ goto fail
47
+
48
+ :init
49
+ @rem Get command-line arguments, handling Windowz variants
50
+
51
+ if not "%OS%" == "Windows_NT" goto win9xME_args
52
+ if "%@eval[2+2]" == "4" goto 4NT_args
53
+
54
+ :win9xME_args
55
+ @rem Slurp the command line arguments.
56
+ set CMD_LINE_ARGS=
57
+ set _SKIP=2
58
+
59
+ :win9xME_args_slurp
60
+ if "x%~1" == "x" goto execute
61
+
62
+ set CMD_LINE_ARGS=%*
63
+ goto execute
64
+
65
+ :4NT_args
66
+ @rem Get arguments from the 4NT Shell from JP Software
67
+ set CMD_LINE_ARGS=%$
68
+
69
+ :execute
70
+ @rem Setup the command line
71
+
72
+ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73
+
74
+ @rem Execute Gradle
75
+ "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76
+
77
+ :end
78
+ @rem End local scope for the variables with windows NT shell
79
+ if "%ERRORLEVEL%"=="0" goto mainEnd
80
+
81
+ :fail
82
+ rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83
+ rem the _cmd.exe /c_ return code!
84
+ if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85
+ exit /b 1
86
+
87
+ :mainEnd
88
+ if "%OS%"=="Windows_NT" endlocal
89
+
90
+ :omega
@@ -0,0 +1,3 @@
1
+ Embulk::JavaPlugin.register_output(
2
+ "analytics_cloud", "org.embulk.output.analytics_cloud.AnalyticsCloudOutputPlugin",
3
+ File.expand_path('../../../../classpath', __FILE__))
Binary file
data/lib/partner.jar ADDED
Binary file
@@ -0,0 +1,404 @@
1
+ package org.embulk.output.analytics_cloud;
2
+
3
+ import com.fasterxml.jackson.core.JsonProcessingException;
4
+ import com.fasterxml.jackson.databind.ObjectMapper;
5
+
6
+ import java.io.StringWriter;
7
+ import java.util.*;
8
+
9
+ import com.sforce.soap.partner.Connector;
10
+ import com.sforce.soap.partner.GetUserInfoResult;
11
+ import com.sforce.soap.partner.PartnerConnection;
12
+ import com.sforce.soap.partner.SaveResult;
13
+ import com.sforce.soap.partner.fault.ApiFault;
14
+ import com.sforce.soap.partner.sobject.SObject;
15
+ import com.sforce.ws.ConnectionException;
16
+ import com.sforce.ws.ConnectorConfig;
17
+ import com.google.common.base.Optional;
18
+ import java.io.IOException;
19
+
20
+ import org.embulk.config.TaskReport;
21
+ import org.embulk.config.Config;
22
+ import org.embulk.config.ConfigDefault;
23
+ import org.embulk.config.ConfigDiff;
24
+ import org.embulk.config.ConfigException;
25
+ import org.embulk.config.ConfigSource;
26
+ import org.embulk.config.Task;
27
+ import org.embulk.config.TaskSource;
28
+ import org.embulk.spi.Column;
29
+ import org.embulk.spi.ColumnVisitor;
30
+ import org.embulk.spi.Exec;
31
+ import org.embulk.spi.OutputPlugin;
32
+ import org.embulk.spi.Page;
33
+ import org.embulk.spi.PageReader;
34
+ import org.embulk.spi.Schema;
35
+ import org.embulk.spi.TransactionalPageOutput;
36
+ import org.embulk.spi.time.Timestamp;
37
+ import org.joda.time.DateTime;
38
+ import org.slf4j.Logger;
39
+ import org.supercsv.io.CsvListWriter;
40
+ import org.supercsv.io.ICsvListWriter;
41
+ import org.supercsv.prefs.CsvPreference;
42
+
43
+ public class AnalyticsCloudOutputPlugin
44
+ implements OutputPlugin
45
+ {
46
+ protected static Logger logger;
47
+ private static PartnerConnection client = null;
48
+ private static Integer batchSize;
49
+ private static String insightsExternalDataId;
50
+ public static Integer partNumber = 1;
51
+
52
+ public static HashMap<String, String> DATATYPE_MAP = new HashMap<String, String>(){
53
+ {
54
+ put("string", "Text");
55
+ put("long", "Numeric");
56
+ put("boolean", "Text");
57
+ put("timestamp", "Date");
58
+ put("double", "Numeric");
59
+ put("json", "Text");
60
+ }
61
+ };
62
+
63
+ public interface PluginTask
64
+ extends Task
65
+ {
66
+ @Config("username")
67
+ public String getUsername();
68
+
69
+ @Config("password")
70
+ public String getPassword();
71
+
72
+ @Config("login_endpoint")
73
+ @ConfigDefault("\"https://login.salesforce.com\"")
74
+ public Optional<String> getLoginEndpoint();
75
+
76
+ @Config("edgemart_alias")
77
+ public String getEdgemartAlias();
78
+
79
+ @Config("operation")
80
+ @ConfigDefault("\"Append\"")
81
+ public Optional<String> getOperation();
82
+
83
+ @Config("metadata_json")
84
+ @ConfigDefault("null")
85
+ public Optional<String> getMetadataJson();
86
+
87
+ @Config("auto_metadata")
88
+ @ConfigDefault("true")
89
+ public Optional<String> getAutoMetadata();
90
+
91
+ @Config("batch_size")
92
+ @ConfigDefault("3000")
93
+ public Integer getBatchSize();
94
+
95
+ @Config("version")
96
+ @ConfigDefault("34.0")
97
+ public Optional<String> getVersion();
98
+ }
99
+
100
+ @Override
101
+ public ConfigDiff transaction(ConfigSource config,
102
+ Schema schema, int taskCount,
103
+ OutputPlugin.Control control)
104
+ {
105
+ PluginTask task = config.loadConfig(PluginTask.class);
106
+ logger = Exec.getLogger(getClass());
107
+
108
+ batchSize = task.getBatchSize();
109
+ final String username = task.getUsername();
110
+ final String password = task.getPassword();
111
+ final String loginEndpoint = task.getLoginEndpoint().get();
112
+ try {
113
+ if (client == null) {
114
+ ConnectorConfig connectorConfig = new ConnectorConfig();
115
+ connectorConfig.setUsername(username);
116
+ connectorConfig.setPassword(password);
117
+ connectorConfig.setAuthEndpoint(loginEndpoint + "/services/Soap/u/" +task.getVersion().get() + "/");
118
+
119
+ client = Connector.newConnection(connectorConfig);
120
+ GetUserInfoResult userInfo = client.getUserInfo();
121
+ logger.info("login successful with {}", userInfo.getUserName());
122
+ insightsExternalDataId = this.createInsightsExternalData(task, schema);
123
+ }
124
+ } catch(ConnectionException ex) {
125
+ logger.error("Login error. Please check your credentials.");
126
+ throw new RuntimeException(ex);
127
+ } catch(Exception ex) {
128
+ throw new RuntimeException(ex);
129
+ }
130
+
131
+ control.run(task.dump());
132
+ return Exec.newConfigDiff();
133
+ }
134
+
135
+ @Override
136
+ public ConfigDiff resume(TaskSource taskSource,
137
+ Schema schema, int taskCount,
138
+ OutputPlugin.Control control)
139
+ {
140
+ throw new UnsupportedOperationException("analytics_cloud output plugin does not support resuming");
141
+ }
142
+
143
+ @Override
144
+ public void cleanup(TaskSource taskSource,
145
+ Schema schema, int taskCount,
146
+ List<TaskReport> successTaskReports)
147
+ {
148
+ if (insightsExternalDataId != null) {
149
+ final SObject insightsExdata = new SObject();
150
+ insightsExdata.setType("InsightsExternalData");
151
+ insightsExdata.setId(insightsExternalDataId);
152
+ insightsExdata.addField("Action", "Process");
153
+ try {
154
+ SaveResult[] srs = client.update(new SObject[]{insightsExdata});
155
+ if (srs[0].getSuccess()) {
156
+ logger.info("Import is processing.");
157
+ } else {
158
+ logger.error(srs[0].getErrors()[0].getMessage());
159
+ }
160
+ } catch (ConnectionException ex) {
161
+ logger.error(ex.getMessage());
162
+ }
163
+ }
164
+ logger.info("logout");
165
+ try {
166
+ if (client != null) {
167
+ client.logout();
168
+ }
169
+ } catch (ConnectionException ex) {}
170
+ }
171
+
172
+ @Override
173
+ public TransactionalPageOutput open(TaskSource taskSource, Schema schema, int taskIndex)
174
+ {
175
+ PageReader reader = new PageReader(schema);
176
+ return new AnalyticsCloudPageOutput(reader, client);
177
+ }
178
+
179
+ public class AnalyticsCloudPageOutput
180
+ implements TransactionalPageOutput
181
+ {
182
+ private final PageReader pageReader;
183
+ private final PartnerConnection client;
184
+ private ArrayList<ArrayList<String>> records;
185
+
186
+ public AnalyticsCloudPageOutput(final PageReader pageReader,
187
+ PartnerConnection client)
188
+ {
189
+ this.pageReader = pageReader;
190
+ this.client = client;
191
+ this.records = new ArrayList<>();
192
+ }
193
+
194
+ @Override
195
+ public void add(Page page)
196
+ {
197
+ try {
198
+ synchronized (AnalyticsCloudOutputPlugin.partNumber) {
199
+ if (AnalyticsCloudOutputPlugin.partNumber == 0) {
200
+ AnalyticsCloudOutputPlugin.partNumber++;
201
+ ArrayList<String> header = new ArrayList<>();
202
+ for (Column column : this.pageReader.getSchema().getColumns()) {
203
+ header.add(column.getName());
204
+ }
205
+ this.records.add(header);
206
+ }
207
+ }
208
+
209
+ pageReader.setPage(page);
210
+ while (pageReader.nextRecord()) {
211
+ ArrayList<String> record = new ArrayList<>();
212
+
213
+ pageReader.getSchema().visitColumns(new ColumnVisitor() {
214
+ @Override
215
+ public void doubleColumn(Column column) {
216
+ record.add(String.valueOf(pageReader.getDouble(column)));
217
+ }
218
+ @Override
219
+ public void timestampColumn(Column column) {
220
+ DateTime dt = new DateTime(pageReader.getTimestamp(column).getEpochSecond()*1000);
221
+ record.add(dt.toString("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"));
222
+ }
223
+ @Override
224
+ public void stringColumn(Column column) {
225
+ record.add(pageReader.getString(column));
226
+ }
227
+ @Override
228
+ public void longColumn(Column column) {
229
+ record.add(String.valueOf(pageReader.getLong(column)));
230
+ }
231
+ @Override
232
+ public void booleanColumn(Column column) {
233
+ record.add(String.valueOf(pageReader.getBoolean(column)));
234
+ }
235
+
236
+ });
237
+ this.records.add(record);
238
+
239
+ if (this.records.size() == batchSize) {
240
+ this.action();
241
+ }
242
+ }
243
+
244
+ } catch (ConfigException ex) {
245
+ logger.error("Configuration Error: {}", ex.getMessage());
246
+ } catch (ApiFault ex) {
247
+ logger.error("API Error: {}", ex.getExceptionMessage());
248
+ } catch (ConnectionException ex) {
249
+ logger.error("Connection Error: {}", ex.getMessage());
250
+ }
251
+ }
252
+
253
+ @Override
254
+ public void finish()
255
+ {
256
+ try {
257
+ if (!this.records.isEmpty()) {
258
+ this.action();
259
+ }
260
+ } catch (ConnectionException e) {
261
+ e.printStackTrace();
262
+ }
263
+
264
+ }
265
+
266
+ @Override
267
+ public void close()
268
+ {
269
+
270
+ }
271
+
272
+ @Override
273
+ public void abort()
274
+ {
275
+ }
276
+
277
+ @Override
278
+ public TaskReport commit()
279
+ {
280
+ return Exec.newTaskReport();
281
+ }
282
+
283
+ private void action() throws ConnectionException{
284
+ final SObject insightsExdataPart = new SObject();
285
+ insightsExdataPart.setType("InsightsExternalDataPart");
286
+ insightsExdataPart.addField("InsightsExternalDataId", AnalyticsCloudOutputPlugin.insightsExternalDataId);
287
+ insightsExdataPart.addField("DataFile", this.createCsvBody(this.records));
288
+ synchronized (AnalyticsCloudOutputPlugin.partNumber) {
289
+ insightsExdataPart.addField("PartNumber", AnalyticsCloudOutputPlugin.partNumber);
290
+ AnalyticsCloudOutputPlugin.partNumber++;
291
+ }
292
+
293
+ SaveResult[] srs = this.client.create(new SObject[]{insightsExdataPart});
294
+ if (srs[0].getSuccess()) {
295
+ logger.info("InsightsExternalDataPart whose part number is {} is created.", insightsExdataPart.getField("PartNumber"));
296
+ } else {
297
+ String errorMessage = srs[0].getErrors()[0].getMessage();
298
+ logger.error(errorMessage);
299
+ throw new RuntimeException(errorMessage);
300
+ }
301
+ this.records = new ArrayList<>();
302
+ }
303
+
304
+
305
+
306
+ private byte[] createCsvBody(ArrayList<ArrayList<String>> records) {
307
+ StringWriter stringWriter = new StringWriter();
308
+ ICsvListWriter writer = new CsvListWriter(stringWriter, CsvPreference.EXCEL_PREFERENCE);
309
+ try {
310
+ for (ArrayList<String> record : records) {
311
+ writer.write(record);
312
+ }
313
+ writer.close();
314
+ } catch (IOException e) {
315
+ e.printStackTrace();
316
+ }
317
+ return stringWriter.toString().getBytes();
318
+ }
319
+
320
+
321
+ }
322
+
323
+ private String createInsightsExternalData(PluginTask task, Schema schema) {
324
+ final SObject insightsExdata = new SObject();
325
+ insightsExdata.setType("InsightsExternalData");
326
+ insightsExdata.addField("EdgemartAlias", task.getEdgemartAlias());
327
+ insightsExdata.addField("Action", "None");
328
+ insightsExdata.addField("Operation", task.getOperation().get());
329
+ if (task.getMetadataJson().isPresent()) {
330
+ insightsExdata.addField("MetadataJson", task.getMetadataJson().get().getBytes());
331
+ } else if (task.getAutoMetadata().get().toLowerCase().equals("true")){
332
+ insightsExdata.addField("MetadataJson", this.createMetadataJSON(task, schema).getBytes());
333
+ }
334
+
335
+ try {
336
+ SaveResult[] srs = this.client.create(new SObject[]{ insightsExdata });
337
+ if (srs[0].getSuccess()) {
338
+ return srs[0].getId();
339
+ }
340
+ String errorMessage = srs[0].getErrors()[0].getMessage();
341
+ logger.error(errorMessage);
342
+ throw new RuntimeException(errorMessage);
343
+ } catch (ConnectionException ex) {
344
+ logger.debug(ex.getMessage() + ":" + ex.getStackTrace());
345
+ throw new RuntimeException(ex);
346
+ }
347
+ }
348
+
349
+ private String createMetadataJSON(PluginTask task, Schema schema) {
350
+ HashMap<String, Object> metadata = new HashMap<>();
351
+ metadata.put("fileFormat", new HashMap<String, Object>(){
352
+ {
353
+ put("charsetName", "UTF-8");
354
+ put("fieldsEnclosedBy", "\"");
355
+ put("fieldsDelimitedBy", ",");
356
+ put("numberOfLinesToIgnore", 1);
357
+ }
358
+ });
359
+
360
+ ArrayList<HashMap<String, Object>> fields = new ArrayList<>();
361
+ for (Column column : schema.getColumns()) {
362
+ fields.add(new HashMap<String, Object>(){
363
+ {
364
+ put("name", column.getName());
365
+ put("label", column.getName());
366
+ put("fullyQualifiedName", column.getName());
367
+ put("description", "");
368
+ put("isSystemField", false);
369
+ put("type", DATATYPE_MAP.get(column.getType().toString()));
370
+
371
+ if (column.getType().getJavaType().equals(Timestamp.class)) {
372
+ put("format", "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
373
+ }
374
+ if (column.getType().getJavaType().equals(long.class)) {
375
+ put("scale", 6);
376
+ put("precision", 18);
377
+ put("defaultValue", 0);
378
+ }
379
+ }
380
+ });
381
+ }
382
+
383
+ ArrayList<HashMap<String, Object>> objects = new ArrayList<>();
384
+ objects.add(new HashMap<String, Object>() {
385
+ {
386
+ put("connector", "EmbulkOutputPluginConnector");
387
+ put("description", "");
388
+ put("fullyQualifiedName", task.getEdgemartAlias());
389
+ put("label", task.getEdgemartAlias());
390
+ put("name", task.getEdgemartAlias());
391
+ put("fields", fields);
392
+ }
393
+ });
394
+ metadata.put("objects", objects);
395
+
396
+ ObjectMapper mapper = new ObjectMapper();
397
+ try {
398
+ return mapper.writeValueAsString(metadata);
399
+ } catch (JsonProcessingException ex) {
400
+ logger.error(ex.getMessage());
401
+ }
402
+ return null;
403
+ }
404
+ }
@@ -0,0 +1,6 @@
1
+ package org.embulk.output.analytics_cloud;
2
+
3
+
4
+ public class TestAnalyticsCloudOutputPlugin
5
+ {
6
+ }
metadata ADDED
@@ -0,0 +1,89 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: embulk-output-analytics_cloud
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Makoto Tajitsu
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2016-06-11 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ~>
17
+ - !ruby/object:Gem::Version
18
+ version: '1.0'
19
+ name: bundler
20
+ prerelease: false
21
+ type: :development
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ~>
25
+ - !ruby/object:Gem::Version
26
+ version: '1.0'
27
+ - !ruby/object:Gem::Dependency
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - '>='
31
+ - !ruby/object:Gem::Version
32
+ version: '10.0'
33
+ name: rake
34
+ prerelease: false
35
+ type: :development
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - '>='
39
+ - !ruby/object:Gem::Version
40
+ version: '10.0'
41
+ description: Dumps records to Analytics Cloud.
42
+ email:
43
+ - makoto_tajitsu@hotmail.co.jp
44
+ executables: []
45
+ extensions: []
46
+ extra_rdoc_files: []
47
+ files:
48
+ - .gitignore
49
+ - LICENSE.txt
50
+ - README.md
51
+ - build.gradle
52
+ - gradle/wrapper/gradle-wrapper.jar
53
+ - gradle/wrapper/gradle-wrapper.properties
54
+ - gradlew
55
+ - gradlew.bat
56
+ - lib/embulk/output/analytics_cloud.rb
57
+ - lib/force-wsc-34.0.jar
58
+ - lib/partner.jar
59
+ - src/main/java/org/embulk/output/analytics_cloud/AnalyticsCloudOutputPlugin.java
60
+ - src/test/java/org/embulk/output/analytics_cloud/TestAnalyticsCloudOutputPlugin.java
61
+ - classpath/embulk-output-analytics_cloud-0.1.0.jar
62
+ - classpath/force-wsc-34.0.jar
63
+ - classpath/partner.jar
64
+ - classpath/super-csv-2.4.0.jar
65
+ homepage: https://github.com/tzmfreedom/embulk-output-analytics_cloud
66
+ licenses:
67
+ - MIT
68
+ metadata: {}
69
+ post_install_message:
70
+ rdoc_options: []
71
+ require_paths:
72
+ - lib
73
+ required_ruby_version: !ruby/object:Gem::Requirement
74
+ requirements:
75
+ - - '>='
76
+ - !ruby/object:Gem::Version
77
+ version: '0'
78
+ required_rubygems_version: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - '>='
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
83
+ requirements: []
84
+ rubyforge_project:
85
+ rubygems_version: 2.1.9
86
+ signing_key:
87
+ specification_version: 4
88
+ summary: Embulk output plugin to load into Analytics Cloud
89
+ test_files: []