embulk-input-jdbc 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 266e103762b852c24503fb2a83616a78bd07a235
4
+ data.tar.gz: 01314fad5353c44cdf63c3cf59c5ae1f029c7c2f
5
+ SHA512:
6
+ metadata.gz: 3004de01d140d8920f8be786fc2e58f14bf3b9ddff09f1a9e59b5ebe4c8f4a5db582af57431193e9427b664de3c9fe660d78cea1de2fea66c8579ddd0c882c59
7
+ data.tar.gz: 30b3caa6cc8ac3a25f520d2386bb3134be88678d3a3287197fae630acaee6163e8ab84618e4d56341bd03428537122bec231b58d4c019e46022da1e20e2851be
@@ -0,0 +1,2 @@
1
+ dependencies {
2
+ }
@@ -0,0 +1,70 @@
1
+ package org.embulk.input;
2
+
3
+ import java.util.Properties;
4
+ import java.sql.Connection;
5
+ import java.sql.Driver;
6
+ import java.sql.SQLException;
7
+ import com.google.common.base.Throwables;
8
+ import org.embulk.config.Config;
9
+ import org.embulk.input.jdbc.AbstractJdbcInputPlugin;
10
+ import org.embulk.input.jdbc.JdbcInputConnection;
11
+
12
+ public class JdbcInputPlugin
13
+ extends AbstractJdbcInputPlugin
14
+ {
15
+ public interface GenericPluginTask extends PluginTask
16
+ {
17
+ @Config("driver_name")
18
+ public String getDriverName();
19
+
20
+ @Config("driver_class")
21
+ public String getDriverClass();
22
+ }
23
+
24
+ @Override
25
+ protected Class<? extends PluginTask> getTaskClass()
26
+ {
27
+ return GenericPluginTask.class;
28
+ }
29
+
30
+ @Override
31
+ protected JdbcInputConnection newConnection(PluginTask task) throws SQLException
32
+ {
33
+ GenericPluginTask g = (GenericPluginTask) task;
34
+
35
+ String url;
36
+ if (g.getPort().isPresent()) {
37
+ url = String.format("jdbc:%s://%s:%d/%s",
38
+ g.getDriverName(), g.getHost(), g.getPort().get(), g.getDatabase());
39
+ } else {
40
+ url = String.format("jdbc:%s://%s:%d/%s",
41
+ g.getDriverName(), g.getHost(), g.getDatabase());
42
+ }
43
+
44
+ Properties props = new Properties();
45
+ props.setProperty("user", g.getUser());
46
+ props.setProperty("password", g.getPassword());
47
+
48
+ props.putAll(g.getOptions());
49
+
50
+ Driver driver;
51
+ try {
52
+ // TODO check Class.forName(driverClass) is a Driver before newInstance
53
+ // for security
54
+ driver = (Driver) Class.forName(g.getDriverClass()).newInstance();
55
+ } catch (Exception ex) {
56
+ throw Throwables.propagate(ex);
57
+ }
58
+
59
+ Connection con = driver.connect(url, props);
60
+ try {
61
+ JdbcInputConnection c = new JdbcInputConnection(con, g.getSchema().orNull());
62
+ con = null;
63
+ return c;
64
+ } finally {
65
+ if (con != null) {
66
+ con.close();
67
+ }
68
+ }
69
+ }
70
+ }
@@ -0,0 +1,315 @@
1
+ package org.embulk.input.jdbc;
2
+
3
+ import java.util.List;
4
+ import java.util.Properties;
5
+ import java.sql.ResultSet;
6
+ import java.sql.SQLException;
7
+ import com.google.common.base.Optional;
8
+ import com.google.common.base.Throwables;
9
+ import com.google.common.collect.ImmutableList;
10
+ import org.embulk.config.CommitReport;
11
+ import org.embulk.config.Config;
12
+ import org.embulk.config.ConfigDefault;
13
+ import org.embulk.config.ConfigDiff;
14
+ import org.embulk.config.ConfigInject;
15
+ import org.embulk.config.ConfigSource;
16
+ import org.embulk.config.Task;
17
+ import org.embulk.config.TaskSource;
18
+ import org.embulk.spi.BufferAllocator;
19
+ import org.embulk.spi.Column;
20
+ import org.embulk.spi.PageBuilder;
21
+ import org.embulk.spi.InputPlugin;
22
+ import org.embulk.spi.PageOutput;
23
+ import org.embulk.spi.Schema;
24
+ import org.embulk.spi.Exec;
25
+ import org.embulk.input.jdbc.getter.ColumnGetter;
26
+ import org.embulk.input.jdbc.getter.ColumnGetterFactory;
27
+ import org.embulk.input.jdbc.JdbcInputConnection.BatchSelect;
28
+
29
+ public abstract class AbstractJdbcInputPlugin
30
+ implements InputPlugin
31
+ {
32
+ public interface PluginTask extends Task
33
+ {
34
+ @Config("host")
35
+ public String getHost();
36
+
37
+ @Config("port")
38
+ @ConfigDefault("null")
39
+ public Optional<Integer> getPort();
40
+
41
+ @Config("user")
42
+ public String getUser();
43
+
44
+ @Config("password")
45
+ @ConfigDefault("\"\"")
46
+ public String getPassword();
47
+
48
+ @Config("options")
49
+ @ConfigDefault("{}")
50
+ public Properties getOptions();
51
+
52
+ @Config("database")
53
+ public String getDatabase();
54
+
55
+ @Config("schema")
56
+ @ConfigDefault("null")
57
+ public Optional<String> getSchema();
58
+
59
+ @Config("table")
60
+ public String getTable();
61
+
62
+ @Config("select")
63
+ @ConfigDefault("null")
64
+ public Optional<String> getSelect();
65
+
66
+ @Config("where")
67
+ @ConfigDefault("null")
68
+ public Optional<String> getWhere();
69
+
70
+ @Config("order_by")
71
+ @ConfigDefault("null")
72
+ public Optional<String> getOrderBy();
73
+
74
+ //// TODO See bellow.
75
+ //@Config("last_value")
76
+ //@ConfigDefault("null")
77
+ //public Optional<String> getLastValue();
78
+
79
+ // TODO limit_value is necessary to make sure repeated bulk load transactions
80
+ // don't a same record twice or miss records when the column
81
+ // specified at order_by parameter is not unique.
82
+ // For example, if the order_by column is "timestamp created_at"
83
+ // column whose precision is second, the table can include multiple
84
+ // records with the same created_at time. At the first bulk load
85
+ // transaction, it loads a record with created_at=2015-01-02 00:00:02.
86
+ // Then next transaction will use WHERE created_at > '2015-01-02 00:00:02'.
87
+ // However, if another record with created_at=2014-01-01 23:59:59 is
88
+ // inserted between the 2 transactions, the new record will be skipped.
89
+ // To prevent this scenario, we want to specify
90
+ // limit_value=2015-01-02 00:00:00 (exclusive). With this way, as long as
91
+ // a transaction runs after 2015-01-02 00:00:00 + some minutes, we don't
92
+ // skip records. Ideally, to automate the scheduling, we want to set
93
+ // limit_value="today".
94
+ //
95
+ //@Config("limit_value")
96
+ //@ConfigDefault("null")
97
+ //public Optional<String> getLimitValue();
98
+
99
+ //// TODO probably limit_rows is unnecessary as long as this has
100
+ // supports parallel execution (partition_by option) and resuming.
101
+ //@Config("limit_rows")
102
+ //@ConfigDefault("null")
103
+ //public Optional<Integer> getLimitRows();
104
+
105
+ @Config("fetch_rows")
106
+ @ConfigDefault("10000")
107
+ // TODO set minimum number
108
+ public int getFetchRows();
109
+
110
+ // TODO parallel execution using "partition_by" config
111
+
112
+ public JdbcSchema getQuerySchema();
113
+ public void setQuerySchema(JdbcSchema schema);
114
+
115
+ @ConfigInject
116
+ public BufferAllocator getBufferAllocator();
117
+ }
118
+
119
+ // for subclasses to add @Config
120
+ protected Class<? extends PluginTask> getTaskClass()
121
+ {
122
+ return PluginTask.class;
123
+ }
124
+
125
+ protected abstract JdbcInputConnection newConnection(PluginTask task) throws SQLException;
126
+
127
+ protected ColumnGetterFactory newColumnGetterFactory(PluginTask task) throws SQLException
128
+ {
129
+ return new ColumnGetterFactory();
130
+ }
131
+
132
+ @Override
133
+ public ConfigDiff transaction(ConfigSource config,
134
+ InputPlugin.Control control)
135
+ {
136
+ PluginTask task = config.loadConfig(getTaskClass());
137
+
138
+ //if (task.getLastValue().isPresent() && !task.getOrderBy().isPresent()) {
139
+ // throw new ConfigException("order_by parameter must be set if last_value parameter is set");
140
+ //}
141
+
142
+ Schema schema;
143
+ try (JdbcInputConnection con = newConnection(task)) {
144
+ schema = setupTask(con, task);
145
+ } catch (SQLException ex) {
146
+ throw Throwables.propagate(ex);
147
+ }
148
+ System.out.println("schema: "+schema);
149
+
150
+ return buildNextConfigDiff(task, control.run(task.dump(), schema, 1));
151
+ }
152
+
153
+ private Schema setupTask(JdbcInputConnection con, PluginTask task) throws SQLException
154
+ {
155
+ // build SELECT query and gets schema of its result
156
+ JdbcSchema querySchema = con.getSchemaOfQuery(task.getTable(), task.getSelect(), task.getWhere(), task.getOrderBy());
157
+ task.setQuerySchema(querySchema);
158
+
159
+ ColumnGetterFactory factory = newColumnGetterFactory(task);
160
+ ImmutableList.Builder<Column> columns = ImmutableList.builder();
161
+ for (int i = 0; i < querySchema.getCount(); i++) {
162
+ columns.add(new Column(i,
163
+ querySchema.getColumnName(i),
164
+ factory.newColumnGetter(querySchema.getColumn(i)).getToType()));
165
+ }
166
+ return new Schema(columns.build());
167
+ }
168
+
169
+ @Override
170
+ public ConfigDiff resume(TaskSource taskSource,
171
+ Schema schema, int processorCount,
172
+ InputPlugin.Control control)
173
+ {
174
+ PluginTask task = taskSource.loadTask(getTaskClass());
175
+
176
+ // TODO when parallel execution is implemented and enabled, (maybe) order_by
177
+ // is necessary to resume. transaction() gets the range of order_by
178
+ // colum and set it to WHERE condition to make the operation deterministic
179
+
180
+ return buildNextConfigDiff(task, control.run(taskSource, schema, processorCount));
181
+ }
182
+
183
+ protected ConfigDiff buildNextConfigDiff(PluginTask task, List<CommitReport> reports)
184
+ {
185
+ ConfigDiff next = Exec.newConfigDiff();
186
+ // TODO
187
+ //if (task.getOrderBy().isPresent()) {
188
+ // // TODO when parallel execution is implemented, calculate the max last_value
189
+ // // from the all commit reports.
190
+ // next.set("last_value", reports.get(0).get(JsonNode.class, "last_value"));
191
+ //}
192
+ return next;
193
+ }
194
+
195
+ @Override
196
+ public void cleanup(TaskSource taskSource,
197
+ Schema schema, int processorCount,
198
+ List<CommitReport> successCommitReports)
199
+ {
200
+ // do nothing
201
+ }
202
+
203
+ @Override
204
+ public CommitReport run(TaskSource taskSource,
205
+ Schema schema, int processorIndex,
206
+ PageOutput output)
207
+ {
208
+ PluginTask task = taskSource.loadTask(getTaskClass());
209
+
210
+ JdbcSchema querySchema = task.getQuerySchema();
211
+ BufferAllocator allocator = task.getBufferAllocator();
212
+ PageBuilder pageBuilder = new PageBuilder(allocator, schema, output);
213
+
214
+ try {
215
+ List<ColumnGetter> getters = newColumnGetters(task, querySchema);
216
+
217
+ try (JdbcInputConnection con = newConnection(task)) {
218
+ try (BatchSelect cursor = con.newSelectCursor(
219
+ task.getTable(), task.getSelect(), task.getWhere(),
220
+ task.getOrderBy(), task.getFetchRows())) {
221
+ while (true) {
222
+ // TODO run fetch() in another thread asynchronously
223
+ // TODO retry fetch() if it failed (maybe order_by is required and unique_column(s) option is also required)
224
+ System.out.println("fetch....");
225
+ boolean cont = fetch(cursor, getters, pageBuilder);
226
+ if (!cont) {
227
+ break;
228
+ }
229
+ }
230
+ }
231
+ }
232
+
233
+ } catch (SQLException ex) {
234
+ throw Throwables.propagate(ex);
235
+ }
236
+ pageBuilder.finish();
237
+
238
+ CommitReport report = Exec.newCommitReport();
239
+ // TODO
240
+ //if (orderByColumn != null) {
241
+ // report.set("last_value", lastValue);
242
+ //}
243
+ return report;
244
+ }
245
+
246
+ private List<ColumnGetter> newColumnGetters(PluginTask task, JdbcSchema querySchema) throws SQLException
247
+ {
248
+ ColumnGetterFactory factory = newColumnGetterFactory(task);
249
+ ImmutableList.Builder<ColumnGetter> getters = ImmutableList.builder();
250
+ for (JdbcColumn c : querySchema.getColumns()) {
251
+ getters.add(factory.newColumnGetter(c));
252
+ }
253
+ return getters.build();
254
+ }
255
+
256
+ private boolean fetch(BatchSelect cursor,
257
+ List<ColumnGetter> getters, PageBuilder pageBuilder) throws SQLException
258
+ {
259
+ ResultSet result = cursor.fetch();
260
+ if (result == null || !result.next()) {
261
+ return false;
262
+ }
263
+
264
+ System.out.println("res: "+result);
265
+
266
+ List<Column> columns = pageBuilder.getSchema().getColumns();
267
+ do {
268
+ System.out.println("record.");
269
+ for (int i=0; i < getters.size(); i++) {
270
+ int index = i + 1; // JDBC column index begins from 1
271
+ System.out.println("getters "+i+" "+getters.get(i));
272
+ getters.get(i).getAndSet(result, index, pageBuilder, columns.get(i));
273
+ }
274
+ pageBuilder.addRecord();
275
+ } while (result.next());
276
+ return true;
277
+ }
278
+
279
+ //// TODO move to embulk.spi.util?
280
+ //private static class ListPageOutput
281
+ //{
282
+ // public ImmutableList.Builder<Page> pages;
283
+ //
284
+ // public ListPageOutput()
285
+ // {
286
+ // reset();
287
+ // }
288
+ //
289
+ // @Override
290
+ // public void add(Page page)
291
+ // {
292
+ // pages.add(page);
293
+ // }
294
+ //
295
+ // @Override
296
+ // public void finish()
297
+ // {
298
+ // }
299
+ //
300
+ // @Override
301
+ // public void close()
302
+ // {
303
+ // }
304
+ //
305
+ // public List<Page> getPages()
306
+ // {
307
+ // return pages.build();
308
+ // }
309
+ //
310
+ // public void reset()
311
+ // {
312
+ // pages = ImmutableList.builder();
313
+ // }
314
+ //}
315
+ }
@@ -0,0 +1,41 @@
1
+ package org.embulk.input.jdbc;
2
+
3
+ import com.fasterxml.jackson.annotation.JsonCreator;
4
+ import com.fasterxml.jackson.annotation.JsonProperty;
5
+ import com.fasterxml.jackson.annotation.JsonIgnore;
6
+
7
+ public class JdbcColumn
8
+ {
9
+ private String name;
10
+ private String typeName;
11
+ private int sqlType;
12
+
13
+ @JsonCreator
14
+ public JdbcColumn(
15
+ @JsonProperty("name") String name,
16
+ @JsonProperty("typeName") String typeName,
17
+ @JsonProperty("sqlType") int sqlType)
18
+ {
19
+ this.name = name;
20
+ this.typeName = typeName;
21
+ this.sqlType = sqlType;
22
+ }
23
+
24
+ @JsonProperty("name")
25
+ public String getName()
26
+ {
27
+ return name;
28
+ }
29
+
30
+ @JsonProperty("typeName")
31
+ public String getTypeName()
32
+ {
33
+ return typeName;
34
+ }
35
+
36
+ @JsonProperty("sqlType")
37
+ public int getSqlType()
38
+ {
39
+ return sqlType;
40
+ }
41
+ }
@@ -0,0 +1,171 @@
1
+ package org.embulk.input.jdbc;
2
+
3
+ import java.sql.Connection;
4
+ import java.sql.DatabaseMetaData;
5
+ import java.sql.PreparedStatement;
6
+ import java.sql.ResultSet;
7
+ import java.sql.ResultSetMetaData;
8
+ import java.sql.Statement;
9
+ import java.sql.SQLException;
10
+ import com.google.common.base.Optional;
11
+ import com.google.common.collect.ImmutableList;
12
+ import org.slf4j.Logger;
13
+ import org.embulk.spi.Exec;
14
+
15
+ public class JdbcInputConnection
16
+ implements AutoCloseable
17
+ {
18
+ private final Logger logger = Exec.getLogger(JdbcInputConnection.class);
19
+ protected final Connection connection;
20
+ protected final String schemaName;
21
+ protected final DatabaseMetaData databaseMetaData;
22
+ protected String identifierQuoteString;
23
+
24
+ public JdbcInputConnection(Connection connection, String schemaName)
25
+ throws SQLException
26
+ {
27
+ this.connection = connection;
28
+ this.schemaName = schemaName;
29
+ this.databaseMetaData = connection.getMetaData();
30
+ this.identifierQuoteString = databaseMetaData.getIdentifierQuoteString();
31
+ if (schemaName != null) {
32
+ setSearchPath(schemaName);
33
+ }
34
+ connection.setAutoCommit(false);
35
+ }
36
+
37
+ protected void setSearchPath(String schema) throws SQLException
38
+ {
39
+ String sql = "SET search_path TO " + quoteIdentifierString(schema);
40
+ executeUpdate(sql);
41
+ }
42
+
43
+ protected String buildSelectQuery(String tableName,
44
+ Optional<String> selectColumnList, Optional<String> whereCondition,
45
+ Optional<String> orderByColumn)
46
+ {
47
+ StringBuilder sb = new StringBuilder();
48
+
49
+ sb.append("SELECT ");
50
+ sb.append(selectColumnList.or("*"));
51
+ sb.append(" FROM ").append(quoteIdentifierString(tableName));
52
+ if (whereCondition.isPresent()) {
53
+ sb.append(" WHERE ").append(whereCondition.get());
54
+ }
55
+ if (orderByColumn.isPresent()) {
56
+ sb.append("ORDER BY ").append(quoteIdentifierString(orderByColumn.get())).append(" ASC");
57
+ }
58
+
59
+ return sb.toString();
60
+ }
61
+
62
+ public JdbcSchema getSchemaOfQuery(String tableName,
63
+ Optional<String> selectColumnList, Optional<String> whereCondition,
64
+ Optional<String> orderByColumn) throws SQLException
65
+ {
66
+ String query = buildSelectQuery(tableName, selectColumnList, whereCondition,
67
+ orderByColumn);
68
+ PreparedStatement stmt = connection.prepareStatement(query);
69
+ try {
70
+ return getSchemaOfResultMetadata(stmt.getMetaData());
71
+ } finally {
72
+ stmt.close();
73
+ }
74
+ }
75
+
76
+ protected JdbcSchema getSchemaOfResultMetadata(ResultSetMetaData metadata) throws SQLException
77
+ {
78
+ ImmutableList.Builder<JdbcColumn> columns = ImmutableList.builder();
79
+ for (int i=0; i < metadata.getColumnCount(); i++) {
80
+ int index = i + 1; // JDBC column index begins from 1
81
+ String name = metadata.getColumnName(index);
82
+ String typeName = metadata.getColumnTypeName(index);
83
+ int sqlType = metadata.getColumnType(index);
84
+ //String scale = metadata.getScale(index)
85
+ //String precision = metadata.getPrecision(index)
86
+ columns.add(new JdbcColumn(name, typeName, sqlType));
87
+ }
88
+ return new JdbcSchema(columns.build());
89
+ }
90
+
91
+ public BatchSelect newSelectCursor(String tableName,
92
+ Optional<String> selectColumnList, Optional<String> whereCondition,
93
+ Optional<String> orderByColumn, int fetchRows) throws SQLException
94
+ {
95
+ String select = buildSelectQuery(tableName, selectColumnList, whereCondition, orderByColumn);
96
+ return newBatchSelect(select, fetchRows);
97
+ }
98
+
99
+ protected BatchSelect newBatchSelect(String select, int fetchRows) throws SQLException
100
+ {
101
+ logger.info("SQL: " + select);
102
+ PreparedStatement stmt = connection.prepareStatement(select);
103
+ stmt.setFetchSize(fetchRows);
104
+ return new SingleSelect(stmt);
105
+ }
106
+
107
+ public interface BatchSelect
108
+ extends AutoCloseable
109
+ {
110
+ public ResultSet fetch() throws SQLException;
111
+
112
+ @Override
113
+ public void close() throws SQLException;
114
+ }
115
+
116
+ public class SingleSelect
117
+ implements BatchSelect
118
+ {
119
+ private final PreparedStatement fetchStatement;
120
+ private boolean fetched = false;
121
+
122
+ public SingleSelect(PreparedStatement fetchStatement) throws SQLException
123
+ {
124
+ this.fetchStatement = fetchStatement;
125
+ }
126
+
127
+ public ResultSet fetch() throws SQLException
128
+ {
129
+ if (fetched == true) {
130
+ return null;
131
+ }
132
+
133
+ long startTime = System.currentTimeMillis();
134
+
135
+ ResultSet rs = fetchStatement.executeQuery();
136
+
137
+ double seconds = (System.currentTimeMillis() - startTime) / 1000.0;
138
+ logger.info(String.format("> %.2f seconds", seconds));
139
+ fetched = true;
140
+ return rs;
141
+ }
142
+
143
+ public void close() throws SQLException
144
+ {
145
+ // TODO close?
146
+ }
147
+ }
148
+
149
+ @Override
150
+ public void close() throws SQLException
151
+ {
152
+ connection.close();
153
+ }
154
+
155
+ protected void executeUpdate(String sql) throws SQLException
156
+ {
157
+ logger.info("SQL: " + sql);
158
+ Statement stmt = connection.createStatement();
159
+ try {
160
+ stmt.executeUpdate(sql);
161
+ } finally {
162
+ stmt.close();
163
+ }
164
+ }
165
+
166
+ // TODO share code with embulk-output-jdbc
167
+ protected String quoteIdentifierString(String str)
168
+ {
169
+ return identifierQuoteString + str + identifierQuoteString;
170
+ }
171
+ }
@@ -0,0 +1,37 @@
1
+ package org.embulk.input.jdbc;
2
+
3
+ import java.util.List;
4
+ import com.fasterxml.jackson.annotation.JsonCreator;
5
+ import com.fasterxml.jackson.annotation.JsonValue;
6
+
7
+ public class JdbcSchema
8
+ {
9
+ private List<JdbcColumn> columns;
10
+
11
+ @JsonCreator
12
+ public JdbcSchema(List<JdbcColumn> columns)
13
+ {
14
+ this.columns = columns;
15
+ }
16
+
17
+ @JsonValue
18
+ public List<JdbcColumn> getColumns()
19
+ {
20
+ return columns;
21
+ }
22
+
23
+ public int getCount()
24
+ {
25
+ return columns.size();
26
+ }
27
+
28
+ public JdbcColumn getColumn(int i)
29
+ {
30
+ return columns.get(i);
31
+ }
32
+
33
+ public String getColumnName(int i)
34
+ {
35
+ return columns.get(i).getName();
36
+ }
37
+ }
@@ -0,0 +1,15 @@
1
+ package org.embulk.input.jdbc.getter;
2
+
3
+ import java.sql.ResultSet;
4
+ import java.sql.SQLException;
5
+ import org.embulk.spi.Column;
6
+ import org.embulk.spi.PageBuilder;
7
+ import org.embulk.spi.type.Type;
8
+
9
+ public interface ColumnGetter
10
+ {
11
+ public void getAndSet(ResultSet from, int fromIndex,
12
+ PageBuilder to, Column toColumn) throws SQLException;
13
+
14
+ public Type getToType();
15
+ }
@@ -0,0 +1,99 @@
1
+ package org.embulk.input.jdbc.getter;
2
+
3
+ import java.sql.Types;
4
+ import org.embulk.input.jdbc.JdbcColumn;
5
+ import org.embulk.input.jdbc.getter.ColumnGetters.BooleanColumnGetter;
6
+ import org.embulk.input.jdbc.getter.ColumnGetters.LongColumnGetter;
7
+ import org.embulk.input.jdbc.getter.ColumnGetters.DoubleColumnGetter;
8
+ import org.embulk.input.jdbc.getter.ColumnGetters.StringColumnGetter;
9
+ import org.embulk.input.jdbc.getter.ColumnGetters.DateColumnGetter;
10
+ import org.embulk.input.jdbc.getter.ColumnGetters.TimeColumnGetter;
11
+ import org.embulk.input.jdbc.getter.ColumnGetters.TimestampColumnGetter;
12
+
13
+ public class ColumnGetterFactory
14
+ {
15
+ public ColumnGetter newColumnGetter(JdbcColumn column)
16
+ {
17
+ switch(column.getSqlType()) {
18
+ // getLong
19
+ case Types.TINYINT:
20
+ case Types.SMALLINT:
21
+ case Types.INTEGER:
22
+ case Types.BIGINT:
23
+ return new LongColumnGetter();
24
+
25
+ // setDouble
26
+ case Types.DOUBLE:
27
+ case Types.FLOAT:
28
+ case Types.REAL:
29
+ return new DoubleColumnGetter();
30
+
31
+ // setBool
32
+ case Types.BOOLEAN:
33
+ case Types.BIT: // JDBC BIT is boolean, unlike SQL-92
34
+ return new BooleanColumnGetter();
35
+
36
+ // setString, Clob
37
+ case Types.CHAR:
38
+ case Types.VARCHAR:
39
+ case Types.LONGVARCHAR:
40
+ case Types.CLOB:
41
+ case Types.NCHAR:
42
+ case Types.NVARCHAR:
43
+ case Types.LONGNVARCHAR:
44
+ return new StringColumnGetter();
45
+
46
+ // TODO
47
+ //// setBytes Blob
48
+ //case Types.BINARY:
49
+ //case Types.VARBINARY:
50
+ //case Types.LONGVARBINARY:
51
+ //case Types.BLOB:
52
+ // return new BytesColumnGetter();
53
+
54
+ // getDate
55
+ case Types.DATE:
56
+ return new DateColumnGetter(); // TODO
57
+
58
+ // getTime
59
+ case Types.TIME:
60
+ return new TimeColumnGetter(); // TODO
61
+
62
+ // getTimestamp
63
+ case Types.TIMESTAMP:
64
+ return new TimestampColumnGetter();
65
+
66
+ // TODO
67
+ //// Null
68
+ //case Types.NULL:
69
+ // return new NullColumnGetter();
70
+
71
+ // TODO
72
+ //// BigDecimal
73
+ //case Types.NUMERIC:
74
+ //case Types.DECIMAL:
75
+ // return new BigDecimalColumnGetter();
76
+
77
+ // others
78
+ case Types.ARRAY: // array
79
+ case Types.STRUCT: // map
80
+ case Types.REF:
81
+ case Types.DATALINK:
82
+ case Types.SQLXML: // XML
83
+ case Types.ROWID:
84
+ case Types.DISTINCT:
85
+ case Types.JAVA_OBJECT:
86
+ case Types.OTHER:
87
+ default:
88
+ throw unsupportedOperationException(column);
89
+ }
90
+ }
91
+
92
+ private static UnsupportedOperationException unsupportedOperationException(JdbcColumn column)
93
+ {
94
+ throw new UnsupportedOperationException(
95
+ String.format("Unsupported type %s (sqlType=%d)",
96
+ column.getTypeName(), column.getSqlType()));
97
+ }
98
+ }
99
+
@@ -0,0 +1,175 @@
1
+ package org.embulk.input.jdbc.getter;
2
+
3
+ import java.sql.ResultSet;
4
+ import java.sql.SQLException;
5
+ import org.embulk.spi.Column;
6
+ import org.embulk.spi.PageBuilder;
7
+ import org.embulk.spi.Column;
8
+ import org.embulk.spi.time.Timestamp;
9
+ import org.embulk.spi.type.Type;
10
+ import org.embulk.spi.type.Types;
11
+
12
+ public class ColumnGetters
13
+ {
14
+ private ColumnGetters() { }
15
+
16
+ public static class BooleanColumnGetter
17
+ implements ColumnGetter
18
+ {
19
+ @Override
20
+ public void getAndSet(ResultSet from, int fromIndex,
21
+ PageBuilder to, Column toColumn) throws SQLException
22
+ {
23
+ boolean v = from.getBoolean(fromIndex);
24
+ if (from.wasNull()) {
25
+ to.setNull(toColumn);
26
+ } else {
27
+ to.setBoolean(toColumn, v);
28
+ }
29
+ }
30
+
31
+ @Override
32
+ public Type getToType()
33
+ {
34
+ return Types.BOOLEAN;
35
+ }
36
+ }
37
+
38
+ public static class LongColumnGetter
39
+ implements ColumnGetter
40
+ {
41
+ @Override
42
+ public void getAndSet(ResultSet from, int fromIndex,
43
+ PageBuilder to, Column toColumn) throws SQLException
44
+ {
45
+ long v = from.getLong(fromIndex);
46
+ if (from.wasNull()) {
47
+ to.setNull(toColumn);
48
+ } else {
49
+ to.setLong(toColumn, v);
50
+ }
51
+ }
52
+
53
+ @Override
54
+ public Type getToType()
55
+ {
56
+ return Types.LONG;
57
+ }
58
+ }
59
+
60
+ public static class DoubleColumnGetter
61
+ implements ColumnGetter
62
+ {
63
+ @Override
64
+ public void getAndSet(ResultSet from, int fromIndex,
65
+ PageBuilder to, Column toColumn) throws SQLException
66
+ {
67
+ double v = from.getDouble(fromIndex);
68
+ if (from.wasNull()) {
69
+ to.setNull(toColumn);
70
+ } else {
71
+ to.setDouble(toColumn, v);
72
+ }
73
+ }
74
+
75
+ @Override
76
+ public Type getToType()
77
+ {
78
+ return Types.DOUBLE;
79
+ }
80
+ }
81
+
82
+ public static class StringColumnGetter
83
+ implements ColumnGetter
84
+ {
85
+ @Override
86
+ public void getAndSet(ResultSet from, int fromIndex,
87
+ PageBuilder to, Column toColumn) throws SQLException
88
+ {
89
+ System.out.println("string column from "+fromIndex+" to "+toColumn);
90
+ String v = from.getString(fromIndex);
91
+ if (from.wasNull()) {
92
+ to.setNull(toColumn);
93
+ System.out.println("> was null");
94
+ } else {
95
+ to.setString(toColumn, v);
96
+ System.out.println("> "+v);
97
+ }
98
+ }
99
+
100
+ @Override
101
+ public Type getToType()
102
+ {
103
+ return Types.STRING;
104
+ }
105
+ }
106
+
107
+ public static class DateColumnGetter
108
+ implements ColumnGetter
109
+ {
110
+ @Override
111
+ public void getAndSet(ResultSet from, int fromIndex,
112
+ PageBuilder to, Column toColumn) throws SQLException
113
+ {
114
+ java.sql.Date v = from.getDate(fromIndex);
115
+ if (from.wasNull()) {
116
+ to.setNull(toColumn);
117
+ } else {
118
+ Timestamp t = Timestamp.ofEpochMilli(v.getTime());
119
+ to.setTimestamp(toColumn, t);
120
+ }
121
+ }
122
+
123
+ @Override
124
+ public Type getToType()
125
+ {
126
+ return Types.TIMESTAMP.withFormat("%Y-%m-%d");
127
+ }
128
+ }
129
+
130
+ public static class TimeColumnGetter
131
+ implements ColumnGetter
132
+ {
133
+ @Override
134
+ public void getAndSet(ResultSet from, int fromIndex,
135
+ PageBuilder to, Column toColumn) throws SQLException
136
+ {
137
+ java.sql.Time v = from.getTime(fromIndex);
138
+ if (from.wasNull()) {
139
+ to.setNull(toColumn);
140
+ } else {
141
+ Timestamp t = Timestamp.ofEpochMilli(v.getTime());
142
+ to.setTimestamp(toColumn, t);
143
+ }
144
+ }
145
+
146
+ @Override
147
+ public Type getToType()
148
+ {
149
+ return Types.TIMESTAMP.withFormat("%H:%M:%S");
150
+ }
151
+ }
152
+
153
+ public static class TimestampColumnGetter
154
+ implements ColumnGetter
155
+ {
156
+ @Override
157
+ public void getAndSet(ResultSet from, int fromIndex,
158
+ PageBuilder to, Column toColumn) throws SQLException
159
+ {
160
+ java.sql.Timestamp v = from.getTimestamp(fromIndex);
161
+ if (from.wasNull()) {
162
+ to.setNull(toColumn);
163
+ } else {
164
+ Timestamp t = Timestamp.ofEpochSecond(v.getTime() / 1000, v.getNanos());
165
+ to.setTimestamp(toColumn, t);
166
+ }
167
+ }
168
+
169
+ @Override
170
+ public Type getToType()
171
+ {
172
+ return Types.TIMESTAMP.withFormat("%Y-%m-%d %H:%M:%S");
173
+ }
174
+ }
175
+ }
metadata ADDED
@@ -0,0 +1,54 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: embulk-input-jdbc
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - FURUHASHI Sadayuki
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2015-02-16 00:00:00.000000000 Z
12
+ dependencies: []
13
+ description: JDBC input plugin is an Embulk plugin that loads records from JDBC so that any output plugins can receive the records. Search the output plugins by "embulk-output" keyword.
14
+ email:
15
+ - frsyuki@users.sourceforge.jp
16
+ executables: []
17
+ extensions: []
18
+ extra_rdoc_files: []
19
+ files:
20
+ - build.gradle
21
+ - src/main/java/org/embulk/input/JdbcInputPlugin.java
22
+ - src/main/java/org/embulk/input/jdbc/AbstractJdbcInputPlugin.java
23
+ - src/main/java/org/embulk/input/jdbc/JdbcColumn.java
24
+ - src/main/java/org/embulk/input/jdbc/JdbcInputConnection.java
25
+ - src/main/java/org/embulk/input/jdbc/JdbcSchema.java
26
+ - src/main/java/org/embulk/input/jdbc/getter/ColumnGetter.java
27
+ - src/main/java/org/embulk/input/jdbc/getter/ColumnGetterFactory.java
28
+ - src/main/java/org/embulk/input/jdbc/getter/ColumnGetters.java
29
+ - classpath/embulk-input-jdbc-0.1.0.jar
30
+ homepage: https://github.com/embulk/embulk-input-jdbc
31
+ licenses:
32
+ - Apache 2.0
33
+ metadata: {}
34
+ post_install_message:
35
+ rdoc_options: []
36
+ require_paths:
37
+ - lib
38
+ required_ruby_version: !ruby/object:Gem::Requirement
39
+ requirements:
40
+ - - '>='
41
+ - !ruby/object:Gem::Version
42
+ version: '0'
43
+ required_rubygems_version: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - '>='
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ requirements: []
49
+ rubyforge_project:
50
+ rubygems_version: 2.1.9
51
+ signing_key:
52
+ specification_version: 4
53
+ summary: JDBC input plugin for Embulk
54
+ test_files: []