embulk-input-jdbc 0.7.2 → 0.7.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/classpath/embulk-input-jdbc-0.7.3.jar +0 -0
- data/src/main/java/org/embulk/input/JdbcInputPlugin.java +1 -1
- data/src/main/java/org/embulk/input/jdbc/AbstractJdbcInputPlugin.java +202 -29
- data/src/main/java/org/embulk/input/jdbc/JdbcColumn.java +0 -1
- data/src/main/java/org/embulk/input/jdbc/JdbcInputConnection.java +143 -56
- data/src/main/java/org/embulk/input/jdbc/JdbcLiteral.java +38 -0
- data/src/main/java/org/embulk/input/jdbc/JdbcSchema.java +18 -0
- data/src/main/java/org/embulk/input/jdbc/getter/AbstractColumnGetter.java +25 -2
- data/src/main/java/org/embulk/input/jdbc/getter/ColumnGetter.java +7 -0
- data/src/main/java/org/embulk/input/jdbc/getter/ColumnGetterFactory.java +7 -3
- data/src/main/java/org/embulk/input/jdbc/getter/LongColumnGetter.java +14 -0
- data/src/main/java/org/embulk/input/jdbc/getter/StringColumnGetter.java +14 -0
- metadata +7 -6
- data/classpath/embulk-input-jdbc-0.7.2.jar +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: bab76f99601161e312ae588c0f5d1c6ed7a5ca7c
|
4
|
+
data.tar.gz: 3394307ce36d292c46de24aafa14a79859cd6b13
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ae4abde050d7ff724733096a16e0461a5d5b31d0877e0ab76ebe4b763c00507b9bca116ad19f7b27e9ce3870814b843589133f4aca0c2a530680594cc81261b4
|
7
|
+
data.tar.gz: a7b72823dd55164287445e38a2d3d5b2c18b76c638d1c8d8c38aa0d6cd5af987a55f818c43b8a3627c06653a3bbec6503855740a005fb97e6dafc647035cc2ca
|
Binary file
|
@@ -1,5 +1,6 @@
|
|
1
1
|
package org.embulk.input.jdbc;
|
2
2
|
|
3
|
+
import java.nio.file.Path;
|
3
4
|
import java.util.List;
|
4
5
|
import java.util.Map;
|
5
6
|
import java.nio.file.Paths;
|
@@ -8,10 +9,12 @@ import java.sql.SQLException;
|
|
8
9
|
|
9
10
|
import org.slf4j.Logger;
|
10
11
|
|
12
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
11
13
|
import com.google.common.base.Optional;
|
12
14
|
import com.google.common.base.Supplier;
|
13
15
|
import com.google.common.base.Throwables;
|
14
16
|
import com.google.common.collect.ImmutableList;
|
17
|
+
import com.google.common.collect.ImmutableMap;
|
15
18
|
|
16
19
|
import org.embulk.config.Config;
|
17
20
|
import org.embulk.config.ConfigException;
|
@@ -25,6 +28,7 @@ import org.embulk.config.TaskSource;
|
|
25
28
|
import org.embulk.plugin.PluginClassLoader;
|
26
29
|
import org.embulk.spi.BufferAllocator;
|
27
30
|
import org.embulk.spi.Column;
|
31
|
+
import org.embulk.spi.DataException;
|
28
32
|
import org.embulk.spi.PageBuilder;
|
29
33
|
import org.embulk.spi.InputPlugin;
|
30
34
|
import org.embulk.spi.PageOutput;
|
@@ -33,8 +37,11 @@ import org.embulk.spi.Exec;
|
|
33
37
|
import org.embulk.input.jdbc.getter.ColumnGetter;
|
34
38
|
import org.embulk.input.jdbc.getter.ColumnGetterFactory;
|
35
39
|
import org.embulk.input.jdbc.JdbcInputConnection.BatchSelect;
|
40
|
+
import org.embulk.input.jdbc.JdbcInputConnection.PreparedQuery;
|
36
41
|
import org.joda.time.DateTimeZone;
|
37
42
|
|
43
|
+
import static java.util.Locale.ENGLISH;
|
44
|
+
|
38
45
|
public abstract class AbstractJdbcInputPlugin
|
39
46
|
implements InputPlugin
|
40
47
|
{
|
@@ -49,6 +56,7 @@ public abstract class AbstractJdbcInputPlugin
|
|
49
56
|
@Config("table")
|
50
57
|
@ConfigDefault("null")
|
51
58
|
public Optional<String> getTable();
|
59
|
+
public void setTable(Optional<String> normalizedTableName);
|
52
60
|
|
53
61
|
@Config("query")
|
54
62
|
@ConfigDefault("null")
|
@@ -66,10 +74,18 @@ public abstract class AbstractJdbcInputPlugin
|
|
66
74
|
@ConfigDefault("null")
|
67
75
|
public Optional<String> getOrderBy();
|
68
76
|
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
77
|
+
@Config("incremental")
|
78
|
+
@ConfigDefault("false")
|
79
|
+
public boolean getIncremental();
|
80
|
+
|
81
|
+
@Config("incremental_columns")
|
82
|
+
@ConfigDefault("[]")
|
83
|
+
public List<String> getIncrementalColumns();
|
84
|
+
public void setIncrementalColumns(List<String> indexes);
|
85
|
+
|
86
|
+
@Config("last_record")
|
87
|
+
@ConfigDefault("null")
|
88
|
+
public Optional<List<JsonNode>> getLastRecord();
|
73
89
|
|
74
90
|
// TODO limit_value is necessary to make sure repeated bulk load transactions
|
75
91
|
// don't a same record twice or miss records when the column
|
@@ -128,9 +144,15 @@ public abstract class AbstractJdbcInputPlugin
|
|
128
144
|
@ConfigDefault("null")
|
129
145
|
public Optional<String> getAfterSelect();
|
130
146
|
|
147
|
+
public PreparedQuery getBuiltQuery();
|
148
|
+
public void setBuiltQuery(PreparedQuery query);
|
149
|
+
|
131
150
|
public JdbcSchema getQuerySchema();
|
132
151
|
public void setQuerySchema(JdbcSchema schema);
|
133
152
|
|
153
|
+
public List<Integer> getIncrementalColumnIndexes();
|
154
|
+
public void setIncrementalColumnIndexes(List<Integer> indexes);
|
155
|
+
|
134
156
|
@ConfigInject
|
135
157
|
public BufferAllocator getBufferAllocator();
|
136
158
|
}
|
@@ -149,12 +171,20 @@ public abstract class AbstractJdbcInputPlugin
|
|
149
171
|
{
|
150
172
|
PluginTask task = config.loadConfig(getTaskClass());
|
151
173
|
|
152
|
-
|
153
|
-
|
154
|
-
|
174
|
+
if (task.getIncremental()) {
|
175
|
+
if (task.getOrderBy().isPresent()) {
|
176
|
+
throw new ConfigException("order_by option must not be set if incremental is true");
|
177
|
+
}
|
178
|
+
}
|
179
|
+
else {
|
180
|
+
if (!task.getIncrementalColumns().isEmpty()) {
|
181
|
+
throw new ConfigException("'incremental: true' must be set if incremental_columns is set");
|
182
|
+
}
|
183
|
+
}
|
155
184
|
|
156
185
|
Schema schema;
|
157
186
|
try (JdbcInputConnection con = newConnection(task)) {
|
187
|
+
// TODO incremental_columns is not set => get primary key
|
158
188
|
schema = setupTask(con, task);
|
159
189
|
} catch (SQLException ex) {
|
160
190
|
throw Throwables.propagate(ex);
|
@@ -165,11 +195,60 @@ public abstract class AbstractJdbcInputPlugin
|
|
165
195
|
|
166
196
|
private Schema setupTask(JdbcInputConnection con, PluginTask task) throws SQLException
|
167
197
|
{
|
198
|
+
if (task.getTable().isPresent()) {
|
199
|
+
String actualTableName = normalizeTableNameCase(con, task.getTable().get());
|
200
|
+
task.setTable(Optional.of(actualTableName));
|
201
|
+
}
|
202
|
+
|
168
203
|
// build SELECT query and gets schema of its result
|
169
204
|
String query = getQuery(task, con);
|
170
|
-
|
205
|
+
|
171
206
|
JdbcSchema querySchema = con.getSchemaOfQuery(query);
|
172
207
|
task.setQuerySchema(querySchema);
|
208
|
+
// query schema should not change after incremental query
|
209
|
+
|
210
|
+
PreparedQuery preparedQuery;
|
211
|
+
if (task.getIncremental()) {
|
212
|
+
// build incremental query
|
213
|
+
|
214
|
+
List<String> incrementalColumns = task.getIncrementalColumns();
|
215
|
+
if (incrementalColumns.isEmpty()) {
|
216
|
+
// incremental_columns is not set
|
217
|
+
if (!task.getTable().isPresent()) {
|
218
|
+
throw new ConfigException("incremental_columns option must be set if incremental is true and custom query option is set");
|
219
|
+
}
|
220
|
+
// get primary keys from the target table to use them as incremental_columns
|
221
|
+
List<String> primaryKeys = con.getPrimaryKeys(task.getTable().get());
|
222
|
+
if (primaryKeys.isEmpty()) {
|
223
|
+
throw new ConfigException(String.format(ENGLISH,
|
224
|
+
"Primary key is not available at the table '%s'. incremental_columns option must be set",
|
225
|
+
task.getTable().get()));
|
226
|
+
}
|
227
|
+
logger.info("Using primary keys as incremental_columns: {}", primaryKeys);
|
228
|
+
task.setIncrementalColumns(primaryKeys);
|
229
|
+
incrementalColumns = primaryKeys;
|
230
|
+
}
|
231
|
+
|
232
|
+
List<Integer> incrementalColumnIndexes = findIncrementalColumnIndexes(querySchema, incrementalColumns);
|
233
|
+
task.setIncrementalColumnIndexes(incrementalColumnIndexes);
|
234
|
+
|
235
|
+
if (task.getLastRecord().isPresent()) {
|
236
|
+
List<JsonNode> lastRecord = task.getLastRecord().get();
|
237
|
+
if (lastRecord.size() != incrementalColumnIndexes.size()) {
|
238
|
+
throw new ConfigException("Number of values set at last_record must be same with number of columns set at incremental_columns");
|
239
|
+
}
|
240
|
+
preparedQuery = con.buildIncrementalQuery(query, querySchema, incrementalColumnIndexes, lastRecord);
|
241
|
+
}
|
242
|
+
else {
|
243
|
+
preparedQuery = con.buildIncrementalQuery(query, querySchema, incrementalColumnIndexes, null);
|
244
|
+
}
|
245
|
+
}
|
246
|
+
else {
|
247
|
+
task.setIncrementalColumnIndexes(ImmutableList.<Integer>of());
|
248
|
+
preparedQuery = new PreparedQuery(query, ImmutableList.<JdbcLiteral>of());
|
249
|
+
}
|
250
|
+
|
251
|
+
task.setBuiltQuery(preparedQuery);
|
173
252
|
|
174
253
|
// validate column_options
|
175
254
|
newColumnGetters(task, querySchema, null);
|
@@ -186,19 +265,63 @@ public abstract class AbstractJdbcInputPlugin
|
|
186
265
|
return new Schema(columns.build());
|
187
266
|
}
|
188
267
|
|
268
|
+
private String normalizeTableNameCase(JdbcInputConnection con, String tableName)
|
269
|
+
throws SQLException
|
270
|
+
{
|
271
|
+
if (con.tableExists(tableName)) {
|
272
|
+
return tableName;
|
273
|
+
} else {
|
274
|
+
String upperTableName = tableName.toUpperCase();
|
275
|
+
String lowerTableName = tableName.toLowerCase();
|
276
|
+
boolean upperExists = con.tableExists(upperTableName);
|
277
|
+
boolean lowerExists = con.tableExists(upperTableName);
|
278
|
+
if (upperExists && lowerExists) {
|
279
|
+
throw new ConfigException(String.format("Cannot specify table '%s' because both '%s' and '%s' exist.",
|
280
|
+
tableName, upperTableName, lowerTableName));
|
281
|
+
} else if (upperExists) {
|
282
|
+
return upperTableName;
|
283
|
+
} else if (lowerExists) {
|
284
|
+
return lowerTableName;
|
285
|
+
} else {
|
286
|
+
// fallback to the given table name. this may throw error later at getSchemaOfQuery
|
287
|
+
return tableName;
|
288
|
+
}
|
289
|
+
}
|
290
|
+
}
|
291
|
+
|
292
|
+
private List<Integer> findIncrementalColumnIndexes(JdbcSchema schema, List<String> incrementalColumns)
|
293
|
+
throws SQLException
|
294
|
+
{
|
295
|
+
ImmutableList.Builder<Integer> builder = ImmutableList.builder();
|
296
|
+
for (String name : incrementalColumns) {
|
297
|
+
Optional<Integer> index = schema.findColumn(name);
|
298
|
+
if (index.isPresent()) {
|
299
|
+
builder.add(index.get());
|
300
|
+
}
|
301
|
+
else {
|
302
|
+
throw new ConfigException(String.format(ENGLISH,
|
303
|
+
"Column name '%s' is in incremental_columns option does not exist",
|
304
|
+
name));
|
305
|
+
}
|
306
|
+
}
|
307
|
+
return builder.build();
|
308
|
+
}
|
309
|
+
|
189
310
|
private String getQuery(PluginTask task, JdbcInputConnection con) throws SQLException
|
190
311
|
{
|
191
312
|
if (task.getQuery().isPresent()) {
|
192
313
|
if (task.getTable().isPresent() || task.getSelect().isPresent() ||
|
193
314
|
task.getWhere().isPresent() || task.getOrderBy().isPresent()) {
|
194
315
|
throw new ConfigException("'table', 'select', 'where' and 'order_by' parameters are unnecessary if 'query' parameter is set.");
|
316
|
+
} else if (!task.getIncrementalColumns().isEmpty() || task.getLastRecord().isPresent()) {
|
317
|
+
throw new ConfigException("'incremental_columns' and 'last_record' parameters are not supported if 'query' parameter is set.");
|
195
318
|
}
|
196
319
|
return task.getQuery().get();
|
197
320
|
} else if (task.getTable().isPresent()) {
|
198
321
|
return con.buildSelectQuery(task.getTable().get(), task.getSelect(),
|
199
322
|
task.getWhere(), task.getOrderBy());
|
200
323
|
} else {
|
201
|
-
throw new ConfigException("'table'
|
324
|
+
throw new ConfigException("'table' or 'query' parameter is required");
|
202
325
|
}
|
203
326
|
}
|
204
327
|
|
@@ -224,12 +347,11 @@ public abstract class AbstractJdbcInputPlugin
|
|
224
347
|
protected ConfigDiff buildNextConfigDiff(PluginTask task, List<TaskReport> reports)
|
225
348
|
{
|
226
349
|
ConfigDiff next = Exec.newConfigDiff();
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
//}
|
350
|
+
if (reports.size() > 0 && reports.get(0).has("last_record")) {
|
351
|
+
next.set("last_record", reports.get(0).get(JsonNode.class, "last_record"));
|
352
|
+
} else if (task.getLastRecord().isPresent()) {
|
353
|
+
next.set("last_record", task.getLastRecord().get());
|
354
|
+
}
|
233
355
|
return next;
|
234
356
|
}
|
235
357
|
|
@@ -241,6 +363,42 @@ public abstract class AbstractJdbcInputPlugin
|
|
241
363
|
// do nothing
|
242
364
|
}
|
243
365
|
|
366
|
+
private static class LastRecordStore
|
367
|
+
{
|
368
|
+
private final List<Integer> columnIndexes;
|
369
|
+
private final JsonNode[] lastValues;
|
370
|
+
private final List<String> columnNames;
|
371
|
+
|
372
|
+
public LastRecordStore(List<Integer> columnIndexes, List<String> columnNames)
|
373
|
+
{
|
374
|
+
this.columnIndexes = columnIndexes;
|
375
|
+
this.lastValues = new JsonNode[columnIndexes.size()];
|
376
|
+
this.columnNames = columnNames;
|
377
|
+
}
|
378
|
+
|
379
|
+
public void accept(List<ColumnGetter> getters)
|
380
|
+
throws SQLException
|
381
|
+
{
|
382
|
+
for (int i = 0; i < columnIndexes.size(); i++) {
|
383
|
+
lastValues[i] = getters.get(columnIndexes.get(i)).encodeToJson();
|
384
|
+
}
|
385
|
+
}
|
386
|
+
|
387
|
+
public List<JsonNode> getList()
|
388
|
+
{
|
389
|
+
ImmutableList.Builder<JsonNode> builder = ImmutableList.builder();
|
390
|
+
for (int i = 0; i < lastValues.length; i++) {
|
391
|
+
if (lastValues[i] == null || lastValues[i].isNull()) {
|
392
|
+
throw new DataException(String.format(ENGLISH,
|
393
|
+
"incremental_columns can't include null values but the last row is null at column '%s'",
|
394
|
+
columnNames.get(i)));
|
395
|
+
}
|
396
|
+
builder.add(lastValues[i]);
|
397
|
+
}
|
398
|
+
return builder.build();
|
399
|
+
}
|
400
|
+
}
|
401
|
+
|
244
402
|
@Override
|
245
403
|
public TaskReport run(TaskSource taskSource,
|
246
404
|
Schema schema, int taskIndex,
|
@@ -248,22 +406,32 @@ public abstract class AbstractJdbcInputPlugin
|
|
248
406
|
{
|
249
407
|
PluginTask task = taskSource.loadTask(getTaskClass());
|
250
408
|
|
409
|
+
PreparedQuery builtQuery = task.getBuiltQuery();
|
251
410
|
JdbcSchema querySchema = task.getQuerySchema();
|
252
411
|
BufferAllocator allocator = task.getBufferAllocator();
|
253
412
|
PageBuilder pageBuilder = new PageBuilder(allocator, schema, output);
|
254
413
|
|
414
|
+
long totalRows = 0;
|
415
|
+
|
416
|
+
LastRecordStore lastRecordStore = null;
|
417
|
+
|
255
418
|
try (JdbcInputConnection con = newConnection(task)) {
|
256
|
-
|
257
|
-
|
419
|
+
List<ColumnGetter> getters = newColumnGetters(task, querySchema, pageBuilder);
|
420
|
+
try (BatchSelect cursor = con.newSelectCursor(builtQuery, getters, task.getFetchRows(), task.getSocketTimeout())) {
|
258
421
|
while (true) {
|
259
|
-
|
260
|
-
|
261
|
-
boolean cont = fetch(cursor, getters, pageBuilder);
|
262
|
-
if (!cont) {
|
422
|
+
long rows = fetch(cursor, getters, pageBuilder);
|
423
|
+
if (rows <= 0L) {
|
263
424
|
break;
|
264
425
|
}
|
426
|
+
totalRows += rows;
|
265
427
|
}
|
266
428
|
}
|
429
|
+
|
430
|
+
if (task.getIncremental() && totalRows > 0) {
|
431
|
+
lastRecordStore = new LastRecordStore(task.getIncrementalColumnIndexes(), task.getIncrementalColumns());
|
432
|
+
lastRecordStore.accept(getters);
|
433
|
+
}
|
434
|
+
|
267
435
|
pageBuilder.finish();
|
268
436
|
|
269
437
|
// after_select runs after pageBuilder.finish because pageBuilder.finish may fail.
|
@@ -284,10 +452,10 @@ public abstract class AbstractJdbcInputPlugin
|
|
284
452
|
}
|
285
453
|
|
286
454
|
TaskReport report = Exec.newTaskReport();
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
455
|
+
if (lastRecordStore != null) {
|
456
|
+
report.set("last_record", lastRecordStore.getList());
|
457
|
+
}
|
458
|
+
|
291
459
|
return report;
|
292
460
|
}
|
293
461
|
|
@@ -339,12 +507,12 @@ public abstract class AbstractJdbcInputPlugin
|
|
339
507
|
});
|
340
508
|
}
|
341
509
|
|
342
|
-
private
|
510
|
+
private long fetch(BatchSelect cursor,
|
343
511
|
List<ColumnGetter> getters, PageBuilder pageBuilder) throws SQLException
|
344
512
|
{
|
345
513
|
ResultSet result = cursor.fetch();
|
346
514
|
if (result == null || !result.next()) {
|
347
|
-
return
|
515
|
+
return 0;
|
348
516
|
}
|
349
517
|
|
350
518
|
List<Column> columns = pageBuilder.getSchema().getColumns();
|
@@ -362,7 +530,8 @@ public abstract class AbstractJdbcInputPlugin
|
|
362
530
|
reportRows *= 2;
|
363
531
|
}
|
364
532
|
} while (result.next());
|
365
|
-
|
533
|
+
|
534
|
+
return rows;
|
366
535
|
}
|
367
536
|
|
368
537
|
//// TODO move to embulk.spi.util?
|
@@ -402,10 +571,14 @@ public abstract class AbstractJdbcInputPlugin
|
|
402
571
|
// }
|
403
572
|
//}
|
404
573
|
|
405
|
-
protected void
|
574
|
+
protected void addDriverJarToClasspath(String glob)
|
406
575
|
{
|
407
576
|
// TODO match glob
|
408
577
|
PluginClassLoader loader = (PluginClassLoader) getClass().getClassLoader();
|
578
|
+
Path path = Paths.get(glob);
|
579
|
+
if (!path.toFile().exists()) {
|
580
|
+
throw new ConfigException("The specified driver jar doesn't exist: " + glob);
|
581
|
+
}
|
409
582
|
loader.addPath(Paths.get(glob));
|
410
583
|
}
|
411
584
|
}
|
@@ -11,12 +11,19 @@ import java.util.Set;
|
|
11
11
|
|
12
12
|
import org.embulk.config.ConfigException;
|
13
13
|
import org.embulk.spi.Exec;
|
14
|
+
import org.embulk.input.jdbc.getter.ColumnGetter;
|
14
15
|
import org.slf4j.Logger;
|
15
16
|
|
17
|
+
import java.util.List;
|
18
|
+
import java.util.ArrayList;
|
19
|
+
import static java.util.Locale.ENGLISH;
|
20
|
+
|
21
|
+
import com.fasterxml.jackson.annotation.JsonCreator;
|
22
|
+
import com.fasterxml.jackson.annotation.JsonProperty;
|
23
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
16
24
|
import com.google.common.base.Optional;
|
17
25
|
import com.google.common.collect.ImmutableList;
|
18
26
|
import com.google.common.collect.ImmutableSet;
|
19
|
-
import com.google.common.collect.ImmutableSet.Builder;
|
20
27
|
|
21
28
|
public class JdbcInputConnection
|
22
29
|
implements AutoCloseable
|
@@ -57,6 +64,20 @@ public class JdbcInputConnection
|
|
57
64
|
}
|
58
65
|
}
|
59
66
|
|
67
|
+
public List<String> getPrimaryKeys(String tableName) throws SQLException
|
68
|
+
{
|
69
|
+
ResultSet rs = databaseMetaData.getPrimaryKeys(null, schemaName, tableName);
|
70
|
+
ImmutableList.Builder<String> builder = ImmutableList.builder();
|
71
|
+
try {
|
72
|
+
while(rs.next()) {
|
73
|
+
builder.add(rs.getString("COLUMN_NAME"));
|
74
|
+
}
|
75
|
+
} finally {
|
76
|
+
rs.close();
|
77
|
+
}
|
78
|
+
return builder.build();
|
79
|
+
}
|
80
|
+
|
60
81
|
protected JdbcSchema getSchemaOfResultMetadata(ResultSetMetaData metadata) throws SQLException
|
61
82
|
{
|
62
83
|
ImmutableList.Builder<JdbcColumn> columns = ImmutableList.builder();
|
@@ -72,19 +93,70 @@ public class JdbcInputConnection
|
|
72
93
|
return new JdbcSchema(columns.build());
|
73
94
|
}
|
74
95
|
|
75
|
-
public
|
96
|
+
public static class PreparedQuery
|
76
97
|
{
|
77
|
-
|
98
|
+
private final String query;
|
99
|
+
private final List<JdbcLiteral> parameters;
|
100
|
+
|
101
|
+
@JsonCreator
|
102
|
+
public PreparedQuery(
|
103
|
+
@JsonProperty("query") String query,
|
104
|
+
@JsonProperty("parameters") List<JdbcLiteral> parameters)
|
105
|
+
{
|
106
|
+
this.query = query;
|
107
|
+
this.parameters = parameters;
|
108
|
+
}
|
109
|
+
|
110
|
+
@JsonProperty("query")
|
111
|
+
public String getQuery()
|
112
|
+
{
|
113
|
+
return query;
|
114
|
+
}
|
115
|
+
|
116
|
+
@JsonProperty("parameters")
|
117
|
+
public List<JdbcLiteral> getParameters()
|
118
|
+
{
|
119
|
+
return parameters;
|
120
|
+
}
|
78
121
|
}
|
79
122
|
|
80
|
-
|
123
|
+
public BatchSelect newSelectCursor(PreparedQuery preparedQuery,
|
124
|
+
List<ColumnGetter> getters,
|
125
|
+
int fetchRows, int queryTimeout) throws SQLException
|
81
126
|
{
|
127
|
+
return newBatchSelect(preparedQuery, getters, fetchRows, queryTimeout);
|
128
|
+
}
|
129
|
+
|
130
|
+
protected BatchSelect newBatchSelect(PreparedQuery preparedQuery,
|
131
|
+
List<ColumnGetter> getters,
|
132
|
+
int fetchRows, int queryTimeout) throws SQLException
|
133
|
+
{
|
134
|
+
String query = preparedQuery.getQuery();
|
135
|
+
List<JdbcLiteral> params = preparedQuery.getParameters();
|
136
|
+
|
82
137
|
PreparedStatement stmt = connection.prepareStatement(query);
|
83
138
|
stmt.setFetchSize(fetchRows);
|
84
139
|
stmt.setQueryTimeout(queryTimeout);
|
140
|
+
logger.info("SQL: " + query);
|
141
|
+
if (!params.isEmpty()) {
|
142
|
+
logger.info("Parameters: {}", params);
|
143
|
+
prepareParameters(stmt, getters, params);
|
144
|
+
}
|
85
145
|
return new SingleSelect(stmt);
|
86
146
|
}
|
87
147
|
|
148
|
+
protected void prepareParameters(PreparedStatement stmt, List<ColumnGetter> getters,
|
149
|
+
List<JdbcLiteral> parameters)
|
150
|
+
throws SQLException
|
151
|
+
{
|
152
|
+
for (int i = 0; i < parameters.size(); i++) {
|
153
|
+
JdbcLiteral literal = parameters.get(i);
|
154
|
+
ColumnGetter getter = getters.get(literal.getColumnIndex());
|
155
|
+
int index = i + 1; // JDBC column index begins from 1
|
156
|
+
getter.decodeFromJsonTo(stmt, index, literal.getValue());
|
157
|
+
}
|
158
|
+
}
|
159
|
+
|
88
160
|
public interface BatchSelect
|
89
161
|
extends AutoCloseable
|
90
162
|
{
|
@@ -156,72 +228,87 @@ public class JdbcInputConnection
|
|
156
228
|
}
|
157
229
|
|
158
230
|
public String buildSelectQuery(String tableName,
|
159
|
-
Optional<String>
|
160
|
-
Optional<String>
|
161
|
-
{
|
162
|
-
String actualTableName;
|
163
|
-
if (tableExists(tableName)) {
|
164
|
-
actualTableName = tableName;
|
165
|
-
} else {
|
166
|
-
String upperTableName = tableName.toUpperCase();
|
167
|
-
String lowerTableName = tableName.toLowerCase();
|
168
|
-
if (tableExists(upperTableName)) {
|
169
|
-
if (tableExists(lowerTableName)) {
|
170
|
-
throw new ConfigException(String.format("Cannot specify table '%s' because both '%s' and '%s' exist.",
|
171
|
-
tableName, upperTableName, lowerTableName));
|
172
|
-
} else {
|
173
|
-
actualTableName = upperTableName;
|
174
|
-
}
|
175
|
-
} else {
|
176
|
-
if (tableExists(lowerTableName)) {
|
177
|
-
actualTableName = lowerTableName;
|
178
|
-
} else {
|
179
|
-
actualTableName = tableName;
|
180
|
-
}
|
181
|
-
}
|
182
|
-
}
|
183
|
-
|
231
|
+
Optional<String> selectExpression, Optional<String> whereCondition,
|
232
|
+
Optional<String> orderByExpression) throws SQLException
|
233
|
+
{
|
184
234
|
StringBuilder sb = new StringBuilder();
|
185
235
|
|
186
236
|
sb.append("SELECT ");
|
187
|
-
sb.append(
|
188
|
-
sb.append(" FROM ").append(buildTableName(
|
237
|
+
sb.append(selectExpression.or("*"));
|
238
|
+
sb.append(" FROM ").append(buildTableName(tableName));
|
189
239
|
|
190
240
|
if (whereCondition.isPresent()) {
|
191
241
|
sb.append(" WHERE ").append(whereCondition.get());
|
192
242
|
}
|
193
243
|
|
194
|
-
if (
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
244
|
+
if (orderByExpression.isPresent()) {
|
245
|
+
sb.append(" ORDER BY ").append(orderByExpression.get());
|
246
|
+
}
|
247
|
+
|
248
|
+
return sb.toString();
|
249
|
+
}
|
250
|
+
|
251
|
+
public PreparedQuery buildIncrementalQuery(String rawQuery, JdbcSchema querySchema,
|
252
|
+
List<Integer> incrementalColumnIndexes, List<JsonNode> incrementalValues) throws SQLException
|
253
|
+
{
|
254
|
+
StringBuilder sb = new StringBuilder();
|
255
|
+
ImmutableList.Builder<JdbcLiteral> parameters = ImmutableList.builder();
|
256
|
+
|
257
|
+
sb.append("SELECT * FROM (");
|
258
|
+
sb.append(truncateStatementDelimiter(rawQuery));
|
259
|
+
sb.append(") embulk_incremental_");
|
260
|
+
if (incrementalValues != null) {
|
261
|
+
sb.append(" WHERE ");
|
262
|
+
|
263
|
+
List<String> leftColumnNames = new ArrayList<>();
|
264
|
+
List<JdbcLiteral> rightLiterals = new ArrayList<>();
|
265
|
+
for (int n = 0; n < incrementalColumnIndexes.size(); n++) {
|
266
|
+
int columnIndex = incrementalColumnIndexes.get(n);
|
267
|
+
JsonNode value = incrementalValues.get(n);
|
268
|
+
leftColumnNames.add(querySchema.getColumnName(columnIndex));
|
269
|
+
rightLiterals.add(new JdbcLiteral(columnIndex, value));
|
270
|
+
}
|
271
|
+
|
272
|
+
for (int n = 0; n < leftColumnNames.size(); n++) {
|
273
|
+
if (n > 0) {
|
274
|
+
sb.append(" OR ");
|
215
275
|
}
|
276
|
+
sb.append("(");
|
277
|
+
|
278
|
+
for (int i = 0; i < n; i++) {
|
279
|
+
sb.append(quoteIdentifierString(leftColumnNames.get(i)));
|
280
|
+
sb.append(" = ?");
|
281
|
+
parameters.add(rightLiterals.get(i));
|
282
|
+
sb.append(" AND ");
|
283
|
+
}
|
284
|
+
sb.append(quoteIdentifierString(leftColumnNames.get(n)));
|
285
|
+
sb.append(" > ?");
|
286
|
+
parameters.add(rightLiterals.get(n));
|
287
|
+
|
288
|
+
sb.append(")");
|
216
289
|
}
|
290
|
+
}
|
291
|
+
sb.append(" ORDER BY ");
|
217
292
|
|
218
|
-
|
293
|
+
boolean first = true;
|
294
|
+
for (int i : incrementalColumnIndexes) {
|
295
|
+
if (first) {
|
296
|
+
first = false;
|
297
|
+
} else {
|
298
|
+
sb.append(", ");
|
299
|
+
}
|
300
|
+
sb.append(quoteIdentifierString(querySchema.getColumnName(i)));
|
219
301
|
}
|
220
302
|
|
221
|
-
return sb.toString();
|
303
|
+
return new PreparedQuery(sb.toString(), parameters.build());
|
304
|
+
}
|
305
|
+
|
306
|
+
protected String truncateStatementDelimiter(String rawQuery) throws SQLException
|
307
|
+
{
|
308
|
+
return rawQuery.replaceAll(";\\s*$", "");
|
222
309
|
}
|
223
310
|
|
224
|
-
|
311
|
+
public boolean tableExists(String tableName) throws SQLException
|
225
312
|
{
|
226
313
|
try (ResultSet rs = connection.getMetaData().getTables(null, schemaName, tableName, null)) {
|
227
314
|
return rs.next();
|
@@ -230,7 +317,7 @@ public class JdbcInputConnection
|
|
230
317
|
|
231
318
|
private Set<String> getColumnNames(String tableName) throws SQLException
|
232
319
|
{
|
233
|
-
Builder<String> columnNamesBuilder = ImmutableSet.builder();
|
320
|
+
ImmutableSet.Builder<String> columnNamesBuilder = ImmutableSet.builder();
|
234
321
|
try (ResultSet rs = connection.getMetaData().getColumns(null, schemaName, tableName, null)) {
|
235
322
|
while (rs.next()) {
|
236
323
|
columnNamesBuilder.add(rs.getString("COLUMN_NAME"));
|
@@ -0,0 +1,38 @@
|
|
1
|
+
package org.embulk.input.jdbc;
|
2
|
+
|
3
|
+
import com.fasterxml.jackson.annotation.JsonCreator;
|
4
|
+
import com.fasterxml.jackson.annotation.JsonProperty;
|
5
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
6
|
+
|
7
|
+
public class JdbcLiteral
|
8
|
+
{
|
9
|
+
private final int columnIndex;
|
10
|
+
private final JsonNode value;
|
11
|
+
|
12
|
+
@JsonCreator
|
13
|
+
public JdbcLiteral(
|
14
|
+
@JsonProperty("columnIndex") int columnIndex,
|
15
|
+
@JsonProperty("value") JsonNode value)
|
16
|
+
{
|
17
|
+
this.columnIndex = columnIndex;
|
18
|
+
this.value = value;
|
19
|
+
}
|
20
|
+
|
21
|
+
@JsonProperty("columnIndex")
|
22
|
+
public int getColumnIndex()
|
23
|
+
{
|
24
|
+
return columnIndex;
|
25
|
+
}
|
26
|
+
|
27
|
+
@JsonProperty("value")
|
28
|
+
public JsonNode getValue()
|
29
|
+
{
|
30
|
+
return value;
|
31
|
+
}
|
32
|
+
|
33
|
+
@Override
|
34
|
+
public String toString()
|
35
|
+
{
|
36
|
+
return value.toString();
|
37
|
+
}
|
38
|
+
}
|
@@ -1,6 +1,7 @@
|
|
1
1
|
package org.embulk.input.jdbc;
|
2
2
|
|
3
3
|
import java.util.List;
|
4
|
+
import com.google.common.base.Optional;
|
4
5
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
5
6
|
import com.fasterxml.jackson.annotation.JsonValue;
|
6
7
|
|
@@ -34,4 +35,21 @@ public class JdbcSchema
|
|
34
35
|
{
|
35
36
|
return columns.get(i).getName();
|
36
37
|
}
|
38
|
+
|
39
|
+
public Optional<Integer> findColumn(String caseInsensitiveName)
|
40
|
+
{
|
41
|
+
// find by case sensitive first
|
42
|
+
for (int i = 0; i < columns.size(); i++) {
|
43
|
+
if (getColumn(i).getName().equals(caseInsensitiveName)) {
|
44
|
+
return Optional.of(i);
|
45
|
+
}
|
46
|
+
}
|
47
|
+
// find by case insensitive
|
48
|
+
for (int i = 0; i < columns.size(); i++) {
|
49
|
+
if (getColumn(i).getName().equalsIgnoreCase(caseInsensitiveName)) {
|
50
|
+
return Optional.of(i);
|
51
|
+
}
|
52
|
+
}
|
53
|
+
return Optional.absent();
|
54
|
+
}
|
37
55
|
}
|
@@ -1,14 +1,21 @@
|
|
1
1
|
package org.embulk.input.jdbc.getter;
|
2
2
|
|
3
3
|
import java.sql.ResultSet;
|
4
|
+
import java.sql.PreparedStatement;
|
4
5
|
import java.sql.SQLException;
|
6
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
7
|
+
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
5
8
|
import org.embulk.spi.Column;
|
6
9
|
import org.embulk.spi.ColumnVisitor;
|
7
10
|
import org.embulk.spi.PageBuilder;
|
8
11
|
import org.embulk.spi.type.Type;
|
12
|
+
import org.embulk.spi.DataException;
|
13
|
+
import static java.util.Locale.ENGLISH;
|
9
14
|
|
10
15
|
public abstract class AbstractColumnGetter implements ColumnGetter, ColumnVisitor
|
11
16
|
{
|
17
|
+
protected static final JsonNodeFactory jsonNodeFactory = JsonNodeFactory.instance;
|
18
|
+
|
12
19
|
protected final PageBuilder to;
|
13
20
|
private final Type toType;
|
14
21
|
|
@@ -20,8 +27,8 @@ public abstract class AbstractColumnGetter implements ColumnGetter, ColumnVisito
|
|
20
27
|
|
21
28
|
@Override
|
22
29
|
public void getAndSet(ResultSet from, int fromIndex,
|
23
|
-
Column toColumn) throws SQLException
|
24
|
-
|
30
|
+
Column toColumn) throws SQLException
|
31
|
+
{
|
25
32
|
fetch(from, fromIndex);
|
26
33
|
if (from.wasNull()) {
|
27
34
|
to.setNull(toColumn);
|
@@ -79,4 +86,20 @@ public abstract class AbstractColumnGetter implements ColumnGetter, ColumnVisito
|
|
79
86
|
|
80
87
|
protected abstract Type getDefaultToType();
|
81
88
|
|
89
|
+
@Override
|
90
|
+
public JsonNode encodeToJson()
|
91
|
+
{
|
92
|
+
throw new DataException(String.format(ENGLISH,
|
93
|
+
"Column type '%s' set at incremental_columns option is not supported",
|
94
|
+
getToType()));
|
95
|
+
}
|
96
|
+
|
97
|
+
@Override
|
98
|
+
public void decodeFromJsonTo(PreparedStatement toStatement, int toIndex, JsonNode fromValue)
|
99
|
+
throws SQLException
|
100
|
+
{
|
101
|
+
throw new DataException(String.format(ENGLISH,
|
102
|
+
"Converting last_record value %s to column index %d is not supported",
|
103
|
+
fromValue.toString(), toIndex));
|
104
|
+
}
|
82
105
|
}
|
@@ -2,6 +2,8 @@ package org.embulk.input.jdbc.getter;
|
|
2
2
|
|
3
3
|
import java.sql.ResultSet;
|
4
4
|
import java.sql.SQLException;
|
5
|
+
import java.sql.PreparedStatement;
|
6
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
5
7
|
import org.embulk.spi.Column;
|
6
8
|
import org.embulk.spi.type.Type;
|
7
9
|
|
@@ -11,4 +13,9 @@ public interface ColumnGetter
|
|
11
13
|
Column toColumn) throws SQLException;
|
12
14
|
|
13
15
|
public Type getToType();
|
16
|
+
|
17
|
+
public JsonNode encodeToJson();
|
18
|
+
|
19
|
+
public void decodeFromJsonTo(PreparedStatement toStatement, int toIndex, JsonNode fromValue)
|
20
|
+
throws SQLException;
|
14
21
|
}
|
@@ -14,6 +14,8 @@ import org.embulk.spi.type.TimestampType;
|
|
14
14
|
import org.embulk.spi.type.Type;
|
15
15
|
import org.joda.time.DateTimeZone;
|
16
16
|
|
17
|
+
import static java.util.Locale.ENGLISH;
|
18
|
+
|
17
19
|
public class ColumnGetterFactory
|
18
20
|
{
|
19
21
|
protected final PageBuilder to;
|
@@ -58,7 +60,8 @@ public class ColumnGetterFactory
|
|
58
60
|
case "decimal":
|
59
61
|
return new BigDecimalColumnGetter(to, toType);
|
60
62
|
default:
|
61
|
-
throw new ConfigException(String.format(
|
63
|
+
throw new ConfigException(String.format(ENGLISH,
|
64
|
+
"Unknown value_type '%s' for column '%s'", option.getValueType(), column.getName()));
|
62
65
|
}
|
63
66
|
}
|
64
67
|
|
@@ -185,7 +188,8 @@ public class ColumnGetterFactory
|
|
185
188
|
private static UnsupportedOperationException unsupportedOperationException(JdbcColumn column)
|
186
189
|
{
|
187
190
|
throw new UnsupportedOperationException(
|
188
|
-
String.format(
|
189
|
-
|
191
|
+
String.format(ENGLISH,
|
192
|
+
"Unsupported type %s (sqlType=%d) of '%s' column. Please add '%s: {type: string}' to 'column_options: {...}' option to convert the values to strings, or exclude the column from 'select:' option",
|
193
|
+
column.getTypeName(), column.getSqlType(), column.getName(), column.getName()));
|
190
194
|
}
|
191
195
|
}
|
@@ -1,7 +1,9 @@
|
|
1
1
|
package org.embulk.input.jdbc.getter;
|
2
2
|
|
3
3
|
import java.sql.ResultSet;
|
4
|
+
import java.sql.PreparedStatement;
|
4
5
|
import java.sql.SQLException;
|
6
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
5
7
|
import org.embulk.spi.Column;
|
6
8
|
import org.embulk.spi.PageBuilder;
|
7
9
|
import org.embulk.spi.type.Type;
|
@@ -53,4 +55,16 @@ public class LongColumnGetter
|
|
53
55
|
to.setString(column, Long.toString(value));
|
54
56
|
}
|
55
57
|
|
58
|
+
@Override
|
59
|
+
public JsonNode encodeToJson()
|
60
|
+
{
|
61
|
+
return jsonNodeFactory.numberNode(value);
|
62
|
+
}
|
63
|
+
|
64
|
+
@Override
|
65
|
+
public void decodeFromJsonTo(PreparedStatement toStatement, int toIndex, JsonNode fromValue)
|
66
|
+
throws SQLException
|
67
|
+
{
|
68
|
+
toStatement.setLong(toIndex, fromValue.asLong());
|
69
|
+
}
|
56
70
|
}
|
@@ -1,7 +1,9 @@
|
|
1
1
|
package org.embulk.input.jdbc.getter;
|
2
2
|
|
3
3
|
import java.sql.ResultSet;
|
4
|
+
import java.sql.PreparedStatement;
|
4
5
|
import java.sql.SQLException;
|
6
|
+
import com.fasterxml.jackson.databind.JsonNode;
|
5
7
|
import org.embulk.spi.Column;
|
6
8
|
import org.embulk.spi.PageBuilder;
|
7
9
|
import org.embulk.spi.json.JsonParseException;
|
@@ -79,4 +81,16 @@ public class StringColumnGetter
|
|
79
81
|
to.setString(column, value);
|
80
82
|
}
|
81
83
|
|
84
|
+
@Override
|
85
|
+
public JsonNode encodeToJson()
|
86
|
+
{
|
87
|
+
return jsonNodeFactory.textNode(value);
|
88
|
+
}
|
89
|
+
|
90
|
+
@Override
|
91
|
+
public void decodeFromJsonTo(PreparedStatement toStatement, int toIndex, JsonNode fromValue)
|
92
|
+
throws SQLException
|
93
|
+
{
|
94
|
+
toStatement.setString(toIndex, fromValue.asText());
|
95
|
+
}
|
82
96
|
}
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: embulk-input-jdbc
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.
|
4
|
+
version: 0.7.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Sadayuki Furuhashi
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-08-26 00:00:00.000000000 Z
|
12
12
|
dependencies: []
|
13
13
|
description: Selects records from a table.
|
14
14
|
email:
|
@@ -18,12 +18,14 @@ extensions: []
|
|
18
18
|
extra_rdoc_files: []
|
19
19
|
files:
|
20
20
|
- build.gradle
|
21
|
+
- classpath/embulk-input-jdbc-0.7.3.jar
|
21
22
|
- lib/embulk/input/jdbc.rb
|
22
23
|
- src/main/java/org/embulk/input/JdbcInputPlugin.java
|
23
24
|
- src/main/java/org/embulk/input/jdbc/AbstractJdbcInputPlugin.java
|
24
25
|
- src/main/java/org/embulk/input/jdbc/JdbcColumn.java
|
25
26
|
- src/main/java/org/embulk/input/jdbc/JdbcColumnOption.java
|
26
27
|
- src/main/java/org/embulk/input/jdbc/JdbcInputConnection.java
|
28
|
+
- src/main/java/org/embulk/input/jdbc/JdbcLiteral.java
|
27
29
|
- src/main/java/org/embulk/input/jdbc/JdbcSchema.java
|
28
30
|
- src/main/java/org/embulk/input/jdbc/ToString.java
|
29
31
|
- src/main/java/org/embulk/input/jdbc/ToStringMap.java
|
@@ -42,7 +44,6 @@ files:
|
|
42
44
|
- src/main/java/org/embulk/input/jdbc/getter/TimeColumnGetter.java
|
43
45
|
- src/main/java/org/embulk/input/jdbc/getter/TimestampColumnGetter.java
|
44
46
|
- src/test/java/org/embulk/input/EmbulkPluginTester.java
|
45
|
-
- classpath/embulk-input-jdbc-0.7.2.jar
|
46
47
|
homepage: https://github.com/embulk/embulk-input-jdbc
|
47
48
|
licenses:
|
48
49
|
- Apache 2.0
|
@@ -53,17 +54,17 @@ require_paths:
|
|
53
54
|
- lib
|
54
55
|
required_ruby_version: !ruby/object:Gem::Requirement
|
55
56
|
requirements:
|
56
|
-
- -
|
57
|
+
- - ">="
|
57
58
|
- !ruby/object:Gem::Version
|
58
59
|
version: '0'
|
59
60
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
60
61
|
requirements:
|
61
|
-
- -
|
62
|
+
- - ">="
|
62
63
|
- !ruby/object:Gem::Version
|
63
64
|
version: '0'
|
64
65
|
requirements: []
|
65
66
|
rubyforge_project:
|
66
|
-
rubygems_version: 2.
|
67
|
+
rubygems_version: 2.4.8
|
67
68
|
signing_key:
|
68
69
|
specification_version: 4
|
69
70
|
summary: JDBC input plugin for Embulk
|
Binary file
|