embulk-input-postgresql 0.8.0 → 0.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +8 -0
  3. data/build.gradle +1 -2
  4. data/classpath/embulk-input-jdbc-0.8.1.jar +0 -0
  5. data/classpath/{embulk-input-postgresql-0.8.0.jar → embulk-input-postgresql-0.8.1.jar} +0 -0
  6. data/src/main/java/org/embulk/input/postgresql/getter/{HstoreColumnGetter.java → HstoreToJsonColumnGetter.java} +5 -30
  7. data/src/main/java/org/embulk/input/postgresql/getter/PostgreSQLColumnGetterFactory.java +28 -7
  8. data/src/test/java/org/embulk/input/postgresql/HstoreTest.java +75 -0
  9. data/src/test/java/org/embulk/input/postgresql/IncrementalTest.java +143 -0
  10. data/src/test/java/org/embulk/input/postgresql/PostgreSQLTests.java +39 -0
  11. data/src/test/resources/org/embulk/input/postgresql/test/expect/hstore/as_json.yml +3 -0
  12. data/src/test/resources/org/embulk/input/postgresql/test/expect/hstore/as_string.yml +1 -0
  13. data/src/test/resources/org/embulk/input/postgresql/test/expect/hstore/expected_json.csv +1 -0
  14. data/src/test/resources/org/embulk/input/postgresql/test/expect/hstore/expected_string.csv +1 -0
  15. data/src/test/resources/org/embulk/input/postgresql/test/expect/hstore/setup.sql +11 -0
  16. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/config_1.yml +3 -0
  17. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/config_2.yml +4 -0
  18. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_1.csv +4 -0
  19. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_1.diff +3 -0
  20. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_2.csv +2 -0
  21. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_2.diff +3 -0
  22. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/insert_more.sql +7 -0
  23. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/setup.sql +13 -0
  24. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/config_1.yml +4 -0
  25. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/config_2.yml +4 -0
  26. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_1.csv +7 -0
  27. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_1.diff +3 -0
  28. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_2.csv +3 -0
  29. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_2.diff +3 -0
  30. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/insert_more.sql +9 -0
  31. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/setup.sql +16 -0
  32. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/config_1.yml +4 -0
  33. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/config_2.yml +4 -0
  34. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_1.csv +7 -0
  35. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_1.diff +3 -0
  36. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_2.csv +3 -0
  37. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_2.diff +3 -0
  38. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/insert_more.sql +9 -0
  39. data/src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/setup.sql +16 -0
  40. metadata +37 -8
  41. data/classpath/embulk-input-jdbc-0.8.0.jar +0 -0
  42. data/src/test/java/org/embulk/input/postgresql/PostgreSQLInputPluginTest.java +0 -102
  43. data/src/test/resources/yml/input_hstore.yml +0 -14
  44. data/src/test/resources/yml/input_hstore2.yml +0 -16
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 2c06aab77369550b3a8fe22a06ed19f08813b552
4
- data.tar.gz: fe540f426d5a51eab8768330faf2964fa0323410
3
+ metadata.gz: b02b6267f621dc419cbc4b72486ce4c69ec31dbf
4
+ data.tar.gz: 6257da06c3568e91fa9ffd6d7f4012300f5d4eed
5
5
  SHA512:
6
- metadata.gz: 563d09b98cd013a35089223a1f88dbf413be9dd5d894c3cd567420e0f67bd5d390eab565cdc1ad2b4dcf7c70cde6f8c1461b19a35cfa6562ddadff88a1458fb4
7
- data.tar.gz: 5166f3f915d6159c7a576fbf0db217ad5ee153b72eaa051feb6f9c2b2c7452f38ef0cce6c1cbb83ec2313716abe0e30eef66f38ac10c91937d9150f7896a0cd8
6
+ metadata.gz: 2d0a03f7ab55bd8a90de593d31525a210fa2fddd1271bc422eca6664234e353d9c376e6c8b0a6b3fda0b318a5ba3b5cf9b5f01e7deabbee85b11a87c14806dea
7
+ data.tar.gz: 3892fa790c926a25dbf4fd11c7e78e1e62113d3ce53e241ebc252385449b6bdb3bcc7f622f4371b8678c231ce4947ec0acecaf31d3646298a8827c31fcc83b65
data/README.md CHANGED
@@ -160,3 +160,11 @@ in:
160
160
  ```
161
161
  $ ./gradlew gem
162
162
  ```
163
+
164
+ Running tests:
165
+
166
+ ```
167
+ $ cp ci/travis_postgresql.yml ci/postgresql.yml # edit this file if necessary
168
+ $ EMBULK_INPUT_POSTGRESQL_TEST_CONFIG=`pwd`/ci/postgresql.yml ./gradlew :embulk-input-postgresql:check --info
169
+ ```
170
+
@@ -3,6 +3,5 @@ dependencies {
3
3
 
4
4
  compile 'org.postgresql:postgresql:9.4-1205-jdbc41'
5
5
 
6
- testCompile 'org.embulk:embulk-standards:0.8.8'
7
- testCompile project(':embulk-input-jdbc').sourceSets.test.output
6
+ testCompile 'org.embulk:embulk-standards:0.8.15'
8
7
  }
@@ -2,62 +2,37 @@ package org.embulk.input.postgresql.getter;
2
2
 
3
3
  import com.fasterxml.jackson.core.JsonProcessingException;
4
4
  import com.fasterxml.jackson.databind.ObjectMapper;
5
- import org.embulk.input.jdbc.getter.AbstractColumnGetter;
5
+ import org.embulk.input.jdbc.getter.JsonColumnGetter;
6
6
  import org.embulk.spi.Column;
7
7
  import org.embulk.spi.PageBuilder;
8
8
  import org.embulk.spi.json.JsonParseException;
9
- import org.embulk.spi.json.JsonParser;
10
9
  import org.embulk.spi.type.Type;
11
- import org.embulk.spi.type.Types;
12
10
  import org.msgpack.value.Value;
13
11
  import org.postgresql.util.HStoreConverter;
14
12
 
15
- import java.sql.ResultSet;
16
- import java.sql.SQLException;
17
13
  import java.util.Map;
18
14
 
19
- public class HstoreColumnGetter
20
- extends AbstractColumnGetter
15
+ public class HstoreToJsonColumnGetter
16
+ extends JsonColumnGetter
21
17
  {
22
- private final JsonParser parser = new JsonParser();
23
18
  private final ObjectMapper mapper = new ObjectMapper();
24
19
 
25
- private String value;
26
-
27
- public HstoreColumnGetter(PageBuilder to, Type toType)
20
+ public HstoreToJsonColumnGetter(PageBuilder to, Type toType)
28
21
  {
29
22
  super(to, toType);
30
23
  }
31
24
 
32
- @Override
33
- protected void fetch(ResultSet from, int fromIndex) throws SQLException
34
- {
35
- value = from.getString(fromIndex);
36
- }
37
-
38
- @Override
39
- protected Type getDefaultToType()
40
- {
41
- return Types.STRING;
42
- }
43
-
44
25
  @Override
45
26
  public void jsonColumn(Column column)
46
27
  {
47
28
  Value v;
48
29
  try {
49
30
  Map map = HStoreConverter.fromString(value);
50
- v = parser.parse(mapper.writeValueAsString(map));
31
+ v = jsonParser.parse(mapper.writeValueAsString(map));
51
32
  } catch (JsonProcessingException | JsonParseException e) {
52
33
  super.jsonColumn(column);
53
34
  return;
54
35
  }
55
36
  to.setJson(column, v);
56
37
  }
57
-
58
- @Override
59
- public void stringColumn(Column column)
60
- {
61
- to.setString(column, value);
62
- }
63
38
  }
@@ -1,10 +1,15 @@
1
1
  package org.embulk.input.postgresql.getter;
2
2
 
3
+ import org.embulk.input.jdbc.AbstractJdbcInputPlugin.PluginTask;
3
4
  import org.embulk.input.jdbc.JdbcColumn;
4
5
  import org.embulk.input.jdbc.JdbcColumnOption;
6
+ import org.embulk.input.jdbc.JdbcInputConnection;
5
7
  import org.embulk.input.jdbc.getter.ColumnGetter;
6
8
  import org.embulk.input.jdbc.getter.ColumnGetterFactory;
9
+ import org.embulk.input.jdbc.getter.TimestampWithTimeZoneIncrementalHandler;
10
+ import org.embulk.input.jdbc.getter.TimestampWithoutTimeZoneIncrementalHandler;
7
11
  import org.embulk.spi.PageBuilder;
12
+ import org.embulk.spi.type.Types;
8
13
  import org.joda.time.DateTimeZone;
9
14
 
10
15
  public class PostgreSQLColumnGetterFactory extends ColumnGetterFactory
@@ -15,21 +20,37 @@ public class PostgreSQLColumnGetterFactory extends ColumnGetterFactory
15
20
  }
16
21
 
17
22
  @Override
18
- public ColumnGetter newColumnGetter(JdbcColumn column, JdbcColumnOption option)
23
+ public ColumnGetter newColumnGetter(JdbcInputConnection con, PluginTask task, JdbcColumn column, JdbcColumnOption option)
19
24
  {
20
- if (column.getTypeName().equals("hstore")) {
21
- return new HstoreColumnGetter(to, getToType(option));
22
- } else {
23
- return super.newColumnGetter(column, option);
25
+ if (column.getTypeName().equals("hstore") && getToType(option) == Types.JSON) {
26
+ // converting hstore to json needs a special handling
27
+ return new HstoreToJsonColumnGetter(to, Types.JSON);
28
+ }
29
+
30
+ ColumnGetter getter = super.newColumnGetter(con, task, column, option);
31
+
32
+ // incremental loading wrapper
33
+ switch (column.getTypeName()) {
34
+ case "timestamptz":
35
+ return new TimestampWithTimeZoneIncrementalHandler(getter);
36
+ case "timestamp":
37
+ return new TimestampWithoutTimeZoneIncrementalHandler(getter);
38
+ default:
39
+ return getter;
24
40
  }
25
41
  }
26
42
 
27
43
  @Override
28
44
  protected String sqlTypeToValueType(JdbcColumn column, int sqlType)
29
45
  {
30
- if (column.getTypeName().equals("json") || column.getTypeName().equals("jsonb")) {
46
+ switch(column.getTypeName()) {
47
+ case "json":
48
+ case "jsonb":
31
49
  return "json";
32
- } else {
50
+ case "hstore":
51
+ // hstore is converted to string by default
52
+ return "string";
53
+ default:
33
54
  return super.sqlTypeToValueType(column, sqlType);
34
55
  }
35
56
  }
@@ -0,0 +1,75 @@
1
+ package org.embulk.input.postgresql;
2
+
3
+ import java.nio.file.Path;
4
+
5
+ import org.embulk.config.ConfigDiff;
6
+ import org.embulk.config.ConfigSource;
7
+ import org.embulk.input.PostgreSQLInputPlugin;
8
+ import org.embulk.spi.InputPlugin;
9
+ import org.embulk.test.TestingEmbulk.RunResult;
10
+ import org.embulk.test.EmbulkTests;
11
+ import org.embulk.test.TestingEmbulk;
12
+ import org.junit.Before;
13
+ import org.junit.Rule;
14
+ import org.junit.Test;
15
+
16
+ import static org.embulk.input.postgresql.PostgreSQLTests.execute;
17
+ import static org.embulk.test.EmbulkTests.readSortedFile;
18
+ import static org.hamcrest.Matchers.is;
19
+ import static org.junit.Assert.assertThat;
20
+
21
+ public class HstoreTest
22
+ {
23
+ private static final String BASIC_RESOURCE_PATH = "org/embulk/input/postgresql/test/expect/hstore/";
24
+
25
+ private static ConfigSource loadYamlResource(TestingEmbulk embulk, String fileName)
26
+ {
27
+ return embulk.loadYamlResource(BASIC_RESOURCE_PATH + fileName);
28
+ }
29
+
30
+ private static String readResource(String fileName)
31
+ {
32
+ return EmbulkTests.readResource(BASIC_RESOURCE_PATH + fileName);
33
+ }
34
+
35
+ @Rule
36
+ public TestingEmbulk embulk = TestingEmbulk.builder()
37
+ .registerPlugin(InputPlugin.class, "postgresql", PostgreSQLInputPlugin.class)
38
+ .build();
39
+
40
+ private ConfigSource baseConfig;
41
+
42
+ @Before
43
+ public void setup()
44
+ {
45
+ baseConfig = PostgreSQLTests.baseConfig();
46
+ }
47
+
48
+ @Test
49
+ public void loadAsStringByDefault() throws Exception
50
+ {
51
+ execute(readResource("setup.sql"));
52
+
53
+ Path out1 = embulk.createTempFile("csv");
54
+ RunResult result1 = embulk.runInput(
55
+ baseConfig.merge(loadYamlResource(embulk, "as_string.yml")),
56
+ out1);
57
+ assertThat(
58
+ readSortedFile(out1),
59
+ is(readResource("expected_string.csv")));
60
+ }
61
+
62
+ @Test
63
+ public void loadAsJson() throws Exception
64
+ {
65
+ execute(readResource("setup.sql"));
66
+
67
+ Path out1 = embulk.createTempFile("csv");
68
+ RunResult result1 = embulk.runInput(
69
+ baseConfig.merge(loadYamlResource(embulk, "as_json.yml")),
70
+ out1);
71
+ assertThat(
72
+ readSortedFile(out1),
73
+ is(readResource("expected_json.csv")));
74
+ }
75
+ }
@@ -0,0 +1,143 @@
1
+ package org.embulk.input.postgresql;
2
+
3
+ import java.nio.file.Path;
4
+
5
+ import org.embulk.config.ConfigDiff;
6
+ import org.embulk.config.ConfigSource;
7
+ import org.embulk.input.PostgreSQLInputPlugin;
8
+ import org.embulk.spi.InputPlugin;
9
+ import org.embulk.test.TestingEmbulk.RunResult;
10
+ import org.embulk.test.EmbulkTests;
11
+ import org.embulk.test.TestingEmbulk;
12
+ import org.junit.Before;
13
+ import org.junit.Rule;
14
+ import org.junit.Test;
15
+
16
+ import static org.embulk.input.postgresql.PostgreSQLTests.execute;
17
+ import static org.embulk.test.EmbulkTests.readSortedFile;
18
+ import static org.hamcrest.Matchers.is;
19
+ import static org.junit.Assert.assertThat;
20
+
21
+ public class IncrementalTest
22
+ {
23
+ private static final String BASIC_RESOURCE_PATH = "org/embulk/input/postgresql/test/expect/incremental/";
24
+
25
+ private static ConfigSource loadYamlResource(TestingEmbulk embulk, String fileName)
26
+ {
27
+ return embulk.loadYamlResource(BASIC_RESOURCE_PATH + fileName);
28
+ }
29
+
30
+ private static String readResource(String fileName)
31
+ {
32
+ return EmbulkTests.readResource(BASIC_RESOURCE_PATH + fileName);
33
+ }
34
+
35
+ @Rule
36
+ public TestingEmbulk embulk = TestingEmbulk.builder()
37
+ .registerPlugin(InputPlugin.class, "postgresql", PostgreSQLInputPlugin.class)
38
+ .build();
39
+
40
+ private ConfigSource baseConfig;
41
+
42
+ @Before
43
+ public void setup()
44
+ {
45
+ baseConfig = PostgreSQLTests.baseConfig();
46
+ }
47
+
48
+ @Test
49
+ public void simpleInt() throws Exception
50
+ {
51
+ // setup first rows
52
+ execute(readResource("int/setup.sql"));
53
+
54
+ Path out1 = embulk.createTempFile("csv");
55
+ RunResult result1 = embulk.runInput(
56
+ baseConfig.merge(loadYamlResource(embulk, "int/config_1.yml")),
57
+ out1);
58
+ assertThat(
59
+ readSortedFile(out1),
60
+ is(readResource("int/expected_1.csv")));
61
+ assertThat(
62
+ result1.getConfigDiff(),
63
+ is((ConfigDiff) loadYamlResource(embulk, "int/expected_1.diff")));
64
+
65
+ // insert more rows
66
+ execute(readResource("int/insert_more.sql"));
67
+
68
+ Path out2 = embulk.createTempFile("csv");
69
+ RunResult result2 = embulk.runInput(
70
+ baseConfig.merge(loadYamlResource(embulk, "int/config_2.yml")),
71
+ out2);
72
+ assertThat(
73
+ readSortedFile(out2),
74
+ is(readResource("int/expected_2.csv")));
75
+ assertThat(
76
+ result2.getConfigDiff(),
77
+ is((ConfigDiff) loadYamlResource(embulk, "int/expected_2.diff")));
78
+ }
79
+
80
+ @Test
81
+ public void simpleTimestampWithoutTimeZone() throws Exception
82
+ {
83
+ // setup first rows
84
+ execute(readResource("timestamp/setup.sql"));
85
+
86
+ Path out1 = embulk.createTempFile("csv");
87
+ RunResult result1 = embulk.runInput(
88
+ baseConfig.merge(loadYamlResource(embulk, "timestamp/config_1.yml")),
89
+ out1);
90
+ assertThat(
91
+ readSortedFile(out1),
92
+ is(readResource("timestamp/expected_1.csv")));
93
+ assertThat(
94
+ result1.getConfigDiff(),
95
+ is((ConfigDiff) loadYamlResource(embulk, "timestamp/expected_1.diff")));
96
+
97
+ // insert more rows
98
+ execute(readResource("timestamp/insert_more.sql"));
99
+
100
+ Path out2 = embulk.createTempFile("csv");
101
+ RunResult result2 = embulk.runInput(
102
+ baseConfig.merge(loadYamlResource(embulk, "timestamp/config_2.yml")),
103
+ out2);
104
+ assertThat(
105
+ readSortedFile(out2),
106
+ is(readResource("timestamp/expected_2.csv")));
107
+ assertThat(
108
+ result2.getConfigDiff(),
109
+ is((ConfigDiff) loadYamlResource(embulk, "timestamp/expected_2.diff")));
110
+ }
111
+
112
+ @Test
113
+ public void simpleTimestampWithTimeZone() throws Exception
114
+ {
115
+ // setup first rows
116
+ execute(readResource("timestamptz/setup.sql"));
117
+
118
+ Path out1 = embulk.createTempFile("csv");
119
+ RunResult result1 = embulk.runInput(
120
+ baseConfig.merge(loadYamlResource(embulk, "timestamptz/config_1.yml")),
121
+ out1);
122
+ assertThat(
123
+ readSortedFile(out1),
124
+ is(readResource("timestamptz/expected_1.csv")));
125
+ assertThat(
126
+ result1.getConfigDiff(),
127
+ is((ConfigDiff) loadYamlResource(embulk, "timestamptz/expected_1.diff")));
128
+
129
+ // insert more rows
130
+ execute(readResource("timestamptz/insert_more.sql"));
131
+
132
+ Path out2 = embulk.createTempFile("csv");
133
+ RunResult result2 = embulk.runInput(
134
+ baseConfig.merge(loadYamlResource(embulk, "timestamptz/config_2.yml")),
135
+ out2);
136
+ assertThat(
137
+ readSortedFile(out2),
138
+ is(readResource("timestamptz/expected_2.csv")));
139
+ assertThat(
140
+ result2.getConfigDiff(),
141
+ is((ConfigDiff) loadYamlResource(embulk, "timestamptz/expected_2.diff")));
142
+ }
143
+ }
@@ -0,0 +1,39 @@
1
+ package org.embulk.input.postgresql;
2
+
3
+ import org.embulk.test.EmbulkTests;
4
+ import com.google.common.base.Throwables;
5
+ import com.google.common.io.ByteStreams;
6
+ import java.io.IOException;
7
+ import org.embulk.config.ConfigSource;
8
+ import static java.util.Locale.ENGLISH;
9
+
10
+ public class PostgreSQLTests
11
+ {
12
+ public static ConfigSource baseConfig()
13
+ {
14
+ return EmbulkTests.config("EMBULK_INPUT_POSTGRESQL_TEST_CONFIG");
15
+ }
16
+
17
+ public static void execute(String sql)
18
+ {
19
+ ConfigSource config = baseConfig();
20
+ ProcessBuilder pb = new ProcessBuilder("psql", "-w", "--set", "ON_ERROR_STOP=1", "-c", sql);
21
+ pb.environment().put("PGUSER", config.get(String.class, "user"));
22
+ pb.environment().put("PGPASSWORD", config.get(String.class, "password"));
23
+ pb.environment().put("PGDATABASE", config.get(String.class, "database"));
24
+ pb.environment().put("PGPORT", config.get(String.class, "port", "5432"));
25
+ pb.redirectErrorStream(true);
26
+ int code;
27
+ try {
28
+ Process process = pb.start();
29
+ ByteStreams.copy(process.getInputStream(), System.out);
30
+ code = process.waitFor();
31
+ } catch (IOException | InterruptedException ex) {
32
+ throw Throwables.propagate(ex);
33
+ }
34
+ if (code != 0) {
35
+ throw new RuntimeException(String.format(ENGLISH,
36
+ "Command finished with non-zero exit code. Exit code is %d.", code));
37
+ }
38
+ }
39
+ }
@@ -0,0 +1,3 @@
1
+ table: input_hstore
2
+ column_options:
3
+ c1: {type: json}
@@ -0,0 +1,11 @@
1
+ drop table if exists input_hstore;
2
+
3
+ create extension if not exists hstore;
4
+
5
+ create table input_hstore (
6
+ c1 hstore
7
+ );
8
+
9
+ insert into input_hstore (c1) values
10
+ ('"a" => "b"')
11
+ ;
@@ -0,0 +1,3 @@
1
+ table: int_load
2
+ incremental: true
3
+ incremental_columns: [num]
@@ -0,0 +1,4 @@
1
+ table: int_load
2
+ last_record: [4]
3
+ incremental: true
4
+ incremental_columns: [num]
@@ -0,0 +1,7 @@
1
+
2
+ insert into int_load (num, note) values
3
+ (0, 'more_skip'),
4
+ (4, 'more_skip'),
5
+ (9, 'more_load'),
6
+ (5, 'more_load');
7
+
@@ -0,0 +1,13 @@
1
+ drop table if exists int_load;
2
+
3
+ create table int_load (
4
+ num int not null,
5
+ note text
6
+ );
7
+
8
+ insert into int_load (num, note) values
9
+ (3, 'first'),
10
+ (4, 'first'),
11
+ (2, 'first'),
12
+ (1, 'first');
13
+
@@ -0,0 +1,4 @@
1
+ table: load
2
+ default_time_zone: +0300
3
+ incremental: true
4
+ incremental_columns: [time]
@@ -0,0 +1,4 @@
1
+ table: load
2
+ last_record: ['2016-11-02T04:00:05.333003']
3
+ incremental: true
4
+ incremental_columns: [time]
@@ -0,0 +1,7 @@
1
+ 2016-11-01 23:00:01.000000 +0000,first
2
+ 2016-11-02 00:00:02.000000 +0000,first
3
+ 2016-11-02 01:00:03.000000 +0000,first
4
+ 2016-11-02 02:00:04.000000 +0000,first
5
+ 2016-11-02 02:00:05.111001 +0000,first
6
+ 2016-11-02 02:00:05.222002 +0000,first
7
+ 2016-11-02 02:00:05.333003 +0000,first
@@ -0,0 +1,3 @@
1
+ in:
2
+ last_record: ['2016-11-02T04:00:05.333003']
3
+ out: {}
@@ -0,0 +1,3 @@
1
+ 2016-11-02 02:00:05.333004 +0000,more_load
2
+ 2016-11-02 02:00:06.000000 +0000,more_load
3
+ 2016-11-02 02:00:06.000000 +0000,more_load
@@ -0,0 +1,3 @@
1
+ in:
2
+ last_record: ['2016-11-02T04:00:06.000000']
3
+ out: {}
@@ -0,0 +1,9 @@
1
+
2
+ insert into load (time, note) values
3
+ ('2016-11-02 04:00:00', 'more_skip'),
4
+ ('2016-11-02 04:00:05.333000', 'more_skip'),
5
+ ('2016-11-02 04:00:05.333003', 'more_skip'),
6
+ ('2016-11-02 04:00:05.333004', 'more_load'),
7
+ ('2016-11-02 04:00:06', 'more_load'),
8
+ ('2016-11-02 04:00:06', 'more_load');
9
+
@@ -0,0 +1,16 @@
1
+ drop table if exists load;
2
+
3
+ create table load (
4
+ time timestamp without time zone not null,
5
+ note text
6
+ );
7
+
8
+ insert into load (time, note) values
9
+ ('2016-11-02 01:00:01', 'first'),
10
+ ('2016-11-02 02:00:02', 'first'),
11
+ ('2016-11-02 03:00:03', 'first'),
12
+ ('2016-11-02 04:00:04', 'first'),
13
+ ('2016-11-02 04:00:05.111001', 'first'),
14
+ ('2016-11-02 04:00:05.222002', 'first'),
15
+ ('2016-11-02 04:00:05.333003', 'first');
16
+
@@ -0,0 +1,4 @@
1
+ table: load
2
+ default_time_zone: +0300
3
+ incremental: true
4
+ incremental_columns: [time]
@@ -0,0 +1,4 @@
1
+ table: load
2
+ last_record: ['2016-11-02T04:00:05.333003Z']
3
+ incremental: true
4
+ incremental_columns: [time]
@@ -0,0 +1,7 @@
1
+ 2016-11-02 01:00:01.000000 +0000,first
2
+ 2016-11-02 02:00:02.000000 +0000,first
3
+ 2016-11-02 03:00:03.000000 +0000,first
4
+ 2016-11-02 04:00:04.000000 +0000,first
5
+ 2016-11-02 04:00:05.111001 +0000,first
6
+ 2016-11-02 04:00:05.222002 +0000,first
7
+ 2016-11-02 04:00:05.333003 +0000,first
@@ -0,0 +1,3 @@
1
+ in:
2
+ last_record: ['2016-11-02T04:00:05.333003Z']
3
+ out: {}
@@ -0,0 +1,3 @@
1
+ 2016-11-02 04:00:05.333004 +0000,more_load
2
+ 2016-11-02 04:00:06.000000 +0000,more_load
3
+ 2016-11-02 04:00:06.000000 +0000,more_load
@@ -0,0 +1,3 @@
1
+ in:
2
+ last_record: ['2016-11-02T04:00:06.000000Z']
3
+ out: {}
@@ -0,0 +1,9 @@
1
+
2
+ insert into load (time, note) values
3
+ ('2016-11-02 04:00:00+0000', 'more_skip'),
4
+ ('2016-11-02 04:00:05.333000+0000', 'more_skip'),
5
+ ('2016-11-02 04:00:05.333003+0000', 'more_skip'),
6
+ ('2016-11-02 04:00:05.333004+0000', 'more_load'),
7
+ ('2016-11-02 04:00:06+0000', 'more_load'),
8
+ ('2016-11-02 04:00:06+0000', 'more_load');
9
+
@@ -0,0 +1,16 @@
1
+ drop table if exists load;
2
+
3
+ create table load (
4
+ time timestamptz(6) not null,
5
+ note text
6
+ );
7
+
8
+ insert into load (time, note) values
9
+ ('2016-11-02 01:00:01+0000', 'first'),
10
+ ('2016-11-02 02:00:02+0000', 'first'),
11
+ ('2016-11-02 03:00:03+0000', 'first'),
12
+ ('2016-11-02 04:00:04+0000', 'first'),
13
+ ('2016-11-02 04:00:05.111001+0000', 'first'),
14
+ ('2016-11-02 04:00:05.222002+0000', 'first'),
15
+ ('2016-11-02 04:00:05.333003+0000', 'first');
16
+
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: embulk-input-postgresql
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.0
4
+ version: 0.8.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sadayuki Furuhashi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-10-13 00:00:00.000000000 Z
11
+ date: 2017-02-10 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: Selects records from a table.
14
14
  email:
@@ -19,17 +19,46 @@ extra_rdoc_files: []
19
19
  files:
20
20
  - README.md
21
21
  - build.gradle
22
- - classpath/embulk-input-jdbc-0.8.0.jar
23
- - classpath/embulk-input-postgresql-0.8.0.jar
22
+ - classpath/embulk-input-jdbc-0.8.1.jar
23
+ - classpath/embulk-input-postgresql-0.8.1.jar
24
24
  - classpath/postgresql-9.4-1205-jdbc41.jar
25
25
  - lib/embulk/input/postgresql.rb
26
26
  - src/main/java/org/embulk/input/PostgreSQLInputPlugin.java
27
27
  - src/main/java/org/embulk/input/postgresql/PostgreSQLInputConnection.java
28
- - src/main/java/org/embulk/input/postgresql/getter/HstoreColumnGetter.java
28
+ - src/main/java/org/embulk/input/postgresql/getter/HstoreToJsonColumnGetter.java
29
29
  - src/main/java/org/embulk/input/postgresql/getter/PostgreSQLColumnGetterFactory.java
30
- - src/test/java/org/embulk/input/postgresql/PostgreSQLInputPluginTest.java
31
- - src/test/resources/yml/input_hstore.yml
32
- - src/test/resources/yml/input_hstore2.yml
30
+ - src/test/java/org/embulk/input/postgresql/HstoreTest.java
31
+ - src/test/java/org/embulk/input/postgresql/IncrementalTest.java
32
+ - src/test/java/org/embulk/input/postgresql/PostgreSQLTests.java
33
+ - src/test/resources/org/embulk/input/postgresql/test/expect/hstore/as_json.yml
34
+ - src/test/resources/org/embulk/input/postgresql/test/expect/hstore/as_string.yml
35
+ - src/test/resources/org/embulk/input/postgresql/test/expect/hstore/expected_json.csv
36
+ - src/test/resources/org/embulk/input/postgresql/test/expect/hstore/expected_string.csv
37
+ - src/test/resources/org/embulk/input/postgresql/test/expect/hstore/setup.sql
38
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/config_1.yml
39
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/config_2.yml
40
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_1.csv
41
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_1.diff
42
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_2.csv
43
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/expected_2.diff
44
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/insert_more.sql
45
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/int/setup.sql
46
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/config_1.yml
47
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/config_2.yml
48
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_1.csv
49
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_1.diff
50
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_2.csv
51
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/expected_2.diff
52
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/insert_more.sql
53
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamp/setup.sql
54
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/config_1.yml
55
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/config_2.yml
56
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_1.csv
57
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_1.diff
58
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_2.csv
59
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/expected_2.diff
60
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/insert_more.sql
61
+ - src/test/resources/org/embulk/input/postgresql/test/expect/incremental/timestamptz/setup.sql
33
62
  homepage: https://github.com/embulk/embulk-input-jdbc
34
63
  licenses:
35
64
  - Apache 2.0
@@ -1,102 +0,0 @@
1
- package org.embulk.input.postgresql;
2
-
3
- import static java.util.Locale.ENGLISH;
4
- import static org.junit.Assert.assertEquals;
5
-
6
- import java.io.BufferedReader;
7
- import java.io.IOException;
8
- import java.io.InputStreamReader;
9
- import java.sql.Connection;
10
- import java.sql.DriverManager;
11
- import java.sql.SQLException;
12
- import java.util.Arrays;
13
-
14
- import org.apache.commons.lang3.StringUtils;
15
- import org.embulk.input.AbstractJdbcInputPluginTest;
16
- import org.embulk.input.PostgreSQLInputPlugin;
17
- import org.embulk.spi.InputPlugin;
18
- import org.junit.Test;
19
-
20
- public class PostgreSQLInputPluginTest extends AbstractJdbcInputPluginTest
21
- {
22
- @Override
23
- protected void prepare() throws SQLException
24
- {
25
- tester.addPlugin(InputPlugin.class, "postgresql", PostgreSQLInputPlugin.class);
26
-
27
- try {
28
- // Create User and Database
29
- psql(String.format(ENGLISH, "DROP DATABASE IF EXISTS %s;", getDatabase()));
30
- psql(String.format(ENGLISH, "DROP USER IF EXISTS %s;", getUser()));
31
- psql(String.format(ENGLISH, "CREATE USER %s WITH SUPERUSER PASSWORD '%s';", getUser(), getPassword()));
32
- psql(String.format(ENGLISH, "CREATE DATABASE %s WITH OWNER %s;", getDatabase(), getUser()));
33
- } catch (IOException e) {
34
- System.err.println(e);
35
- System.err.println("Warning: cannot prepare a database for testing embulk-input-postgresql.");
36
- // 1. install postgresql.
37
- // 2. add bin directory to path.
38
- // 3. set environment variable PGPASSWORD or write pgpassword in tests.yml
39
- return;
40
- } catch (InterruptedException e) {
41
- throw new RuntimeException(e);
42
- }
43
-
44
- enabled = true;
45
-
46
- // Insert Data
47
- String sql = "";
48
- sql += "DROP TABLE IF EXISTS input_hstore;";
49
- sql += "CREATE EXTENSION IF NOT EXISTS hstore WITH SCHEMA public;";
50
- sql += "CREATE TABLE input_hstore (c1 hstore);";
51
- sql += "INSERT INTO input_hstore (c1) VALUES('\"a\" => \"b\"');";
52
- executeSQL(sql);
53
- }
54
-
55
- @Test
56
- public void testHstoreAsString() throws Exception
57
- {
58
- if (enabled) {
59
- test("/yml/input_hstore.yml");
60
- assertEquals(Arrays.asList("c1", "\"\"\"a\"\"=>\"\"b\"\"\""),
61
- read("postgresql-input000.00.csv"));
62
- }
63
- }
64
-
65
- @Test
66
- public void testHstoreAsJson() throws Exception
67
- {
68
- if (enabled) {
69
- test("/yml/input_hstore2.yml");
70
- assertEquals(Arrays.asList("c1", "\"{\"\"a\"\":\"\"b\"\"}\""),
71
- read("postgresql-input000.00.csv"));
72
- }
73
- }
74
-
75
- private void psql(String sql) throws IOException, InterruptedException {
76
- ProcessBuilder pb = new ProcessBuilder("psql", "-w", "-c", sql);
77
- String pgPassword = (String)getTestConfig("pgpassword", false);
78
- if (!StringUtils.isEmpty(pgPassword)) {
79
- pb.environment().put("PGPASSWORD", pgPassword);
80
- }
81
- System.out.println("PSQL: " + pb.command().toString());
82
- final Process process = pb.start();
83
- final int code = process.waitFor();
84
- if (code != 0) {
85
- try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
86
- String line;
87
- while ((line = reader.readLine()) != null) {
88
- System.err.println(line);
89
- }
90
- }
91
- throw new IOException(String.format(
92
- "Command finished with non-zero exit code. Exit code is %d.", code));
93
- }
94
- }
95
-
96
- @Override
97
- protected Connection connect() throws SQLException
98
- {
99
- return DriverManager.getConnection(String.format(ENGLISH, "jdbc:postgresql://%s:%d/%s", getHost(), getPort(), getDatabase()),
100
- getUser(), getPassword());
101
- }
102
- }
@@ -1,14 +0,0 @@
1
- in:
2
- type: postgresql
3
- host: #host#
4
- database: #database#
5
- user: #user#
6
- password: #password#
7
- table: input_hstore
8
- select: "*"
9
- out:
10
- type: file
11
- path_prefix: postgresql-input
12
- file_ext: csv
13
- formatter:
14
- type: csv
@@ -1,16 +0,0 @@
1
- in:
2
- type: postgresql
3
- host: #host#
4
- database: #database#
5
- user: #user#
6
- password: #password#
7
- table: input_hstore
8
- select: "*"
9
- column_options:
10
- c1: {type: json}
11
- out:
12
- type: file
13
- path_prefix: postgresql-input
14
- file_ext: csv
15
- formatter:
16
- type: csv