embulk-input-mongodb 0.6.1 → 0.7.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/.travis.yml +13 -20
- data/README.md +62 -37
- data/build.gradle +4 -4
- data/docker-compose.yml +44 -0
- data/src/main/java/org/embulk/input/mongodb/AuthMethod.java +36 -0
- data/src/main/java/org/embulk/input/mongodb/MongodbInputPlugin.java +87 -20
- data/src/main/java/org/embulk/input/mongodb/PluginTask.java +12 -0
- data/src/test/java/org/embulk/input/mongodb/TestMongodbInputPlugin.java +155 -77
- data/src/test/resources/basic.yml +1 -1
- data/src/test/resources/full.yml +1 -1
- data/src/test/resources/id_field_name.yml +1 -1
- metadata +6 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ad8c05dd58ee2ce347781dd2d519aed6a9f08d9a
|
4
|
+
data.tar.gz: bd43fe1ba4a5a18d3f577130a4c8a82e3e96184c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5d0597d3ac72e926e6ceda10e15f07fbaf95c8594f24b7fd9facb65c5beade7843b1aa4e8d0d6cbad3891b4d00f5a2d7e3d4f857ea8ba3533a880a05d600a896
|
7
|
+
data.tar.gz: 44acde315c5bda2b634ec10af13b88121c413ca96b6549ccc1feb3ff6adc6806107d8fb8bc5cc8e336bd19957324f75f174d114db97706afdc833d32a7c8d346
|
data/.travis.yml
CHANGED
@@ -2,17 +2,18 @@ language: java
|
|
2
2
|
|
3
3
|
jdk:
|
4
4
|
- oraclejdk8
|
5
|
-
|
6
|
-
|
5
|
+
|
6
|
+
services:
|
7
|
+
- docker
|
7
8
|
|
8
9
|
env:
|
9
10
|
global:
|
10
|
-
- MONGO_DATABASE=
|
11
|
+
- MONGO_DATABASE=mydb
|
11
12
|
- MONGO_COLLECTION=my_collection
|
12
|
-
- MONGO_URI=mongodb://localhost:27017/
|
13
|
+
- MONGO_URI=mongodb://localhost:27017/mydb
|
14
|
+
- DOCKER_COMPOSE_VERSION=1.22.0
|
13
15
|
|
14
16
|
sudo: required
|
15
|
-
dist: precise
|
16
17
|
|
17
18
|
before_cache:
|
18
19
|
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
|
@@ -22,36 +23,28 @@ cache:
|
|
22
23
|
- $HOME/.gradle/caches/
|
23
24
|
- $HOME/.gradle/wrapper/
|
24
25
|
|
25
|
-
# Work around fix for buffer overflow error on OpenJDK7
|
26
|
-
# ref: https://github.com/travis-ci/travis-ci/issues/5227#issuecomment-165131913
|
27
26
|
before_install:
|
28
|
-
-
|
29
|
-
-
|
30
|
-
-
|
31
|
-
- sudo mv /
|
32
|
-
- cat /etc/hosts # optionally check the content *after*
|
27
|
+
- sudo rm /usr/local/bin/docker-compose
|
28
|
+
- curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose
|
29
|
+
- chmod +x docker-compose
|
30
|
+
- sudo mv docker-compose /usr/local/bin
|
33
31
|
|
34
32
|
install:
|
35
|
-
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
|
36
|
-
- echo "deb http://repo.mongodb.org/apt/ubuntu "$(lsb_release -sc)"/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list
|
37
|
-
- sudo apt-get update
|
38
|
-
- sudo apt-get install -y --force-yes mongodb-org
|
39
|
-
- mongod -version
|
40
33
|
- curl --create-dirs -o ~/.embulk/bin/embulk -L "https://dl.bintray.com/embulk/maven/embulk-0.8.8.jar"
|
41
34
|
- chmod +x ~/.embulk/bin/embulk
|
42
35
|
- export PATH="$HOME/.embulk/bin:$PATH"
|
43
36
|
- embulk --version
|
44
37
|
|
45
38
|
before_script:
|
46
|
-
- echo "Wait mongodb wakeup"
|
47
|
-
- sleep 15
|
48
39
|
- mkdir -p ./tmp
|
49
40
|
- date
|
41
|
+
- docker-compose up -d
|
42
|
+
- docker-compose ps
|
50
43
|
|
51
44
|
script:
|
52
45
|
- ./gradlew check
|
53
46
|
- ./gradlew package
|
54
|
-
- mongoimport --host 127.0.0.1 --db $MONGO_DATABASE --collection $MONGO_COLLECTION --type json --drop src/test/resources/my_collection.jsonl
|
47
|
+
- mongoimport --host 127.0.0.1 -u mongo_user -p dbpass --db $MONGO_DATABASE --collection $MONGO_COLLECTION --type json --drop src/test/resources/my_collection.jsonl
|
55
48
|
- |
|
56
49
|
for target in basic full id_field_name
|
57
50
|
do
|
data/README.md
CHANGED
@@ -21,6 +21,8 @@ This plugin only works with embulk >= 0.8.8.
|
|
21
21
|
- **uri**: [MongoDB connection string URI](http://docs.mongodb.org/manual/reference/connection-string/) (e.g. 'mongodb://localhost:27017/mydb') (string, required)
|
22
22
|
- use separated URI parameters
|
23
23
|
- **hosts**: list of hosts. `hosts` are pairs of host(string, required) and port(integer, optional, default: 27017)
|
24
|
+
- **auth_method**: Auth method. One of `scram-sha-1`, `mongodb-cr`, `auto` (string, optional, default: null)
|
25
|
+
- **auth_source**: Auth source. The database name where the user is defined (string, optional, default: null)
|
24
26
|
- **user**: (string, optional)
|
25
27
|
- **password**: (string, optional)
|
26
28
|
- **database**: (string, required)
|
@@ -37,6 +39,7 @@ This plugin only works with embulk >= 0.8.8.
|
|
37
39
|
- **query**: A JSON document used for [querying](https://docs.mongodb.com/manual/tutorial/query-documents/) on the source collection. Documents are loaded from the colleciton if they match with this condition. (string, optional)
|
38
40
|
- **projection**: A JSON document used for [projection](https://docs.mongodb.com/manual/reference/operator/projection/positional/) on query results. Fields in a document are used only if they match with this condition. (string, optional)
|
39
41
|
- **sort**: Ordering of results (string, optional)
|
42
|
+
- **aggregation**: Aggregation query (string, optional) See [Aggregation query](#aggregation-query) for more detail.
|
40
43
|
- **batch_size**: Limits the number of objects returned in one [batch](http://api.mongodb.com/java/current/com/mongodb/DBCursor.html#batchSize-int-) (integer, optional, default: 10000)
|
41
44
|
- **incremental_field** List of field name (list, optional, can't use with sort option)
|
42
45
|
- **last_record** Last loaded record for incremental load (hash, optional)
|
@@ -45,6 +48,37 @@ This plugin only works with embulk >= 0.8.8.
|
|
45
48
|
|
46
49
|
## Example
|
47
50
|
|
51
|
+
### Authentication
|
52
|
+
|
53
|
+
#### Use separated URI prameters
|
54
|
+
|
55
|
+
```yaml
|
56
|
+
in:
|
57
|
+
type: mongodb
|
58
|
+
hosts:
|
59
|
+
- {host: localhost, port:27017}
|
60
|
+
user: myuser
|
61
|
+
password: mypassword
|
62
|
+
database: my_database
|
63
|
+
auth_method: scram-sha-1
|
64
|
+
auth_source: auth_db
|
65
|
+
collection: "my_collection
|
66
|
+
```
|
67
|
+
|
68
|
+
If you set `auth_method: auto`, The client will negotiate the best mechanism based on the version of the server that the client is authenticating to.
|
69
|
+
|
70
|
+
If the server version is 3.0 or higher, the driver will authenticate using the SCRAM-SHA-1 mechanism.
|
71
|
+
|
72
|
+
Otherwise, the driver will authenticate using the MONGODB_CR mechanism.
|
73
|
+
|
74
|
+
#### Use URI String
|
75
|
+
|
76
|
+
```yaml
|
77
|
+
in:
|
78
|
+
type: mongodb
|
79
|
+
uri: mongodb://myuser:mypassword@localhost:27017/my_database?authMechanism=SCRAM-SHA-1&authSource=another_database
|
80
|
+
```
|
81
|
+
|
48
82
|
### Exporting all objects
|
49
83
|
|
50
84
|
#### Specify with MongoDB connection string URI.
|
@@ -128,6 +162,20 @@ in:
|
|
128
162
|
$ embulk run /path/to/config.yml -c config-diff.yml
|
129
163
|
```
|
130
164
|
|
165
|
+
### Aggregation query
|
166
|
+
|
167
|
+
This plugin supports aggregation query. You can write complex query like below.
|
168
|
+
|
169
|
+
`aggregation` option can't be used with `sort`, `limit`, `skip`, `query` option. Incremental load also doesn't work with aggregation query.
|
170
|
+
|
171
|
+
```yaml
|
172
|
+
in:
|
173
|
+
type: mongodb
|
174
|
+
aggregation: { $match: {"int32_field":{"$gte":5 },} }
|
175
|
+
```
|
176
|
+
|
177
|
+
See also [Aggregation — MongoDB Manual](https://docs.mongodb.com/manual/aggregation/) and [Aggregation Pipeline Stages — MongoDB Manual](https://docs.mongodb.com/manual/reference/operator/aggregation-pipeline/)
|
178
|
+
|
131
179
|
### Advanced usage with filter plugins
|
132
180
|
|
133
181
|
```yaml
|
@@ -173,44 +221,21 @@ $ ./gradlew gem
|
|
173
221
|
|
174
222
|
## Test
|
175
223
|
|
176
|
-
|
177
|
-
|
178
|
-
```
|
224
|
+
Firstly install Docker and Docker compose then `docker-compose up -d`,
|
225
|
+
so that an MongoDB server will be locally launched then you can run tests with `./gradlew test`.
|
179
226
|
|
180
|
-
|
227
|
+
```sh
|
228
|
+
$ docker-compose up -d
|
229
|
+
Creating embulk-input-mongodb_server ... done
|
230
|
+
Creating mongo-express ... done
|
231
|
+
Creating mongoClientTemp ... done
|
181
232
|
|
182
|
-
|
233
|
+
$ docker-compose ps
|
234
|
+
Name Command State Ports
|
235
|
+
------------------------------------------------------------------------------------------------------------------------------
|
236
|
+
embulk-input-mongodb_server docker-entrypoint.sh mongod Up 0.0.0.0:27017->27017/tcp, 0.0.0.0:27018->27018/tcp
|
237
|
+
mongo-express tini -- /docker-entrypoint ... Up 0.0.0.0:8081->8081/tcp
|
238
|
+
mongoClientTemp docker-entrypoint.sh mongo ... Restarting
|
183
239
|
|
184
|
-
|
185
|
-
MONGO_URI
|
186
|
-
MONGO_COLLECTION
|
187
|
-
```
|
188
|
-
|
189
|
-
If you're using Mac OS X El Capitan and GUI Applications(IDE), like as follows.
|
190
|
-
```xml
|
191
|
-
$ vi ~/Library/LaunchAgents/environment.plist
|
192
|
-
<?xml version="1.0" encoding="UTF-8"?>
|
193
|
-
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
194
|
-
<plist version="1.0">
|
195
|
-
<dict>
|
196
|
-
<key>Label</key>
|
197
|
-
<string>my.startup</string>
|
198
|
-
<key>ProgramArguments</key>
|
199
|
-
<array>
|
200
|
-
<string>sh</string>
|
201
|
-
<string>-c</string>
|
202
|
-
<string>
|
203
|
-
launchctl setenv MONGO_URI mongodb://myuser:mypassword@localhost:27017/my_database
|
204
|
-
launchctl setenv MONGO_COLLECTION my_collection
|
205
|
-
</string>
|
206
|
-
</array>
|
207
|
-
<key>RunAtLoad</key>
|
208
|
-
<true/>
|
209
|
-
</dict>
|
210
|
-
</plist>
|
211
|
-
|
212
|
-
$ launchctl load ~/Library/LaunchAgents/environment.plist
|
213
|
-
$ launchctl getenv MONGO_URI //try to get value.
|
214
|
-
|
215
|
-
Then start your applications.
|
240
|
+
$ ./gradlew test # -t to watch change of files and rebuild continuously
|
216
241
|
```
|
data/build.gradle
CHANGED
@@ -17,15 +17,15 @@ configurations {
|
|
17
17
|
provided
|
18
18
|
}
|
19
19
|
|
20
|
-
version = "0.
|
20
|
+
version = "0.7.0"
|
21
21
|
|
22
|
-
sourceCompatibility = 1.
|
23
|
-
targetCompatibility = 1.
|
22
|
+
sourceCompatibility = 1.8
|
23
|
+
targetCompatibility = 1.8
|
24
24
|
|
25
25
|
dependencies {
|
26
26
|
compile "org.embulk:embulk-core:0.8.8"
|
27
27
|
provided "org.embulk:embulk-core:0.8.8"
|
28
|
-
compile "org.mongodb:mongo-java-driver:3.
|
28
|
+
compile "org.mongodb:mongo-java-driver:3.8.1"
|
29
29
|
|
30
30
|
testCompile "junit:junit:4.+"
|
31
31
|
testCompile "org.embulk:embulk-core:0.8.8:tests"
|
data/docker-compose.yml
ADDED
@@ -0,0 +1,44 @@
|
|
1
|
+
version: '3.1'
|
2
|
+
services:
|
3
|
+
mongodb:
|
4
|
+
container_name: embulk-input-mongodb_server
|
5
|
+
image: mongo:3.6
|
6
|
+
restart: always
|
7
|
+
ports:
|
8
|
+
- 27017:27017
|
9
|
+
- 27018:27018
|
10
|
+
environment:
|
11
|
+
MONGO_INITDB_ROOT_USERNAME: admin
|
12
|
+
MONGO_INITDB_ROOT_PASSWORD: tiger
|
13
|
+
volumes:
|
14
|
+
- mongodb-data:/data/db
|
15
|
+
- mongodb-configdb:/data/configdb
|
16
|
+
|
17
|
+
mongo-express:
|
18
|
+
container_name: mongo-express
|
19
|
+
image: mongo-express
|
20
|
+
restart: always
|
21
|
+
ports:
|
22
|
+
- 8081:8081
|
23
|
+
depends_on:
|
24
|
+
- mongodb
|
25
|
+
environment:
|
26
|
+
ME_CONFIG_MONGODB_ADMINUSERNAME: admin
|
27
|
+
ME_CONFIG_MONGODB_ADMINPASSWORD: tiger
|
28
|
+
ME_CONFIG_MONGODB_SERVER: mongodb
|
29
|
+
|
30
|
+
mongoClientTemp:
|
31
|
+
container_name: mongoClientTemp
|
32
|
+
image: mongo:3.6
|
33
|
+
depends_on:
|
34
|
+
- mongodb
|
35
|
+
# Sleep to wait MongoDB wake up on Travis CI
|
36
|
+
command: >
|
37
|
+
/bin/bash -c
|
38
|
+
"sleep 15 &&
|
39
|
+
mongo --host mongodb -u admin -p tiger admin --eval \"db.getSiblingDB('mydb').createUser({user:'mongo_user', pwd:'dbpass', roles:[{role:'readWrite',db:'mydb'}]});\""
|
40
|
+
volumes:
|
41
|
+
mongodb-data:
|
42
|
+
driver: local
|
43
|
+
mongodb-configdb:
|
44
|
+
driver: local
|
@@ -0,0 +1,36 @@
|
|
1
|
+
package org.embulk.input.mongodb;
|
2
|
+
|
3
|
+
import com.fasterxml.jackson.annotation.JsonCreator;
|
4
|
+
import com.fasterxml.jackson.annotation.JsonValue;
|
5
|
+
import org.embulk.config.ConfigException;
|
6
|
+
|
7
|
+
import java.util.Locale;
|
8
|
+
|
9
|
+
public enum AuthMethod
|
10
|
+
{
|
11
|
+
AUTO,
|
12
|
+
SCRAM_SHA_1,
|
13
|
+
MONGODB_CR;
|
14
|
+
|
15
|
+
@JsonValue
|
16
|
+
@Override
|
17
|
+
public String toString()
|
18
|
+
{
|
19
|
+
return name().toLowerCase(Locale.ENGLISH);
|
20
|
+
}
|
21
|
+
|
22
|
+
@JsonCreator
|
23
|
+
public static AuthMethod fromString(String value)
|
24
|
+
{
|
25
|
+
switch (value.replace("_", "-")) {
|
26
|
+
case "scram-sha-1":
|
27
|
+
return SCRAM_SHA_1;
|
28
|
+
case "mongodb-cr":
|
29
|
+
return MONGODB_CR;
|
30
|
+
case "auto":
|
31
|
+
return AUTO;
|
32
|
+
default:
|
33
|
+
throw new ConfigException(String.format("Unknown auth_method '%s'. Supported auth_method are scram-sha-1, mongodb-cr, auto", value));
|
34
|
+
}
|
35
|
+
}
|
36
|
+
}
|
@@ -12,6 +12,7 @@ import com.mongodb.ServerAddress;
|
|
12
12
|
import com.mongodb.client.MongoCollection;
|
13
13
|
import com.mongodb.client.MongoCursor;
|
14
14
|
import com.mongodb.client.MongoDatabase;
|
15
|
+
import org.bson.Document;
|
15
16
|
import org.bson.codecs.configuration.CodecRegistries;
|
16
17
|
import org.bson.codecs.configuration.CodecRegistry;
|
17
18
|
import org.bson.conversions.Bson;
|
@@ -64,6 +65,24 @@ public class MongodbInputPlugin
|
|
64
65
|
throw new ConfigException("both of skip and incremental_load can't be used together");
|
65
66
|
}
|
66
67
|
|
68
|
+
if (task.getAggregation().isPresent()) {
|
69
|
+
if (task.getIncrementalField().isPresent()) {
|
70
|
+
throw new ConfigException("both of aggregation and incremental_load can't be used together");
|
71
|
+
}
|
72
|
+
if (!task.getSort().equals("{}")) {
|
73
|
+
throw new ConfigException("both of sort and aggregation can't be used together");
|
74
|
+
}
|
75
|
+
if (task.getLimit().isPresent()) {
|
76
|
+
throw new ConfigException("both of limit and aggregation can't be used together");
|
77
|
+
}
|
78
|
+
if (task.getSkip().isPresent()) {
|
79
|
+
throw new ConfigException("both of skip and aggregation can't be used together");
|
80
|
+
}
|
81
|
+
if (!task.getQuery().equals("{}")) {
|
82
|
+
throw new ConfigException("both of query and aggregation can't be used together");
|
83
|
+
}
|
84
|
+
}
|
85
|
+
|
67
86
|
Map<String, String> newCondition = buildIncrementalCondition(task);
|
68
87
|
task.setQuery(newCondition.get("query"));
|
69
88
|
task.setSort(newCondition.get("sort"));
|
@@ -71,6 +90,9 @@ public class MongodbInputPlugin
|
|
71
90
|
validateJsonField("projection", task.getProjection());
|
72
91
|
validateJsonField("query", task.getQuery());
|
73
92
|
validateJsonField("sort", task.getSort());
|
93
|
+
if (task.getAggregation().isPresent()) {
|
94
|
+
validateJsonField("aggrigation", task.getAggregation().get());
|
95
|
+
}
|
74
96
|
|
75
97
|
// Connect once to throw ConfigException in earlier stage of excecution
|
76
98
|
try {
|
@@ -146,20 +168,35 @@ public class MongodbInputPlugin
|
|
146
168
|
log.trace("skip: {}", task.getSkip());
|
147
169
|
}
|
148
170
|
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
171
|
+
if (task.getAggregation().isPresent()) {
|
172
|
+
Bson aggregationString = Document.parse(task.getAggregation().get());
|
173
|
+
List<Bson> aggregation = Arrays.asList(aggregationString);
|
174
|
+
try (MongoCursor<Value> cursor = collection
|
175
|
+
.aggregate(aggregation).iterator()) {
|
176
|
+
while (cursor.hasNext()) {
|
177
|
+
pageBuilder.setJson(column, cursor.next());
|
178
|
+
pageBuilder.addRecord();
|
179
|
+
}
|
180
|
+
} catch (MongoException ex) {
|
181
|
+
Throwables.propagate(ex);
|
182
|
+
}
|
183
|
+
}
|
184
|
+
else {
|
185
|
+
try (MongoCursor<Value> cursor = collection
|
186
|
+
.find(query)
|
187
|
+
.projection(projection)
|
188
|
+
.sort(sort)
|
189
|
+
.batchSize(task.getBatchSize())
|
190
|
+
.limit(task.getLimit().or(0))
|
191
|
+
.skip(task.getSkip().or(0))
|
192
|
+
.iterator()) {
|
193
|
+
while (cursor.hasNext()) {
|
194
|
+
pageBuilder.setJson(column, cursor.next());
|
195
|
+
pageBuilder.addRecord();
|
196
|
+
}
|
197
|
+
} catch (MongoException ex) {
|
198
|
+
Throwables.propagate(ex);
|
160
199
|
}
|
161
|
-
} catch (MongoException ex) {
|
162
|
-
Throwables.propagate(ex);
|
163
200
|
}
|
164
201
|
|
165
202
|
pageBuilder.finish();
|
@@ -259,18 +296,48 @@ public class MongodbInputPlugin
|
|
259
296
|
}
|
260
297
|
|
261
298
|
if (task.getUser().isPresent()) {
|
262
|
-
|
263
|
-
task.getUser().get(),
|
264
|
-
task.getDatabase().get(),
|
265
|
-
task.getPassword().get().toCharArray()
|
266
|
-
);
|
267
|
-
return new MongoClient(addresses, Arrays.asList(credential));
|
299
|
+
return new MongoClient(addresses, Arrays.asList(createCredential(task)));
|
268
300
|
}
|
269
301
|
else {
|
270
302
|
return new MongoClient(addresses);
|
271
303
|
}
|
272
304
|
}
|
273
305
|
|
306
|
+
// @see http://mongodb.github.io/mongo-java-driver/3.0/driver-async/reference/connecting/authenticating/
|
307
|
+
private MongoCredential createCredential(PluginTask task)
|
308
|
+
{
|
309
|
+
MongoCredential credential;
|
310
|
+
String authSource = task.getAuthSource().isPresent() ? task.getAuthSource().get() : task.getDatabase().get();
|
311
|
+
AuthMethod authMethod = task.getAuthMethod().isPresent() ? task.getAuthMethod().get() : AuthMethod.AUTO;
|
312
|
+
switch (authMethod) {
|
313
|
+
case SCRAM_SHA_1:
|
314
|
+
credential = MongoCredential.createScramSha1Credential(
|
315
|
+
task.getUser().get(),
|
316
|
+
authSource,
|
317
|
+
task.getPassword().get().toCharArray());
|
318
|
+
break;
|
319
|
+
case MONGODB_CR:
|
320
|
+
credential = MongoCredential.createMongoCRCredential(
|
321
|
+
task.getUser().get(),
|
322
|
+
authSource,
|
323
|
+
task.getPassword().get().toCharArray());
|
324
|
+
break;
|
325
|
+
case AUTO:
|
326
|
+
default:
|
327
|
+
/* The client will negotiate the best mechanism based on the
|
328
|
+
* version of the server that the client is authenticating to.
|
329
|
+
* If the server version is 3.0 or higher, the driver will authenticate using the SCRAM-SHA-1 mechanism.
|
330
|
+
* Otherwise, the driver will authenticate using the MONGODB_CR mechanism.
|
331
|
+
*/
|
332
|
+
credential = MongoCredential.createCredential(
|
333
|
+
task.getUser().get(),
|
334
|
+
authSource,
|
335
|
+
task.getPassword().get().toCharArray()
|
336
|
+
);
|
337
|
+
}
|
338
|
+
return credential;
|
339
|
+
}
|
340
|
+
|
274
341
|
private Map<String, String> buildIncrementalCondition(PluginTask task)
|
275
342
|
{
|
276
343
|
Map<String, String> result = new HashMap<>();
|
@@ -331,7 +398,7 @@ public class MongodbInputPlugin
|
|
331
398
|
private void validateJsonField(String name, String jsonString)
|
332
399
|
{
|
333
400
|
try {
|
334
|
-
|
401
|
+
Document.parse(jsonString);
|
335
402
|
}
|
336
403
|
catch (JsonParseException ex) {
|
337
404
|
throw new ConfigException(String.format("Invalid JSON string was given for '%s' parameter. [%s]", name, jsonString));
|
@@ -25,6 +25,14 @@ public interface PluginTask
|
|
25
25
|
@ConfigDefault("null")
|
26
26
|
Optional<List<HostTask>> getHosts();
|
27
27
|
|
28
|
+
@Config("auth_method")
|
29
|
+
@ConfigDefault("null")
|
30
|
+
Optional<AuthMethod> getAuthMethod();
|
31
|
+
|
32
|
+
@Config("auth_source")
|
33
|
+
@ConfigDefault("null")
|
34
|
+
Optional<String> getAuthSource();
|
35
|
+
|
28
36
|
@Config("user")
|
29
37
|
@ConfigDefault("null")
|
30
38
|
Optional<String> getUser();
|
@@ -53,6 +61,10 @@ public interface PluginTask
|
|
53
61
|
String getQuery();
|
54
62
|
void setQuery(String query);
|
55
63
|
|
64
|
+
@Config("aggregation")
|
65
|
+
@ConfigDefault("null")
|
66
|
+
Optional<String> getAggregation();
|
67
|
+
|
56
68
|
@Config("sort")
|
57
69
|
@ConfigDefault("\"{}\"")
|
58
70
|
String getSort();
|
@@ -7,6 +7,7 @@ import com.google.common.collect.ImmutableList;
|
|
7
7
|
import com.google.common.collect.ImmutableMap;
|
8
8
|
import com.google.common.collect.Lists;
|
9
9
|
import com.mongodb.MongoClientURI;
|
10
|
+
import com.mongodb.MongoCredential;
|
10
11
|
import com.mongodb.client.MongoCollection;
|
11
12
|
import com.mongodb.client.MongoDatabase;
|
12
13
|
import org.bson.BsonBinary;
|
@@ -31,7 +32,6 @@ import org.embulk.spi.TestPageBuilderReader.MockPageOutput;
|
|
31
32
|
import org.embulk.spi.type.Types;
|
32
33
|
import org.embulk.spi.util.Pages;
|
33
34
|
import org.junit.Before;
|
34
|
-
import org.junit.BeforeClass;
|
35
35
|
import org.junit.Rule;
|
36
36
|
import org.junit.Test;
|
37
37
|
import org.junit.rules.ExpectedException;
|
@@ -53,8 +53,8 @@ import static org.junit.Assert.assertThat;
|
|
53
53
|
|
54
54
|
public class TestMongodbInputPlugin
|
55
55
|
{
|
56
|
-
private
|
57
|
-
private
|
56
|
+
private final String mongoUri = "mongodb://mongo_user:dbpass@localhost:27017/mydb";
|
57
|
+
private final String mongoCollection = "my_collection";
|
58
58
|
|
59
59
|
@Rule
|
60
60
|
public EmbulkTestRuntime runtime = new EmbulkTestRuntime();
|
@@ -66,20 +66,8 @@ public class TestMongodbInputPlugin
|
|
66
66
|
private MongodbInputPlugin plugin;
|
67
67
|
private MockPageOutput output;
|
68
68
|
|
69
|
-
/*
|
70
|
-
* This test case requires environment variables
|
71
|
-
* MONGO_URI
|
72
|
-
* MONGO_COLLECTION
|
73
|
-
*/
|
74
|
-
@BeforeClass
|
75
|
-
public static void initializeConstant()
|
76
|
-
{
|
77
|
-
MONGO_URI = System.getenv("MONGO_URI");
|
78
|
-
MONGO_COLLECTION = System.getenv("MONGO_COLLECTION");
|
79
|
-
}
|
80
|
-
|
81
69
|
@Before
|
82
|
-
public void createResources()
|
70
|
+
public void createResources()
|
83
71
|
{
|
84
72
|
config = config();
|
85
73
|
plugin = new MongodbInputPlugin();
|
@@ -90,8 +78,8 @@ public class TestMongodbInputPlugin
|
|
90
78
|
public void checkDefaultValues()
|
91
79
|
{
|
92
80
|
ConfigSource config = Exec.newConfigSource()
|
93
|
-
.set("uri",
|
94
|
-
.set("collection",
|
81
|
+
.set("uri", mongoUri)
|
82
|
+
.set("collection", mongoCollection);
|
95
83
|
|
96
84
|
PluginTask task = config.loadConfig(PluginTask.class);
|
97
85
|
assertEquals("{}", task.getQuery());
|
@@ -109,7 +97,7 @@ public class TestMongodbInputPlugin
|
|
109
97
|
{
|
110
98
|
ConfigSource config = Exec.newConfigSource()
|
111
99
|
.set("uri", null)
|
112
|
-
.set("collection",
|
100
|
+
.set("collection", mongoCollection);
|
113
101
|
|
114
102
|
plugin.transaction(config, new Control());
|
115
103
|
}
|
@@ -119,7 +107,7 @@ public class TestMongodbInputPlugin
|
|
119
107
|
{
|
120
108
|
ConfigSource config = Exec.newConfigSource()
|
121
109
|
.set("uri", "mongodb://mongouser:password@non-exists.example.com:23490/test")
|
122
|
-
.set("collection",
|
110
|
+
.set("collection", mongoCollection);
|
123
111
|
|
124
112
|
plugin.transaction(config, new Control());
|
125
113
|
}
|
@@ -128,7 +116,7 @@ public class TestMongodbInputPlugin
|
|
128
116
|
public void checkDefaultValuesCollectionIsNull()
|
129
117
|
{
|
130
118
|
ConfigSource config = Exec.newConfigSource()
|
131
|
-
.set("uri",
|
119
|
+
.set("uri", mongoUri)
|
132
120
|
.set("collection", null);
|
133
121
|
|
134
122
|
plugin.transaction(config, new Control());
|
@@ -138,8 +126,8 @@ public class TestMongodbInputPlugin
|
|
138
126
|
public void checkSortCannotUseWithIncremental()
|
139
127
|
{
|
140
128
|
ConfigSource config = Exec.newConfigSource()
|
141
|
-
.set("uri",
|
142
|
-
.set("collection",
|
129
|
+
.set("uri", mongoUri)
|
130
|
+
.set("collection", mongoCollection)
|
143
131
|
.set("sort", "{ \"field1\": 1 }")
|
144
132
|
.set("incremental_field", Optional.of(Arrays.asList("account")));
|
145
133
|
|
@@ -150,8 +138,8 @@ public class TestMongodbInputPlugin
|
|
150
138
|
public void checkSkipCannotUseWithIncremental()
|
151
139
|
{
|
152
140
|
ConfigSource config = Exec.newConfigSource()
|
153
|
-
.set("uri",
|
154
|
-
.set("collection",
|
141
|
+
.set("uri", mongoUri)
|
142
|
+
.set("collection", mongoCollection)
|
155
143
|
.set("skip", 10)
|
156
144
|
.set("incremental_field", Optional.of(Arrays.asList("account")));
|
157
145
|
|
@@ -162,8 +150,8 @@ public class TestMongodbInputPlugin
|
|
162
150
|
public void checkInvalidQueryOption()
|
163
151
|
{
|
164
152
|
ConfigSource config = Exec.newConfigSource()
|
165
|
-
.set("uri",
|
166
|
-
.set("collection",
|
153
|
+
.set("uri", mongoUri)
|
154
|
+
.set("collection", mongoCollection)
|
167
155
|
.set("query", "{\"key\":invalid_value}")
|
168
156
|
.set("last_record", 0)
|
169
157
|
.set("incremental_field", Optional.of(Arrays.asList("account")));
|
@@ -171,6 +159,63 @@ public class TestMongodbInputPlugin
|
|
171
159
|
plugin.transaction(config, new Control());
|
172
160
|
}
|
173
161
|
|
162
|
+
@Test(expected = ConfigException.class)
|
163
|
+
public void checkAggregationWithOtherOption()
|
164
|
+
{
|
165
|
+
ConfigSource config = Exec.newConfigSource()
|
166
|
+
.set("uri", mongoUri)
|
167
|
+
.set("collection", mongoCollection)
|
168
|
+
.set("query", "{\"key\":\"valid_value\"}")
|
169
|
+
.set("aggregation", "{$match: { account: { $gt: 32864}}}")
|
170
|
+
.set("incremental_field", Optional.of(Arrays.asList("account")));
|
171
|
+
|
172
|
+
plugin.transaction(config, new Control());
|
173
|
+
}
|
174
|
+
|
175
|
+
@Test
|
176
|
+
public void testCreateCredentialsSha1() throws Exception
|
177
|
+
{
|
178
|
+
PluginTask task = configForAuth().deepCopy()
|
179
|
+
.set("auth_method", "scram-sha-1")
|
180
|
+
.set("database", "db")
|
181
|
+
.loadConfig(PluginTask.class);
|
182
|
+
|
183
|
+
Method createCredential = MongodbInputPlugin.class.getDeclaredMethod("createCredential", PluginTask.class);
|
184
|
+
createCredential.setAccessible(true);
|
185
|
+
MongoCredential credential = (MongoCredential) createCredential.invoke(plugin, task);
|
186
|
+
assertThat("SCRAM-SHA-1", is(credential.getMechanism()));
|
187
|
+
assertThat("db", is(credential.getSource()));
|
188
|
+
}
|
189
|
+
|
190
|
+
@Test
|
191
|
+
public void testCreateCredentialsSha1WithAuthSource() throws Exception
|
192
|
+
{
|
193
|
+
PluginTask task = configForAuth().deepCopy()
|
194
|
+
.set("auth_method", "scram-sha-1")
|
195
|
+
.set("database", "db")
|
196
|
+
.set("auth_source", "authdb")
|
197
|
+
.loadConfig(PluginTask.class);
|
198
|
+
|
199
|
+
Method createCredential = MongodbInputPlugin.class.getDeclaredMethod("createCredential", PluginTask.class);
|
200
|
+
createCredential.setAccessible(true);
|
201
|
+
MongoCredential credential = (MongoCredential) createCredential.invoke(plugin, task);
|
202
|
+
assertThat("SCRAM-SHA-1", is(credential.getMechanism()));
|
203
|
+
assertThat("authdb", is(credential.getSource()));
|
204
|
+
}
|
205
|
+
|
206
|
+
@Test
|
207
|
+
public void testCreateCredentialsCr() throws Exception
|
208
|
+
{
|
209
|
+
PluginTask task = configForAuth().deepCopy()
|
210
|
+
.set("auth_method", "mongodb-cr")
|
211
|
+
.loadConfig(PluginTask.class);
|
212
|
+
|
213
|
+
Method createCredential = MongodbInputPlugin.class.getDeclaredMethod("createCredential", PluginTask.class);
|
214
|
+
createCredential.setAccessible(true);
|
215
|
+
MongoCredential credential = (MongoCredential) createCredential.invoke(plugin, task);
|
216
|
+
assertThat("MONGODB-CR", is(credential.getMechanism()));
|
217
|
+
}
|
218
|
+
|
174
219
|
@Test
|
175
220
|
public void testResume()
|
176
221
|
{
|
@@ -205,8 +250,8 @@ public class TestMongodbInputPlugin
|
|
205
250
|
{
|
206
251
|
PluginTask task = config.loadConfig(PluginTask.class);
|
207
252
|
|
208
|
-
dropCollection(task,
|
209
|
-
createCollection(task,
|
253
|
+
dropCollection(task, mongoCollection);
|
254
|
+
createCollection(task, mongoCollection);
|
210
255
|
insertDocument(task, createValidDocuments());
|
211
256
|
|
212
257
|
plugin.transaction(config, new Control());
|
@@ -217,13 +262,13 @@ public class TestMongodbInputPlugin
|
|
217
262
|
public void testRunWithLimit() throws Exception
|
218
263
|
{
|
219
264
|
ConfigSource config = Exec.newConfigSource()
|
220
|
-
.set("uri",
|
221
|
-
.set("collection",
|
265
|
+
.set("uri", mongoUri)
|
266
|
+
.set("collection", mongoCollection)
|
222
267
|
.set("limit", 1);
|
223
268
|
PluginTask task = config.loadConfig(PluginTask.class);
|
224
269
|
|
225
|
-
dropCollection(task,
|
226
|
-
createCollection(task,
|
270
|
+
dropCollection(task, mongoCollection);
|
271
|
+
createCollection(task, mongoCollection);
|
227
272
|
insertDocument(task, createValidDocuments());
|
228
273
|
|
229
274
|
plugin.transaction(config, new Control());
|
@@ -234,14 +279,14 @@ public class TestMongodbInputPlugin
|
|
234
279
|
public void testRunWithLimitSkip() throws Exception
|
235
280
|
{
|
236
281
|
ConfigSource config = Exec.newConfigSource()
|
237
|
-
.set("uri",
|
238
|
-
.set("collection",
|
282
|
+
.set("uri", mongoUri)
|
283
|
+
.set("collection", mongoCollection)
|
239
284
|
.set("limit", 3)
|
240
285
|
.set("skip", 1);
|
241
286
|
PluginTask task = config.loadConfig(PluginTask.class);
|
242
287
|
|
243
|
-
dropCollection(task,
|
244
|
-
createCollection(task,
|
288
|
+
dropCollection(task, mongoCollection);
|
289
|
+
createCollection(task, mongoCollection);
|
245
290
|
insertDocument(task, createValidDocuments());
|
246
291
|
|
247
292
|
plugin.transaction(config, new Control());
|
@@ -251,7 +296,7 @@ public class TestMongodbInputPlugin
|
|
251
296
|
@Test
|
252
297
|
public void testRunWithConnectionParams() throws Exception
|
253
298
|
{
|
254
|
-
MongoClientURI uri = new MongoClientURI(
|
299
|
+
MongoClientURI uri = new MongoClientURI(mongoUri);
|
255
300
|
String host = uri.getHosts().get(0);
|
256
301
|
Integer port = (host.split(":")[1] != null) ? Integer.valueOf(host.split(":")[1]) : 27017;
|
257
302
|
ConfigSource config = Exec.newConfigSource()
|
@@ -259,11 +304,11 @@ public class TestMongodbInputPlugin
|
|
259
304
|
.set("user", uri.getUsername())
|
260
305
|
.set("password", uri.getPassword())
|
261
306
|
.set("database", uri.getDatabase())
|
262
|
-
.set("collection",
|
307
|
+
.set("collection", mongoCollection);
|
263
308
|
PluginTask task = config.loadConfig(PluginTask.class);
|
264
309
|
|
265
|
-
dropCollection(task,
|
266
|
-
createCollection(task,
|
310
|
+
dropCollection(task, mongoCollection);
|
311
|
+
createCollection(task, mongoCollection);
|
267
312
|
insertDocument(task, createValidDocuments());
|
268
313
|
|
269
314
|
plugin.transaction(config, new Control());
|
@@ -274,13 +319,13 @@ public class TestMongodbInputPlugin
|
|
274
319
|
public void testRunWithIncrementalLoad() throws Exception
|
275
320
|
{
|
276
321
|
ConfigSource config = Exec.newConfigSource()
|
277
|
-
.set("uri",
|
278
|
-
.set("collection",
|
322
|
+
.set("uri", mongoUri)
|
323
|
+
.set("collection", mongoCollection)
|
279
324
|
.set("incremental_field", Optional.of(Arrays.asList("int32_field")));
|
280
325
|
PluginTask task = config.loadConfig(PluginTask.class);
|
281
326
|
|
282
|
-
dropCollection(task,
|
283
|
-
createCollection(task,
|
327
|
+
dropCollection(task, mongoCollection);
|
328
|
+
createCollection(task, mongoCollection);
|
284
329
|
insertDocument(task, createValidDocuments());
|
285
330
|
|
286
331
|
ConfigDiff diff = plugin.transaction(config, new Control());
|
@@ -293,15 +338,15 @@ public class TestMongodbInputPlugin
|
|
293
338
|
public void testRunWithLimitIncrementalLoad() throws Exception
|
294
339
|
{
|
295
340
|
ConfigSource config = Exec.newConfigSource()
|
296
|
-
.set("uri",
|
297
|
-
.set("collection",
|
341
|
+
.set("uri", mongoUri)
|
342
|
+
.set("collection", mongoCollection)
|
298
343
|
.set("id_field_name", "int32_field")
|
299
344
|
.set("incremental_field", Optional.of(Arrays.asList("int32_field", "double_field", "datetime_field", "boolean_field")))
|
300
345
|
.set("limit", 1);
|
301
346
|
PluginTask task = config.loadConfig(PluginTask.class);
|
302
347
|
|
303
|
-
dropCollection(task,
|
304
|
-
createCollection(task,
|
348
|
+
dropCollection(task, mongoCollection);
|
349
|
+
createCollection(task, mongoCollection);
|
305
350
|
insertDocument(task, createValidDocuments());
|
306
351
|
|
307
352
|
ConfigDiff diff = plugin.transaction(config, new Control());
|
@@ -321,8 +366,8 @@ public class TestMongodbInputPlugin
|
|
321
366
|
previousLastRecord.put("datetime_field", "{$date=2015-01-27T10:23:49.000Z}");
|
322
367
|
previousLastRecord.put("boolean_field", true);
|
323
368
|
ConfigSource config = Exec.newConfigSource()
|
324
|
-
.set("uri",
|
325
|
-
.set("collection",
|
369
|
+
.set("uri", mongoUri)
|
370
|
+
.set("collection", mongoCollection)
|
326
371
|
.set("id_field_name", "int32_field")
|
327
372
|
.set("query", "{\"double_field\":{\"$gte\": 1.23}}")
|
328
373
|
.set("incremental_field", Optional.of(Arrays.asList("int32_field", "datetime_field", "boolean_field")))
|
@@ -330,8 +375,8 @@ public class TestMongodbInputPlugin
|
|
330
375
|
|
331
376
|
PluginTask task = config.loadConfig(PluginTask.class);
|
332
377
|
|
333
|
-
dropCollection(task,
|
334
|
-
createCollection(task,
|
378
|
+
dropCollection(task, mongoCollection);
|
379
|
+
createCollection(task, mongoCollection);
|
335
380
|
insertDocument(task, createValidDocuments());
|
336
381
|
|
337
382
|
ConfigDiff diff = plugin.transaction(config, new Control());
|
@@ -346,13 +391,13 @@ public class TestMongodbInputPlugin
|
|
346
391
|
public void testRunWithIncrementalLoadUnsupportedType() throws Exception
|
347
392
|
{
|
348
393
|
ConfigSource config = Exec.newConfigSource()
|
349
|
-
.set("uri",
|
350
|
-
.set("collection",
|
394
|
+
.set("uri", mongoUri)
|
395
|
+
.set("collection", mongoCollection)
|
351
396
|
.set("incremental_field", Optional.of(Arrays.asList("document_field")));
|
352
397
|
PluginTask task = config.loadConfig(PluginTask.class);
|
353
398
|
|
354
|
-
dropCollection(task,
|
355
|
-
createCollection(task,
|
399
|
+
dropCollection(task, mongoCollection);
|
400
|
+
createCollection(task, mongoCollection);
|
356
401
|
insertDocument(task, createValidDocuments());
|
357
402
|
|
358
403
|
plugin.transaction(config, new Control());
|
@@ -362,14 +407,14 @@ public class TestMongodbInputPlugin
|
|
362
407
|
public void testRunWithUnsupportedType() throws Exception
|
363
408
|
{
|
364
409
|
ConfigSource config = Exec.newConfigSource()
|
365
|
-
.set("uri",
|
366
|
-
.set("collection",
|
410
|
+
.set("uri", mongoUri)
|
411
|
+
.set("collection", mongoCollection)
|
367
412
|
.set("stop_on_invalid_record", true);
|
368
413
|
|
369
414
|
PluginTask task = config.loadConfig(PluginTask.class);
|
370
415
|
|
371
|
-
dropCollection(task,
|
372
|
-
createCollection(task,
|
416
|
+
dropCollection(task, mongoCollection);
|
417
|
+
createCollection(task, mongoCollection);
|
373
418
|
|
374
419
|
List<Document> documents = new ArrayList<>();
|
375
420
|
documents.add(
|
@@ -380,6 +425,25 @@ public class TestMongodbInputPlugin
|
|
380
425
|
plugin.transaction(config, new Control());
|
381
426
|
}
|
382
427
|
|
428
|
+
@Test
|
429
|
+
public void testRunWithAggregation() throws Exception
|
430
|
+
{
|
431
|
+
ConfigSource config = Exec.newConfigSource()
|
432
|
+
.set("uri", mongoUri)
|
433
|
+
.set("collection", mongoCollection)
|
434
|
+
.set("id_field_name", "int32_field")
|
435
|
+
.set("aggregation", "{ $match: {\"int32_field\":{\"$gte\":5 },} }");
|
436
|
+
|
437
|
+
PluginTask task = config.loadConfig(PluginTask.class);
|
438
|
+
|
439
|
+
dropCollection(task, mongoCollection);
|
440
|
+
createCollection(task, mongoCollection);
|
441
|
+
insertDocument(task, createValidDocuments());
|
442
|
+
|
443
|
+
plugin.transaction(config, new Control());
|
444
|
+
assertValidRecordsForAggregation(getFieldSchema(), output);
|
445
|
+
}
|
446
|
+
|
383
447
|
@Test
|
384
448
|
public void testNormalize() throws Exception
|
385
449
|
{
|
@@ -425,16 +489,16 @@ public class TestMongodbInputPlugin
|
|
425
489
|
public void testBuildIncrementalCondition() throws Exception
|
426
490
|
{
|
427
491
|
PluginTask task = config().loadConfig(PluginTask.class);
|
428
|
-
dropCollection(task,
|
429
|
-
createCollection(task,
|
492
|
+
dropCollection(task, mongoCollection);
|
493
|
+
createCollection(task, mongoCollection);
|
430
494
|
insertDocument(task, createValidDocuments());
|
431
495
|
|
432
496
|
Method method = MongodbInputPlugin.class.getDeclaredMethod("buildIncrementalCondition", PluginTask.class);
|
433
497
|
method.setAccessible(true);
|
434
498
|
|
435
499
|
ConfigSource config = Exec.newConfigSource()
|
436
|
-
.set("uri",
|
437
|
-
.set("collection",
|
500
|
+
.set("uri", mongoUri)
|
501
|
+
.set("collection", mongoCollection)
|
438
502
|
.set("incremental_field", Optional.of(Arrays.asList("account")));
|
439
503
|
task = config.loadConfig(PluginTask.class);
|
440
504
|
Map<String, String> actual = (Map<String, String>) method.invoke(plugin, task);
|
@@ -452,8 +516,8 @@ public class TestMongodbInputPlugin
|
|
452
516
|
innerRecord.put("$date", "2015-01-27T19:23:49Z");
|
453
517
|
lastRecord.put("datetime_field", innerRecord);
|
454
518
|
config = Exec.newConfigSource()
|
455
|
-
.set("uri",
|
456
|
-
.set("collection",
|
519
|
+
.set("uri", mongoUri)
|
520
|
+
.set("collection", mongoCollection)
|
457
521
|
.set("query", "{\"double_field\":{\"$gte\": 1.23}}")
|
458
522
|
.set("incremental_field", Optional.of(Arrays.asList("_id", "int32_field", "datetime_field")))
|
459
523
|
.set("last_record", Optional.of(lastRecord));
|
@@ -471,14 +535,14 @@ public class TestMongodbInputPlugin
|
|
471
535
|
lastRecord.put("double_field", "0");
|
472
536
|
|
473
537
|
ConfigSource config = Exec.newConfigSource()
|
474
|
-
.set("uri",
|
475
|
-
.set("collection",
|
538
|
+
.set("uri", mongoUri)
|
539
|
+
.set("collection", mongoCollection)
|
476
540
|
.set("query", "{\"double_field\":{\"$gte\": 1.23}}")
|
477
541
|
.set("incremental_field", Optional.of(Arrays.asList("double_field")))
|
478
542
|
.set("last_record", Optional.of(lastRecord));
|
479
543
|
PluginTask task = config.loadConfig(PluginTask.class);
|
480
|
-
dropCollection(task,
|
481
|
-
createCollection(task,
|
544
|
+
dropCollection(task, mongoCollection);
|
545
|
+
createCollection(task, mongoCollection);
|
482
546
|
insertDocument(task, createValidDocuments());
|
483
547
|
|
484
548
|
Method method = MongodbInputPlugin.class.getDeclaredMethod("buildIncrementalCondition", PluginTask.class);
|
@@ -498,13 +562,13 @@ public class TestMongodbInputPlugin
|
|
498
562
|
lastRecord.put("double_field", "0");
|
499
563
|
|
500
564
|
ConfigSource config = Exec.newConfigSource()
|
501
|
-
.set("uri",
|
502
|
-
.set("collection",
|
565
|
+
.set("uri", mongoUri)
|
566
|
+
.set("collection", mongoCollection)
|
503
567
|
.set("incremental_field", Optional.of(Arrays.asList("invalid_field")))
|
504
568
|
.set("last_record", Optional.of(lastRecord));
|
505
569
|
PluginTask task = config.loadConfig(PluginTask.class);
|
506
|
-
dropCollection(task,
|
507
|
-
createCollection(task,
|
570
|
+
dropCollection(task, mongoCollection);
|
571
|
+
createCollection(task, mongoCollection);
|
508
572
|
|
509
573
|
Method method = MongodbInputPlugin.class.getDeclaredMethod("buildIncrementalCondition", PluginTask.class);
|
510
574
|
method.setAccessible(true);
|
@@ -542,8 +606,17 @@ public class TestMongodbInputPlugin
|
|
542
606
|
private ConfigSource config()
|
543
607
|
{
|
544
608
|
return Exec.newConfigSource()
|
545
|
-
.set("uri",
|
546
|
-
.set("collection",
|
609
|
+
.set("uri", mongoUri)
|
610
|
+
.set("collection", mongoCollection);
|
611
|
+
}
|
612
|
+
|
613
|
+
private ConfigSource configForAuth()
|
614
|
+
{
|
615
|
+
return Exec.newConfigSource()
|
616
|
+
.set("database", "db")
|
617
|
+
.set("collection", mongoCollection)
|
618
|
+
.set("user", "abcde")
|
619
|
+
.set("password", "passw0rd");
|
547
620
|
}
|
548
621
|
|
549
622
|
private List<Document> createValidDocuments() throws Exception
|
@@ -604,6 +677,11 @@ public class TestMongodbInputPlugin
|
|
604
677
|
assertValidRecords(schema, output, 5, 0);
|
605
678
|
}
|
606
679
|
|
680
|
+
private void assertValidRecordsForAggregation(Schema schema, MockPageOutput output) throws Exception
|
681
|
+
{
|
682
|
+
assertValidRecords(schema, output, 1, 4);
|
683
|
+
}
|
684
|
+
|
607
685
|
private void assertValidRecords(Schema schema, MockPageOutput output, int limit, int skip) throws Exception
|
608
686
|
{
|
609
687
|
int maxRecordSize = 5;
|
data/src/test/resources/full.yml
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: embulk-input-mongodb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.7.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kazuyuki Honda
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-
|
11
|
+
date: 2018-12-14 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -53,11 +53,13 @@ files:
|
|
53
53
|
- build.gradle
|
54
54
|
- config/checkstyle/checkstyle.xml
|
55
55
|
- config/checkstyle/default.xml
|
56
|
+
- docker-compose.yml
|
56
57
|
- gradle/wrapper/gradle-wrapper.jar
|
57
58
|
- gradle/wrapper/gradle-wrapper.properties
|
58
59
|
- gradlew
|
59
60
|
- gradlew.bat
|
60
61
|
- lib/embulk/input/mongodb.rb
|
62
|
+
- src/main/java/org/embulk/input/mongodb/AuthMethod.java
|
61
63
|
- src/main/java/org/embulk/input/mongodb/HostTask.java
|
62
64
|
- src/main/java/org/embulk/input/mongodb/MongodbInputPlugin.java
|
63
65
|
- src/main/java/org/embulk/input/mongodb/PluginTask.java
|
@@ -70,8 +72,8 @@ files:
|
|
70
72
|
- src/test/resources/id_field_name.yml
|
71
73
|
- src/test/resources/id_field_name_expected.csv
|
72
74
|
- src/test/resources/my_collection.jsonl
|
73
|
-
- classpath/embulk-input-mongodb-0.
|
74
|
-
- classpath/mongo-java-driver-3.
|
75
|
+
- classpath/embulk-input-mongodb-0.7.0.jar
|
76
|
+
- classpath/mongo-java-driver-3.8.1.jar
|
75
77
|
homepage: https://github.com/hakobera/embulk-input-mongodb
|
76
78
|
licenses:
|
77
79
|
- MIT
|