airbyte-source-convex 0.4.0__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- airbyte-source-convex-0.4.0/PKG-INFO +100 -0
- airbyte-source-convex-0.4.0/README.md +100 -0
- airbyte-source-convex-0.4.0/airbyte_source_convex.egg-info/PKG-INFO +100 -0
- airbyte-source-convex-0.4.0/airbyte_source_convex.egg-info/SOURCES.txt +24 -0
- airbyte-source-convex-0.4.0/airbyte_source_convex.egg-info/dependency_links.txt +1 -0
- airbyte-source-convex-0.4.0/airbyte_source_convex.egg-info/entry_points.txt +2 -0
- airbyte-source-convex-0.4.0/airbyte_source_convex.egg-info/requires.txt +7 -0
- airbyte-source-convex-0.4.0/airbyte_source_convex.egg-info/top_level.txt +3 -0
- airbyte-source-convex-0.4.0/integration_tests/__init__.py +3 -0
- airbyte-source-convex-0.4.0/integration_tests/abnormal_state.json +12 -0
- airbyte-source-convex-0.4.0/integration_tests/acceptance.py +14 -0
- airbyte-source-convex-0.4.0/integration_tests/configured_catalog.json +56 -0
- airbyte-source-convex-0.4.0/integration_tests/invalid_config.json +4 -0
- airbyte-source-convex-0.4.0/integration_tests/sample_config.json +4 -0
- airbyte-source-convex-0.4.0/integration_tests/sample_state.json +12 -0
- airbyte-source-convex-0.4.0/setup.cfg +97 -0
- airbyte-source-convex-0.4.0/setup.py +44 -0
- airbyte-source-convex-0.4.0/source_convex/__init__.py +8 -0
- airbyte-source-convex-0.4.0/source_convex/run.py +14 -0
- airbyte-source-convex-0.4.0/source_convex/source.py +243 -0
- airbyte-source-convex-0.4.0/source_convex/spec.yaml +21 -0
- airbyte-source-convex-0.4.0/unit_tests/__init__.py +3 -0
- airbyte-source-convex-0.4.0/unit_tests/test_incremental_streams.py +88 -0
- airbyte-source-convex-0.4.0/unit_tests/test_source.py +115 -0
- airbyte-source-convex-0.4.0/unit_tests/test_streams.py +169 -0
@@ -0,0 +1,100 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: airbyte-source-convex
|
3
|
+
Version: 0.4.0
|
4
|
+
Summary: Source implementation for Convex.
|
5
|
+
Author: Airbyte
|
6
|
+
Author-email: contact@airbyte.io
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
Requires-Dist: airbyte-cdk~=0.2
|
9
|
+
Provides-Extra: tests
|
10
|
+
Requires-Dist: requests-mock~=1.9.3; extra == "tests"
|
11
|
+
Requires-Dist: pytest~=6.1; extra == "tests"
|
12
|
+
Requires-Dist: pytest-mock~=3.6.1; extra == "tests"
|
13
|
+
Requires-Dist: responses~=0.13.3; extra == "tests"
|
14
|
+
|
15
|
+
# Convex Source
|
16
|
+
|
17
|
+
This is the repository for the Convex source connector, written in Python.
|
18
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/convex).
|
19
|
+
|
20
|
+
|
21
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
22
|
+
|
23
|
+
|
24
|
+
From this connector directory, create a virtual environment:
|
25
|
+
```
|
26
|
+
python -m venv .venv
|
27
|
+
```
|
28
|
+
|
29
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
30
|
+
development environment of choice. To activate it from the terminal, run:
|
31
|
+
```
|
32
|
+
source .venv/bin/activate
|
33
|
+
pip install -r requirements.txt
|
34
|
+
pip install '.[tests]'
|
35
|
+
```
|
36
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
37
|
+
|
38
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
39
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
40
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
41
|
+
should work as you expect.
|
42
|
+
|
43
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/convex)
|
44
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_convex/spec.yaml` file.
|
45
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
46
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
47
|
+
|
48
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source convex test creds`
|
49
|
+
and place them into `secrets/config.json`.
|
50
|
+
|
51
|
+
```
|
52
|
+
python main.py spec
|
53
|
+
python main.py check --config secrets/config.json
|
54
|
+
python main.py discover --config secrets/config.json
|
55
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
56
|
+
```
|
57
|
+
|
58
|
+
|
59
|
+
|
60
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
61
|
+
```bash
|
62
|
+
airbyte-ci connectors --name=source-convex build
|
63
|
+
```
|
64
|
+
|
65
|
+
An image will be built with the tag `airbyte/source-convex:dev`.
|
66
|
+
|
67
|
+
**Via `docker build`:**
|
68
|
+
```bash
|
69
|
+
docker build -t airbyte/source-convex:dev .
|
70
|
+
```
|
71
|
+
|
72
|
+
Then run any of the connector commands as follows:
|
73
|
+
```
|
74
|
+
docker run --rm airbyte/source-convex:dev spec
|
75
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev check --config /secrets/config.json
|
76
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev discover --config /secrets/config.json
|
77
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-convex:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
78
|
+
```
|
79
|
+
|
80
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
81
|
+
```bash
|
82
|
+
airbyte-ci connectors --name=source-convex test
|
83
|
+
```
|
84
|
+
|
85
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
86
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
87
|
+
|
88
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
89
|
+
We split dependencies between two groups, dependencies that are:
|
90
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
91
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
92
|
+
|
93
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
94
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-convex test`
|
95
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
96
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
97
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/convex.md`).
|
98
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
99
|
+
6. Pat yourself on the back for being an awesome contributor.
|
100
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
@@ -0,0 +1,100 @@
|
|
1
|
+
# Convex Source
|
2
|
+
|
3
|
+
This is the repository for the Convex source connector, written in Python.
|
4
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/convex).
|
5
|
+
|
6
|
+
## Local development
|
7
|
+
|
8
|
+
### Prerequisites
|
9
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
10
|
+
|
11
|
+
#### Minimum Python version required `= 3.9.0`
|
12
|
+
|
13
|
+
#### Build & Activate Virtual Environment and install dependencies
|
14
|
+
From this connector directory, create a virtual environment:
|
15
|
+
```
|
16
|
+
python -m venv .venv
|
17
|
+
```
|
18
|
+
|
19
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
20
|
+
development environment of choice. To activate it from the terminal, run:
|
21
|
+
```
|
22
|
+
source .venv/bin/activate
|
23
|
+
pip install -r requirements.txt
|
24
|
+
pip install '.[tests]'
|
25
|
+
```
|
26
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
27
|
+
|
28
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
29
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
30
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
31
|
+
should work as you expect.
|
32
|
+
|
33
|
+
#### Create credentials
|
34
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/convex)
|
35
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_convex/spec.yaml` file.
|
36
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
37
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
38
|
+
|
39
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source convex test creds`
|
40
|
+
and place them into `secrets/config.json`.
|
41
|
+
|
42
|
+
### Locally running the connector
|
43
|
+
```
|
44
|
+
python main.py spec
|
45
|
+
python main.py check --config secrets/config.json
|
46
|
+
python main.py discover --config secrets/config.json
|
47
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
48
|
+
```
|
49
|
+
|
50
|
+
### Locally running the connector docker image
|
51
|
+
|
52
|
+
|
53
|
+
#### Build
|
54
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
55
|
+
```bash
|
56
|
+
airbyte-ci connectors --name=source-convex build
|
57
|
+
```
|
58
|
+
|
59
|
+
An image will be built with the tag `airbyte/source-convex:dev`.
|
60
|
+
|
61
|
+
**Via `docker build`:**
|
62
|
+
```bash
|
63
|
+
docker build -t airbyte/source-convex:dev .
|
64
|
+
```
|
65
|
+
|
66
|
+
#### Run
|
67
|
+
Then run any of the connector commands as follows:
|
68
|
+
```
|
69
|
+
docker run --rm airbyte/source-convex:dev spec
|
70
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev check --config /secrets/config.json
|
71
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev discover --config /secrets/config.json
|
72
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-convex:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
73
|
+
```
|
74
|
+
|
75
|
+
## Testing
|
76
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
77
|
+
```bash
|
78
|
+
airbyte-ci connectors --name=source-convex test
|
79
|
+
```
|
80
|
+
|
81
|
+
### Customizing acceptance Tests
|
82
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
83
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
84
|
+
|
85
|
+
## Dependency Management
|
86
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
87
|
+
We split dependencies between two groups, dependencies that are:
|
88
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
89
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
90
|
+
|
91
|
+
### Publishing a new version of the connector
|
92
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
93
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-convex test`
|
94
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
95
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
96
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/convex.md`).
|
97
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
98
|
+
6. Pat yourself on the back for being an awesome contributor.
|
99
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
100
|
+
|
@@ -0,0 +1,100 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: airbyte-source-convex
|
3
|
+
Version: 0.4.0
|
4
|
+
Summary: Source implementation for Convex.
|
5
|
+
Author: Airbyte
|
6
|
+
Author-email: contact@airbyte.io
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
Requires-Dist: airbyte-cdk~=0.2
|
9
|
+
Provides-Extra: tests
|
10
|
+
Requires-Dist: requests-mock~=1.9.3; extra == "tests"
|
11
|
+
Requires-Dist: pytest~=6.1; extra == "tests"
|
12
|
+
Requires-Dist: pytest-mock~=3.6.1; extra == "tests"
|
13
|
+
Requires-Dist: responses~=0.13.3; extra == "tests"
|
14
|
+
|
15
|
+
# Convex Source
|
16
|
+
|
17
|
+
This is the repository for the Convex source connector, written in Python.
|
18
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/convex).
|
19
|
+
|
20
|
+
|
21
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
22
|
+
|
23
|
+
|
24
|
+
From this connector directory, create a virtual environment:
|
25
|
+
```
|
26
|
+
python -m venv .venv
|
27
|
+
```
|
28
|
+
|
29
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
30
|
+
development environment of choice. To activate it from the terminal, run:
|
31
|
+
```
|
32
|
+
source .venv/bin/activate
|
33
|
+
pip install -r requirements.txt
|
34
|
+
pip install '.[tests]'
|
35
|
+
```
|
36
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
37
|
+
|
38
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
39
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
40
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
41
|
+
should work as you expect.
|
42
|
+
|
43
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/convex)
|
44
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_convex/spec.yaml` file.
|
45
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
46
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
47
|
+
|
48
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source convex test creds`
|
49
|
+
and place them into `secrets/config.json`.
|
50
|
+
|
51
|
+
```
|
52
|
+
python main.py spec
|
53
|
+
python main.py check --config secrets/config.json
|
54
|
+
python main.py discover --config secrets/config.json
|
55
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
56
|
+
```
|
57
|
+
|
58
|
+
|
59
|
+
|
60
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
61
|
+
```bash
|
62
|
+
airbyte-ci connectors --name=source-convex build
|
63
|
+
```
|
64
|
+
|
65
|
+
An image will be built with the tag `airbyte/source-convex:dev`.
|
66
|
+
|
67
|
+
**Via `docker build`:**
|
68
|
+
```bash
|
69
|
+
docker build -t airbyte/source-convex:dev .
|
70
|
+
```
|
71
|
+
|
72
|
+
Then run any of the connector commands as follows:
|
73
|
+
```
|
74
|
+
docker run --rm airbyte/source-convex:dev spec
|
75
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev check --config /secrets/config.json
|
76
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev discover --config /secrets/config.json
|
77
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-convex:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
78
|
+
```
|
79
|
+
|
80
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
81
|
+
```bash
|
82
|
+
airbyte-ci connectors --name=source-convex test
|
83
|
+
```
|
84
|
+
|
85
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
86
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
87
|
+
|
88
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
89
|
+
We split dependencies between two groups, dependencies that are:
|
90
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
91
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
92
|
+
|
93
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
94
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-convex test`
|
95
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
96
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
97
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/convex.md`).
|
98
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
99
|
+
6. Pat yourself on the back for being an awesome contributor.
|
100
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
@@ -0,0 +1,24 @@
|
|
1
|
+
README.md
|
2
|
+
setup.cfg
|
3
|
+
setup.py
|
4
|
+
airbyte_source_convex.egg-info/PKG-INFO
|
5
|
+
airbyte_source_convex.egg-info/SOURCES.txt
|
6
|
+
airbyte_source_convex.egg-info/dependency_links.txt
|
7
|
+
airbyte_source_convex.egg-info/entry_points.txt
|
8
|
+
airbyte_source_convex.egg-info/requires.txt
|
9
|
+
airbyte_source_convex.egg-info/top_level.txt
|
10
|
+
integration_tests/__init__.py
|
11
|
+
integration_tests/abnormal_state.json
|
12
|
+
integration_tests/acceptance.py
|
13
|
+
integration_tests/configured_catalog.json
|
14
|
+
integration_tests/invalid_config.json
|
15
|
+
integration_tests/sample_config.json
|
16
|
+
integration_tests/sample_state.json
|
17
|
+
source_convex/__init__.py
|
18
|
+
source_convex/run.py
|
19
|
+
source_convex/source.py
|
20
|
+
source_convex/spec.yaml
|
21
|
+
unit_tests/__init__.py
|
22
|
+
unit_tests/test_incremental_streams.py
|
23
|
+
unit_tests/test_source.py
|
24
|
+
unit_tests/test_streams.py
|
@@ -0,0 +1 @@
|
|
1
|
+
|
@@ -0,0 +1,14 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
|
6
|
+
import pytest
|
7
|
+
|
8
|
+
pytest_plugins = ("connector_acceptance_test.plugin",)
|
9
|
+
|
10
|
+
|
11
|
+
@pytest.fixture(scope="session", autouse=True)
|
12
|
+
def connector_setup():
|
13
|
+
"""This fixture is a placeholder for external resources that acceptance test might require."""
|
14
|
+
yield
|
@@ -0,0 +1,56 @@
|
|
1
|
+
{
|
2
|
+
"streams": [
|
3
|
+
{
|
4
|
+
"sync_mode": "incremental",
|
5
|
+
"destination_sync_mode": "append",
|
6
|
+
"stream": {
|
7
|
+
"name": "posts",
|
8
|
+
"json_schema": {
|
9
|
+
"type": "object",
|
10
|
+
"properties": {
|
11
|
+
"_creationTime": { "type": "number" },
|
12
|
+
"_id": {
|
13
|
+
"type": "object",
|
14
|
+
"properties": { "$id": { "type": "string" } }
|
15
|
+
},
|
16
|
+
"author": {
|
17
|
+
"type": "object",
|
18
|
+
"properties": { "$id": { "type": "string" } }
|
19
|
+
},
|
20
|
+
"body": { "type": "string" },
|
21
|
+
"time": { "type": "number" },
|
22
|
+
"_ts": { "type": "number" }
|
23
|
+
}
|
24
|
+
},
|
25
|
+
"supported_sync_modes": ["full_refresh", "incremental"],
|
26
|
+
"source_defined_cursor": true,
|
27
|
+
"default_cursor_field": ["_ts"],
|
28
|
+
"source_defined_primary_key": [["_id"]]
|
29
|
+
}
|
30
|
+
},
|
31
|
+
{
|
32
|
+
"sync_mode": "incremental",
|
33
|
+
"destination_sync_mode": "append",
|
34
|
+
"stream": {
|
35
|
+
"name": "users",
|
36
|
+
"json_schema": {
|
37
|
+
"type": "object",
|
38
|
+
"properties": {
|
39
|
+
"_creationTime": { "type": "number" },
|
40
|
+
"_id": {
|
41
|
+
"type": "object",
|
42
|
+
"properties": { "$id": { "type": "string" } }
|
43
|
+
},
|
44
|
+
"name": { "type": "string" },
|
45
|
+
"tokenIdentifier": { "type": "string" },
|
46
|
+
"_ts": { "type": "number" }
|
47
|
+
}
|
48
|
+
},
|
49
|
+
"supported_sync_modes": ["full_refresh", "incremental"],
|
50
|
+
"source_defined_cursor": true,
|
51
|
+
"default_cursor_field": ["_ts"],
|
52
|
+
"source_defined_primary_key": [["_id"]]
|
53
|
+
}
|
54
|
+
}
|
55
|
+
]
|
56
|
+
}
|
@@ -0,0 +1,97 @@
|
|
1
|
+
[metadata]
|
2
|
+
name = airbyte-source-convex
|
3
|
+
version = 0.4.0
|
4
|
+
author = Airbyte
|
5
|
+
author_email = contact@airbyte.io
|
6
|
+
long_description = # Convex Source
|
7
|
+
|
8
|
+
This is the repository for the Convex source connector, written in Python.
|
9
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/convex).
|
10
|
+
|
11
|
+
|
12
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
13
|
+
|
14
|
+
|
15
|
+
From this connector directory, create a virtual environment:
|
16
|
+
```
|
17
|
+
python -m venv .venv
|
18
|
+
```
|
19
|
+
|
20
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
21
|
+
development environment of choice. To activate it from the terminal, run:
|
22
|
+
```
|
23
|
+
source .venv/bin/activate
|
24
|
+
pip install -r requirements.txt
|
25
|
+
pip install '.[tests]'
|
26
|
+
```
|
27
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
28
|
+
|
29
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
30
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
31
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
32
|
+
should work as you expect.
|
33
|
+
|
34
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/convex)
|
35
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_convex/spec.yaml` file.
|
36
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
37
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
38
|
+
|
39
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source convex test creds`
|
40
|
+
and place them into `secrets/config.json`.
|
41
|
+
|
42
|
+
```
|
43
|
+
python main.py spec
|
44
|
+
python main.py check --config secrets/config.json
|
45
|
+
python main.py discover --config secrets/config.json
|
46
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
47
|
+
```
|
48
|
+
|
49
|
+
|
50
|
+
|
51
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
52
|
+
```bash
|
53
|
+
airbyte-ci connectors --name=source-convex build
|
54
|
+
```
|
55
|
+
|
56
|
+
An image will be built with the tag `airbyte/source-convex:dev`.
|
57
|
+
|
58
|
+
**Via `docker build`:**
|
59
|
+
```bash
|
60
|
+
docker build -t airbyte/source-convex:dev .
|
61
|
+
```
|
62
|
+
|
63
|
+
Then run any of the connector commands as follows:
|
64
|
+
```
|
65
|
+
docker run --rm airbyte/source-convex:dev spec
|
66
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev check --config /secrets/config.json
|
67
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev discover --config /secrets/config.json
|
68
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-convex:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
69
|
+
```
|
70
|
+
|
71
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
72
|
+
```bash
|
73
|
+
airbyte-ci connectors --name=source-convex test
|
74
|
+
```
|
75
|
+
|
76
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
77
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
78
|
+
|
79
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
80
|
+
We split dependencies between two groups, dependencies that are:
|
81
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
82
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
83
|
+
|
84
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
85
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-convex test`
|
86
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
87
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
88
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/convex.md`).
|
89
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
90
|
+
6. Pat yourself on the back for being an awesome contributor.
|
91
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
92
|
+
long_description_content_type = text/markdown
|
93
|
+
|
94
|
+
[egg_info]
|
95
|
+
tag_build =
|
96
|
+
tag_date = 0
|
97
|
+
|
@@ -0,0 +1,44 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
|
6
|
+
from setuptools import find_packages, setup
|
7
|
+
|
8
|
+
MAIN_REQUIREMENTS = [
|
9
|
+
"airbyte-cdk~=0.2",
|
10
|
+
]
|
11
|
+
|
12
|
+
TEST_REQUIREMENTS = [
|
13
|
+
"requests-mock~=1.9.3",
|
14
|
+
"pytest~=6.1",
|
15
|
+
"pytest-mock~=3.6.1",
|
16
|
+
"responses~=0.13.3",
|
17
|
+
]
|
18
|
+
|
19
|
+
setup(
|
20
|
+
entry_points={
|
21
|
+
"console_scripts": [
|
22
|
+
"source-convex=source_convex.run:run",
|
23
|
+
],
|
24
|
+
},
|
25
|
+
description="Source implementation for Convex.",
|
26
|
+
packages=find_packages(),
|
27
|
+
install_requires=MAIN_REQUIREMENTS,
|
28
|
+
package_data={
|
29
|
+
"": [
|
30
|
+
# Include yaml files in the package (if any)
|
31
|
+
"*.yml",
|
32
|
+
"*.yaml",
|
33
|
+
# Include all json files in the package, up to 4 levels deep
|
34
|
+
"*.json",
|
35
|
+
"*/*.json",
|
36
|
+
"*/*/*.json",
|
37
|
+
"*/*/*/*.json",
|
38
|
+
"*/*/*/*/*.json",
|
39
|
+
]
|
40
|
+
},
|
41
|
+
extras_require={
|
42
|
+
"tests": TEST_REQUIREMENTS,
|
43
|
+
},
|
44
|
+
)
|
@@ -0,0 +1,243 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
|
6
|
+
from datetime import datetime
|
7
|
+
from json import JSONDecodeError
|
8
|
+
from typing import Any, Dict, Iterable, Iterator, List, Mapping, MutableMapping, Optional, Tuple, TypedDict, cast
|
9
|
+
|
10
|
+
import requests
|
11
|
+
from airbyte_cdk.models import SyncMode
|
12
|
+
from airbyte_cdk.sources import AbstractSource
|
13
|
+
from airbyte_cdk.sources.streams import IncrementalMixin, Stream
|
14
|
+
from airbyte_cdk.sources.streams.http import HttpStream
|
15
|
+
from airbyte_cdk.sources.streams.http.requests_native_auth.token import TokenAuthenticator
|
16
|
+
|
17
|
+
ConvexConfig = TypedDict(
|
18
|
+
"ConvexConfig",
|
19
|
+
{
|
20
|
+
"deployment_url": str,
|
21
|
+
"access_key": str,
|
22
|
+
},
|
23
|
+
)
|
24
|
+
|
25
|
+
ConvexState = TypedDict(
|
26
|
+
"ConvexState",
|
27
|
+
{
|
28
|
+
"snapshot_cursor": Optional[str],
|
29
|
+
"snapshot_has_more": bool,
|
30
|
+
"delta_cursor": Optional[int],
|
31
|
+
},
|
32
|
+
)
|
33
|
+
|
34
|
+
CONVEX_CLIENT_VERSION = "0.4.0"
|
35
|
+
|
36
|
+
|
37
|
+
# Source
|
38
|
+
class SourceConvex(AbstractSource):
|
39
|
+
def _json_schemas(self, config: ConvexConfig) -> requests.Response:
|
40
|
+
deployment_url = config["deployment_url"]
|
41
|
+
access_key = config["access_key"]
|
42
|
+
url = f"{deployment_url}/api/json_schemas?deltaSchema=true&format=json"
|
43
|
+
headers = {
|
44
|
+
"Authorization": f"Convex {access_key}",
|
45
|
+
"Convex-Client": f"airbyte-export-{CONVEX_CLIENT_VERSION}",
|
46
|
+
}
|
47
|
+
return requests.get(url, headers=headers)
|
48
|
+
|
49
|
+
def check_connection(self, logger: Any, config: Mapping[str, Any]) -> Tuple[bool, Any]:
|
50
|
+
"""
|
51
|
+
Connection check to validate that the user-provided config can be used to connect to the underlying API
|
52
|
+
|
53
|
+
:param config: the user-input config object conforming to the connector's spec.yaml
|
54
|
+
:param logger: logger object
|
55
|
+
:return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise.
|
56
|
+
"""
|
57
|
+
config = cast(ConvexConfig, config)
|
58
|
+
resp = self._json_schemas(config)
|
59
|
+
if resp.status_code == 200:
|
60
|
+
return True, None
|
61
|
+
else:
|
62
|
+
return False, format_http_error("Connection to Convex via json_schemas endpoint failed", resp)
|
63
|
+
|
64
|
+
def streams(self, config: Mapping[str, Any]) -> List[Stream]:
|
65
|
+
"""
|
66
|
+
:param config: A Mapping of the user input configuration as defined in the connector spec.
|
67
|
+
"""
|
68
|
+
config = cast(ConvexConfig, config)
|
69
|
+
resp = self._json_schemas(config)
|
70
|
+
if resp.status_code != 200:
|
71
|
+
raise Exception(format_http_error("Failed request to json_schemas", resp))
|
72
|
+
json_schemas = resp.json()
|
73
|
+
table_names = list(json_schemas.keys())
|
74
|
+
return [
|
75
|
+
ConvexStream(
|
76
|
+
config["deployment_url"],
|
77
|
+
config["access_key"],
|
78
|
+
"json", # Use `json` export format
|
79
|
+
table_name,
|
80
|
+
json_schemas[table_name],
|
81
|
+
)
|
82
|
+
for table_name in table_names
|
83
|
+
]
|
84
|
+
|
85
|
+
|
86
|
+
class ConvexStream(HttpStream, IncrementalMixin):
|
87
|
+
def __init__(
|
88
|
+
self,
|
89
|
+
deployment_url: str,
|
90
|
+
access_key: str,
|
91
|
+
fmt: str,
|
92
|
+
table_name: str,
|
93
|
+
json_schema: Dict[str, Any],
|
94
|
+
):
|
95
|
+
self.deployment_url = deployment_url
|
96
|
+
self.fmt = fmt
|
97
|
+
self.table_name = table_name
|
98
|
+
if json_schema:
|
99
|
+
json_schema["additionalProperties"] = True
|
100
|
+
json_schema["properties"]["_ab_cdc_lsn"] = {"type": "number"}
|
101
|
+
json_schema["properties"]["_ab_cdc_updated_at"] = {"type": "string"}
|
102
|
+
json_schema["properties"]["_ab_cdc_deleted_at"] = {"anyOf": [{"type": "string"}, {"type": "null"}]}
|
103
|
+
else:
|
104
|
+
json_schema = {}
|
105
|
+
self.json_schema = json_schema
|
106
|
+
self._snapshot_cursor_value: Optional[str] = None
|
107
|
+
self._snapshot_has_more = True
|
108
|
+
self._delta_cursor_value: Optional[int] = None
|
109
|
+
self._delta_has_more = True
|
110
|
+
super().__init__(TokenAuthenticator(access_key, "Convex"))
|
111
|
+
|
112
|
+
@property
|
113
|
+
def name(self) -> str:
|
114
|
+
return self.table_name
|
115
|
+
|
116
|
+
@property
|
117
|
+
def url_base(self) -> str:
|
118
|
+
return self.deployment_url
|
119
|
+
|
120
|
+
def get_json_schema(self) -> Mapping[str, Any]: # type: ignore[override]
|
121
|
+
return self.json_schema
|
122
|
+
|
123
|
+
primary_key = "_id"
|
124
|
+
cursor_field = "_ts"
|
125
|
+
|
126
|
+
# Checkpoint stream reads after this many records. This prevents re-reading of data if the stream fails for any reason.
|
127
|
+
state_checkpoint_interval = 128
|
128
|
+
|
129
|
+
@property
|
130
|
+
def state(self) -> MutableMapping[str, Any]:
|
131
|
+
value: ConvexState = {
|
132
|
+
"snapshot_cursor": self._snapshot_cursor_value,
|
133
|
+
"snapshot_has_more": self._snapshot_has_more,
|
134
|
+
"delta_cursor": self._delta_cursor_value,
|
135
|
+
}
|
136
|
+
return cast(MutableMapping[str, Any], value)
|
137
|
+
|
138
|
+
@state.setter
|
139
|
+
def state(self, value: MutableMapping[str, Any]) -> None:
|
140
|
+
state = cast(ConvexState, value)
|
141
|
+
self._snapshot_cursor_value = state["snapshot_cursor"]
|
142
|
+
self._snapshot_has_more = state["snapshot_has_more"]
|
143
|
+
self._delta_cursor_value = state["delta_cursor"]
|
144
|
+
|
145
|
+
def next_page_token(self, response: requests.Response) -> Optional[ConvexState]:
|
146
|
+
if response.status_code != 200:
|
147
|
+
raise Exception(format_http_error("Failed request", response))
|
148
|
+
resp_json = response.json()
|
149
|
+
if self._snapshot_has_more:
|
150
|
+
self._snapshot_cursor_value = resp_json["cursor"]
|
151
|
+
self._snapshot_has_more = resp_json["hasMore"]
|
152
|
+
self._delta_cursor_value = resp_json["snapshot"]
|
153
|
+
else:
|
154
|
+
self._delta_cursor_value = resp_json["cursor"]
|
155
|
+
self._delta_has_more = resp_json["hasMore"]
|
156
|
+
has_more = self._snapshot_has_more or self._delta_has_more
|
157
|
+
return cast(ConvexState, self.state) if has_more else None
|
158
|
+
|
159
|
+
def path(
|
160
|
+
self,
|
161
|
+
stream_state: Optional[Mapping[str, Any]] = None,
|
162
|
+
stream_slice: Optional[Mapping[str, Any]] = None,
|
163
|
+
next_page_token: Optional[Mapping[str, Any]] = None,
|
164
|
+
) -> str:
|
165
|
+
# https://docs.convex.dev/http-api/#sync
|
166
|
+
if self._snapshot_has_more:
|
167
|
+
return "/api/list_snapshot"
|
168
|
+
else:
|
169
|
+
return "/api/document_deltas"
|
170
|
+
|
171
|
+
def parse_response(
|
172
|
+
self,
|
173
|
+
response: requests.Response,
|
174
|
+
stream_state: Mapping[str, Any],
|
175
|
+
stream_slice: Optional[Mapping[str, Any]] = None,
|
176
|
+
next_page_token: Optional[Mapping[str, Any]] = None,
|
177
|
+
) -> Iterable[Mapping[str, Any]]:
|
178
|
+
if response.status_code != 200:
|
179
|
+
raise Exception(format_http_error("Failed request", response))
|
180
|
+
resp_json = response.json()
|
181
|
+
return list(resp_json["values"])
|
182
|
+
|
183
|
+
def request_params(
|
184
|
+
self,
|
185
|
+
stream_state: Optional[Mapping[str, Any]],
|
186
|
+
stream_slice: Optional[Mapping[str, Any]] = None,
|
187
|
+
next_page_token: Optional[Mapping[str, Any]] = None,
|
188
|
+
) -> MutableMapping[str, Any]:
|
189
|
+
params: Dict[str, Any] = {"tableName": self.table_name, "format": self.fmt}
|
190
|
+
if self._snapshot_has_more:
|
191
|
+
if self._snapshot_cursor_value:
|
192
|
+
params["cursor"] = self._snapshot_cursor_value
|
193
|
+
if self._delta_cursor_value:
|
194
|
+
params["snapshot"] = self._delta_cursor_value
|
195
|
+
else:
|
196
|
+
if self._delta_cursor_value:
|
197
|
+
params["cursor"] = self._delta_cursor_value
|
198
|
+
return params
|
199
|
+
|
200
|
+
def request_headers(
|
201
|
+
self,
|
202
|
+
stream_state: Optional[Mapping[str, Any]],
|
203
|
+
stream_slice: Optional[Mapping[str, Any]] = None,
|
204
|
+
next_page_token: Optional[Mapping[str, Any]] = None,
|
205
|
+
) -> Dict[str, str]:
|
206
|
+
"""
|
207
|
+
Custom headers for each HTTP request, not including Authorization.
|
208
|
+
"""
|
209
|
+
return {
|
210
|
+
"Convex-Client": f"airbyte-export-{CONVEX_CLIENT_VERSION}",
|
211
|
+
}
|
212
|
+
|
213
|
+
def get_updated_state(self, current_stream_state: ConvexState, latest_record: Mapping[str, Any]) -> ConvexState:
|
214
|
+
"""
|
215
|
+
This (deprecated) method is still used by AbstractSource to update state between calls to `read_records`.
|
216
|
+
"""
|
217
|
+
return cast(ConvexState, self.state)
|
218
|
+
|
219
|
+
def read_records(self, sync_mode: SyncMode, *args: Any, **kwargs: Any) -> Iterator[Any]:
|
220
|
+
self._delta_has_more = sync_mode == SyncMode.incremental
|
221
|
+
for read_record in super().read_records(sync_mode, *args, **kwargs):
|
222
|
+
record = dict(read_record)
|
223
|
+
ts_ns = record["_ts"]
|
224
|
+
ts_seconds = ts_ns / 1e9 # convert from nanoseconds.
|
225
|
+
# equivalent of java's `new Timestamp(transactionMillis).toInstant().toString()`
|
226
|
+
ts_datetime = datetime.utcfromtimestamp(ts_seconds)
|
227
|
+
ts = ts_datetime.isoformat()
|
228
|
+
# DebeziumEventUtils.CDC_LSN
|
229
|
+
record["_ab_cdc_lsn"] = ts_ns
|
230
|
+
# DebeziumEventUtils.CDC_DELETED_AT
|
231
|
+
record["_ab_cdc_updated_at"] = ts
|
232
|
+
record["_deleted"] = "_deleted" in record and record["_deleted"]
|
233
|
+
# DebeziumEventUtils.CDC_DELETED_AT
|
234
|
+
record["_ab_cdc_deleted_at"] = ts if record["_deleted"] else None
|
235
|
+
yield record
|
236
|
+
|
237
|
+
|
238
|
+
def format_http_error(context: str, resp: requests.Response) -> str:
|
239
|
+
try:
|
240
|
+
err = resp.json()
|
241
|
+
return f"{context}: {resp.status_code}: {err['code']}: {err['message']}"
|
242
|
+
except (JSONDecodeError, KeyError):
|
243
|
+
return f"{context}: {resp.text}"
|
@@ -0,0 +1,21 @@
|
|
1
|
+
documentationUrl: https://docs.airbyte.com/integrations/sources/convex
|
2
|
+
connectionSpecification:
|
3
|
+
additionalProperties: true
|
4
|
+
$schema: http://json-schema.org/draft-07/schema#
|
5
|
+
title: Convex Source Spec
|
6
|
+
type: object
|
7
|
+
required:
|
8
|
+
- deployment_url
|
9
|
+
- access_key
|
10
|
+
properties:
|
11
|
+
deployment_url:
|
12
|
+
type: string
|
13
|
+
title: Deployment Url
|
14
|
+
examples:
|
15
|
+
- https://murky-swan-635.convex.cloud
|
16
|
+
- https://cluttered-owl-337.convex.cloud
|
17
|
+
access_key:
|
18
|
+
type: string
|
19
|
+
title: Access Key
|
20
|
+
description: API access key used to retrieve data from Convex.
|
21
|
+
airbyte_secret: true
|
@@ -0,0 +1,88 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
|
6
|
+
from unittest.mock import MagicMock
|
7
|
+
|
8
|
+
from airbyte_cdk.models import SyncMode
|
9
|
+
from pytest import fixture
|
10
|
+
from source_convex.source import ConvexStream
|
11
|
+
|
12
|
+
|
13
|
+
@fixture
|
14
|
+
def patch_incremental_base_class(mocker):
|
15
|
+
# Mock abstract methods to enable instantiating abstract class
|
16
|
+
mocker.patch.object(ConvexStream, "path", "v0/example_endpoint")
|
17
|
+
mocker.patch.object(ConvexStream, "primary_key", "test_primary_key")
|
18
|
+
mocker.patch.object(ConvexStream, "__abstractmethods__", set())
|
19
|
+
|
20
|
+
|
21
|
+
def test_cursor_field(patch_incremental_base_class):
|
22
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
23
|
+
expected_cursor_field = "_ts"
|
24
|
+
assert stream.cursor_field == expected_cursor_field
|
25
|
+
|
26
|
+
|
27
|
+
def test_get_updated_state(patch_incremental_base_class):
|
28
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
29
|
+
resp = MagicMock()
|
30
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 123}], "cursor": 1234, "snapshot": 3000, "hasMore": True}
|
31
|
+
resp.status_code = 200
|
32
|
+
stream.parse_response(resp, {})
|
33
|
+
stream.next_page_token(resp)
|
34
|
+
assert stream.get_updated_state(None, None) == {
|
35
|
+
"snapshot_cursor": 1234,
|
36
|
+
"snapshot_has_more": True,
|
37
|
+
"delta_cursor": 3000,
|
38
|
+
}
|
39
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 1235, "snapshot": 3000, "hasMore": False}
|
40
|
+
stream.parse_response(resp, {})
|
41
|
+
stream.next_page_token(resp)
|
42
|
+
assert stream.get_updated_state(None, None) == {
|
43
|
+
"snapshot_cursor": 1235,
|
44
|
+
"snapshot_has_more": False,
|
45
|
+
"delta_cursor": 3000,
|
46
|
+
}
|
47
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 8000, "hasMore": True}
|
48
|
+
stream.parse_response(resp, {})
|
49
|
+
stream.next_page_token(resp)
|
50
|
+
assert stream.get_updated_state(None, None) == {
|
51
|
+
"snapshot_cursor": 1235,
|
52
|
+
"snapshot_has_more": False,
|
53
|
+
"delta_cursor": 8000,
|
54
|
+
}
|
55
|
+
assert stream._delta_has_more is True
|
56
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 9000, "hasMore": False}
|
57
|
+
stream.parse_response(resp, {})
|
58
|
+
stream.next_page_token(resp)
|
59
|
+
assert stream.get_updated_state(None, None) == {
|
60
|
+
"snapshot_cursor": 1235,
|
61
|
+
"snapshot_has_more": False,
|
62
|
+
"delta_cursor": 9000,
|
63
|
+
}
|
64
|
+
assert stream._delta_has_more is False
|
65
|
+
|
66
|
+
|
67
|
+
def test_stream_slices(patch_incremental_base_class):
|
68
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
69
|
+
inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}}
|
70
|
+
expected_stream_slice = [None]
|
71
|
+
assert stream.stream_slices(**inputs) == expected_stream_slice
|
72
|
+
|
73
|
+
|
74
|
+
def test_supports_incremental(patch_incremental_base_class, mocker):
|
75
|
+
mocker.patch.object(ConvexStream, "cursor_field", "dummy_field")
|
76
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
77
|
+
assert stream.supports_incremental
|
78
|
+
|
79
|
+
|
80
|
+
def test_source_defined_cursor(patch_incremental_base_class):
|
81
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
82
|
+
assert stream.source_defined_cursor
|
83
|
+
|
84
|
+
|
85
|
+
def test_stream_checkpoint_interval(patch_incremental_base_class):
|
86
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
87
|
+
expected_checkpoint_interval = 128
|
88
|
+
assert stream.state_checkpoint_interval == expected_checkpoint_interval
|
@@ -0,0 +1,115 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
from unittest.mock import MagicMock
|
6
|
+
|
7
|
+
import responses
|
8
|
+
from source_convex.source import SourceConvex
|
9
|
+
|
10
|
+
|
11
|
+
def setup_responses():
|
12
|
+
sample_shapes_resp = {
|
13
|
+
"posts": {
|
14
|
+
"type": "object",
|
15
|
+
"properties": {
|
16
|
+
"_creationTime": {"type": "number"},
|
17
|
+
"_id": {"$description": "Id(posts)", "type": "object", "properties": {"$id": {"type": "string"}}},
|
18
|
+
"author": {"$description": "Id(users)", "type": "object", "properties": {"$id": {"type": "string"}}},
|
19
|
+
"body": {"type": "string"},
|
20
|
+
"_ts": {"type": "integer"},
|
21
|
+
"_deleted": {"type": "boolean"},
|
22
|
+
},
|
23
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
24
|
+
},
|
25
|
+
"users": {
|
26
|
+
"type": "object",
|
27
|
+
"properties": {
|
28
|
+
"_creationTime": {"type": "number"},
|
29
|
+
"_id": {"$description": "Id(users)", "type": "object", "properties": {"$id": {"type": "string"}}},
|
30
|
+
"name": {"type": "string"},
|
31
|
+
"tokenIdentifier": {"type": "string"},
|
32
|
+
"_ts": {"type": "integer"},
|
33
|
+
"_deleted": {"type": "boolean"},
|
34
|
+
},
|
35
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
36
|
+
},
|
37
|
+
}
|
38
|
+
responses.add(
|
39
|
+
responses.GET,
|
40
|
+
"https://murky-swan-635.convex.cloud/api/json_schemas?deltaSchema=true&format=json",
|
41
|
+
json=sample_shapes_resp,
|
42
|
+
)
|
43
|
+
responses.add(
|
44
|
+
responses.GET,
|
45
|
+
"https://curious-giraffe-964.convex.cloud/api/json_schemas?deltaSchema=true&format=json",
|
46
|
+
json={"code": "Error code", "message": "Error message"},
|
47
|
+
status=400,
|
48
|
+
)
|
49
|
+
|
50
|
+
|
51
|
+
@responses.activate
|
52
|
+
def test_check_connection(mocker):
|
53
|
+
setup_responses()
|
54
|
+
source = SourceConvex()
|
55
|
+
logger_mock = MagicMock()
|
56
|
+
assert source.check_connection(
|
57
|
+
logger_mock,
|
58
|
+
{
|
59
|
+
"deployment_url": "https://murky-swan-635.convex.cloud",
|
60
|
+
"access_key": "test_api_key",
|
61
|
+
},
|
62
|
+
) == (True, None)
|
63
|
+
|
64
|
+
|
65
|
+
@responses.activate
|
66
|
+
def test_check_bad_connection(mocker):
|
67
|
+
setup_responses()
|
68
|
+
source = SourceConvex()
|
69
|
+
logger_mock = MagicMock()
|
70
|
+
assert source.check_connection(
|
71
|
+
logger_mock,
|
72
|
+
{
|
73
|
+
"deployment_url": "https://curious-giraffe-964.convex.cloud",
|
74
|
+
"access_key": "test_api_key",
|
75
|
+
},
|
76
|
+
) == (False, "Connection to Convex via json_schemas endpoint failed: 400: Error code: Error message")
|
77
|
+
|
78
|
+
|
79
|
+
@responses.activate
|
80
|
+
def test_streams(mocker):
|
81
|
+
setup_responses()
|
82
|
+
source = SourceConvex()
|
83
|
+
streams = source.streams(
|
84
|
+
{
|
85
|
+
"deployment_url": "https://murky-swan-635.convex.cloud",
|
86
|
+
"access_key": "test_api_key",
|
87
|
+
}
|
88
|
+
)
|
89
|
+
assert len(streams) == 2
|
90
|
+
streams.sort(key=lambda stream: stream.table_name)
|
91
|
+
assert streams[0].table_name == "posts"
|
92
|
+
assert streams[1].table_name == "users"
|
93
|
+
assert all(stream.deployment_url == "https://murky-swan-635.convex.cloud" for stream in streams)
|
94
|
+
assert all(stream._session.auth.get_auth_header() == {"Authorization": "Convex test_api_key"} for stream in streams)
|
95
|
+
shapes = [stream.get_json_schema() for stream in streams]
|
96
|
+
assert all(shape["type"] == "object" for shape in shapes)
|
97
|
+
properties = [shape["properties"] for shape in shapes]
|
98
|
+
assert [
|
99
|
+
props["_id"]
|
100
|
+
== {
|
101
|
+
"type": "object",
|
102
|
+
"properties": {
|
103
|
+
"$id": {"type": "string"},
|
104
|
+
},
|
105
|
+
}
|
106
|
+
for props in properties
|
107
|
+
]
|
108
|
+
assert [props["_ts"] == {"type": "number"} for props in properties]
|
109
|
+
assert [props["_creationTime"] == {"type": "number"} for props in properties]
|
110
|
+
assert set(properties[0].keys()) == set(
|
111
|
+
["_id", "_ts", "_deleted", "_creationTime", "author", "body", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at"]
|
112
|
+
)
|
113
|
+
assert set(properties[1].keys()) == set(
|
114
|
+
["_id", "_ts", "_deleted", "_creationTime", "name", "tokenIdentifier", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at"]
|
115
|
+
)
|
@@ -0,0 +1,169 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
from http import HTTPStatus
|
6
|
+
from unittest.mock import MagicMock
|
7
|
+
|
8
|
+
import pytest
|
9
|
+
import requests
|
10
|
+
import responses
|
11
|
+
from airbyte_cdk.models import SyncMode
|
12
|
+
from source_convex.source import ConvexStream
|
13
|
+
|
14
|
+
|
15
|
+
@pytest.fixture
|
16
|
+
def patch_base_class(mocker):
|
17
|
+
# Mock abstract methods to enable instantiating abstract class
|
18
|
+
mocker.patch.object(ConvexStream, "primary_key", "test_primary_key")
|
19
|
+
mocker.patch.object(ConvexStream, "__abstractmethods__", set())
|
20
|
+
|
21
|
+
|
22
|
+
def test_request_params(patch_base_class):
|
23
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
24
|
+
inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None}
|
25
|
+
expected_params = {"tableName": "messages", "format": "json"}
|
26
|
+
assert stream.request_params(**inputs) == expected_params
|
27
|
+
stream._snapshot_cursor_value = 1234
|
28
|
+
expected_params = {"tableName": "messages", "format": "json", "cursor": 1234}
|
29
|
+
assert stream.request_params(**inputs) == expected_params
|
30
|
+
stream._snapshot_has_more = False
|
31
|
+
stream._delta_cursor_value = 2345
|
32
|
+
expected_params = {"tableName": "messages", "format": "json", "cursor": 2345}
|
33
|
+
assert stream.request_params(**inputs) == expected_params
|
34
|
+
|
35
|
+
|
36
|
+
def test_next_page_token(patch_base_class):
|
37
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
38
|
+
resp = MagicMock()
|
39
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 123}], "cursor": 1234, "snapshot": 5000, "hasMore": True}
|
40
|
+
resp.status_code = 200
|
41
|
+
stream.parse_response(resp, {})
|
42
|
+
assert stream.next_page_token(resp) == {
|
43
|
+
"snapshot_cursor": 1234,
|
44
|
+
"snapshot_has_more": True,
|
45
|
+
"delta_cursor": 5000,
|
46
|
+
}
|
47
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 1235, "snapshot": 5000, "hasMore": False}
|
48
|
+
stream.parse_response(resp, {})
|
49
|
+
assert stream.next_page_token(resp) == {
|
50
|
+
"snapshot_cursor": 1235,
|
51
|
+
"snapshot_has_more": False,
|
52
|
+
"delta_cursor": 5000,
|
53
|
+
}
|
54
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 6000, "hasMore": True}
|
55
|
+
stream.parse_response(resp, {})
|
56
|
+
assert stream.next_page_token(resp) == {
|
57
|
+
"snapshot_cursor": 1235,
|
58
|
+
"snapshot_has_more": False,
|
59
|
+
"delta_cursor": 6000,
|
60
|
+
}
|
61
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 7000, "hasMore": False}
|
62
|
+
stream.parse_response(resp, {})
|
63
|
+
assert stream.next_page_token(resp) is None
|
64
|
+
assert stream.state == {"snapshot_cursor": 1235, "snapshot_has_more": False, "delta_cursor": 7000}
|
65
|
+
|
66
|
+
|
67
|
+
@responses.activate
|
68
|
+
def test_read_records_full_refresh(patch_base_class):
|
69
|
+
stream = ConvexStream("http://mocked_base_url:8080", "accesskey", "json", "messages", None)
|
70
|
+
snapshot0_resp = {"values": [{"_id": "my_id", "field": "f", "_ts": 123}], "cursor": 1234, "snapshot": 5000, "hasMore": True}
|
71
|
+
responses.add(
|
72
|
+
responses.GET,
|
73
|
+
"http://mocked_base_url:8080/api/list_snapshot?tableName=messages&format=json",
|
74
|
+
json=snapshot0_resp,
|
75
|
+
)
|
76
|
+
snapshot1_resp = {"values": [{"_id": "an_id", "field": "b", "_ts": 100}], "cursor": 2345, "snapshot": 5000, "hasMore": True}
|
77
|
+
responses.add(
|
78
|
+
responses.GET,
|
79
|
+
"http://mocked_base_url:8080/api/list_snapshot?tableName=messages&format=json&cursor=1234&snapshot=5000",
|
80
|
+
json=snapshot1_resp,
|
81
|
+
)
|
82
|
+
snapshot2_resp = {"values": [{"_id": "a_id", "field": "x", "_ts": 300}], "cursor": 3456, "snapshot": 5000, "hasMore": False}
|
83
|
+
responses.add(
|
84
|
+
responses.GET,
|
85
|
+
"http://mocked_base_url:8080/api/list_snapshot?tableName=messages&format=json&cursor=2345&snapshot=5000",
|
86
|
+
json=snapshot2_resp,
|
87
|
+
)
|
88
|
+
records = list(stream.read_records(SyncMode.full_refresh))
|
89
|
+
assert len(records) == 3
|
90
|
+
assert [record["field"] for record in records] == ["f", "b", "x"]
|
91
|
+
assert stream.state == {"delta_cursor": 5000, "snapshot_cursor": 3456, "snapshot_has_more": False}
|
92
|
+
|
93
|
+
|
94
|
+
@responses.activate
|
95
|
+
def test_read_records_incremental(patch_base_class):
|
96
|
+
stream = ConvexStream("http://mocked_base_url:8080", "accesskey", "json", "messages", None)
|
97
|
+
snapshot0_resp = {"values": [{"_id": "my_id", "field": "f", "_ts": 123}], "cursor": 1234, "snapshot": 5000, "hasMore": True}
|
98
|
+
responses.add(
|
99
|
+
responses.GET,
|
100
|
+
"http://mocked_base_url:8080/api/list_snapshot?tableName=messages&format=json",
|
101
|
+
json=snapshot0_resp,
|
102
|
+
)
|
103
|
+
snapshot1_resp = {"values": [{"_id": "an_id", "field": "b", "_ts": 100}], "cursor": 2345, "snapshot": 5000, "hasMore": False}
|
104
|
+
responses.add(
|
105
|
+
responses.GET,
|
106
|
+
"http://mocked_base_url:8080/api/list_snapshot?tableName=messages&format=json&cursor=1234&snapshot=5000",
|
107
|
+
json=snapshot1_resp,
|
108
|
+
)
|
109
|
+
delta0_resp = {"values": [{"_id": "a_id", "field": "x", "_ts": 300}], "cursor": 6000, "hasMore": True}
|
110
|
+
responses.add(
|
111
|
+
responses.GET,
|
112
|
+
"http://mocked_base_url:8080/api/document_deltas?tableName=messages&format=json&cursor=5000",
|
113
|
+
json=delta0_resp,
|
114
|
+
)
|
115
|
+
delta1_resp = {"values": [{"_id": "a_id", "field": "x", "_ts": 400}], "cursor": 7000, "hasMore": False}
|
116
|
+
responses.add(
|
117
|
+
responses.GET,
|
118
|
+
"http://mocked_base_url:8080/api/document_deltas?tableName=messages&format=json&cursor=6000",
|
119
|
+
json=delta1_resp,
|
120
|
+
)
|
121
|
+
records = list(stream.read_records(SyncMode.incremental))
|
122
|
+
assert len(records) == 4
|
123
|
+
assert [record["field"] for record in records] == ["f", "b", "x", "x"]
|
124
|
+
assert stream.state == {"delta_cursor": 7000, "snapshot_cursor": 2345, "snapshot_has_more": False}
|
125
|
+
|
126
|
+
|
127
|
+
def test_parse_response(patch_base_class):
|
128
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
129
|
+
resp = MagicMock()
|
130
|
+
resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1234}], "cursor": 1234, "snapshot": 2000, "hasMore": True}
|
131
|
+
resp.status_code = 200
|
132
|
+
inputs = {"response": resp, "stream_state": {}}
|
133
|
+
expected_parsed_objects = [{"_id": "my_id", "field": "f", "_ts": 1234}]
|
134
|
+
assert stream.parse_response(**inputs) == expected_parsed_objects
|
135
|
+
|
136
|
+
|
137
|
+
def test_request_headers(patch_base_class):
|
138
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
139
|
+
inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None}
|
140
|
+
assert stream.request_headers(**inputs) == {"Convex-Client": "airbyte-export-0.4.0"}
|
141
|
+
|
142
|
+
|
143
|
+
def test_http_method(patch_base_class):
|
144
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
145
|
+
expected_method = "GET"
|
146
|
+
assert stream.http_method == expected_method
|
147
|
+
|
148
|
+
|
149
|
+
@pytest.mark.parametrize(
|
150
|
+
("http_status", "should_retry"),
|
151
|
+
[
|
152
|
+
(HTTPStatus.OK, False),
|
153
|
+
(HTTPStatus.BAD_REQUEST, False),
|
154
|
+
(HTTPStatus.TOO_MANY_REQUESTS, True),
|
155
|
+
(HTTPStatus.INTERNAL_SERVER_ERROR, True),
|
156
|
+
],
|
157
|
+
)
|
158
|
+
def test_should_retry(patch_base_class, http_status, should_retry):
|
159
|
+
response_mock = MagicMock()
|
160
|
+
response_mock.status_code = http_status
|
161
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
162
|
+
assert stream.should_retry(response_mock) == should_retry
|
163
|
+
|
164
|
+
|
165
|
+
def test_backoff_time(patch_base_class):
|
166
|
+
response_mock = MagicMock()
|
167
|
+
stream = ConvexStream("murky-swan-635", "accesskey", "json", "messages", None)
|
168
|
+
expected_backoff_time = None
|
169
|
+
assert stream.backoff_time(response_mock) == expected_backoff_time
|