airbyte-source-microsoft-dataverse 0.1.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte-source-microsoft-dataverse-0.1.2/PKG-INFO +99 -0
- airbyte-source-microsoft-dataverse-0.1.2/README.md +100 -0
- airbyte-source-microsoft-dataverse-0.1.2/airbyte_source_microsoft_dataverse.egg-info/PKG-INFO +99 -0
- airbyte-source-microsoft-dataverse-0.1.2/airbyte_source_microsoft_dataverse.egg-info/SOURCES.txt +27 -0
- airbyte-source-microsoft-dataverse-0.1.2/airbyte_source_microsoft_dataverse.egg-info/dependency_links.txt +1 -0
- airbyte-source-microsoft-dataverse-0.1.2/airbyte_source_microsoft_dataverse.egg-info/entry_points.txt +2 -0
- airbyte-source-microsoft-dataverse-0.1.2/airbyte_source_microsoft_dataverse.egg-info/requires.txt +6 -0
- airbyte-source-microsoft-dataverse-0.1.2/airbyte_source_microsoft_dataverse.egg-info/top_level.txt +3 -0
- airbyte-source-microsoft-dataverse-0.1.2/integration_tests/__init__.py +3 -0
- airbyte-source-microsoft-dataverse-0.1.2/integration_tests/abnormal_state.json +5 -0
- airbyte-source-microsoft-dataverse-0.1.2/integration_tests/acceptance.py +16 -0
- airbyte-source-microsoft-dataverse-0.1.2/integration_tests/configured_catalog.json +15 -0
- airbyte-source-microsoft-dataverse-0.1.2/integration_tests/invalid_config.json +7 -0
- airbyte-source-microsoft-dataverse-0.1.2/integration_tests/sample_config.json +7 -0
- airbyte-source-microsoft-dataverse-0.1.2/integration_tests/sample_state.json +5 -0
- airbyte-source-microsoft-dataverse-0.1.2/setup.cfg +97 -0
- airbyte-source-microsoft-dataverse-0.1.2/setup.py +43 -0
- airbyte-source-microsoft-dataverse-0.1.2/source_microsoft_dataverse/__init__.py +8 -0
- airbyte-source-microsoft-dataverse-0.1.2/source_microsoft_dataverse/dataverse.py +80 -0
- airbyte-source-microsoft-dataverse-0.1.2/source_microsoft_dataverse/run.py +14 -0
- airbyte-source-microsoft-dataverse-0.1.2/source_microsoft_dataverse/source.py +102 -0
- airbyte-source-microsoft-dataverse-0.1.2/source_microsoft_dataverse/spec.yaml +47 -0
- airbyte-source-microsoft-dataverse-0.1.2/source_microsoft_dataverse/streams.py +152 -0
- airbyte-source-microsoft-dataverse-0.1.2/unit_tests/__init__.py +3 -0
- airbyte-source-microsoft-dataverse-0.1.2/unit_tests/test_dataverse.py +15 -0
- airbyte-source-microsoft-dataverse-0.1.2/unit_tests/test_incremental_streams.py +101 -0
- airbyte-source-microsoft-dataverse-0.1.2/unit_tests/test_source.py +148 -0
- airbyte-source-microsoft-dataverse-0.1.2/unit_tests/test_streams.py +107 -0
@@ -0,0 +1,99 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: airbyte-source-microsoft-dataverse
|
3
|
+
Version: 0.1.2
|
4
|
+
Summary: Source implementation for Microsoft Dataverse.
|
5
|
+
Author: Airbyte
|
6
|
+
Author-email: contact@airbyte.io
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
Requires-Dist: airbyte-cdk~=0.2
|
9
|
+
Provides-Extra: tests
|
10
|
+
Requires-Dist: requests-mock~=1.9.3; extra == "tests"
|
11
|
+
Requires-Dist: pytest~=6.1; extra == "tests"
|
12
|
+
Requires-Dist: pytest-mock~=3.6.1; extra == "tests"
|
13
|
+
|
14
|
+
# Microsoft Dataverse Source
|
15
|
+
|
16
|
+
This is the repository for the Microsoft Dataverse source connector, written in Python.
|
17
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse).
|
18
|
+
|
19
|
+
|
20
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
21
|
+
|
22
|
+
|
23
|
+
From this connector directory, create a virtual environment:
|
24
|
+
```
|
25
|
+
python -m venv .venv
|
26
|
+
```
|
27
|
+
|
28
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
29
|
+
development environment of choice. To activate it from the terminal, run:
|
30
|
+
```
|
31
|
+
source .venv/bin/activate
|
32
|
+
pip install -r requirements.txt
|
33
|
+
pip install '.[tests]'
|
34
|
+
```
|
35
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
36
|
+
|
37
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
38
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
39
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
40
|
+
should work as you expect.
|
41
|
+
|
42
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse)
|
43
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_microsoft_dataverse/spec.yaml` file.
|
44
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
45
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
46
|
+
|
47
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source microsoft-dataverse test creds`
|
48
|
+
and place them into `secrets/config.json`.
|
49
|
+
|
50
|
+
```
|
51
|
+
python main.py spec
|
52
|
+
python main.py check --config secrets/config.json
|
53
|
+
python main.py discover --config secrets/config.json
|
54
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
55
|
+
```
|
56
|
+
|
57
|
+
|
58
|
+
|
59
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
60
|
+
```bash
|
61
|
+
airbyte-ci connectors --name=source-microsoft-dataverse build
|
62
|
+
```
|
63
|
+
|
64
|
+
An image will be built with the tag `airbyte/source-microsoft-dataverse:dev`.
|
65
|
+
|
66
|
+
**Via `docker build`:**
|
67
|
+
```bash
|
68
|
+
docker build -t airbyte/source-microsoft-dataverse:dev .
|
69
|
+
```
|
70
|
+
|
71
|
+
Then run any of the connector commands as follows:
|
72
|
+
```
|
73
|
+
docker run --rm airbyte/source-microsoft-dataverse:dev spec
|
74
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev check --config /secrets/config.json
|
75
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev discover --config /secrets/config.json
|
76
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-dataverse:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
77
|
+
```
|
78
|
+
|
79
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
80
|
+
```bash
|
81
|
+
airbyte-ci connectors --name=source-microsoft-dataverse test
|
82
|
+
```
|
83
|
+
|
84
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
85
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
86
|
+
|
87
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
88
|
+
We split dependencies between two groups, dependencies that are:
|
89
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
90
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
91
|
+
|
92
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
93
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-microsoft-dataverse test`
|
94
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
95
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
96
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-dataverse.md`).
|
97
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
98
|
+
6. Pat yourself on the back for being an awesome contributor.
|
99
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
@@ -0,0 +1,100 @@
|
|
1
|
+
# Microsoft Dataverse Source
|
2
|
+
|
3
|
+
This is the repository for the Microsoft Dataverse source connector, written in Python.
|
4
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse).
|
5
|
+
|
6
|
+
## Local development
|
7
|
+
|
8
|
+
### Prerequisites
|
9
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
10
|
+
|
11
|
+
#### Minimum Python version required `= 3.9.0`
|
12
|
+
|
13
|
+
#### Build & Activate Virtual Environment and install dependencies
|
14
|
+
From this connector directory, create a virtual environment:
|
15
|
+
```
|
16
|
+
python -m venv .venv
|
17
|
+
```
|
18
|
+
|
19
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
20
|
+
development environment of choice. To activate it from the terminal, run:
|
21
|
+
```
|
22
|
+
source .venv/bin/activate
|
23
|
+
pip install -r requirements.txt
|
24
|
+
pip install '.[tests]'
|
25
|
+
```
|
26
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
27
|
+
|
28
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
29
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
30
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
31
|
+
should work as you expect.
|
32
|
+
|
33
|
+
#### Create credentials
|
34
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse)
|
35
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_microsoft_dataverse/spec.yaml` file.
|
36
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
37
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
38
|
+
|
39
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source microsoft-dataverse test creds`
|
40
|
+
and place them into `secrets/config.json`.
|
41
|
+
|
42
|
+
### Locally running the connector
|
43
|
+
```
|
44
|
+
python main.py spec
|
45
|
+
python main.py check --config secrets/config.json
|
46
|
+
python main.py discover --config secrets/config.json
|
47
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
48
|
+
```
|
49
|
+
|
50
|
+
### Locally running the connector docker image
|
51
|
+
|
52
|
+
|
53
|
+
#### Build
|
54
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
55
|
+
```bash
|
56
|
+
airbyte-ci connectors --name=source-microsoft-dataverse build
|
57
|
+
```
|
58
|
+
|
59
|
+
An image will be built with the tag `airbyte/source-microsoft-dataverse:dev`.
|
60
|
+
|
61
|
+
**Via `docker build`:**
|
62
|
+
```bash
|
63
|
+
docker build -t airbyte/source-microsoft-dataverse:dev .
|
64
|
+
```
|
65
|
+
|
66
|
+
#### Run
|
67
|
+
Then run any of the connector commands as follows:
|
68
|
+
```
|
69
|
+
docker run --rm airbyte/source-microsoft-dataverse:dev spec
|
70
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev check --config /secrets/config.json
|
71
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev discover --config /secrets/config.json
|
72
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-dataverse:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
73
|
+
```
|
74
|
+
|
75
|
+
## Testing
|
76
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
77
|
+
```bash
|
78
|
+
airbyte-ci connectors --name=source-microsoft-dataverse test
|
79
|
+
```
|
80
|
+
|
81
|
+
### Customizing acceptance Tests
|
82
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
83
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
84
|
+
|
85
|
+
## Dependency Management
|
86
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
87
|
+
We split dependencies between two groups, dependencies that are:
|
88
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
89
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
90
|
+
|
91
|
+
### Publishing a new version of the connector
|
92
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
93
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-microsoft-dataverse test`
|
94
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
95
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
96
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-dataverse.md`).
|
97
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
98
|
+
6. Pat yourself on the back for being an awesome contributor.
|
99
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
100
|
+
|
@@ -0,0 +1,99 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: airbyte-source-microsoft-dataverse
|
3
|
+
Version: 0.1.2
|
4
|
+
Summary: Source implementation for Microsoft Dataverse.
|
5
|
+
Author: Airbyte
|
6
|
+
Author-email: contact@airbyte.io
|
7
|
+
Description-Content-Type: text/markdown
|
8
|
+
Requires-Dist: airbyte-cdk~=0.2
|
9
|
+
Provides-Extra: tests
|
10
|
+
Requires-Dist: requests-mock~=1.9.3; extra == "tests"
|
11
|
+
Requires-Dist: pytest~=6.1; extra == "tests"
|
12
|
+
Requires-Dist: pytest-mock~=3.6.1; extra == "tests"
|
13
|
+
|
14
|
+
# Microsoft Dataverse Source
|
15
|
+
|
16
|
+
This is the repository for the Microsoft Dataverse source connector, written in Python.
|
17
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse).
|
18
|
+
|
19
|
+
|
20
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
21
|
+
|
22
|
+
|
23
|
+
From this connector directory, create a virtual environment:
|
24
|
+
```
|
25
|
+
python -m venv .venv
|
26
|
+
```
|
27
|
+
|
28
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
29
|
+
development environment of choice. To activate it from the terminal, run:
|
30
|
+
```
|
31
|
+
source .venv/bin/activate
|
32
|
+
pip install -r requirements.txt
|
33
|
+
pip install '.[tests]'
|
34
|
+
```
|
35
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
36
|
+
|
37
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
38
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
39
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
40
|
+
should work as you expect.
|
41
|
+
|
42
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse)
|
43
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_microsoft_dataverse/spec.yaml` file.
|
44
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
45
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
46
|
+
|
47
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source microsoft-dataverse test creds`
|
48
|
+
and place them into `secrets/config.json`.
|
49
|
+
|
50
|
+
```
|
51
|
+
python main.py spec
|
52
|
+
python main.py check --config secrets/config.json
|
53
|
+
python main.py discover --config secrets/config.json
|
54
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
55
|
+
```
|
56
|
+
|
57
|
+
|
58
|
+
|
59
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
60
|
+
```bash
|
61
|
+
airbyte-ci connectors --name=source-microsoft-dataverse build
|
62
|
+
```
|
63
|
+
|
64
|
+
An image will be built with the tag `airbyte/source-microsoft-dataverse:dev`.
|
65
|
+
|
66
|
+
**Via `docker build`:**
|
67
|
+
```bash
|
68
|
+
docker build -t airbyte/source-microsoft-dataverse:dev .
|
69
|
+
```
|
70
|
+
|
71
|
+
Then run any of the connector commands as follows:
|
72
|
+
```
|
73
|
+
docker run --rm airbyte/source-microsoft-dataverse:dev spec
|
74
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev check --config /secrets/config.json
|
75
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev discover --config /secrets/config.json
|
76
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-dataverse:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
77
|
+
```
|
78
|
+
|
79
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
80
|
+
```bash
|
81
|
+
airbyte-ci connectors --name=source-microsoft-dataverse test
|
82
|
+
```
|
83
|
+
|
84
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
85
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
86
|
+
|
87
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
88
|
+
We split dependencies between two groups, dependencies that are:
|
89
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
90
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
91
|
+
|
92
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
93
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-microsoft-dataverse test`
|
94
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
95
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
96
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-dataverse.md`).
|
97
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
98
|
+
6. Pat yourself on the back for being an awesome contributor.
|
99
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
airbyte-source-microsoft-dataverse-0.1.2/airbyte_source_microsoft_dataverse.egg-info/SOURCES.txt
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
README.md
|
2
|
+
setup.cfg
|
3
|
+
setup.py
|
4
|
+
airbyte_source_microsoft_dataverse.egg-info/PKG-INFO
|
5
|
+
airbyte_source_microsoft_dataverse.egg-info/SOURCES.txt
|
6
|
+
airbyte_source_microsoft_dataverse.egg-info/dependency_links.txt
|
7
|
+
airbyte_source_microsoft_dataverse.egg-info/entry_points.txt
|
8
|
+
airbyte_source_microsoft_dataverse.egg-info/requires.txt
|
9
|
+
airbyte_source_microsoft_dataverse.egg-info/top_level.txt
|
10
|
+
integration_tests/__init__.py
|
11
|
+
integration_tests/abnormal_state.json
|
12
|
+
integration_tests/acceptance.py
|
13
|
+
integration_tests/configured_catalog.json
|
14
|
+
integration_tests/invalid_config.json
|
15
|
+
integration_tests/sample_config.json
|
16
|
+
integration_tests/sample_state.json
|
17
|
+
source_microsoft_dataverse/__init__.py
|
18
|
+
source_microsoft_dataverse/dataverse.py
|
19
|
+
source_microsoft_dataverse/run.py
|
20
|
+
source_microsoft_dataverse/source.py
|
21
|
+
source_microsoft_dataverse/spec.yaml
|
22
|
+
source_microsoft_dataverse/streams.py
|
23
|
+
unit_tests/__init__.py
|
24
|
+
unit_tests/test_dataverse.py
|
25
|
+
unit_tests/test_incremental_streams.py
|
26
|
+
unit_tests/test_source.py
|
27
|
+
unit_tests/test_streams.py
|
@@ -0,0 +1 @@
|
|
1
|
+
|
@@ -0,0 +1,16 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
|
6
|
+
import pytest
|
7
|
+
|
8
|
+
pytest_plugins = ("connector_acceptance_test.plugin",)
|
9
|
+
|
10
|
+
|
11
|
+
@pytest.fixture(scope="session", autouse=True)
|
12
|
+
def connector_setup():
|
13
|
+
"""This fixture is a placeholder for external resources that acceptance test might require."""
|
14
|
+
# TODO: setup test dependencies if needed. otherwise remove the TODO comments
|
15
|
+
yield
|
16
|
+
# TODO: clean up test dependencies
|
@@ -0,0 +1,15 @@
|
|
1
|
+
{
|
2
|
+
"streams": [
|
3
|
+
{
|
4
|
+
"stream": {
|
5
|
+
"name": "<entity_name>",
|
6
|
+
"json_schema": {},
|
7
|
+
"supported_sync_modes": ["full_refresh", "incremental"]
|
8
|
+
},
|
9
|
+
"cursor_field": ["modifiedon"],
|
10
|
+
"primary_key": [["<entity_primary_key>"]],
|
11
|
+
"sync_mode": "incremental",
|
12
|
+
"destination_sync_mode": "append"
|
13
|
+
}
|
14
|
+
]
|
15
|
+
}
|
@@ -0,0 +1,97 @@
|
|
1
|
+
[metadata]
|
2
|
+
name = airbyte-source-microsoft-dataverse
|
3
|
+
version = 0.1.2
|
4
|
+
author = Airbyte
|
5
|
+
author_email = contact@airbyte.io
|
6
|
+
long_description = # Microsoft Dataverse Source
|
7
|
+
|
8
|
+
This is the repository for the Microsoft Dataverse source connector, written in Python.
|
9
|
+
For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse).
|
10
|
+
|
11
|
+
|
12
|
+
**To iterate on this connector, make sure to complete this prerequisites section.**
|
13
|
+
|
14
|
+
|
15
|
+
From this connector directory, create a virtual environment:
|
16
|
+
```
|
17
|
+
python -m venv .venv
|
18
|
+
```
|
19
|
+
|
20
|
+
This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
|
21
|
+
development environment of choice. To activate it from the terminal, run:
|
22
|
+
```
|
23
|
+
source .venv/bin/activate
|
24
|
+
pip install -r requirements.txt
|
25
|
+
pip install '.[tests]'
|
26
|
+
```
|
27
|
+
If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
|
28
|
+
|
29
|
+
Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
|
30
|
+
used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
|
31
|
+
If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
|
32
|
+
should work as you expect.
|
33
|
+
|
34
|
+
**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse)
|
35
|
+
to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_microsoft_dataverse/spec.yaml` file.
|
36
|
+
Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
|
37
|
+
See `integration_tests/sample_config.json` for a sample config file.
|
38
|
+
|
39
|
+
**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source microsoft-dataverse test creds`
|
40
|
+
and place them into `secrets/config.json`.
|
41
|
+
|
42
|
+
```
|
43
|
+
python main.py spec
|
44
|
+
python main.py check --config secrets/config.json
|
45
|
+
python main.py discover --config secrets/config.json
|
46
|
+
python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
|
47
|
+
```
|
48
|
+
|
49
|
+
|
50
|
+
|
51
|
+
**Via [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) (recommended):**
|
52
|
+
```bash
|
53
|
+
airbyte-ci connectors --name=source-microsoft-dataverse build
|
54
|
+
```
|
55
|
+
|
56
|
+
An image will be built with the tag `airbyte/source-microsoft-dataverse:dev`.
|
57
|
+
|
58
|
+
**Via `docker build`:**
|
59
|
+
```bash
|
60
|
+
docker build -t airbyte/source-microsoft-dataverse:dev .
|
61
|
+
```
|
62
|
+
|
63
|
+
Then run any of the connector commands as follows:
|
64
|
+
```
|
65
|
+
docker run --rm airbyte/source-microsoft-dataverse:dev spec
|
66
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev check --config /secrets/config.json
|
67
|
+
docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev discover --config /secrets/config.json
|
68
|
+
docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-dataverse:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
|
69
|
+
```
|
70
|
+
|
71
|
+
You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md):
|
72
|
+
```bash
|
73
|
+
airbyte-ci connectors --name=source-microsoft-dataverse test
|
74
|
+
```
|
75
|
+
|
76
|
+
Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information.
|
77
|
+
If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
|
78
|
+
|
79
|
+
All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
|
80
|
+
We split dependencies between two groups, dependencies that are:
|
81
|
+
* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
|
82
|
+
* required for the testing need to go to `TEST_REQUIREMENTS` list
|
83
|
+
|
84
|
+
You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
|
85
|
+
1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-microsoft-dataverse test`
|
86
|
+
2. Bump the connector version in `metadata.yaml`: increment the `dockerImageTag` value. Please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors).
|
87
|
+
3. Make sure the `metadata.yaml` content is up to date.
|
88
|
+
4. Make the connector documentation and its changelog is up to date (`docs/integrations/sources/microsoft-dataverse.md`).
|
89
|
+
5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention).
|
90
|
+
6. Pat yourself on the back for being an awesome contributor.
|
91
|
+
7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
|
92
|
+
long_description_content_type = text/markdown
|
93
|
+
|
94
|
+
[egg_info]
|
95
|
+
tag_build =
|
96
|
+
tag_date = 0
|
97
|
+
|
@@ -0,0 +1,43 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
|
6
|
+
from setuptools import find_packages, setup
|
7
|
+
|
8
|
+
MAIN_REQUIREMENTS = [
|
9
|
+
"airbyte-cdk~=0.2",
|
10
|
+
]
|
11
|
+
|
12
|
+
TEST_REQUIREMENTS = [
|
13
|
+
"requests-mock~=1.9.3",
|
14
|
+
"pytest~=6.1",
|
15
|
+
"pytest-mock~=3.6.1",
|
16
|
+
]
|
17
|
+
|
18
|
+
setup(
|
19
|
+
entry_points={
|
20
|
+
"console_scripts": [
|
21
|
+
"source-microsoft-dataverse=source_microsoft_dataverse.run:run",
|
22
|
+
],
|
23
|
+
},
|
24
|
+
description="Source implementation for Microsoft Dataverse.",
|
25
|
+
packages=find_packages(),
|
26
|
+
install_requires=MAIN_REQUIREMENTS,
|
27
|
+
package_data={
|
28
|
+
"": [
|
29
|
+
# Include yaml files in the package (if any)
|
30
|
+
"*.yml",
|
31
|
+
"*.yaml",
|
32
|
+
# Include all json files in the package, up to 4 levels deep
|
33
|
+
"*.json",
|
34
|
+
"*/*.json",
|
35
|
+
"*/*/*.json",
|
36
|
+
"*/*/*/*.json",
|
37
|
+
"*/*/*/*/*.json",
|
38
|
+
]
|
39
|
+
},
|
40
|
+
extras_require={
|
41
|
+
"tests": TEST_REQUIREMENTS,
|
42
|
+
},
|
43
|
+
)
|
@@ -0,0 +1,80 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
3
|
+
#
|
4
|
+
|
5
|
+
from enum import Enum
|
6
|
+
from typing import Any, Mapping, MutableMapping, Optional
|
7
|
+
|
8
|
+
import requests
|
9
|
+
from airbyte_cdk.sources.streams.http.requests_native_auth.oauth import Oauth2Authenticator
|
10
|
+
|
11
|
+
|
12
|
+
class MicrosoftOauth2Authenticator(Oauth2Authenticator):
|
13
|
+
def build_refresh_request_body(self) -> Mapping[str, Any]:
|
14
|
+
"""
|
15
|
+
Returns the request body to set on the refresh request
|
16
|
+
"""
|
17
|
+
payload: MutableMapping[str, Any] = {
|
18
|
+
"grant_type": "client_credentials",
|
19
|
+
"client_id": self.get_client_id(),
|
20
|
+
"client_secret": self.get_client_secret(),
|
21
|
+
"scope": self.get_scopes(),
|
22
|
+
}
|
23
|
+
|
24
|
+
return payload
|
25
|
+
|
26
|
+
|
27
|
+
class AirbyteType(Enum):
|
28
|
+
|
29
|
+
String = {"type": ["null", "string"]}
|
30
|
+
Boolean = {"type": ["null", "boolean"]}
|
31
|
+
Timestamp = {"type": ["null", "string"], "format": "date-time", "airbyte_type": "timestamp_with_timezone"}
|
32
|
+
Integer = {"type": ["null", "integer"]}
|
33
|
+
Number = {"type": ["null", "number"]}
|
34
|
+
|
35
|
+
|
36
|
+
class DataverseType(Enum):
|
37
|
+
|
38
|
+
String = AirbyteType.String
|
39
|
+
Uniqueidentifier = AirbyteType.String
|
40
|
+
DateTime = AirbyteType.Timestamp
|
41
|
+
Integer = AirbyteType.Integer
|
42
|
+
BigInt = AirbyteType.Integer
|
43
|
+
Money = AirbyteType.Number
|
44
|
+
Boolean = AirbyteType.Boolean
|
45
|
+
Double = AirbyteType.Number
|
46
|
+
Decimal = AirbyteType.Number
|
47
|
+
Status = AirbyteType.Integer
|
48
|
+
State = AirbyteType.Integer
|
49
|
+
Picklist = AirbyteType.Integer
|
50
|
+
Lookup = AirbyteType.String
|
51
|
+
Virtual = None
|
52
|
+
|
53
|
+
|
54
|
+
def get_auth(config: Mapping[str, Any]) -> MicrosoftOauth2Authenticator:
|
55
|
+
return MicrosoftOauth2Authenticator(
|
56
|
+
token_refresh_endpoint=f'https://login.microsoftonline.com/{config["tenant_id"]}/oauth2/v2.0/token',
|
57
|
+
client_id=config["client_id"],
|
58
|
+
client_secret=config["client_secret_value"],
|
59
|
+
scopes=[f'{config["url"]}/.default'],
|
60
|
+
refresh_token="",
|
61
|
+
)
|
62
|
+
|
63
|
+
|
64
|
+
def do_request(config: Mapping[str, Any], path: str):
|
65
|
+
auth = get_auth(config)
|
66
|
+
headers = auth.get_auth_header()
|
67
|
+
# Call a protected API with the access token.
|
68
|
+
return requests.get(
|
69
|
+
config["url"] + "/api/data/v9.2/" + path,
|
70
|
+
headers=headers,
|
71
|
+
)
|
72
|
+
|
73
|
+
|
74
|
+
def convert_dataverse_type(dataverse_type: str) -> Optional[dict]:
|
75
|
+
if dataverse_type in DataverseType.__members__:
|
76
|
+
enum_type = DataverseType[dataverse_type]
|
77
|
+
if enum_type:
|
78
|
+
return enum_type.value if enum_type.value is None else enum_type.value.value
|
79
|
+
|
80
|
+
return AirbyteType.String.value
|