airbyte-cdk 6.59.0__py3-none-any.whl → 6.59.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airbyte_cdk/manifest_migrations/README.md +4 -3
- airbyte_cdk/manifest_migrations/migrations/registry.yaml +1 -1
- airbyte_cdk/sources/declarative/declarative_component_schema.yaml +105 -20
- airbyte_cdk/sources/declarative/models/declarative_component_schema.py +63 -35
- airbyte_cdk/test/entrypoint_wrapper.py +0 -1
- airbyte_cdk/test/models/scenario.py +9 -0
- airbyte_cdk/test/standard_tests/connector_base.py +0 -8
- airbyte_cdk/test/standard_tests/docker_base.py +62 -26
- airbyte_cdk/test/standard_tests/source_base.py +7 -1
- airbyte_cdk/utils/connector_paths.py +1 -2
- {airbyte_cdk-6.59.0.dist-info → airbyte_cdk-6.59.2.dist-info}/METADATA +1 -1
- {airbyte_cdk-6.59.0.dist-info → airbyte_cdk-6.59.2.dist-info}/RECORD +16 -16
- {airbyte_cdk-6.59.0.dist-info → airbyte_cdk-6.59.2.dist-info}/LICENSE.txt +0 -0
- {airbyte_cdk-6.59.0.dist-info → airbyte_cdk-6.59.2.dist-info}/LICENSE_SHORT +0 -0
- {airbyte_cdk-6.59.0.dist-info → airbyte_cdk-6.59.2.dist-info}/WHEEL +0 -0
- {airbyte_cdk-6.59.0.dist-info → airbyte_cdk-6.59.2.dist-info}/entry_points.txt +0 -0
@@ -20,14 +20,15 @@ This directory contains the logic and registry for manifest migrations in the Ai
|
|
20
20
|
|
21
21
|
3. **Register the Migration:**
|
22
22
|
- Open `migrations/registry.yaml`.
|
23
|
-
- Add an entry under the appropriate version, or create a new version section if needed.
|
24
|
-
- Version can be: "*", "==6.48.3", "~=1.2", ">=1.0.0,<2.0.0", "6.48.3"
|
23
|
+
- Add an entry under the appropriate version, or create a new version section if needed.
|
24
|
+
- Version can be: "*", "==6.48.3", "~=1.2", ">=1.0.0,<2.0.0", "6.48.3"
|
25
25
|
- Each migration entry should include:
|
26
26
|
- `name`: The filename (without `.py`)
|
27
27
|
- `order`: The order in which this migration should be applied for the version
|
28
28
|
- `description`: A short description of the migration
|
29
29
|
|
30
30
|
Example:
|
31
|
+
|
31
32
|
```yaml
|
32
33
|
manifest_migrations:
|
33
34
|
- version: 6.45.2
|
@@ -71,4 +72,4 @@ class ExampleMigration(ManifestMigration):
|
|
71
72
|
|
72
73
|
---
|
73
74
|
|
74
|
-
For more details, see the docstrings in `manifest_migration.py` and the examples in the `migrations/` folder.
|
75
|
+
For more details, see the docstrings in `manifest_migration.py` and the examples in the `migrations/` folder.
|
@@ -940,14 +940,53 @@ definitions:
|
|
940
940
|
- "{{ config['record_cursor'] }}"
|
941
941
|
cursor_datetime_formats:
|
942
942
|
title: Cursor Datetime Formats
|
943
|
-
description: The possible formats for the cursor field, in order of preference. The first format that matches the cursor field value will be used to parse it. If not provided, the `datetime_format` will be used.
|
944
943
|
type: array
|
945
944
|
items:
|
946
945
|
type: string
|
947
|
-
|
948
|
-
|
949
|
-
|
950
|
-
-
|
946
|
+
description: |
|
947
|
+
The possible formats for the cursor field, in order of preference. The first format that matches the cursor field value will be used to parse it. If not provided, the Outgoing Datetime Format will be used.
|
948
|
+
Use placeholders starting with "%" to describe the format the API is using. The following placeholders are available:
|
949
|
+
* **%s**: Epoch unix timestamp - `1686218963`
|
950
|
+
* **%s_as_float**: Epoch unix timestamp in seconds as float with microsecond precision - `1686218963.123456`
|
951
|
+
* **%ms**: Epoch unix timestamp - `1686218963123`
|
952
|
+
* **%a**: Weekday (abbreviated) - `Sun`
|
953
|
+
* **%A**: Weekday (full) - `Sunday`
|
954
|
+
* **%w**: Weekday (decimal) - `0` (Sunday), `6` (Saturday)
|
955
|
+
* **%d**: Day of the month (zero-padded) - `01`, `02`, ..., `31`
|
956
|
+
* **%b**: Month (abbreviated) - `Jan`
|
957
|
+
* **%B**: Month (full) - `January`
|
958
|
+
* **%m**: Month (zero-padded) - `01`, `02`, ..., `12`
|
959
|
+
* **%y**: Year (without century, zero-padded) - `00`, `01`, ..., `99`
|
960
|
+
* **%Y**: Year (with century) - `0001`, `0002`, ..., `9999`
|
961
|
+
* **%H**: Hour (24-hour, zero-padded) - `00`, `01`, ..., `23`
|
962
|
+
* **%I**: Hour (12-hour, zero-padded) - `01`, `02`, ..., `12`
|
963
|
+
* **%p**: AM/PM indicator
|
964
|
+
* **%M**: Minute (zero-padded) - `00`, `01`, ..., `59`
|
965
|
+
* **%S**: Second (zero-padded) - `00`, `01`, ..., `59`
|
966
|
+
* **%f**: Microsecond (zero-padded to 6 digits) - `000000`, `000001`, ..., `999999`
|
967
|
+
* **%_ms**: Millisecond (zero-padded to 3 digits) - `000`, `001`, ..., `999`
|
968
|
+
* **%z**: UTC offset - `(empty)`, `+0000`, `-04:00`
|
969
|
+
* **%Z**: Time zone name - `(empty)`, `UTC`, `GMT`
|
970
|
+
* **%j**: Day of the year (zero-padded) - `001`, `002`, ..., `366`
|
971
|
+
* **%U**: Week number of the year (Sunday as first day) - `00`, `01`, ..., `53`
|
972
|
+
* **%W**: Week number of the year (Monday as first day) - `00`, `01`, ..., `53`
|
973
|
+
* **%c**: Date and time representation - `Tue Aug 16 21:30:00 1988`
|
974
|
+
* **%x**: Date representation - `08/16/1988`
|
975
|
+
* **%X**: Time representation - `21:30:00`
|
976
|
+
* **%%**: Literal '%' character
|
977
|
+
|
978
|
+
Some placeholders depend on the locale of the underlying system - in most cases this locale is configured as en/US. For more information see the [Python documentation](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes).
|
979
|
+
examples:
|
980
|
+
- "%Y-%m-%d"
|
981
|
+
- "%Y-%m-%d %H:%M:%S"
|
982
|
+
- "%Y-%m-%dT%H:%M:%S"
|
983
|
+
- "%Y-%m-%dT%H:%M:%SZ"
|
984
|
+
- "%Y-%m-%dT%H:%M:%S%z"
|
985
|
+
- "%Y-%m-%dT%H:%M:%S.%fZ"
|
986
|
+
- "%Y-%m-%dT%H:%M:%S.%f%z"
|
987
|
+
- "%Y-%m-%d %H:%M:%S.%f+00:00"
|
988
|
+
- "%s"
|
989
|
+
- "%ms"
|
951
990
|
start_datetime:
|
952
991
|
title: Start Datetime
|
953
992
|
description: The datetime that determines the earliest record that should be synced.
|
@@ -1024,33 +1063,45 @@ definitions:
|
|
1024
1063
|
- "%s_as_float"
|
1025
1064
|
cursor_granularity:
|
1026
1065
|
title: Cursor Granularity
|
1027
|
-
description:
|
1066
|
+
description: |
|
1028
1067
|
Smallest increment the datetime_format has (ISO 8601 duration) that is used to ensure the start of a slice does not overlap with the end of the previous one, e.g. for %Y-%m-%d the granularity should
|
1029
1068
|
be P1D, for %Y-%m-%dT%H:%M:%SZ the granularity should be PT1S. Given this field is provided, `step` needs to be provided as well.
|
1069
|
+
* **PT0.000001S**: 1 microsecond
|
1070
|
+
* **PT0.001S**: 1 millisecond
|
1071
|
+
* **PT1S**: 1 second
|
1072
|
+
* **PT1M**: 1 minute
|
1073
|
+
* **PT1H**: 1 hour
|
1074
|
+
* **P1D**: 1 day
|
1030
1075
|
type: string
|
1031
1076
|
examples:
|
1032
1077
|
- "PT1S"
|
1033
1078
|
is_data_feed:
|
1034
|
-
title:
|
1079
|
+
title: Data Feed API
|
1035
1080
|
description: A data feed API is an API that does not allow filtering and paginates the content from the most recent to the least recent. Given this, the CDK needs to know when to stop paginating and this field will generate a stop condition for pagination.
|
1036
1081
|
type: boolean
|
1037
1082
|
is_client_side_incremental:
|
1038
|
-
title:
|
1039
|
-
description:
|
1083
|
+
title: Client-side Incremental Filtering
|
1084
|
+
description: Set to True if the target API endpoint does not take cursor values to filter records and returns all records anyway. This will cause the connector to filter out records locally, and only emit new records from the last sync, hence incremental. This means that all records would be read from the API, but only new records will be emitted to the destination.
|
1040
1085
|
type: boolean
|
1041
1086
|
is_compare_strictly:
|
1042
|
-
title:
|
1043
|
-
description: Set to True if the target API does not accept queries where the start time equal the end time.
|
1087
|
+
title: Strict Start-End Time Comparison
|
1088
|
+
description: Set to True if the target API does not accept queries where the start time equal the end time. This will cause those requests to be skipped.
|
1044
1089
|
type: boolean
|
1045
1090
|
default: False
|
1046
1091
|
global_substream_cursor:
|
1047
|
-
title:
|
1048
|
-
description:
|
1092
|
+
title: Global Substream Cursor
|
1093
|
+
description: Setting to True causes the connector to store the cursor as one value, instead of per-partition. This setting optimizes performance when the parent stream has thousands of partitions. Notably, the substream state is updated only at the end of the sync, which helps prevent data loss in case of a sync failure. See more info in the [docs](https://docs.airbyte.com/connector-development/config-based/understanding-the-yaml-file/incremental-syncs).
|
1049
1094
|
type: boolean
|
1050
1095
|
default: false
|
1051
1096
|
lookback_window:
|
1052
1097
|
title: Lookback Window
|
1053
|
-
description:
|
1098
|
+
description: |
|
1099
|
+
Time interval (ISO8601 duration) before the start_datetime to read data for, e.g. P1M for looking back one month.
|
1100
|
+
* **PT1H**: 1 hour
|
1101
|
+
* **P1D**: 1 day
|
1102
|
+
* **P1W**: 1 week
|
1103
|
+
* **P1M**: 1 month
|
1104
|
+
* **P1Y**: 1 year
|
1054
1105
|
type: string
|
1055
1106
|
interpolation_context:
|
1056
1107
|
- config
|
@@ -1071,7 +1122,13 @@ definitions:
|
|
1071
1122
|
- "starting_time"
|
1072
1123
|
step:
|
1073
1124
|
title: Step
|
1074
|
-
description:
|
1125
|
+
description: |
|
1126
|
+
The size of the time window (ISO8601 duration). Given this field is provided, `cursor_granularity` needs to be provided as well.
|
1127
|
+
* **PT1H**: 1 hour
|
1128
|
+
* **P1D**: 1 day
|
1129
|
+
* **P1W**: 1 week
|
1130
|
+
* **P1M**: 1 month
|
1131
|
+
* **P1Y**: 1 year
|
1075
1132
|
type: string
|
1076
1133
|
examples:
|
1077
1134
|
- "P1W"
|
@@ -1095,6 +1152,8 @@ definitions:
|
|
1095
1152
|
title: Secret Key
|
1096
1153
|
type: string
|
1097
1154
|
description: Secret used to sign the JSON web token.
|
1155
|
+
interpolation_context:
|
1156
|
+
- config
|
1098
1157
|
examples:
|
1099
1158
|
- "{{ config['secret_key'] }}"
|
1100
1159
|
base64_encode_secret_key:
|
@@ -1224,8 +1283,10 @@ definitions:
|
|
1224
1283
|
title: Client ID
|
1225
1284
|
description: The OAuth client ID. Fill it in the user inputs.
|
1226
1285
|
type: string
|
1286
|
+
interpolation_context:
|
1287
|
+
- config
|
1227
1288
|
examples:
|
1228
|
-
- "{{ config['client_id }}"
|
1289
|
+
- "{{ config['client_id'] }}"
|
1229
1290
|
- "{{ config['credentials']['client_id }}"
|
1230
1291
|
client_secret_name:
|
1231
1292
|
title: Client Secret Property Name
|
@@ -1238,8 +1299,10 @@ definitions:
|
|
1238
1299
|
title: Client Secret
|
1239
1300
|
description: The OAuth client secret. Fill it in the user inputs.
|
1240
1301
|
type: string
|
1302
|
+
interpolation_context:
|
1303
|
+
- config
|
1241
1304
|
examples:
|
1242
|
-
- "{{ config['client_secret }}"
|
1305
|
+
- "{{ config['client_secret'] }}"
|
1243
1306
|
- "{{ config['credentials']['client_secret }}"
|
1244
1307
|
refresh_token_name:
|
1245
1308
|
title: Refresh Token Property Name
|
@@ -1252,6 +1315,8 @@ definitions:
|
|
1252
1315
|
title: Refresh Token
|
1253
1316
|
description: Credential artifact used to get a new access token.
|
1254
1317
|
type: string
|
1318
|
+
interpolation_context:
|
1319
|
+
- config
|
1255
1320
|
examples:
|
1256
1321
|
- "{{ config['refresh_token'] }}"
|
1257
1322
|
- "{{ config['credentials]['refresh_token'] }}"
|
@@ -1272,6 +1337,8 @@ definitions:
|
|
1272
1337
|
title: Access Token Value
|
1273
1338
|
description: The value of the access_token to bypass the token refreshing using `refresh_token`.
|
1274
1339
|
type: string
|
1340
|
+
interpolation_context:
|
1341
|
+
- config
|
1275
1342
|
examples:
|
1276
1343
|
- secret_access_token_value
|
1277
1344
|
expires_in_name:
|
@@ -1444,6 +1511,7 @@ definitions:
|
|
1444
1511
|
incremental_sync:
|
1445
1512
|
title: Incremental Sync
|
1446
1513
|
description: Component used to fetch data incrementally based on a time field in the data.
|
1514
|
+
linkable: true
|
1447
1515
|
anyOf:
|
1448
1516
|
- "$ref": "#/definitions/DatetimeBasedCursor"
|
1449
1517
|
- "$ref": "#/definitions/IncrementingCountCursor"
|
@@ -1476,6 +1544,7 @@ definitions:
|
|
1476
1544
|
transformations:
|
1477
1545
|
title: Transformations
|
1478
1546
|
description: A list of transformations to be applied to each output record.
|
1547
|
+
linkable: true
|
1479
1548
|
type: array
|
1480
1549
|
items:
|
1481
1550
|
anyOf:
|
@@ -1499,6 +1568,7 @@ definitions:
|
|
1499
1568
|
file_uploader:
|
1500
1569
|
title: File Uploader
|
1501
1570
|
description: (experimental) Describes how to fetch a file
|
1571
|
+
linkable: true
|
1502
1572
|
type: object
|
1503
1573
|
required:
|
1504
1574
|
- type
|
@@ -1901,7 +1971,13 @@ definitions:
|
|
1901
1971
|
type: string
|
1902
1972
|
expiration_duration:
|
1903
1973
|
title: Expiration Duration
|
1904
|
-
description:
|
1974
|
+
description: |
|
1975
|
+
The duration in ISO 8601 duration notation after which the session token expires, starting from the time it was obtained. Omitting it will result in the session token being refreshed for every request.
|
1976
|
+
* **PT1H**: 1 hour
|
1977
|
+
* **P1D**: 1 day
|
1978
|
+
* **P1W**: 1 week
|
1979
|
+
* **P1M**: 1 month
|
1980
|
+
* **P1Y**: 1 year
|
1905
1981
|
type: string
|
1906
1982
|
examples:
|
1907
1983
|
- "PT1H"
|
@@ -2055,6 +2131,7 @@ definitions:
|
|
2055
2131
|
request_parameters:
|
2056
2132
|
title: Query Parameters
|
2057
2133
|
description: Specifies the query parameters that should be set on an outgoing HTTP request given the inputs.
|
2134
|
+
linkable: true
|
2058
2135
|
anyOf:
|
2059
2136
|
- type: object
|
2060
2137
|
title: Key/Value Pairs
|
@@ -2077,6 +2154,7 @@ definitions:
|
|
2077
2154
|
request_headers:
|
2078
2155
|
title: Request Headers
|
2079
2156
|
description: Return any non-auth headers. Authentication headers will overwrite any overlapping headers returned from this method.
|
2157
|
+
linkable: true
|
2080
2158
|
anyOf:
|
2081
2159
|
- type: object
|
2082
2160
|
title: Key/Value Pairs
|
@@ -2136,6 +2214,7 @@ definitions:
|
|
2136
2214
|
request_body:
|
2137
2215
|
title: Request Body
|
2138
2216
|
description: Specifies how to populate the body of the request with a payload. Can contain nested objects.
|
2217
|
+
linkable: true
|
2139
2218
|
anyOf:
|
2140
2219
|
- "$ref": "#/definitions/RequestBodyPlainText"
|
2141
2220
|
- "$ref": "#/definitions/RequestBodyUrlEncodedForm"
|
@@ -2149,6 +2228,7 @@ definitions:
|
|
2149
2228
|
error_handler:
|
2150
2229
|
title: Error Handler
|
2151
2230
|
description: Error handler component that defines how to handle errors.
|
2231
|
+
linkable: true
|
2152
2232
|
anyOf:
|
2153
2233
|
- "$ref": "#/definitions/DefaultErrorHandler"
|
2154
2234
|
- "$ref": "#/definitions/CompositeErrorHandler"
|
@@ -2683,6 +2763,7 @@ definitions:
|
|
2683
2763
|
- 2021-01-01
|
2684
2764
|
- 2021-01-01T00:00:00Z
|
2685
2765
|
- "{{ config['start_time'] }}"
|
2766
|
+
- "{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}"
|
2686
2767
|
datetime_format:
|
2687
2768
|
title: Datetime Format
|
2688
2769
|
description: |
|
@@ -3067,7 +3148,7 @@ definitions:
|
|
3067
3148
|
- 100
|
3068
3149
|
- "{{ config['page_size'] }}"
|
3069
3150
|
inject_on_first_request:
|
3070
|
-
title: Inject Offset
|
3151
|
+
title: Inject Offset on First Request
|
3071
3152
|
description: Using the `offset` with value `0` during the first request
|
3072
3153
|
type: boolean
|
3073
3154
|
default: false
|
@@ -3107,7 +3188,7 @@ definitions:
|
|
3107
3188
|
- 0
|
3108
3189
|
- 1
|
3109
3190
|
inject_on_first_request:
|
3110
|
-
title: Inject Page Number
|
3191
|
+
title: Inject Page Number on First Request
|
3111
3192
|
description: Using the `page number` with value defined by `start_from_page` during the first request
|
3112
3193
|
type: boolean
|
3113
3194
|
default: false
|
@@ -3398,6 +3479,7 @@ definitions:
|
|
3398
3479
|
description: Array of paths defining the field to remove. Each item is an array whose field describe the path of a field to remove.
|
3399
3480
|
type: array
|
3400
3481
|
items:
|
3482
|
+
type: array
|
3401
3483
|
items:
|
3402
3484
|
type: string
|
3403
3485
|
examples:
|
@@ -3589,6 +3671,7 @@ definitions:
|
|
3589
3671
|
"$ref": "#/definitions/RecordSelector"
|
3590
3672
|
paginator:
|
3591
3673
|
description: Paginator component that describes how to navigate through the API's pages.
|
3674
|
+
linkable: true
|
3592
3675
|
anyOf:
|
3593
3676
|
- "$ref": "#/definitions/DefaultPaginator"
|
3594
3677
|
- "$ref": "#/definitions/NoPagination"
|
@@ -3599,6 +3682,7 @@ definitions:
|
|
3599
3682
|
partition_router:
|
3600
3683
|
title: Partition Router
|
3601
3684
|
description: Used to iteratively execute requests over a set of values, such as a parent stream's records or a list of constant values.
|
3685
|
+
linkable: true
|
3602
3686
|
anyOf:
|
3603
3687
|
- "$ref": "#/definitions/SubstreamPartitionRouter"
|
3604
3688
|
- "$ref": "#/definitions/ListPartitionRouter"
|
@@ -4546,6 +4630,7 @@ definitions:
|
|
4546
4630
|
description: A list of field pointers to be removed from the config.
|
4547
4631
|
type: array
|
4548
4632
|
items:
|
4633
|
+
type: array
|
4549
4634
|
items:
|
4550
4635
|
type: string
|
4551
4636
|
examples:
|
@@ -1,5 +1,3 @@
|
|
1
|
-
# Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
2
|
-
|
3
1
|
# generated by datamodel-codegen:
|
4
2
|
# filename: declarative_component_schema.yaml
|
5
3
|
|
@@ -160,20 +158,6 @@ class CustomBackoffStrategy(BaseModel):
|
|
160
158
|
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
|
161
159
|
|
162
160
|
|
163
|
-
class CustomConfigTransformation(BaseModel):
|
164
|
-
class Config:
|
165
|
-
extra = Extra.allow
|
166
|
-
|
167
|
-
type: Literal["CustomConfigTransformation"]
|
168
|
-
class_name: str = Field(
|
169
|
-
...,
|
170
|
-
description="Fully-qualified name of the class that will be implementing the custom config transformation. The format is `source_<name>.<package>.<class_name>`.",
|
171
|
-
examples=["source_declarative_manifest.components.MyCustomConfigTransformation"],
|
172
|
-
title="Class Name",
|
173
|
-
)
|
174
|
-
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
|
175
|
-
|
176
|
-
|
177
161
|
class CustomErrorHandler(BaseModel):
|
178
162
|
class Config:
|
179
163
|
extra = Extra.allow
|
@@ -539,7 +523,10 @@ class OAuthAuthenticator(BaseModel):
|
|
539
523
|
client_id: Optional[str] = Field(
|
540
524
|
None,
|
541
525
|
description="The OAuth client ID. Fill it in the user inputs.",
|
542
|
-
examples=[
|
526
|
+
examples=[
|
527
|
+
"{{ config['client_id'] }}",
|
528
|
+
"{{ config['credentials']['client_id }}",
|
529
|
+
],
|
543
530
|
title="Client ID",
|
544
531
|
)
|
545
532
|
client_secret_name: Optional[str] = Field(
|
@@ -552,7 +539,7 @@ class OAuthAuthenticator(BaseModel):
|
|
552
539
|
None,
|
553
540
|
description="The OAuth client secret. Fill it in the user inputs.",
|
554
541
|
examples=[
|
555
|
-
"{{ config['client_secret }}",
|
542
|
+
"{{ config['client_secret'] }}",
|
556
543
|
"{{ config['credentials']['client_secret }}",
|
557
544
|
],
|
558
545
|
title="Client Secret",
|
@@ -996,7 +983,12 @@ class MinMaxDatetime(BaseModel):
|
|
996
983
|
datetime: str = Field(
|
997
984
|
...,
|
998
985
|
description="Datetime value.",
|
999
|
-
examples=[
|
986
|
+
examples=[
|
987
|
+
"2021-01-01",
|
988
|
+
"2021-01-01T00:00:00Z",
|
989
|
+
"{{ config['start_time'] }}",
|
990
|
+
"{{ now_utc().strftime('%Y-%m-%dT%H:%M:%SZ') }}",
|
991
|
+
],
|
1000
992
|
title="Datetime",
|
1001
993
|
)
|
1002
994
|
datetime_format: Optional[str] = Field(
|
@@ -1208,7 +1200,7 @@ class OffsetIncrement(BaseModel):
|
|
1208
1200
|
inject_on_first_request: Optional[bool] = Field(
|
1209
1201
|
False,
|
1210
1202
|
description="Using the `offset` with value `0` during the first request",
|
1211
|
-
title="Inject Offset",
|
1203
|
+
title="Inject Offset on First Request",
|
1212
1204
|
)
|
1213
1205
|
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
|
1214
1206
|
|
@@ -1230,7 +1222,7 @@ class PageIncrement(BaseModel):
|
|
1230
1222
|
inject_on_first_request: Optional[bool] = Field(
|
1231
1223
|
False,
|
1232
1224
|
description="Using the `page number` with value defined by `start_from_page` during the first request",
|
1233
|
-
title="Inject Page Number",
|
1225
|
+
title="Inject Page Number on First Request",
|
1234
1226
|
)
|
1235
1227
|
parameters: Optional[Dict[str, Any]] = Field(None, alias="$parameters")
|
1236
1228
|
|
@@ -1657,6 +1649,20 @@ class ConfigRemoveFields(BaseModel):
|
|
1657
1649
|
)
|
1658
1650
|
|
1659
1651
|
|
1652
|
+
class CustomConfigTransformation(BaseModel):
|
1653
|
+
type: Literal["CustomConfigTransformation"]
|
1654
|
+
class_name: str = Field(
|
1655
|
+
...,
|
1656
|
+
description="Fully-qualified name of the class that will be implementing the custom config transformation. The format is `source_<name>.<package>.<class_name>`.",
|
1657
|
+
examples=["source_declarative_manifest.components.MyCustomConfigTransformation"],
|
1658
|
+
)
|
1659
|
+
parameters: Optional[Dict[str, Any]] = Field(
|
1660
|
+
None,
|
1661
|
+
alias="$parameters",
|
1662
|
+
description="Additional parameters to be passed to the custom config transformation.",
|
1663
|
+
)
|
1664
|
+
|
1665
|
+
|
1660
1666
|
class AddedFieldDefinition(BaseModel):
|
1661
1667
|
type: Literal["AddedFieldDefinition"]
|
1662
1668
|
path: List[str] = Field(
|
@@ -1796,7 +1802,19 @@ class DatetimeBasedCursor(BaseModel):
|
|
1796
1802
|
)
|
1797
1803
|
cursor_datetime_formats: Optional[List[str]] = Field(
|
1798
1804
|
None,
|
1799
|
-
description="The possible formats for the cursor field, in order of preference. The first format that matches the cursor field value will be used to parse it. If not provided, the
|
1805
|
+
description="The possible formats for the cursor field, in order of preference. The first format that matches the cursor field value will be used to parse it. If not provided, the Outgoing Datetime Format will be used.\nUse placeholders starting with \"%\" to describe the format the API is using. The following placeholders are available:\n * **%s**: Epoch unix timestamp - `1686218963`\n * **%s_as_float**: Epoch unix timestamp in seconds as float with microsecond precision - `1686218963.123456`\n * **%ms**: Epoch unix timestamp - `1686218963123`\n * **%a**: Weekday (abbreviated) - `Sun`\n * **%A**: Weekday (full) - `Sunday`\n * **%w**: Weekday (decimal) - `0` (Sunday), `6` (Saturday)\n * **%d**: Day of the month (zero-padded) - `01`, `02`, ..., `31`\n * **%b**: Month (abbreviated) - `Jan`\n * **%B**: Month (full) - `January`\n * **%m**: Month (zero-padded) - `01`, `02`, ..., `12`\n * **%y**: Year (without century, zero-padded) - `00`, `01`, ..., `99`\n * **%Y**: Year (with century) - `0001`, `0002`, ..., `9999`\n * **%H**: Hour (24-hour, zero-padded) - `00`, `01`, ..., `23`\n * **%I**: Hour (12-hour, zero-padded) - `01`, `02`, ..., `12`\n * **%p**: AM/PM indicator\n * **%M**: Minute (zero-padded) - `00`, `01`, ..., `59`\n * **%S**: Second (zero-padded) - `00`, `01`, ..., `59`\n * **%f**: Microsecond (zero-padded to 6 digits) - `000000`, `000001`, ..., `999999`\n * **%_ms**: Millisecond (zero-padded to 3 digits) - `000`, `001`, ..., `999`\n * **%z**: UTC offset - `(empty)`, `+0000`, `-04:00`\n * **%Z**: Time zone name - `(empty)`, `UTC`, `GMT`\n * **%j**: Day of the year (zero-padded) - `001`, `002`, ..., `366`\n * **%U**: Week number of the year (Sunday as first day) - `00`, `01`, ..., `53`\n * **%W**: Week number of the year (Monday as first day) - `00`, `01`, ..., `53`\n * **%c**: Date and time representation - `Tue Aug 16 21:30:00 1988`\n * **%x**: Date representation - `08/16/1988`\n * **%X**: Time representation - `21:30:00`\n * **%%**: Literal '%' character\n\n Some placeholders depend on the locale of the underlying system - in most cases this locale is configured as en/US. For more information see the [Python documentation](https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes).\n",
|
1806
|
+
examples=[
|
1807
|
+
"%Y-%m-%d",
|
1808
|
+
"%Y-%m-%d %H:%M:%S",
|
1809
|
+
"%Y-%m-%dT%H:%M:%S",
|
1810
|
+
"%Y-%m-%dT%H:%M:%SZ",
|
1811
|
+
"%Y-%m-%dT%H:%M:%S%z",
|
1812
|
+
"%Y-%m-%dT%H:%M:%S.%fZ",
|
1813
|
+
"%Y-%m-%dT%H:%M:%S.%f%z",
|
1814
|
+
"%Y-%m-%d %H:%M:%S.%f+00:00",
|
1815
|
+
"%s",
|
1816
|
+
"%ms",
|
1817
|
+
],
|
1800
1818
|
title="Cursor Datetime Formats",
|
1801
1819
|
)
|
1802
1820
|
start_datetime: Union[MinMaxDatetime, str] = Field(
|
@@ -1829,33 +1847,33 @@ class DatetimeBasedCursor(BaseModel):
|
|
1829
1847
|
)
|
1830
1848
|
cursor_granularity: Optional[str] = Field(
|
1831
1849
|
None,
|
1832
|
-
description="Smallest increment the datetime_format has (ISO 8601 duration) that is used to ensure the start of a slice does not overlap with the end of the previous one, e.g. for %Y-%m-%d the granularity should
|
1850
|
+
description="Smallest increment the datetime_format has (ISO 8601 duration) that is used to ensure the start of a slice does not overlap with the end of the previous one, e.g. for %Y-%m-%d the granularity should\nbe P1D, for %Y-%m-%dT%H:%M:%SZ the granularity should be PT1S. Given this field is provided, `step` needs to be provided as well.\n * **PT0.000001S**: 1 microsecond\n * **PT0.001S**: 1 millisecond\n * **PT1S**: 1 second\n * **PT1M**: 1 minute\n * **PT1H**: 1 hour\n * **P1D**: 1 day\n",
|
1833
1851
|
examples=["PT1S"],
|
1834
1852
|
title="Cursor Granularity",
|
1835
1853
|
)
|
1836
1854
|
is_data_feed: Optional[bool] = Field(
|
1837
1855
|
None,
|
1838
1856
|
description="A data feed API is an API that does not allow filtering and paginates the content from the most recent to the least recent. Given this, the CDK needs to know when to stop paginating and this field will generate a stop condition for pagination.",
|
1839
|
-
title="
|
1857
|
+
title="Data Feed API",
|
1840
1858
|
)
|
1841
1859
|
is_client_side_incremental: Optional[bool] = Field(
|
1842
1860
|
None,
|
1843
|
-
description="
|
1844
|
-
title="
|
1861
|
+
description="Set to True if the target API endpoint does not take cursor values to filter records and returns all records anyway. This will cause the connector to filter out records locally, and only emit new records from the last sync, hence incremental. This means that all records would be read from the API, but only new records will be emitted to the destination.",
|
1862
|
+
title="Client-side Incremental Filtering",
|
1845
1863
|
)
|
1846
1864
|
is_compare_strictly: Optional[bool] = Field(
|
1847
1865
|
False,
|
1848
|
-
description="Set to True if the target API does not accept queries where the start time equal the end time.",
|
1849
|
-
title="
|
1866
|
+
description="Set to True if the target API does not accept queries where the start time equal the end time. This will cause those requests to be skipped.",
|
1867
|
+
title="Strict Start-End Time Comparison",
|
1850
1868
|
)
|
1851
1869
|
global_substream_cursor: Optional[bool] = Field(
|
1852
1870
|
False,
|
1853
|
-
description="
|
1854
|
-
title="
|
1871
|
+
description="Setting to True causes the connector to store the cursor as one value, instead of per-partition. This setting optimizes performance when the parent stream has thousands of partitions. Notably, the substream state is updated only at the end of the sync, which helps prevent data loss in case of a sync failure. See more info in the [docs](https://docs.airbyte.com/connector-development/config-based/understanding-the-yaml-file/incremental-syncs).",
|
1872
|
+
title="Global Substream Cursor",
|
1855
1873
|
)
|
1856
1874
|
lookback_window: Optional[str] = Field(
|
1857
1875
|
None,
|
1858
|
-
description="Time interval before the start_datetime to read data for, e.g. P1M for looking back one month
|
1876
|
+
description="Time interval (ISO8601 duration) before the start_datetime to read data for, e.g. P1M for looking back one month.\n * **PT1H**: 1 hour\n * **P1D**: 1 day\n * **P1W**: 1 week\n * **P1M**: 1 month\n * **P1Y**: 1 year\n",
|
1859
1877
|
examples=["P1D", "P{{ config['lookback_days'] }}D"],
|
1860
1878
|
title="Lookback Window",
|
1861
1879
|
)
|
@@ -1873,7 +1891,7 @@ class DatetimeBasedCursor(BaseModel):
|
|
1873
1891
|
)
|
1874
1892
|
step: Optional[str] = Field(
|
1875
1893
|
None,
|
1876
|
-
description="The size of the time window (ISO8601 duration). Given this field is provided, `cursor_granularity` needs to be provided as well
|
1894
|
+
description="The size of the time window (ISO8601 duration). Given this field is provided, `cursor_granularity` needs to be provided as well.\n * **PT1H**: 1 hour\n * **P1D**: 1 day\n * **P1W**: 1 week\n * **P1M**: 1 month\n * **P1Y**: 1 year\n",
|
1877
1895
|
examples=["P1W", "{{ config['step_increment'] }}"],
|
1878
1896
|
title="Step",
|
1879
1897
|
)
|
@@ -2164,7 +2182,12 @@ class ConfigMigration(BaseModel):
|
|
2164
2182
|
None, description="The description/purpose of the config migration."
|
2165
2183
|
)
|
2166
2184
|
transformations: List[
|
2167
|
-
Union[
|
2185
|
+
Union[
|
2186
|
+
ConfigRemapField,
|
2187
|
+
ConfigAddFields,
|
2188
|
+
ConfigRemoveFields,
|
2189
|
+
CustomConfigTransformation,
|
2190
|
+
]
|
2168
2191
|
] = Field(
|
2169
2192
|
...,
|
2170
2193
|
description="The list of transformations that will attempt to be applied on an incoming unmigrated config. The transformations will be applied in the order they are defined.",
|
@@ -2184,7 +2207,12 @@ class ConfigNormalizationRules(BaseModel):
|
|
2184
2207
|
)
|
2185
2208
|
transformations: Optional[
|
2186
2209
|
List[
|
2187
|
-
Union[
|
2210
|
+
Union[
|
2211
|
+
ConfigRemapField,
|
2212
|
+
ConfigAddFields,
|
2213
|
+
ConfigRemoveFields,
|
2214
|
+
CustomConfigTransformation,
|
2215
|
+
]
|
2188
2216
|
]
|
2189
2217
|
] = Field(
|
2190
2218
|
[],
|
@@ -2481,7 +2509,7 @@ class SessionTokenAuthenticator(BaseModel):
|
|
2481
2509
|
)
|
2482
2510
|
expiration_duration: Optional[str] = Field(
|
2483
2511
|
None,
|
2484
|
-
description="The duration in ISO 8601 duration notation after which the session token expires, starting from the time it was obtained. Omitting it will result in the session token being refreshed for every request
|
2512
|
+
description="The duration in ISO 8601 duration notation after which the session token expires, starting from the time it was obtained. Omitting it will result in the session token being refreshed for every request.\n * **PT1H**: 1 hour\n * **P1D**: 1 day\n * **P1W**: 1 week\n * **P1M**: 1 month\n * **P1Y**: 1 year\n",
|
2485
2513
|
examples=["PT1H", "P1D"],
|
2486
2514
|
title="Expiration Duration",
|
2487
2515
|
)
|
@@ -44,12 +44,21 @@ class ConnectorTestScenario(BaseModel):
|
|
44
44
|
skip_test: bool
|
45
45
|
bypass_reason: str
|
46
46
|
|
47
|
+
class AcceptanceTestEmptyStream(BaseModel):
|
48
|
+
name: str
|
49
|
+
bypass_reason: str | None = None
|
50
|
+
|
51
|
+
# bypass reason does not affect equality
|
52
|
+
def __hash__(self) -> int:
|
53
|
+
return hash(self.name)
|
54
|
+
|
47
55
|
config_path: Path | None = None
|
48
56
|
config_dict: dict[str, Any] | None = None
|
49
57
|
|
50
58
|
_id: str | None = None # Used to override the default ID generation
|
51
59
|
|
52
60
|
configured_catalog_path: Path | None = None
|
61
|
+
empty_streams: list[AcceptanceTestEmptyStream] | None = None
|
53
62
|
timeout_seconds: int | None = None
|
54
63
|
expect_records: AcceptanceTestExpectRecords | None = None
|
55
64
|
file_types: AcceptanceTestFileTypes | None = None
|
@@ -10,20 +10,12 @@ from typing import TYPE_CHECKING, cast
|
|
10
10
|
|
11
11
|
from boltons.typeutils import classproperty
|
12
12
|
|
13
|
-
from airbyte_cdk.models import (
|
14
|
-
AirbyteMessage,
|
15
|
-
Type,
|
16
|
-
)
|
17
13
|
from airbyte_cdk.test import entrypoint_wrapper
|
18
14
|
from airbyte_cdk.test.models import (
|
19
15
|
ConnectorTestScenario,
|
20
16
|
)
|
21
17
|
from airbyte_cdk.test.standard_tests._job_runner import IConnector, run_test_job
|
22
18
|
from airbyte_cdk.test.standard_tests.docker_base import DockerConnectorTestSuite
|
23
|
-
from airbyte_cdk.utils.connector_paths import (
|
24
|
-
ACCEPTANCE_TEST_CONFIG,
|
25
|
-
find_connector_root,
|
26
|
-
)
|
27
19
|
|
28
20
|
if TYPE_CHECKING:
|
29
21
|
from collections.abc import Callable
|
@@ -10,8 +10,7 @@ import tempfile
|
|
10
10
|
import warnings
|
11
11
|
from dataclasses import asdict
|
12
12
|
from pathlib import Path
|
13
|
-
from
|
14
|
-
from typing import Literal, cast
|
13
|
+
from typing import Any, Literal, cast
|
15
14
|
|
16
15
|
import orjson
|
17
16
|
import pytest
|
@@ -35,7 +34,6 @@ from airbyte_cdk.utils.connector_paths import (
|
|
35
34
|
from airbyte_cdk.utils.docker import (
|
36
35
|
build_connector_image,
|
37
36
|
run_docker_airbyte_command,
|
38
|
-
run_docker_command,
|
39
37
|
)
|
40
38
|
|
41
39
|
|
@@ -66,13 +64,57 @@ class DockerConnectorTestSuite:
|
|
66
64
|
return cast(str, cls.connector_name).startswith("destination-")
|
67
65
|
|
68
66
|
@classproperty
|
69
|
-
def
|
70
|
-
"""Get the
|
71
|
-
result = cls.get_connector_root_dir() / ACCEPTANCE_TEST_CONFIG
|
72
|
-
if result.exists():
|
73
|
-
return result
|
67
|
+
def acceptance_test_config(cls) -> Any:
|
68
|
+
"""Get the contents of acceptance test config file.
|
74
69
|
|
75
|
-
|
70
|
+
Also perform some basic validation that the file has the expected structure.
|
71
|
+
"""
|
72
|
+
acceptance_test_config_path = cls.get_connector_root_dir() / ACCEPTANCE_TEST_CONFIG
|
73
|
+
if not acceptance_test_config_path.exists():
|
74
|
+
raise FileNotFoundError(
|
75
|
+
f"Acceptance test config file not found at: {str(acceptance_test_config_path)}"
|
76
|
+
)
|
77
|
+
|
78
|
+
tests_config = yaml.safe_load(acceptance_test_config_path.read_text())
|
79
|
+
|
80
|
+
if "acceptance_tests" not in tests_config:
|
81
|
+
raise ValueError(
|
82
|
+
f"Acceptance tests config not found in {acceptance_test_config_path}."
|
83
|
+
f" Found only: {str(tests_config)}."
|
84
|
+
)
|
85
|
+
return tests_config
|
86
|
+
|
87
|
+
@staticmethod
|
88
|
+
def _dedup_scenarios(scenarios: list[ConnectorTestScenario]) -> list[ConnectorTestScenario]:
|
89
|
+
"""
|
90
|
+
For FAST tests, we treat each config as a separate test scenario to run against, whereas CATs defined
|
91
|
+
a series of more granular scenarios specifying a config_path and empty_streams among other things.
|
92
|
+
|
93
|
+
This method deduplicates the CATs scenarios based on their config_path. In doing so, we choose to
|
94
|
+
take the union of any defined empty_streams, to have high confidence that runnning a read with the
|
95
|
+
config will not error on the lack of data in the empty streams or lack of permissions to read them.
|
96
|
+
|
97
|
+
"""
|
98
|
+
deduped_scenarios: list[ConnectorTestScenario] = []
|
99
|
+
|
100
|
+
for scenario in scenarios:
|
101
|
+
for existing_scenario in deduped_scenarios:
|
102
|
+
if scenario.config_path == existing_scenario.config_path:
|
103
|
+
# If a scenario with the same config_path already exists, we merge the empty streams.
|
104
|
+
# scenarios are immutable, so we create a new one.
|
105
|
+
all_empty_streams = (existing_scenario.empty_streams or []) + (
|
106
|
+
scenario.empty_streams or []
|
107
|
+
)
|
108
|
+
merged_scenario = existing_scenario.model_copy(
|
109
|
+
update={"empty_streams": list(set(all_empty_streams))}
|
110
|
+
)
|
111
|
+
deduped_scenarios.remove(existing_scenario)
|
112
|
+
deduped_scenarios.append(merged_scenario)
|
113
|
+
break
|
114
|
+
else:
|
115
|
+
# If a scenario does not exist with the config, add the new scenario to the list.
|
116
|
+
deduped_scenarios.append(scenario)
|
117
|
+
return deduped_scenarios
|
76
118
|
|
77
119
|
@classmethod
|
78
120
|
def get_scenarios(
|
@@ -83,9 +125,8 @@ class DockerConnectorTestSuite:
|
|
83
125
|
This has to be a separate function because pytest does not allow
|
84
126
|
parametrization of fixtures with arguments from the test class itself.
|
85
127
|
"""
|
86
|
-
categories = ["connection", "spec"]
|
87
128
|
try:
|
88
|
-
|
129
|
+
all_tests_config = cls.acceptance_test_config
|
89
130
|
except FileNotFoundError as e:
|
90
131
|
# Destinations sometimes do not have an acceptance tests file.
|
91
132
|
warnings.warn(
|
@@ -95,15 +136,9 @@ class DockerConnectorTestSuite:
|
|
95
136
|
)
|
96
137
|
return []
|
97
138
|
|
98
|
-
all_tests_config = yaml.safe_load(cls.acceptance_test_config_path.read_text())
|
99
|
-
if "acceptance_tests" not in all_tests_config:
|
100
|
-
raise ValueError(
|
101
|
-
f"Acceptance tests config not found in {cls.acceptance_test_config_path}."
|
102
|
-
f" Found only: {str(all_tests_config)}."
|
103
|
-
)
|
104
|
-
|
105
139
|
test_scenarios: list[ConnectorTestScenario] = []
|
106
|
-
|
140
|
+
# we look in the basic_read section to find any empty streams
|
141
|
+
for category in ["spec", "connection", "basic_read"]:
|
107
142
|
if (
|
108
143
|
category not in all_tests_config["acceptance_tests"]
|
109
144
|
or "tests" not in all_tests_config["acceptance_tests"][category]
|
@@ -121,15 +156,11 @@ class DockerConnectorTestSuite:
|
|
121
156
|
|
122
157
|
scenario = ConnectorTestScenario.model_validate(test)
|
123
158
|
|
124
|
-
if scenario.config_path and scenario.config_path in [
|
125
|
-
s.config_path for s in test_scenarios
|
126
|
-
]:
|
127
|
-
# Skip duplicate scenarios based on config_path
|
128
|
-
continue
|
129
|
-
|
130
159
|
test_scenarios.append(scenario)
|
131
160
|
|
132
|
-
|
161
|
+
deduped_test_scenarios = cls._dedup_scenarios(test_scenarios)
|
162
|
+
|
163
|
+
return deduped_test_scenarios
|
133
164
|
|
134
165
|
@pytest.mark.skipif(
|
135
166
|
shutil.which("docker") is None,
|
@@ -332,6 +363,11 @@ class DockerConnectorTestSuite:
|
|
332
363
|
# If `read_from_streams` is a list, we filter the discovered streams.
|
333
364
|
streams_list = list(set(streams_list) & set(read_from_streams))
|
334
365
|
|
366
|
+
if scenario.empty_streams:
|
367
|
+
# Filter out streams marked as empty in the scenario.
|
368
|
+
empty_stream_names = [stream.name for stream in scenario.empty_streams]
|
369
|
+
streams_list = [s for s in streams_list if s.name not in empty_stream_names]
|
370
|
+
|
335
371
|
configured_catalog: ConfiguredAirbyteCatalog = ConfiguredAirbyteCatalog(
|
336
372
|
streams=[
|
337
373
|
ConfiguredAirbyteStream(
|
@@ -120,6 +120,12 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
120
120
|
if scenario.expected_outcome.expect_exception() and discover_result.errors:
|
121
121
|
# Failed as expected; we're done.
|
122
122
|
return
|
123
|
+
streams = discover_result.catalog.catalog.streams # type: ignore [reportOptionalMemberAccess, union-attr]
|
124
|
+
|
125
|
+
if scenario.empty_streams:
|
126
|
+
# Filter out streams marked as empty in the scenario.
|
127
|
+
empty_stream_names = [stream.name for stream in scenario.empty_streams]
|
128
|
+
streams = [s for s in streams if s.name not in empty_stream_names]
|
123
129
|
|
124
130
|
configured_catalog = ConfiguredAirbyteCatalog(
|
125
131
|
streams=[
|
@@ -128,7 +134,7 @@ class SourceTestSuiteBase(ConnectorTestSuiteBase):
|
|
128
134
|
sync_mode=SyncMode.full_refresh,
|
129
135
|
destination_sync_mode=DestinationSyncMode.append_dedup,
|
130
136
|
)
|
131
|
-
for stream in
|
137
|
+
for stream in streams
|
132
138
|
]
|
133
139
|
)
|
134
140
|
result = run_test_job(
|
@@ -86,8 +86,6 @@ def resolve_airbyte_repo_root(
|
|
86
86
|
|
87
87
|
def resolve_connector_name_and_directory(
|
88
88
|
connector_ref: str | Path | None = None,
|
89
|
-
*,
|
90
|
-
connector_directory: Path | None = None,
|
91
89
|
) -> tuple[str, Path]:
|
92
90
|
"""Resolve the connector name and directory.
|
93
91
|
|
@@ -104,6 +102,7 @@ def resolve_connector_name_and_directory(
|
|
104
102
|
FileNotFoundError: If the connector directory does not exist or cannot be found.
|
105
103
|
"""
|
106
104
|
connector_name: str | None = None
|
105
|
+
connector_directory: Path | None = None
|
107
106
|
|
108
107
|
# Resolve connector_ref to connector_name or connector_directory (if provided)
|
109
108
|
if connector_ref:
|
@@ -37,7 +37,7 @@ airbyte_cdk/destinations/vector_db_based/writer.py,sha256=nZ00xPiohElJmYktEZZIhr
|
|
37
37
|
airbyte_cdk/entrypoint.py,sha256=R2kAsAnCAI7eZCctQpMCImLhFFwo7PniJVA0e7RhJVI,19774
|
38
38
|
airbyte_cdk/exception_handler.py,sha256=D_doVl3Dt60ASXlJsfviOCswxGyKF2q0RL6rif3fNks,2013
|
39
39
|
airbyte_cdk/logger.py,sha256=1cURbvawbunCAV178q-XhTHcbAQZTSf07WhU7U9AXWU,3744
|
40
|
-
airbyte_cdk/manifest_migrations/README.md,sha256=
|
40
|
+
airbyte_cdk/manifest_migrations/README.md,sha256=YX1h0xyc4jHdwH3I25ZHqB7R3hcUUCHMvnexpfzF2E8,3020
|
41
41
|
airbyte_cdk/manifest_migrations/__init__.py,sha256=0eq9ic_6GGXMwzE31eAOSA7PLtBauMfgM9XshjYHF84,61
|
42
42
|
airbyte_cdk/manifest_migrations/exceptions.py,sha256=mmMZaCVEkYSGykVL5jKA0xsDWWkybRdQwnh9pGb7VG0,300
|
43
43
|
airbyte_cdk/manifest_migrations/manifest_migration.py,sha256=4ohLfbj2PeuPSgCMVbCArb0d-YdaZIllX4ieXQNiRRw,4420
|
@@ -46,7 +46,7 @@ airbyte_cdk/manifest_migrations/migrations/__init__.py,sha256=HRN7fMMbTuM9W1vmyc
|
|
46
46
|
airbyte_cdk/manifest_migrations/migrations/http_requester_path_to_url.py,sha256=IIn2SjRh1v2yaSBFUCDyBHpX6mBhlckhvbsSg55mREI,2153
|
47
47
|
airbyte_cdk/manifest_migrations/migrations/http_requester_request_body_json_data_to_request_body.py,sha256=hwz2JZYSUibpoD983DMrOdcFYApiHkTVayBkeG2Kx9w,2731
|
48
48
|
airbyte_cdk/manifest_migrations/migrations/http_requester_url_base_to_url.py,sha256=EX1MVYVpoWypA28qoH48wA0SYZjGdlR8bcSixTDzfgo,1346
|
49
|
-
airbyte_cdk/manifest_migrations/migrations/registry.yaml,sha256=
|
49
|
+
airbyte_cdk/manifest_migrations/migrations/registry.yaml,sha256=F-hdapvl_vZnsI7CQsV00Rb7g7j4Nt2zaM83-Tbwgbg,956
|
50
50
|
airbyte_cdk/manifest_migrations/migrations_registry.py,sha256=zly2fwaOxDukqC7eowzrDlvhA2v71FjW74kDzvRXhSY,2619
|
51
51
|
airbyte_cdk/models/__init__.py,sha256=Et9wJWs5VOWynGbb-3aJRhsdAHAiLkNNLxdwqJAuqkw,2114
|
52
52
|
airbyte_cdk/models/airbyte_protocol.py,sha256=oZdKsZ7yPjUt9hvxdWNpxCtgjSV2RWhf4R9Np03sqyY,3613
|
@@ -90,7 +90,7 @@ airbyte_cdk/sources/declarative/concurrent_declarative_source.py,sha256=rQz9gXp3
|
|
90
90
|
airbyte_cdk/sources/declarative/datetime/__init__.py,sha256=4Hw-PX1-VgESLF16cDdvuYCzGJtHntThLF4qIiULWeo,61
|
91
91
|
airbyte_cdk/sources/declarative/datetime/datetime_parser.py,sha256=_zGNGq31RNy_0QBLt_EcTvgPyhj7urPdx6oA3M5-r3o,3150
|
92
92
|
airbyte_cdk/sources/declarative/datetime/min_max_datetime.py,sha256=0BHBtDNQZfvwM45-tY5pNlTcKAFSGGNxemoi0Jic-0E,5785
|
93
|
-
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=
|
93
|
+
airbyte_cdk/sources/declarative/declarative_component_schema.yaml,sha256=tGte90Y3mTtsdyT5nUNgxBgV-CD9Nk_zrMU0mL1M3DE,186143
|
94
94
|
airbyte_cdk/sources/declarative/declarative_source.py,sha256=qmyMnnet92eGc3C22yBtpvD5UZjqdhsAafP_zxI5wp8,1814
|
95
95
|
airbyte_cdk/sources/declarative/declarative_stream.py,sha256=dCRlddBUSaJmBNBz1pSO1r2rTw8AP5d2_vlmIeGs2gg,10767
|
96
96
|
airbyte_cdk/sources/declarative/decoders/__init__.py,sha256=JHb_0d3SE6kNY10mxA5YBEKPeSbsWYjByq1gUQxepoE,953
|
@@ -134,7 +134,7 @@ airbyte_cdk/sources/declarative/migrations/legacy_to_per_partition_state_migrati
|
|
134
134
|
airbyte_cdk/sources/declarative/migrations/state_migration.py,sha256=KWPjealMLKSMtajXgkdGgKg7EmTLR-CqqD7UIh0-eDU,794
|
135
135
|
airbyte_cdk/sources/declarative/models/__init__.py,sha256=nUFxNCiKeYRVXuZEKA7GD-lTHxsiKcQ8FitZjKhPIvE,100
|
136
136
|
airbyte_cdk/sources/declarative/models/base_model_with_deprecations.py,sha256=Imnj3yef0aqRdLfaUxkIYISUb8YkiPrRH_wBd-x8HjM,5999
|
137
|
-
airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=
|
137
|
+
airbyte_cdk/sources/declarative/models/declarative_component_schema.py,sha256=5M8q0qBoFTIqyHXs5RniGyMDurDgODZ0Ad_OuIhpdio,131664
|
138
138
|
airbyte_cdk/sources/declarative/parsers/__init__.py,sha256=ZnqYNxHsKCgO38IwB34RQyRMXTs4GTvlRi3ImKnIioo,61
|
139
139
|
airbyte_cdk/sources/declarative/parsers/custom_code_compiler.py,sha256=nlVvHC511NUyDEEIRBkoeDTAvLqKNp-hRy8D19z8tdk,5941
|
140
140
|
airbyte_cdk/sources/declarative/parsers/custom_exceptions.py,sha256=wnRUP0Xeru9Rbu5OexXSDN9QWDo8YU4tT9M2LDVOgGA,802
|
@@ -380,7 +380,7 @@ airbyte_cdk/sql/shared/sql_processor.py,sha256=jR-hdLZsPf2sNBa_wvWKLvys8ZJ-SQCIi
|
|
380
380
|
airbyte_cdk/sql/types.py,sha256=XEIhRAo_ASd0kVLBkdLf5bHiRhNple-IJrC9TibcDdY,5880
|
381
381
|
airbyte_cdk/test/__init__.py,sha256=f_XdkOg4_63QT2k3BbKY34209lppwgw-svzfZstQEq4,199
|
382
382
|
airbyte_cdk/test/catalog_builder.py,sha256=-y05Cz1x0Dlk6oE9LSKhCozssV2gYBNtMdV5YYOPOtk,3015
|
383
|
-
airbyte_cdk/test/entrypoint_wrapper.py,sha256=
|
383
|
+
airbyte_cdk/test/entrypoint_wrapper.py,sha256=wNxoUP8lUIc_8-owLF9SlzLgkH6yH_Bsow8K0wLRB5I,17889
|
384
384
|
airbyte_cdk/test/mock_http/__init__.py,sha256=jE5kC6CQ0OXkTqKhciDnNVZHesBFVIA2YvkdFGwva7k,322
|
385
385
|
airbyte_cdk/test/mock_http/matcher.py,sha256=4Qj8UnJKZIs-eodshryce3SN1Ayc8GZpBETmP6hTEyc,1446
|
386
386
|
airbyte_cdk/test/mock_http/mocker.py,sha256=XgsjMtVoeMpRELPyALgrkHFauH9H5irxrz1Kcxh2yFY,8013
|
@@ -389,15 +389,15 @@ airbyte_cdk/test/mock_http/response.py,sha256=s4-cQQqTtmeej0pQDWqmG0vUWpHS-93lIW
|
|
389
389
|
airbyte_cdk/test/mock_http/response_builder.py,sha256=F-v7ebftqGj7YVIMLKdodmU9U8Dq8aIyllWGo2NGwHc,8331
|
390
390
|
airbyte_cdk/test/models/__init__.py,sha256=5f5oFcuUA3dyNTfvvTWav2pTD8WX4nznObKgMTmvdus,290
|
391
391
|
airbyte_cdk/test/models/outcome.py,sha256=niSX6gkP4P-_kQUF1jkbBXq72FC3Rtkvtdl0gJsUyho,2263
|
392
|
-
airbyte_cdk/test/models/scenario.py,sha256=
|
392
|
+
airbyte_cdk/test/models/scenario.py,sha256=sXzqDRv1SOS16PR9nF_6Yy6e0AiD8YFqHF8dREQitpo,6233
|
393
393
|
airbyte_cdk/test/standard_tests/__init__.py,sha256=TGCSc9bqfiEhdfyz7SVqwBog2CsCY1unCXocSXswtV0,1369
|
394
394
|
airbyte_cdk/test/standard_tests/_job_runner.py,sha256=PF3ffgaB8ZQX5bdNLL37wq7S9P3VJhGBXsNIIv6JSb4,5639
|
395
|
-
airbyte_cdk/test/standard_tests/connector_base.py,sha256=
|
395
|
+
airbyte_cdk/test/standard_tests/connector_base.py,sha256=AhM856o5cFYN6bKYvyTdNLP7NFKYWXR_-U6kXqDAHdQ,4994
|
396
396
|
airbyte_cdk/test/standard_tests/declarative_sources.py,sha256=4lmXKVJEhYeZAYaaXODwkn-DoJt_V--Thbea0kzOqdc,3502
|
397
397
|
airbyte_cdk/test/standard_tests/destination_base.py,sha256=MARZip2mdo_PzGvzf2VBTAfrP4tbjrJYgeJUApnAArA,731
|
398
|
-
airbyte_cdk/test/standard_tests/docker_base.py,sha256=
|
398
|
+
airbyte_cdk/test/standard_tests/docker_base.py,sha256=zWrtv4aKKLXc4cLuAp0c2BpLSGu8-PY94Ytf_nEfx9M,16016
|
399
399
|
airbyte_cdk/test/standard_tests/pytest_hooks.py,sha256=qUrRN36PVHdaGQXLnb3y4CzQhkktq7qnRQppnOfSQh4,5773
|
400
|
-
airbyte_cdk/test/standard_tests/source_base.py,sha256
|
400
|
+
airbyte_cdk/test/standard_tests/source_base.py,sha256=-V8vOJhPndIYUOhBkM85mHSee4jCN0JvGTrF_AaSTaQ,7010
|
401
401
|
airbyte_cdk/test/standard_tests/util.py,sha256=ncXVo6f_gJS2z_Pn6d_OhkuSVRiTy1D5SsPpRYAYWm4,3267
|
402
402
|
airbyte_cdk/test/state_builder.py,sha256=kLPql9lNzUJaBg5YYRLJlY_Hy5JLHJDVyKPMZMoYM44,946
|
403
403
|
airbyte_cdk/test/utils/__init__.py,sha256=Hu-1XT2KDoYjDF7-_ziDwv5bY3PueGjANOCbzeOegDg,57
|
@@ -408,7 +408,7 @@ airbyte_cdk/test/utils/reading.py,sha256=9ReW2uoITE7NCpVBKn6EfM9yi9_SvqhsNLb-5LO
|
|
408
408
|
airbyte_cdk/utils/__init__.py,sha256=qhnC02DbS35OY8oB_tkYHwZzHed2FZeBM__G8IOgckY,347
|
409
409
|
airbyte_cdk/utils/airbyte_secrets_utils.py,sha256=wEtRnl5KRhN6eLJwrDrC4FJjyqt_4vkA1F65mdl8c24,3142
|
410
410
|
airbyte_cdk/utils/analytics_message.py,sha256=bi3uugQ2NjecnwTnz63iD5D1M8ZR8mXPbdtt6w5cC4s,653
|
411
|
-
airbyte_cdk/utils/connector_paths.py,sha256=
|
411
|
+
airbyte_cdk/utils/connector_paths.py,sha256=MXj0RBi3HpuvQTWH6-vc62BZZ3XguHMq0CVIkjhS3qs,8779
|
412
412
|
airbyte_cdk/utils/constants.py,sha256=QzCi7j5SqpI5I06uRvQ8FC73JVJi7rXaRnR3E_gro5c,108
|
413
413
|
airbyte_cdk/utils/datetime_format_inferrer.py,sha256=Ne2cpk7Tx3eZDEW2Q3O7jnNOY9g-w-AUMt3Ltvwg1tY,3989
|
414
414
|
airbyte_cdk/utils/datetime_helpers.py,sha256=8mqzZ67Or2PBp7tLtrhh6XFv4wFzYsjCL_DOQJRaftI,17751
|
@@ -424,9 +424,9 @@ airbyte_cdk/utils/slice_hasher.py,sha256=EDxgROHDbfG-QKQb59m7h_7crN1tRiawdf5uU7G
|
|
424
424
|
airbyte_cdk/utils/spec_schema_transformations.py,sha256=-5HTuNsnDBAhj-oLeQXwpTGA0HdcjFOf2zTEMUTTg_Y,816
|
425
425
|
airbyte_cdk/utils/stream_status_utils.py,sha256=ZmBoiy5HVbUEHAMrUONxZvxnvfV9CesmQJLDTAIWnWw,1171
|
426
426
|
airbyte_cdk/utils/traced_exception.py,sha256=C8uIBuCL_E4WnBAOPSxBicD06JAldoN9fGsQDp463OY,6292
|
427
|
-
airbyte_cdk-6.59.
|
428
|
-
airbyte_cdk-6.59.
|
429
|
-
airbyte_cdk-6.59.
|
430
|
-
airbyte_cdk-6.59.
|
431
|
-
airbyte_cdk-6.59.
|
432
|
-
airbyte_cdk-6.59.
|
427
|
+
airbyte_cdk-6.59.2.dist-info/LICENSE.txt,sha256=Wfe61S4BaGPj404v8lrAbvhjYR68SHlkzeYrg3_bbuM,1051
|
428
|
+
airbyte_cdk-6.59.2.dist-info/LICENSE_SHORT,sha256=aqF6D1NcESmpn-cqsxBtszTEnHKnlsp8L4x9wAh3Nxg,55
|
429
|
+
airbyte_cdk-6.59.2.dist-info/METADATA,sha256=v-WEUpwObclpURlDpPC98g6GMfs_9Xl8gdL6KGpctos,6477
|
430
|
+
airbyte_cdk-6.59.2.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
431
|
+
airbyte_cdk-6.59.2.dist-info/entry_points.txt,sha256=AKWbEkHfpzzk9nF9tqBUaw1MbvTM4mGtEzmZQm0ZWvM,139
|
432
|
+
airbyte_cdk-6.59.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|