omnata-plugin-runtime 0.4.11a111__tar.gz → 0.5.0a113__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omnata-plugin-runtime
3
- Version: 0.4.11a111
3
+ Version: 0.5.0a113
4
4
  Summary: Classes and common runtime components for building and running Omnata Plugins
5
5
  Author: James Weakley
6
6
  Author-email: james.weakley@omnata.com
@@ -9,26 +9,32 @@ Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.8
10
10
  Classifier: Programming Language :: Python :: 3.9
11
11
  Classifier: Programming Language :: Python :: 3.10
12
- Requires-Dist: certifi (<=2023.11.17)
12
+ Requires-Dist: annotated-types (<=0.6.0)
13
+ Requires-Dist: certifi (<=2024.7.4)
13
14
  Requires-Dist: charset-normalizer (<=2.0.4)
14
- Requires-Dist: idna (<=3.4)
15
+ Requires-Dist: cryptography (<=42.0.5)
16
+ Requires-Dist: filelock (<=3.13.1)
17
+ Requires-Dist: idna (<=3.7)
15
18
  Requires-Dist: jinja2 (>=3.1.2,<=3.1.4)
16
19
  Requires-Dist: markupsafe (<=2.1.3)
17
- Requires-Dist: numpy (<=1.26.3)
18
- Requires-Dist: packaging (<=23.1)
19
- Requires-Dist: pandas (<=2.1.4)
20
+ Requires-Dist: numpy (<=1.26.4)
21
+ Requires-Dist: packaging (<=24.1)
22
+ Requires-Dist: pandas (<=2.2.2)
20
23
  Requires-Dist: platformdirs (<=3.10.0)
21
- Requires-Dist: pydantic (>=1,<=1.10.12)
22
- Requires-Dist: pyjwt (<=2.4.0)
23
- Requires-Dist: pyopenssl (<=23.2.0)
24
- Requires-Dist: pytz (<=2023.3.post1)
25
- Requires-Dist: requests (>=2,<=2.31.0)
24
+ Requires-Dist: pycparser (<=2.21)
25
+ Requires-Dist: pydantic (>=2,<=2.5.3)
26
+ Requires-Dist: pydantic-core (<=2.14.6)
27
+ Requires-Dist: pyjwt (<=2.8.0)
28
+ Requires-Dist: pyopenssl (<=24.0.0)
29
+ Requires-Dist: pytz (<=2024.1)
30
+ Requires-Dist: requests (>=2,<=2.32.2)
26
31
  Requires-Dist: setuptools (<=69.5.1)
27
- Requires-Dist: snowflake-snowpark-python (>=1,<2)
28
- Requires-Dist: tenacity (>=8,<=8.2.2)
32
+ Requires-Dist: snowflake-connector-python (>=3,<=3.12.0)
33
+ Requires-Dist: snowflake-snowpark-python (==1.19.0)
34
+ Requires-Dist: tenacity (>=8,<=8.2.3)
29
35
  Requires-Dist: tomlkit (<=0.11.1)
30
- Requires-Dist: urllib3 (<=2.1.0)
31
- Requires-Dist: wheel (<=0.41.2)
36
+ Requires-Dist: urllib3 (<=2.2.2)
37
+ Requires-Dist: wheel (<=0.43.0)
32
38
  Description-Content-Type: text/markdown
33
39
 
34
40
  # omnata-plugin-runtime
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "omnata-plugin-runtime"
3
- version = "0.4.11-a111"
3
+ version = "0.5.0-a113"
4
4
  description = "Classes and common runtime components for building and running Omnata Plugins"
5
5
  authors = ["James Weakley <james.weakley@omnata.com>"]
6
6
  readme = "README.md"
@@ -8,27 +8,33 @@ packages = [{include = "omnata_plugin_runtime", from = "src"}]
8
8
 
9
9
  [tool.poetry.dependencies]
10
10
  python = ">=3.8, <3.11"
11
- snowflake-snowpark-python = "^1"
11
+ snowflake-snowpark-python = "1.19.0" # fixes an issue
12
+ snowflake-connector-python = "^3, <=3.12.0" # latest version available on Snowflake Anaconda
13
+ cryptography = "<=42.0.5"
14
+ annotated-types = "<=0.6.0"
15
+ pycparser = "<=2.21"
16
+ filelock = "<=3.13.1"
17
+ pydantic-core = "<=2.14.6"
12
18
  # had to relax some of these thanks to snowcli pinning newer versions
13
- certifi = "<=2023.11.17" # latest version available on Snowflake Anaconda
19
+ certifi = "<=2024.7.4" # latest version available on Snowflake Anaconda
14
20
  charset-normalizer = "<=2.0.4" # latest version available on Snowflake Anaconda
15
- idna = "<=3.4" # latest version available on Snowflake Anaconda
21
+ idna = "<=3.7" # latest version available on Snowflake Anaconda
16
22
  jinja2 = ">=3.1.2,<=3.1.4" # 3.1.4 was latest version available on Snowflake Anaconda
17
23
  markupsafe = "<=2.1.3" # latest version available on Snowflake Anaconda
18
- numpy = "<=1.26.3" # latest version available on Snowflake Anaconda
19
- packaging = "<=23.1" # latest version available on Snowflake Anaconda
20
- pandas = "<=2.1.4" # latest version available on Snowflake Anaconda
24
+ numpy = "<=1.26.4" # latest version available on Snowflake Anaconda
25
+ packaging = "<=24.1" # latest version available on Snowflake Anaconda
26
+ pandas = "<=2.2.2" # latest version available on Snowflake Anaconda
21
27
  platformdirs = "<=3.10.0" # latest version available on Snowflake Anaconda
22
- pydantic = "^1, <=1.10.12" # latest version available on Snowflake Anaconda
23
- pyjwt = "<=2.4.0" # latest version available on Snowflake Anaconda
24
- pyopenssl = "<=23.2.0" # latest version available on Snowflake Anaconda
25
- pytz = "<=2023.3.post1" # latest version available on Snowflake Anaconda
26
- requests = "^2, <=2.31.0" # latest version available on Snowflake Anaconda
28
+ pydantic = "^2, <=2.5.3" # latest version available on Snowflake Anaconda
29
+ pyjwt = "<=2.8.0" # latest version available on Snowflake Anaconda
30
+ pyopenssl = "<=24.0.0" # latest version available on Snowflake Anaconda
31
+ pytz = "<=2024.1" # latest version available on Snowflake Anaconda
32
+ requests = "^2, <=2.32.2" # latest version available on Snowflake Anaconda
27
33
  setuptools = "<=69.5.1" # 69.5.1 was latest version available on Snowflake Anaconda
28
34
  tomlkit = "<=0.11.1" # 0.11.1 was latest version available on Snowflake Anaconda
29
- tenacity = "^8, <=8.2.2" # latest version available on Snowflake Anaconda
30
- urllib3 = "<=2.1.0" # latest version available on Snowflake Anaconda
31
- wheel = "<=0.41.2" # latest version available on Snowflake Anaconda
35
+ tenacity = "^8, <=8.2.3" # latest version available on Snowflake Anaconda
36
+ urllib3 = "<=2.2.2" # latest version available on Snowflake Anaconda
37
+ wheel = "<=0.43.0" # latest version available on Snowflake Anaconda
32
38
 
33
39
  [tool.poetry.dev-dependencies]
34
40
  pytest = "^6.2.4"
@@ -67,8 +67,8 @@ class PluginMessageStreamProgressUpdate(BaseModel):
67
67
  stream_total_counts: Dict[str, int]
68
68
  completed_streams: List[str]
69
69
  # older runtime versions didn't have these, so the sync engine can't expect it
70
- stream_errors: Optional[Dict[str,str]]
71
- total_records_estimate: Optional[Dict[str,int]]
70
+ stream_errors: Optional[Dict[str,str]] = None
71
+ total_records_estimate: Optional[Dict[str,int]] = None
72
72
 
73
73
 
74
74
  class PluginMessageCancelledStreams(BaseModel):
@@ -120,14 +120,14 @@ class OutboundSyncRequestPayload(BaseModel):
120
120
  logging_level: str
121
121
  connection_method: str
122
122
  connection_parameters: Dict[str, StoredConfigurationValue]
123
- oauth_secret_name: Optional[str]
124
- other_secrets_name: Optional[str]
123
+ oauth_secret_name: Optional[str] = None
124
+ other_secrets_name: Optional[str] = None
125
125
  sync_direction: Literal["outbound"] = "outbound"
126
126
  sync_strategy: OutboundSyncStrategy
127
127
  sync_parameters: Dict[str, StoredConfigurationValue]
128
128
  api_limit_overrides: List[ApiLimits]
129
129
  rate_limits_state: Dict[int,Dict[str,Dict[str,RateLimitState]]]
130
- field_mappings: Optional[StoredMappingValue]
130
+ field_mappings: Optional[StoredMappingValue] = None
131
131
  time_limit_mins: int = 60 * 4
132
132
 
133
133
 
@@ -138,7 +138,7 @@ class InboundSyncRequestPayload(BaseModel):
138
138
 
139
139
  sync_id: int # only used by log handler
140
140
  sync_branch_name: str = 'main' # only used by rate limit updater
141
- sync_branch_id: Optional[int] # only used by log handler
141
+ sync_branch_id: Optional[int] = None # only used by log handler
142
142
  connection_id: int # only used by log handler
143
143
  run_id: int # used by log handler and for reporting back run status updates
144
144
  source_app_name: str # the name of the app which is invoking this plugin
@@ -147,8 +147,8 @@ class InboundSyncRequestPayload(BaseModel):
147
147
  logging_level: str
148
148
  connection_method: str
149
149
  connection_parameters: Dict[str, StoredConfigurationValue]
150
- oauth_secret_name: Optional[str]
151
- other_secrets_name: Optional[str]
150
+ oauth_secret_name: Optional[str] = None
151
+ other_secrets_name: Optional[str] = None
152
152
  sync_direction: Literal["inbound"] = "inbound"
153
153
  sync_parameters: Dict[str, StoredConfigurationValue]
154
154
  api_limit_overrides: List[ApiLimits]
@@ -8,7 +8,7 @@ from typing import Any, List, Dict, Literal, Union, Optional
8
8
  from enum import Enum
9
9
 
10
10
  from abc import ABC
11
- from pydantic import BaseModel, Field, validator # pylint: disable=no-name-in-module
11
+ from pydantic import BaseModel, Field, PrivateAttr, validator # pylint: disable=no-name-in-module
12
12
 
13
13
  if tuple(sys.version_info[:2]) >= (3, 9):
14
14
  # Python 3.9 and above
@@ -424,8 +424,8 @@ class InboundSyncStreamsConfiguration(SubscriptableBaseModel):
424
424
  """
425
425
 
426
426
  include_new_streams: bool
427
- new_stream_sync_strategy: Optional[InboundSyncStrategy]
428
- new_stream_storage_behaviour: Optional[InboundStorageBehaviour]
427
+ new_stream_sync_strategy: Optional[InboundSyncStrategy] = None
428
+ new_stream_storage_behaviour: Optional[InboundStorageBehaviour] = None
429
429
  included_streams: Dict[str, StoredStreamConfiguration]
430
430
  excluded_streams: List[str]
431
431
  bulk_configuration: InboundSyncBulkConfiguration = InboundSyncBulkConfiguration.CUSTOMIZE
@@ -536,10 +536,10 @@ class ConnectionConfigurationParameters(SubscriptableBaseModel):
536
536
  connection_secrets: Dict[str, StoredConfigurationValue] = None
537
537
  ngrok_tunnel_settings: Optional[NgrokTunnelSettings] = None
538
538
  access_token_secret_name: Optional[str] = None
539
- _snowflake: Optional[Any] = None
540
539
 
541
- class Config:
542
- underscore_attrs_are_private = True
540
+ _snowflake: Optional[Any] = PrivateAttr( # or use Any to annotate the type and use Field to initialize
541
+ default=None
542
+ )
543
543
 
544
544
  @validator("ngrok_tunnel_settings", always=True)
545
545
  def validate_ngrok_tunnel_settings(cls, v: str, values: dict[str, Any]) -> Optional[NgrokTunnelSettings]:
@@ -849,7 +849,7 @@ class StoredFieldMapping(SubscriptableBaseModel):
849
849
  app_metadata: dict = Field(default_factory=dict)
850
850
 
851
851
 
852
- StoredStreamConfiguration.update_forward_refs()
853
- InboundSyncStreamsConfiguration.update_forward_refs()
854
- StoredFieldMappings.update_forward_refs()
855
- OutboundSyncConfigurationParameters.update_forward_refs()
852
+ StoredStreamConfiguration.model_rebuild()
853
+ InboundSyncStreamsConfiguration.model_rebuild()
854
+ StoredFieldMappings.model_rebuild()
855
+ OutboundSyncConfigurationParameters.model_rebuild()
@@ -389,8 +389,8 @@ class NewOptionCreator(SubscriptableBaseModel):
389
389
  return v.__name__ if isinstance(v, MethodType) else v
390
390
 
391
391
 
392
- StaticFormOptionsDataSource.update_forward_refs()
393
- DynamicFormOptionsDataSource.update_forward_refs()
392
+ StaticFormOptionsDataSource.model_rebuild()
393
+ DynamicFormOptionsDataSource.model_rebuild()
394
394
 
395
395
 
396
396
  class FormFieldMappingSelector(FormFieldWithDataSource, BaseModel):
@@ -30,13 +30,10 @@ from typing import Any, Callable, Dict, Iterable, List, Literal, Optional, Type,
30
30
 
31
31
  import jinja2
32
32
  import pandas
33
- import pydantic
34
- import pydantic.json
35
- from pydantic import Field, parse_obj_as, root_validator
33
+ from pydantic_core import to_jsonable_python
34
+ from pydantic import Field, TypeAdapter, root_validator, BaseModel
36
35
  from dateutil.parser import parse
37
36
  from jinja2 import Environment
38
- from pydantic import BaseModel # pylint: disable=no-name-in-module
39
- from pandas import set_option
40
37
  from snowflake.connector.pandas_tools import write_pandas
41
38
  from snowflake.connector.version import VERSION
42
39
  from snowflake.snowpark import Session
@@ -127,7 +124,7 @@ class PluginInfo(BaseModel):
127
124
  manifest: PluginManifest
128
125
  anaconda_packages: List[str]
129
126
  bundled_packages: List[str]
130
- icon_source: Optional[str]
127
+ icon_source: Optional[str] = None
131
128
  plugin_class_name: str
132
129
  has_custom_validator: bool
133
130
  plugin_runtime_version: str
@@ -368,7 +365,7 @@ class SyncRequest(ABC):
368
365
  if update_rate_limit_result is not None:
369
366
  sync_id:int = update_rate_limit_result["sync_id"]
370
367
  sync_branch_name:str = update_rate_limit_result["sync_branch_name"]
371
- latest_state:Dict[int,Dict[str,Dict[str,RateLimitState]]] = parse_obj_as(Dict[int,Dict[str,Dict[str,RateLimitState]]],update_rate_limit_result["latest_state"])
368
+ latest_state = TypeAdapter(Dict[int,Dict[str,Dict[str,RateLimitState]]]).validate_python(update_rate_limit_result["latest_state"])
372
369
  (rate_limit_state_all, rate_limit_state_this_branch) = RateLimitState.collapse(latest_state,sync_id, sync_branch_name)
373
370
  self.rate_limit_state_all = rate_limit_state_all
374
371
  self.rate_limit_state_this_sync_and_branch = rate_limit_state_this_branch
@@ -466,7 +463,7 @@ class SyncRequest(ABC):
466
463
  self._session.sql(
467
464
  f"""call {self._source_app_name}.API.PLUGIN_MESSAGE(
468
465
  {self._run_id},
469
- PARSE_JSON($${json.dumps(message,default=pydantic.json.pydantic_encoder)}$$))"""
466
+ PARSE_JSON($${json.dumps(to_jsonable_python(message))}$$))"""
470
467
  ).collect()
471
468
  )
472
469
  except Exception as e:
@@ -1955,7 +1952,7 @@ def managed_outbound_processing(concurrency: int, batch_size: int):
1955
1952
  dataframe_arg = method_args[0]
1956
1953
  if dataframe_arg.__class__.__name__ != "DataFrame" and not hasattr(dataframe_arg, "__next__"):
1957
1954
  raise ValueError(
1958
- f"The first argument to a @managed_outbound_processing method must be a DataFrame or DataFrame generator (from outbound_sync_request.get_records). Instead, a {first_arg.__class__.__name__} was provided. Alternatively, you can provide these via the 'dataframe' or 'dataframe_generator' named arguments."
1955
+ f"The first argument to a @managed_outbound_processing method must be a DataFrame or DataFrame generator (from outbound_sync_request.get_records). Instead, a {dataframe_arg.__class__.__name__} was provided. Alternatively, you can provide these via the 'dataframe' or 'dataframe_generator' named arguments."
1959
1956
  )
1960
1957
  method_args = method_args[1:]
1961
1958
 
@@ -8,8 +8,8 @@ import time
8
8
  import threading
9
9
  from typing import Dict, List, Optional
10
10
 
11
- from pydantic import BaseModel, parse_obj_as # pylint: disable=no-name-in-module
12
- import pydantic.json
11
+ from pydantic import BaseModel,TypeAdapter # pylint: disable=no-name-in-module
12
+ from pydantic_core import to_jsonable_python
13
13
  from snowflake.snowpark import Session
14
14
 
15
15
  from .api import PluginMessageStreamProgressUpdate, SyncRequestPayload, handle_proc_result
@@ -69,7 +69,7 @@ class PluginEntrypoint:
69
69
 
70
70
  def sync(self, sync_request: Dict):
71
71
  logger.info("Entered sync method")
72
- request = parse_obj_as(SyncRequestPayload, sync_request)
72
+ request = TypeAdapter(SyncRequestPayload).validate_python(sync_request)
73
73
  connection_secrets = self.get_secrets(
74
74
  request.oauth_secret_name, request.other_secrets_name
75
75
  )
@@ -93,7 +93,7 @@ class PluginEntrypoint:
93
93
  connection_parameters.access_token_secret_name = request.oauth_secret_name
94
94
  all_api_limits = self._plugin_instance.api_limits(connection_parameters)
95
95
  logger.info(
96
- f"Default API limits: {json.dumps(all_api_limits, default=pydantic.json.pydantic_encoder)}"
96
+ f"Default API limits: {json.dumps(to_jsonable_python(all_api_limits))}"
97
97
  )
98
98
  all_api_limits_by_category = {
99
99
  api_limit.endpoint_category: api_limit for api_limit in all_api_limits
@@ -109,7 +109,7 @@ class PluginEntrypoint:
109
109
  api_limits = list(all_api_limits_by_category.values())
110
110
  return_dict = {}
111
111
  logger.info(
112
- f"Rate limits state: {json.dumps(request.rate_limits_state, default=pydantic.json.pydantic_encoder)}"
112
+ f"Rate limits state: {json.dumps(to_jsonable_python(request.rate_limits_state))}"
113
113
  )
114
114
  (rate_limit_state_all, rate_limit_state_this_branch) = RateLimitState.collapse(request.rate_limits_state,request.sync_id, request.sync_branch_name)
115
115
  # if any endpoint categories have no state, give them an empty state
@@ -270,19 +270,15 @@ class PluginEntrypoint:
270
270
  oauth_secret_name = normalise_nulls(oauth_secret_name)
271
271
  other_secrets_name = normalise_nulls(other_secrets_name)
272
272
  connection_secrets = self.get_secrets(oauth_secret_name, other_secrets_name)
273
- connection_parameters = parse_obj_as(
274
- Dict[str, StoredConfigurationValue], connection_parameters
275
- )
276
- sync_parameters = parse_obj_as(
277
- Dict[str, StoredConfigurationValue], sync_parameters
278
- )
273
+ connection_parameters = TypeAdapter(
274
+ Dict[str, StoredConfigurationValue]).validate_python(connection_parameters)
275
+ sync_parameters = TypeAdapter(
276
+ Dict[str, StoredConfigurationValue]).validate_python(sync_parameters)
279
277
  form_parameters = None
280
278
  if current_form_parameters is not None:
281
- form_parameters = parse_obj_as(
282
- Dict[str, StoredConfigurationValue], current_form_parameters
283
- )
279
+ form_parameters = TypeAdapter(Dict[str, StoredConfigurationValue]).validate_python(current_form_parameters)
284
280
  if sync_direction == "outbound":
285
- sync_strat = OutboundSyncStrategy.parse_obj(sync_strategy) if sync_strategy is not None else None
281
+ sync_strat = OutboundSyncStrategy.model_validate(sync_strategy) if sync_strategy is not None else None
286
282
  parameters = OutboundSyncConfigurationParameters(
287
283
  connection_parameters=connection_parameters,
288
284
  connection_secrets=connection_secrets,
@@ -328,12 +324,10 @@ class PluginEntrypoint:
328
324
  oauth_secret_name = normalise_nulls(oauth_secret_name)
329
325
  other_secrets_name = normalise_nulls(other_secrets_name)
330
326
  connection_secrets = self.get_secrets(oauth_secret_name, other_secrets_name)
331
- connection_parameters = parse_obj_as(
332
- Dict[str, StoredConfigurationValue], connection_parameters
333
- )
334
- sync_parameters = parse_obj_as(
335
- Dict[str, StoredConfigurationValue], sync_parameters
336
- )
327
+ connection_parameters = TypeAdapter(
328
+ Dict[str, StoredConfigurationValue]).validate_python(connection_parameters)
329
+ sync_parameters = TypeAdapter(
330
+ Dict[str, StoredConfigurationValue]).validate_python(sync_parameters)
337
331
  parameters = InboundSyncConfigurationParameters(
338
332
  connection_parameters=connection_parameters,
339
333
  connection_secrets=connection_secrets,
@@ -360,9 +354,8 @@ class PluginEntrypoint:
360
354
  stored_values: List[Dict],
361
355
  ):
362
356
  logger.info("Entered construct_form_option method")
363
- stored_values_parsed = parse_obj_as(
364
- List[StoredConfigurationValue], stored_values
365
- )
357
+ stored_values_parsed = TypeAdapter(
358
+ List[StoredConfigurationValue]).validate_python(stored_values)
366
359
  the_function = getattr(
367
360
  self._plugin_instance,
368
361
  function_name,
@@ -382,7 +375,7 @@ class PluginEntrypoint:
382
375
 
383
376
  def create_billing_events(self, session, event_request: Dict):
384
377
  logger.info("Entered create_billing_events method")
385
- request = parse_obj_as(BillingEventRequest, event_request)
378
+ request = BillingEventRequest.model_validate(event_request)
386
379
  events: List[SnowflakeBillingEvent] = self._plugin_instance.create_billing_events(
387
380
  request
388
381
  )
@@ -435,7 +428,7 @@ class PluginEntrypoint:
435
428
  other_secrets = json.loads(secret_string_content)
436
429
  connection_secrets = {
437
430
  **connection_secrets,
438
- **parse_obj_as(Dict[str, StoredConfigurationValue], other_secrets),
431
+ **TypeAdapter(Dict[str, StoredConfigurationValue]).validate_python(other_secrets),
439
432
  }
440
433
  except Exception as exception:
441
434
  logger.error(f"Error parsing secrets content for secret {other_secrets_name}: {str(exception)}")
@@ -454,9 +447,8 @@ class PluginEntrypoint:
454
447
  oauth_secret_name = normalise_nulls(oauth_secret_name)
455
448
  other_secrets_name = normalise_nulls(other_secrets_name)
456
449
  connection_secrets = self.get_secrets(oauth_secret_name, other_secrets_name)
457
- connection_parameters = parse_obj_as(
458
- Dict[str, StoredConfigurationValue], connection_parameters
459
- )
450
+ connection_parameters = TypeAdapter(
451
+ Dict[str, StoredConfigurationValue]).validate_python(connection_parameters)
460
452
  parameters = ConnectionConfigurationParameters(
461
453
  connection_method=connection_method,
462
454
  connection_parameters=connection_parameters,
@@ -484,9 +476,8 @@ class PluginEntrypoint:
484
476
  return self._plugin_instance.network_addresses(
485
477
  ConnectionConfigurationParameters(
486
478
  connection_method=method,
487
- connection_parameters=parse_obj_as(
488
- Dict[str, StoredConfigurationValue], connection_parameters
489
- ),
479
+ connection_parameters=TypeAdapter(
480
+ Dict[str, StoredConfigurationValue]).validate_python(connection_parameters),
490
481
  connection_secrets={},
491
482
  )
492
483
  )
@@ -508,9 +499,8 @@ class PluginEntrypoint:
508
499
  )
509
500
  parameters = ConnectionConfigurationParameters(
510
501
  connection_method=method,
511
- connection_parameters=parse_obj_as(
512
- Dict[str, StoredConfigurationValue], connection_parameters
513
- ),
502
+ connection_parameters=TypeAdapter(
503
+ Dict[str, StoredConfigurationValue]).validate_python(connection_parameters),
514
504
  connection_secrets=connection_secrets
515
505
  )
516
506
  if oauth_secret_name is not None:
@@ -555,15 +545,14 @@ class PluginEntrypoint:
555
545
  )
556
546
  connection_parameters = ConnectionConfigurationParameters(
557
547
  connection_method=method,
558
- connection_parameters=parse_obj_as(
559
- Dict[str, StoredConfigurationValue], connection_parameters
560
- ),
548
+ connection_parameters=TypeAdapter(
549
+ Dict[str, StoredConfigurationValue]).validate_python(connection_parameters),
561
550
  connection_secrets=connection_secrets
562
551
  )
563
552
  if oauth_secret_name is not None:
564
553
  connection_parameters.access_token_secret_name = oauth_secret_name
565
554
  response: List[ApiLimits] = self._plugin_instance.api_limits(connection_parameters)
566
- return [api_limit.dict() for api_limit in response]
555
+ return [api_limit.model_dump() for api_limit in response]
567
556
 
568
557
  def outbound_record_validator(
569
558
  self,
@@ -573,12 +562,9 @@ class PluginEntrypoint:
573
562
  source_types: Dict[str, str],
574
563
  ):
575
564
  # There's a bit of parsing here that could possibly be done outside of the handler function, but this shouldn't be too expensive
576
- sync_parameters: Dict[str, StoredConfigurationValue] = parse_obj_as(
577
- Dict[str, StoredConfigurationValue], sync_parameters
578
- )
579
- field_mappings: StoredMappingValue = parse_obj_as(
580
- StoredMappingValue, field_mappings
581
- )
565
+ sync_parameters: Dict[str, StoredConfigurationValue] = TypeAdapter(
566
+ Dict[str, StoredConfigurationValue]).validate_python(sync_parameters)
567
+ field_mappings: StoredMappingValue = StoredMappingValue.model_validate(field_mappings)
582
568
  return self._plugin_instance.outbound_record_validator(
583
569
  sync_parameters, field_mappings, transformed_record, source_types
584
570
  )
@@ -12,8 +12,8 @@ from typing import Any, List, Literal, Optional, Dict, Tuple
12
12
  import requests
13
13
  import time
14
14
  import logging
15
- from pydantic import Field, root_validator
16
- from pydantic.json import pydantic_encoder
15
+ from pydantic import Field, root_validator, PrivateAttr, field_serializer
16
+ from pydantic_core import to_jsonable_python
17
17
  from .configuration import SubscriptableBaseModel
18
18
  import pytz
19
19
  from requests.adapters import HTTPAdapter
@@ -160,7 +160,7 @@ def epoch_milliseconds_to_datetime(epoch: int) -> datetime.datetime:
160
160
  def datetimes_as_ints_encoder(obj):
161
161
  if isinstance(obj, datetime.datetime):
162
162
  return datetime_to_epoch_milliseconds(obj)
163
- return pydantic_encoder(obj)
163
+ return to_jsonable_python(obj)
164
164
 
165
165
 
166
166
  class RateLimitState(SubscriptableBaseModel):
@@ -178,7 +178,22 @@ class RateLimitState(SubscriptableBaseModel):
178
178
  [],
179
179
  description="A list of timestamps where previous requests have been made, used to calculate the next request time",
180
180
  )
181
- _request_timestamps_lock = threading.Lock()
181
+
182
+ _request_timestamps_lock: threading.Lock = PrivateAttr( # or use Any to annotate the type and use Field to initialize
183
+ default_factory=lambda: threading.Lock()
184
+ )
185
+
186
+ @field_serializer('wait_until',when_used='always')
187
+ def serialize_wait_until(self, value:Optional[datetime.datetime]) -> Optional[int]:
188
+ # if a datetime is provided, convert it to epoch milliseconds
189
+ if value is not None:
190
+ return datetime_to_epoch_milliseconds(value)
191
+
192
+ @field_serializer('previous_request_timestamps',when_used='always')
193
+ def serialize_previous_request_timestamps(self, value:List[datetime.datetime]) -> List[int]:
194
+ # if a list of datetimes is provided, convert them to epoch milliseconds
195
+ return [datetime_to_epoch_milliseconds(ts) if ts else None for ts in value]
196
+
182
197
 
183
198
  # Combined root validator
184
199
  @root_validator(pre=True)
@@ -545,4 +560,4 @@ def too_many_requests_hook(
545
560
  return hook
546
561
 
547
562
 
548
- ApiLimits.update_forward_refs()
563
+ ApiLimits.model_rebuild()