omnata-plugin-runtime 0.8.0a186__tar.gz → 0.8.0a188__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omnata-plugin-runtime
3
- Version: 0.8.0a186
3
+ Version: 0.8.0a188
4
4
  Summary: Classes and common runtime components for building and running Omnata Plugins
5
5
  Author: James Weakley
6
6
  Author-email: james.weakley@omnata.com
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "omnata-plugin-runtime"
3
- version = "0.8.0-a186"
3
+ version = "0.8.0-a188"
4
4
  description = "Classes and common runtime components for building and running Omnata Plugins"
5
5
  authors = ["James Weakley <james.weakley@omnata.com>"]
6
6
  readme = "README.md"
@@ -11,6 +11,7 @@ from enum import Enum
11
11
 
12
12
  from abc import ABC
13
13
  from pydantic import BaseModel, Field, PrivateAttr, SerializationInfo, TypeAdapter, field_validator, model_serializer, validator # pylint: disable=no-name-in-module
14
+ from .logging import logger
14
15
 
15
16
  if tuple(sys.version_info[:2]) >= (3, 9):
16
17
  # Python 3.9 and above
@@ -19,8 +20,6 @@ else:
19
20
  # Python 3.8 and below
20
21
  from typing_extensions import Annotated
21
22
 
22
- logger = logging.getLogger(__name__)
23
-
24
23
 
25
24
  class MapperType(str, Enum):
26
25
  FIELD_MAPPING_SELECTOR = "field_mapping_selector"
@@ -24,7 +24,7 @@ from .configuration import (
24
24
  ConnectivityOption
25
25
  )
26
26
  from .forms import ConnectionMethod, FormInputField, FormOption
27
- from .logging import OmnataPluginLogHandler
27
+ from .logging import OmnataPluginLogHandler, logger
28
28
  from .omnata_plugin import (
29
29
  SnowflakeBillingEvent,
30
30
  BillingEventRequest,
@@ -36,9 +36,7 @@ from .omnata_plugin import (
36
36
  )
37
37
  from pydantic import TypeAdapter
38
38
  from .rate_limiting import ApiLimits, RateLimitState
39
-
40
- # set the logger class to our custom logger so that pydantic errors are handled correctly
41
- logger = logging.getLogger(__name__)
39
+ from opentelemetry import trace
42
40
 
43
41
  IMPORT_DIRECTORY_NAME = "snowflake_import_directory"
44
42
 
@@ -53,89 +51,108 @@ class PluginEntrypoint:
53
51
  self, plugin_fqn: str, session: Session, module_name: str, class_name: str
54
52
  ):
55
53
  logger.info(f"Initialising plugin entrypoint for {plugin_fqn}")
56
- self._session = session
57
- import_dir = sys._xoptions[IMPORT_DIRECTORY_NAME]
58
- sys.path.append(os.path.join(import_dir, "app.zip"))
59
- module = importlib.import_module(module_name)
60
- class_obj = getattr(module, class_name)
61
- self._plugin_instance: OmnataPlugin = class_obj()
62
- self._plugin_instance._session = session # pylint: disable=protected-access
63
- # logging defaults
64
- snowflake_logger = logging.getLogger("snowflake")
65
- snowflake_logger.setLevel(logging.WARN) # we don't want snowflake queries being logged by default
66
- # the sync engine can tell the plugin to override log level via a session variable
67
- if session is not None:
68
- try:
69
- v = session.sql("select getvariable('LOG_LEVEL_OVERRIDES')").collect()
70
- result = v[0][0]
71
- if result is not None:
72
- log_level_overrides:Dict[str,str] = json.loads(result)
73
- for logger_name,level in log_level_overrides.items():
74
- logger_override = logging.getLogger(logger_name)
75
- logger_override.setLevel(level)
76
- logger_override.propagate = False
77
- for handler in logger_override.handlers:
78
- handler.setLevel(level)
79
- except Exception as e:
80
- logger.error(f"Error setting log level overrides: {str(e)}")
54
+ self.tracer = trace.get_tracer(__name__)
55
+ with self.tracer.start_as_current_span("plugin_initialization") as span:
56
+ self._session = session
57
+ import_dir = sys._xoptions[IMPORT_DIRECTORY_NAME]
58
+ span.add_event("Adding plugin zip to path")
59
+ sys.path.append(os.path.join(import_dir, "app.zip"))
60
+ span.add_event("Importing plugin module")
61
+ module = importlib.import_module(module_name)
62
+ class_obj = getattr(module, class_name)
63
+ self._plugin_instance: OmnataPlugin = class_obj()
64
+ self._plugin_instance._session = session # pylint: disable=protected-access
65
+ # logging defaults
66
+ snowflake_logger = logging.getLogger("snowflake")
67
+ snowflake_logger.setLevel(logging.WARN) # we don't want snowflake queries being logged by default
68
+ # the sync engine can tell the plugin to override log level via a session variable
69
+ if session is not None:
70
+ try:
71
+ span.add_event("Checking log level overrides")
72
+ v = session.sql("select getvariable('LOG_LEVEL_OVERRIDES')").collect()
73
+ result = v[0][0]
74
+ if result is not None:
75
+ log_level_overrides:Dict[str,str] = json.loads(result)
76
+ span.add_event("Applying log level overrides",log_level_overrides)
77
+ for logger_name,level in log_level_overrides.items():
78
+ logger_override = logging.getLogger(logger_name)
79
+ logger_override.setLevel(level)
80
+ logger_override.propagate = False
81
+ for handler in logger_override.handlers:
82
+ handler.setLevel(level)
83
+ except Exception as e:
84
+ logger.error(f"Error setting log level overrides: {str(e)}")
81
85
 
82
86
 
83
87
  def sync(self, sync_request: Dict):
88
+ logger.add_extra('omnata.operation', 'sync')
89
+ logger.add_extra('omnata.sync.id', request.sync_id)
90
+ logger.add_extra('omnata.sync.direction', request.sync_direction)
91
+ logger.add_extra('omnata.connection.id', request.connection_id)
92
+ logger.add_extra('omnata.sync.run_id', request.run_id)
93
+ logger.add_extra('omnata.sync_branch.id', request.sync_branch_id)
94
+ logger.add_extra('omnata.sync_branch.name', request.sync_branch_name)
84
95
  logger.info("Entered sync method")
85
- request = TypeAdapter(SyncRequestPayload).validate_python(sync_request)
86
- connection_secrets = get_secrets(
87
- request.oauth_secret_name, request.other_secrets_name
88
- )
89
- omnata_log_handler = OmnataPluginLogHandler(
90
- session=self._session,
91
- sync_id=request.sync_id,
92
- sync_branch_id=request.sync_branch_id,
93
- connection_id=request.connection_id,
94
- sync_run_id=request.run_id,
95
- )
96
- omnata_log_handler.register(
97
- request.logging_level, self._plugin_instance.additional_loggers()
98
- )
99
- # construct some connection parameters for the purpose of getting the api limits
100
- connection_parameters = ConnectionConfigurationParameters(
101
- connection_method=request.connection_method,
102
- connectivity_option=request.connectivity_option,
103
- connection_parameters=request.connection_parameters,
104
- connection_secrets=connection_secrets
105
- )
106
- if request.oauth_secret_name is not None:
107
- connection_parameters.access_token_secret_name = request.oauth_secret_name
108
- all_api_limits = self._plugin_instance.api_limits(connection_parameters)
109
- logger.info(
110
- f"Default API limits: {json.dumps(to_jsonable_python(all_api_limits))}"
111
- )
112
- all_api_limits_by_category = {
113
- api_limit.endpoint_category: api_limit for api_limit in all_api_limits
114
- }
115
- all_api_limits_by_category.update(
116
- {
117
- k: v
118
- for k, v in [
119
- (x.endpoint_category, x) for x in request.api_limit_overrides
120
- ]
96
+ with self.tracer.start_as_current_span("sync_initialization") as span:
97
+ span.add_event("Fetching secrets")
98
+
99
+ request = TypeAdapter(SyncRequestPayload).validate_python(sync_request)
100
+ connection_secrets = get_secrets(
101
+ request.oauth_secret_name, request.other_secrets_name
102
+ )
103
+ span.add_event("Configuring log handler")
104
+ omnata_log_handler = OmnataPluginLogHandler(
105
+ session=self._session,
106
+ sync_id=request.sync_id,
107
+ sync_branch_id=request.sync_branch_id,
108
+ connection_id=request.connection_id,
109
+ sync_run_id=request.run_id,
110
+ )
111
+
112
+ omnata_log_handler.register(
113
+ request.logging_level, self._plugin_instance.additional_loggers()
114
+ )
115
+ # construct some connection parameters for the purpose of getting the api limits
116
+ connection_parameters = ConnectionConfigurationParameters(
117
+ connection_method=request.connection_method,
118
+ connectivity_option=request.connectivity_option,
119
+ connection_parameters=request.connection_parameters,
120
+ connection_secrets=connection_secrets
121
+ )
122
+ if request.oauth_secret_name is not None:
123
+ connection_parameters.access_token_secret_name = request.oauth_secret_name
124
+ span.add_event("Configuring API Limits")
125
+ all_api_limits = self._plugin_instance.api_limits(connection_parameters)
126
+ logger.info(
127
+ f"Default API limits: {json.dumps(to_jsonable_python(all_api_limits))}"
128
+ )
129
+ all_api_limits_by_category = {
130
+ api_limit.endpoint_category: api_limit for api_limit in all_api_limits
121
131
  }
122
- )
123
- api_limits = list(all_api_limits_by_category.values())
124
- return_dict = {}
125
- logger.info(
126
- f"Rate limits state: {json.dumps(to_jsonable_python(request.rate_limits_state))}"
127
- )
128
- (rate_limit_state_all, rate_limit_state_this_branch) = RateLimitState.collapse(request.rate_limits_state,request.sync_id, request.sync_branch_name)
129
- # if any endpoint categories have no state, give them an empty state
130
- for api_limit in api_limits:
131
- if api_limit.endpoint_category not in rate_limit_state_all:
132
- rate_limit_state_all[api_limit.endpoint_category] = RateLimitState(
133
- wait_until=None, previous_request_timestamps=[]
134
- )
135
- if api_limit.endpoint_category not in rate_limit_state_this_branch:
136
- rate_limit_state_this_branch[api_limit.endpoint_category] = RateLimitState(
137
- wait_until=None, previous_request_timestamps=[]
138
- )
132
+ all_api_limits_by_category.update(
133
+ {
134
+ k: v
135
+ for k, v in [
136
+ (x.endpoint_category, x) for x in request.api_limit_overrides
137
+ ]
138
+ }
139
+ )
140
+ api_limits = list(all_api_limits_by_category.values())
141
+ return_dict = {}
142
+ logger.info(
143
+ f"Rate limits state: {json.dumps(to_jsonable_python(request.rate_limits_state))}"
144
+ )
145
+ (rate_limit_state_all, rate_limit_state_this_branch) = RateLimitState.collapse(request.rate_limits_state,request.sync_id, request.sync_branch_name)
146
+ # if any endpoint categories have no state, give them an empty state
147
+ for api_limit in api_limits:
148
+ if api_limit.endpoint_category not in rate_limit_state_all:
149
+ rate_limit_state_all[api_limit.endpoint_category] = RateLimitState(
150
+ wait_until=None, previous_request_timestamps=[]
151
+ )
152
+ if api_limit.endpoint_category not in rate_limit_state_this_branch:
153
+ rate_limit_state_this_branch[api_limit.endpoint_category] = RateLimitState(
154
+ wait_until=None, previous_request_timestamps=[]
155
+ )
139
156
 
140
157
  if request.sync_direction == "outbound":
141
158
  parameters = OutboundSyncConfigurationParameters(
@@ -169,11 +186,13 @@ class PluginEntrypoint:
169
186
  )
170
187
  try:
171
188
  self._plugin_instance._configuration_parameters = parameters
172
- with HttpRateLimiting(outbound_sync_request, parameters):
173
- self._plugin_instance.sync_outbound(parameters, outbound_sync_request)
189
+ with self.tracer.start_as_current_span("sync_execution") as span:
190
+ with HttpRateLimiting(outbound_sync_request, parameters):
191
+ self._plugin_instance.sync_outbound(parameters, outbound_sync_request)
174
192
  if self._plugin_instance.disable_background_workers is False:
175
- outbound_sync_request.apply_results_queue()
176
- outbound_sync_request.apply_rate_limit_state()
193
+ with self.tracer.start_as_current_span("results_finalization") as span:
194
+ outbound_sync_request.apply_results_queue()
195
+ outbound_sync_request.apply_rate_limit_state()
177
196
  if outbound_sync_request.deadline_reached:
178
197
  # if we actually hit the deadline, this is flagged by the cancellation checking worker and the cancellation
179
198
  # token is set. We throw it here as an error since that's currently how it flows back to the engine with a DELAYED state
@@ -227,19 +246,21 @@ class PluginEntrypoint:
227
246
  inbound_sync_request.update_activity("Invoking plugin")
228
247
  logger.info(f"inbound sync request: {inbound_sync_request}")
229
248
  # plugin_instance._inbound_sync_request = outbound_sync_request
230
- with HttpRateLimiting(inbound_sync_request, parameters):
231
- self._plugin_instance.sync_inbound(parameters, inbound_sync_request)
249
+ with self.tracer.start_as_current_span("sync_execution") as span:
250
+ with HttpRateLimiting(inbound_sync_request, parameters):
251
+ self._plugin_instance.sync_inbound(parameters, inbound_sync_request)
232
252
  logger.info("Finished invoking plugin")
233
253
  if self._plugin_instance.disable_background_workers is False:
234
- inbound_sync_request.update_activity("Staging remaining records")
235
- logger.info("Calling apply_results_queue")
236
- inbound_sync_request.apply_results_queue()
237
- try:
238
- # this is not critical, we wouldn't fail the sync over rate limit usage capture
239
- logger.info("Calling apply_rate_limit_state")
240
- inbound_sync_request.apply_rate_limit_state()
241
- except Exception as e:
242
- logger.error(f"Error applying rate limit state: {str(e)}")
254
+ with self.tracer.start_as_current_span("results_finalization") as span:
255
+ inbound_sync_request.update_activity("Staging remaining records")
256
+ logger.info("Calling apply_results_queue")
257
+ inbound_sync_request.apply_results_queue()
258
+ try:
259
+ # this is not critical, we wouldn't fail the sync over rate limit usage capture
260
+ logger.info("Calling apply_rate_limit_state")
261
+ inbound_sync_request.apply_rate_limit_state()
262
+ except Exception as e:
263
+ logger.error(f"Error applying rate limit state: {str(e)}")
243
264
  # here we used to do a final inbound_sync_request.apply_progress_updates(ignore_errors=False)
244
265
  # but it was erroring too much since there was usually a lot of DDL activity on the Snowflake side
245
266
  # so instead, we'll provide a final progress update via a return value from the proc
@@ -283,6 +304,12 @@ class PluginEntrypoint:
283
304
  sync_parameters: Dict,
284
305
  current_form_parameters: Optional[Dict],
285
306
  ):
307
+ logger.add_extra('omnata.operation', 'configuration_form')
308
+ logger.add_extra('omnata.connection.connectivity_option', connectivity_option)
309
+ logger.add_extra('omnata.connection.connection_method', connection_method)
310
+ logger.add_extra('omnata.configuration_form.function_name', function_name)
311
+ logger.add_extra('omnata.sync.direction', sync_direction)
312
+
286
313
  logger.info("Entered configuration_form method")
287
314
  sync_strategy = normalise_nulls(sync_strategy)
288
315
  oauth_secret_name = normalise_nulls(oauth_secret_name)
@@ -342,6 +369,10 @@ class PluginEntrypoint:
342
369
  sync_parameters: Dict,
343
370
  selected_streams: Optional[List[str]], # None to return all streams without requiring schema
344
371
  ):
372
+ logger.add_extra('omnata.operation', 'list_streams')
373
+ logger.add_extra('omnata.connection.connectivity_option', connectivity_option)
374
+ logger.add_extra('omnata.connection.connection_method', connection_method)
375
+ logger.add_extra('omnata.sync.direction', 'inbound')
345
376
  logger.debug("Entered list_streams method")
346
377
  oauth_secret_name = normalise_nulls(oauth_secret_name)
347
378
  other_secrets_name = normalise_nulls(other_secrets_name)
@@ -393,6 +424,8 @@ class PluginEntrypoint:
393
424
  return results
394
425
 
395
426
  def connection_form(self,connectivity_option: str):
427
+ logger.add_extra('omnata.operation', 'connection_form')
428
+ logger.add_extra('omnata.connection.connectivity_option', connectivity_option)
396
429
  connectivity_option = TypeAdapter(ConnectivityOption).validate_python(connectivity_option)
397
430
  logger.info("Entered connection_form method")
398
431
  if self._plugin_instance.connection_form.__code__.co_argcount==1:
@@ -402,6 +435,7 @@ class PluginEntrypoint:
402
435
  return [f.model_dump() for f in form]
403
436
 
404
437
  def create_billing_events(self, session, event_request: Dict):
438
+ logger.add_extra('omnata.operation', 'create_billing_events')
405
439
  logger.info("Entered create_billing_events method")
406
440
  request = TypeAdapter(BillingEventRequest).validate_python(event_request)
407
441
  events: List[SnowflakeBillingEvent] = self._plugin_instance.create_billing_events(
@@ -474,6 +508,9 @@ class PluginEntrypoint:
474
508
  oauth_secret_name: Optional[str],
475
509
  other_secrets_name: Optional[str],
476
510
  ):
511
+ logger.add_extra('omnata.operation', 'connection_test')
512
+ logger.add_extra('omnata.connection.connectivity_option', connectivity_option)
513
+ logger.add_extra('omnata.connection.connection_method', method)
477
514
  logger.info("Entered connect method")
478
515
  logger.info(f"Connection parameters: {connection_parameters}")
479
516
  connection_secrets = get_secrets(oauth_secret_name, other_secrets_name)
@@ -524,6 +561,9 @@ class PluginEntrypoint:
524
561
  connection_parameters: Dict,
525
562
  oauth_secret_name: Optional[str],
526
563
  other_secrets_name: Optional[str]):
564
+ logger.add_extra('omnata.operation', 'api_limits')
565
+ logger.add_extra('omnata.connection.connectivity_option', connectivity_option)
566
+ logger.add_extra('omnata.connection.connection_method', method)
527
567
  logger.info("Entered api_limits method")
528
568
  connection_secrets = get_secrets(oauth_secret_name, other_secrets_name)
529
569
  from omnata_plugin_runtime.omnata_plugin import (
@@ -15,12 +15,11 @@ import logging
15
15
  from pydantic import Field, root_validator, PrivateAttr, field_serializer
16
16
  from pydantic_core import to_jsonable_python
17
17
  from .configuration import SubscriptableBaseModel
18
+ from .logging import logger
18
19
  import pytz
19
20
  from requests.adapters import HTTPAdapter
20
21
  from urllib3.util.retry import Retry
21
22
 
22
- logger = getLogger(__name__)
23
-
24
23
  TimeUnitType = Literal["second", "minute", "hour", "day"]
25
24
 
26
25
  HttpMethodType = Literal[