omnata-plugin-runtime 0.8.0a193__py3-none-any.whl → 0.8.0a195__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -11,7 +11,7 @@ from enum import Enum
11
11
 
12
12
  from abc import ABC
13
13
  from pydantic import BaseModel, Field, PrivateAttr, SerializationInfo, TypeAdapter, field_validator, model_serializer, validator # pylint: disable=no-name-in-module
14
- from .logging import logger
14
+ from .logging import logger, tracer
15
15
 
16
16
  if tuple(sys.version_info[:2]) >= (3, 9):
17
17
  # Python 3.9 and above
@@ -19,9 +19,6 @@ if tuple(sys.version_info[:2]) >= (3, 9):
19
19
  else:
20
20
  # Python 3.8 and below
21
21
  from typing_extensions import Annotated
22
- from opentelemetry import trace
23
-
24
- tracer = trace.get_tracer(__name__)
25
22
 
26
23
  class MapperType(str, Enum):
27
24
  FIELD_MAPPING_SELECTOR = "field_mapping_selector"
@@ -11,6 +11,7 @@ from pydantic import ValidationError
11
11
  from snowflake import telemetry
12
12
  from opentelemetry import trace
13
13
 
14
+ tracer = trace.get_tracer('omnata_plugin_runtime')
14
15
 
15
16
  class CustomLoggerAdapter(logging.LoggerAdapter):
16
17
  """
@@ -47,7 +47,7 @@ from snowflake.snowpark import Session
47
47
  from snowflake.snowpark.functions import col
48
48
  from tenacity import Retrying, stop_after_attempt, wait_fixed, retry_if_exception_message
49
49
 
50
- from .logging import OmnataPluginLogHandler
50
+ from .logging import OmnataPluginLogHandler, logger, tracer
51
51
 
52
52
  from .api import (
53
53
  PluginMessage,
@@ -88,7 +88,6 @@ from .rate_limiting import (
88
88
  RateLimitedSession
89
89
  )
90
90
 
91
- logger = getLogger(__name__)
92
91
  SortDirectionType = Literal["asc", "desc"]
93
92
 
94
93
 
@@ -810,22 +809,23 @@ class OutboundSyncRequest(SyncRequest):
810
809
  logger.debug("applying results to table")
811
810
  # use a random table name with a random string to avoid collisions
812
811
  with self._snowflake_query_lock:
813
- for attempt in Retrying(stop=stop_after_attempt(30),wait=wait_fixed(2),reraise=True,retry=retry_if_exception_message(match=".*(is being|was) committed.*")):
814
- with attempt:
815
- success, nchunks, nrows, _ = write_pandas(
816
- conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
817
- df=self._preprocess_results_dataframe(results_df),
818
- quote_identifiers=False,
819
- table_name=self._full_results_table_name,
820
- auto_create_table=False
821
- )
822
- if not success:
823
- raise ValueError(
824
- f"Failed to write results to table {self._full_results_table_name}"
812
+ with tracer.start_as_current_span("apply_results"):
813
+ for attempt in Retrying(stop=stop_after_attempt(30),wait=wait_fixed(2),reraise=True,retry=retry_if_exception_message(match=".*(is being|was) committed.*")):
814
+ with attempt:
815
+ success, nchunks, nrows, _ = write_pandas(
816
+ conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
817
+ df=self._preprocess_results_dataframe(results_df),
818
+ quote_identifiers=False,
819
+ table_name=self._full_results_table_name,
820
+ auto_create_table=False
821
+ )
822
+ if not success:
823
+ raise ValueError(
824
+ f"Failed to write results to table {self._full_results_table_name}"
825
+ )
826
+ logger.debug(
827
+ f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
825
828
  )
826
- logger.debug(
827
- f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
828
- )
829
829
 
830
830
  def __dataframe_wrapper(
831
831
  self, data_frame: pandas.DataFrame, render_jinja: bool = True
@@ -1465,36 +1465,37 @@ class InboundSyncRequest(SyncRequest):
1465
1465
  """
1466
1466
  if len(results_df) > 0:
1467
1467
  with self._snowflake_query_lock:
1468
- for attempt in Retrying(stop=stop_after_attempt(30),wait=wait_fixed(2),reraise=True,retry=retry_if_exception_message(match=".*(is being|was) committed.*")):
1469
- with attempt:
1470
- logger.debug(
1471
- f"Applying {len(results_df)} results to {self._full_results_table_name}"
1472
- )
1473
- # try setting parquet engine here, since the engine parameter does not seem to make it through to the write_pandas function
1474
- success, nchunks, nrows, _ = write_pandas(
1475
- conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
1476
- df=results_df,
1477
- table_name=self._full_results_table_name,
1478
- quote_identifiers=False, # already done in get_temp_table_name
1479
- # schema='INBOUND_RAW', # it seems to be ok to provide schema in the table name
1480
- table_type="transient"
1481
- )
1482
- if not success:
1483
- raise ValueError(
1484
- f"Failed to write results to table {self._full_results_table_name}"
1468
+ with tracer.start_as_current_span("apply_results"):
1469
+ for attempt in Retrying(stop=stop_after_attempt(30),wait=wait_fixed(2),reraise=True,retry=retry_if_exception_message(match=".*(is being|was) committed.*")):
1470
+ with attempt:
1471
+ logger.debug(
1472
+ f"Applying {len(results_df)} results to {self._full_results_table_name}"
1485
1473
  )
1486
- logger.debug(
1487
- f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
1488
- )
1489
- # temp tables aren't allowed
1490
- # snowflake_df = self._session.create_dataframe(results_df)
1491
- # snowflake_df.write.save_as_table(table_name=temp_table,
1492
- # mode='append',
1493
- # column_order='index',
1494
- # #create_temp_table=True
1495
- # )
1496
- for stream_name in stream_names:
1497
- self._results_exist[stream_name] = True
1474
+ # try setting parquet engine here, since the engine parameter does not seem to make it through to the write_pandas function
1475
+ success, nchunks, nrows, _ = write_pandas(
1476
+ conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
1477
+ df=results_df,
1478
+ table_name=self._full_results_table_name,
1479
+ quote_identifiers=False, # already done in get_temp_table_name
1480
+ # schema='INBOUND_RAW', # it seems to be ok to provide schema in the table name
1481
+ table_type="transient"
1482
+ )
1483
+ if not success:
1484
+ raise ValueError(
1485
+ f"Failed to write results to table {self._full_results_table_name}"
1486
+ )
1487
+ logger.debug(
1488
+ f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
1489
+ )
1490
+ # temp tables aren't allowed
1491
+ # snowflake_df = self._session.create_dataframe(results_df)
1492
+ # snowflake_df.write.save_as_table(table_name=temp_table,
1493
+ # mode='append',
1494
+ # column_order='index',
1495
+ # #create_temp_table=True
1496
+ # )
1497
+ for stream_name in stream_names:
1498
+ self._results_exist[stream_name] = True
1498
1499
  else:
1499
1500
  logger.debug("Results dataframe is empty, not applying")
1500
1501
 
@@ -2137,14 +2138,16 @@ def __managed_inbound_processing_worker(
2137
2138
  logger.debug(f"stream returned from queue: {stream}")
2138
2139
  # restore the first argument, was originally the dataframe/generator but now it's the appropriately sized dataframe
2139
2140
  try:
2140
- logger.debug(f"worker {worker_index} processing stream {stream.stream_name}, invoking plugin class method {method.__name__}")
2141
- result = method(plugin_class_obj, *(stream, *method_args), **method_kwargs)
2142
- logger.debug(f"worker {worker_index} completed processing stream {stream.stream_name}")
2143
- if result is not None and result is False:
2144
- logger.info(f"worker {worker_index} requested that {stream.stream_name} be not marked as complete")
2145
- else:
2146
- logger.info(f"worker {worker_index} marking stream {stream.stream_name} as complete")
2147
- plugin_class_obj._sync_request.mark_stream_complete(stream.stream_name)
2141
+ with tracer.start_as_current_span("managed_inbound_processing") as managed_inbound_processing_span:
2142
+ logger.debug(f"worker {worker_index} processing stream {stream.stream_name}, invoking plugin class method {method.__name__}")
2143
+ managed_inbound_processing_span.set_attribute("stream_name", stream.stream_name)
2144
+ result = method(plugin_class_obj, *(stream, *method_args), **method_kwargs)
2145
+ logger.debug(f"worker {worker_index} completed processing stream {stream.stream_name}")
2146
+ if result is not None and result is False:
2147
+ logger.info(f"worker {worker_index} requested that {stream.stream_name} be not marked as complete")
2148
+ else:
2149
+ logger.info(f"worker {worker_index} marking stream {stream.stream_name} as complete")
2150
+ plugin_class_obj._sync_request.mark_stream_complete(stream.stream_name)
2148
2151
  except InterruptedWhileWaitingException:
2149
2152
  # If an inbound run is cancelled while waiting for rate limiting, this should mean that
2150
2153
  # the cancellation is handled elsewhere, so we don't need to do anything special here other than stop waiting
@@ -24,7 +24,7 @@ from .configuration import (
24
24
  ConnectivityOption
25
25
  )
26
26
  from .forms import ConnectionMethod, FormInputField, FormOption
27
- from .logging import OmnataPluginLogHandler, logger
27
+ from .logging import OmnataPluginLogHandler, logger, tracer
28
28
  from .omnata_plugin import (
29
29
  SnowflakeBillingEvent,
30
30
  BillingEventRequest,
@@ -39,7 +39,6 @@ from .rate_limiting import ApiLimits, RateLimitState
39
39
  from opentelemetry import trace
40
40
 
41
41
  IMPORT_DIRECTORY_NAME = "snowflake_import_directory"
42
- tracer = trace.get_tracer(__name__)
43
42
 
44
43
  class PluginEntrypoint:
45
44
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omnata-plugin-runtime
3
- Version: 0.8.0a193
3
+ Version: 0.8.0a195
4
4
  Summary: Classes and common runtime components for building and running Omnata Plugins
5
5
  Author: James Weakley
6
6
  Author-email: james.weakley@omnata.com
@@ -0,0 +1,12 @@
1
+ omnata_plugin_runtime/__init__.py,sha256=MS9d1whnfT_B3-ThqZ7l63QeC_8OEKTuaYV5wTwRpBA,1576
2
+ omnata_plugin_runtime/api.py,sha256=tVi4KLL0v5N3yz3Ie0kSyFemryu572gCbtSRfWN6wBU,6523
3
+ omnata_plugin_runtime/configuration.py,sha256=6JmgE4SL3F5cGlDYqt17A1vTFu6nB74yWgEpQ5qV9ho,38380
4
+ omnata_plugin_runtime/forms.py,sha256=ueodN2GIMS5N9fqebpY4uNGJnjEb9HcuaVQVfWH-cGg,19838
5
+ omnata_plugin_runtime/logging.py,sha256=WBuZt8lF9E5oFWM4KYQbE8dDJ_HctJ1pN3BHwU6rcd0,4461
6
+ omnata_plugin_runtime/omnata_plugin.py,sha256=ZMoF5ef9GT0ri8nof_3JaJT4dTQ9oDRf22zw-5UryjI,130600
7
+ omnata_plugin_runtime/plugin_entrypoints.py,sha256=3nqIWOlUdNVkBu7YBqC73Gb5G_xvMUzmlZf838N0JSg,32728
8
+ omnata_plugin_runtime/rate_limiting.py,sha256=6fn_h2vxcHbqqiW-OZ6FKfNYv_XlNvorsrCknVce2PA,25929
9
+ omnata_plugin_runtime-0.8.0a195.dist-info/LICENSE,sha256=rGaMQG3R3F5-JGDp_-rlMKpDIkg5n0SI4kctTk8eZSI,56
10
+ omnata_plugin_runtime-0.8.0a195.dist-info/METADATA,sha256=JW6TXPXIu60cr3BLAydTSegpjml5vFKFJQuRWZbmP8M,2148
11
+ omnata_plugin_runtime-0.8.0a195.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
12
+ omnata_plugin_runtime-0.8.0a195.dist-info/RECORD,,
@@ -1,12 +0,0 @@
1
- omnata_plugin_runtime/__init__.py,sha256=MS9d1whnfT_B3-ThqZ7l63QeC_8OEKTuaYV5wTwRpBA,1576
2
- omnata_plugin_runtime/api.py,sha256=tVi4KLL0v5N3yz3Ie0kSyFemryu572gCbtSRfWN6wBU,6523
3
- omnata_plugin_runtime/configuration.py,sha256=uMGMqKYy4XmntX1ROungUwTJXeY2ciczAb_PtRCFZZI,38441
4
- omnata_plugin_runtime/forms.py,sha256=ueodN2GIMS5N9fqebpY4uNGJnjEb9HcuaVQVfWH-cGg,19838
5
- omnata_plugin_runtime/logging.py,sha256=-8BqjgIOhmAFlHz0U_n4YPPme_YvjXyFDjtpbkbS9c8,4410
6
- omnata_plugin_runtime/omnata_plugin.py,sha256=aggjb_CTTjhgqjS8CHPOm4ENU0jNcYoT6LC8yI1IeF4,130048
7
- omnata_plugin_runtime/plugin_entrypoints.py,sha256=LZHwOsacZmS6JtzNM3OsLvpYyVo_UP0DaCWDWDxrJ0w,32756
8
- omnata_plugin_runtime/rate_limiting.py,sha256=6fn_h2vxcHbqqiW-OZ6FKfNYv_XlNvorsrCknVce2PA,25929
9
- omnata_plugin_runtime-0.8.0a193.dist-info/LICENSE,sha256=rGaMQG3R3F5-JGDp_-rlMKpDIkg5n0SI4kctTk8eZSI,56
10
- omnata_plugin_runtime-0.8.0a193.dist-info/METADATA,sha256=oMhjg3-Ms9WMHfnSk-IMhpUDbdNOvPhy2gVd0OHYTgE,2148
11
- omnata_plugin_runtime-0.8.0a193.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
12
- omnata_plugin_runtime-0.8.0a193.dist-info/RECORD,,