omnata-plugin-runtime 0.3.23a68__tar.gz → 0.3.24a70__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/PKG-INFO +1 -1
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/pyproject.toml +1 -1
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/omnata_plugin.py +43 -35
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/LICENSE +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/README.md +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/__init__.py +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/api.py +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/configuration.py +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/forms.py +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/logging.py +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/plugin_entrypoints.py +0 -0
- {omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/rate_limiting.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "omnata-plugin-runtime"
|
3
|
-
version = "0.3.
|
3
|
+
version = "0.3.24-a70"
|
4
4
|
description = "Classes and common runtime components for building and running Omnata Plugins"
|
5
5
|
authors = ["James Weakley <james.weakley@omnata.com>"]
|
6
6
|
readme = "README.md"
|
@@ -1261,7 +1261,7 @@ class InboundSyncRequest(SyncRequest):
|
|
1261
1261
|
)
|
1262
1262
|
]
|
1263
1263
|
|
1264
|
-
def get_hash(keys:List[str]) -> str:
|
1264
|
+
def get_hash(self, keys:List[str]) -> str:
|
1265
1265
|
"""
|
1266
1266
|
Creates a hash from a list of keys.
|
1267
1267
|
The function will join the keys with an underscore and then create a
|
@@ -1827,26 +1827,35 @@ def managed_outbound_processing(concurrency: int, batch_size: int):
|
|
1827
1827
|
raise ValueError(
|
1828
1828
|
"To use the managed_outbound_processing decorator, you must attach a sync request to the plugin instance (via the _sync_request property)"
|
1829
1829
|
)
|
1830
|
-
# if self._sync_request.api_limits is None:
|
1831
|
-
# raise ValueError('To use the managed_outbound_processing decorator, API constraints must be defined. These can be provided in the response to the connect method')
|
1832
1830
|
logger.info(f"Batch size: {batch_size}. Concurrency: {concurrency}")
|
1833
|
-
|
1834
|
-
|
1835
|
-
|
1836
|
-
|
1837
|
-
|
1838
|
-
|
1839
|
-
|
1840
|
-
|
1841
|
-
|
1842
|
-
|
1843
|
-
|
1844
|
-
|
1845
|
-
|
1846
|
-
)
|
1831
|
+
|
1832
|
+
dataframe_arg = None
|
1833
|
+
if 'dataframe' in method_kwargs:
|
1834
|
+
dataframe_arg = method_kwargs['dataframe']
|
1835
|
+
del method_kwargs['dataframe']
|
1836
|
+
if dataframe_arg.__class__.__name__ != "DataFrame":
|
1837
|
+
raise ValueError(
|
1838
|
+
f"The 'dataframe' named argument to the @managed_outbound_processing must be a DataFrame. Instead, a {dataframe_arg.__class__.__name__} was provided."
|
1839
|
+
)
|
1840
|
+
|
1841
|
+
elif 'dataframe_generator' in method_kwargs:
|
1842
|
+
dataframe_arg = method_kwargs['dataframe_generator']
|
1843
|
+
del method_kwargs['dataframe_generator']
|
1844
|
+
if not hasattr(dataframe_arg, "__next__"):
|
1845
|
+
raise ValueError(
|
1846
|
+
f"The 'dataframe_generator' named argument to the @managed_outbound_processing must be an iterator function. Instead, a {dataframe_arg.__class__.__name__} was provided."
|
1847
|
+
)
|
1848
|
+
# if the dataframe was provided as the first argument, we'll use that
|
1849
|
+
if dataframe_arg is None and len(method_args) > 0:
|
1850
|
+
dataframe_arg = method_args[0]
|
1851
|
+
if dataframe_arg.__class__.__name__ != "DataFrame" and not hasattr(dataframe_arg, "__next__"):
|
1852
|
+
raise ValueError(
|
1853
|
+
f"The first argument to a @managed_outbound_processing method must be a DataFrame or DataFrame generator (from outbound_sync_request.get_records). Instead, a {first_arg.__class__.__name__} was provided. Alternatively, you can provide these via the 'dataframe' or 'dataframe_generator' named arguments."
|
1854
|
+
)
|
1855
|
+
method_args = method_args[1:]
|
1847
1856
|
|
1848
1857
|
# put the record iterator on the queue, ready for the first task to read it
|
1849
|
-
fixed_size_generator = FixedSizeGenerator(
|
1858
|
+
fixed_size_generator = FixedSizeGenerator(dataframe_arg, batch_size=batch_size)
|
1850
1859
|
tasks:List[threading.Thread] = []
|
1851
1860
|
logger.info(f"Creating {concurrency} worker(s) for applying records")
|
1852
1861
|
# just in case
|
@@ -1862,7 +1871,7 @@ def managed_outbound_processing(concurrency: int, batch_size: int):
|
|
1862
1871
|
i,
|
1863
1872
|
fixed_size_generator,
|
1864
1873
|
self._sync_request._thread_cancellation_token,
|
1865
|
-
method_args
|
1874
|
+
method_args,
|
1866
1875
|
method_kwargs,
|
1867
1876
|
),
|
1868
1877
|
)
|
@@ -1987,22 +1996,21 @@ def managed_inbound_processing(concurrency: int):
|
|
1987
1996
|
concurrency_to_use = 1 # disable concurrency when running in development mode, it interferes with pyvcr
|
1988
1997
|
else:
|
1989
1998
|
concurrency_to_use = concurrency
|
1990
|
-
|
1991
|
-
|
1992
|
-
|
1993
|
-
|
1994
|
-
|
1995
|
-
)
|
1996
|
-
|
1997
|
-
|
1998
|
-
|
1999
|
-
|
2000
|
-
|
2001
|
-
|
2002
|
-
|
2003
|
-
)
|
1999
|
+
stream_list_arg: List[StoredStreamConfiguration] = None
|
2000
|
+
if 'streams' in method_kwargs:
|
2001
|
+
stream_list_arg = cast(List[StoredStreamConfiguration],method_kwargs['streams'])
|
2002
|
+
del method_kwargs['streams']
|
2003
|
+
if stream_list_arg is None and len(method_args) > 0:
|
2004
|
+
stream_list_arg = cast(List[StoredStreamConfiguration],method_args[0])
|
2005
|
+
if stream_list_arg.__class__.__name__ != "list":
|
2006
|
+
raise ValueError(
|
2007
|
+
f"The first argument to a @managed_inbound_processing method must be a list of StoredStreamConfigurations if the 'streams' named argument is not provided. Instead, a {stream_list_arg.__class__.__name__} was provided."
|
2008
|
+
)
|
2009
|
+
method_args = method_args[1:]
|
2010
|
+
if stream_list_arg is None:
|
2011
|
+
raise ValueError("You must provide a list of StoredStreamConfiguration objects to the method, either as the first argument or as a named argument 'streams'")
|
2004
2012
|
|
2005
|
-
streams_list
|
2013
|
+
streams_list = stream_list_arg
|
2006
2014
|
# create a queue full of all the streams to process
|
2007
2015
|
streams_queue = queue.Queue()
|
2008
2016
|
for stream in streams_list:
|
@@ -2022,7 +2030,7 @@ def managed_inbound_processing(concurrency: int):
|
|
2022
2030
|
i,
|
2023
2031
|
streams_queue,
|
2024
2032
|
self._sync_request._thread_cancellation_token,
|
2025
|
-
method_args
|
2033
|
+
method_args,
|
2026
2034
|
method_kwargs,
|
2027
2035
|
),
|
2028
2036
|
)
|
File without changes
|
File without changes
|
File without changes
|
{omnata_plugin_runtime-0.3.23a68 → omnata_plugin_runtime-0.3.24a70}/src/omnata_plugin_runtime/api.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|