omnata-plugin-runtime 0.4.0a88__tar.gz → 0.4.0a90__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omnata-plugin-runtime
3
- Version: 0.4.0a88
3
+ Version: 0.4.0a90
4
4
  Summary: Classes and common runtime components for building and running Omnata Plugins
5
5
  Author: James Weakley
6
6
  Author-email: james.weakley@omnata.com
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "omnata-plugin-runtime"
3
- version = "0.4.0-a88"
3
+ version = "0.4.0-a90"
4
4
  description = "Classes and common runtime components for building and running Omnata Plugins"
5
5
  authors = ["James Weakley <james.weakley@omnata.com>"]
6
6
  readme = "README.md"
@@ -690,19 +690,34 @@ class OutboundSyncRequest(SyncRequest):
690
690
  logger.info("applying results to table")
691
691
  # use a random table name with a random string to avoid collisions
692
692
  with self._snowflake_query_lock:
693
- success, nchunks, nrows, _ = write_pandas(
694
- conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
695
- df=self._preprocess_results_dataframe(results_df),
696
- quote_identifiers=False,
697
- table_name=self._full_results_table_name,
698
- auto_create_table=False
699
- )
700
- if not success:
701
- raise ValueError(
702
- f"Failed to write results to table {self._full_results_table_name}"
703
- )
704
- logger.info(
705
- f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
693
+ attempts_remaining = 12
694
+ while attempts_remaining > 0:
695
+ try:
696
+ success, nchunks, nrows, _ = write_pandas(
697
+ conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
698
+ df=self._preprocess_results_dataframe(results_df),
699
+ quote_identifiers=False,
700
+ table_name=self._full_results_table_name,
701
+ auto_create_table=False
702
+ )
703
+ if not success:
704
+ raise ValueError(
705
+ f"Failed to write results to table {self._full_results_table_name}"
706
+ )
707
+ logger.info(
708
+ f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
709
+ )
710
+ return
711
+ except Exception as e:
712
+ if 'is being committed' not in str(e):
713
+ raise e
714
+ logger.error(
715
+ f"Transaction clash writing results to table {self._full_results_table_name}: {e}"
716
+ )
717
+ attempts_remaining -= 1
718
+ time.sleep(5)
719
+ raise ValueError(
720
+ f"Failed to write results to table {self._full_results_table_name} after {attempts_remaining} attempts"
706
721
  )
707
722
 
708
723
  def __dataframe_wrapper(
@@ -948,8 +963,6 @@ class InboundSyncRequest(SyncRequest):
948
963
  for stream_name in stream_names:
949
964
  self._apply_results[stream_name] = None
950
965
  self._apply_results = {}
951
- # update the inbound stream record counts, so we can see progress
952
- self.apply_progress_updates()
953
966
 
954
967
  # also take care of uploading delete requests
955
968
  if hasattr(self,'_apply_results_criteria_deletes') and self._apply_results_criteria_deletes is not None:
@@ -969,6 +982,12 @@ class InboundSyncRequest(SyncRequest):
969
982
  self._apply_criteria_deletes_dataframe(all_dfs)
970
983
  # clear the delete requests
971
984
  self._apply_results_criteria_deletes = {}
985
+
986
+
987
+ # update the inbound stream record counts, so we can see progress
988
+ # we do this last, because marking a stream as completed will cause the sync engine to process it
989
+ # so we need to make sure all the results are applied first
990
+ self.apply_progress_updates()
972
991
 
973
992
  def apply_progress_updates(self):
974
993
  """
@@ -1308,34 +1327,49 @@ class InboundSyncRequest(SyncRequest):
1308
1327
  """
1309
1328
  if len(results_df) > 0:
1310
1329
  with self._snowflake_query_lock:
1311
- logger.info(
1312
- f"Applying {len(results_df)} results to {self._full_results_table_name}"
1313
- )
1314
- # try setting parquet engine here, since the engine parameter does not seem to make it through to the write_pandas function
1315
- success, nchunks, nrows, _ = write_pandas(
1316
- conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
1317
- df=results_df,
1318
- table_name=self._full_results_table_name,
1319
- quote_identifiers=False, # already done in get_temp_table_name
1320
- # schema='INBOUND_RAW', # it seems to be ok to provide schema in the table name
1321
- table_type="transient"
1322
- )
1323
- if not success:
1324
- raise ValueError(
1325
- f"Failed to write results to table {self._full_results_table_name}"
1326
- )
1327
- logger.info(
1328
- f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
1330
+ attempts_remaining = 12
1331
+ while attempts_remaining > 0:
1332
+ try:
1333
+ logger.info(
1334
+ f"Applying {len(results_df)} results to {self._full_results_table_name}"
1335
+ )
1336
+ # try setting parquet engine here, since the engine parameter does not seem to make it through to the write_pandas function
1337
+ success, nchunks, nrows, _ = write_pandas(
1338
+ conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
1339
+ df=results_df,
1340
+ table_name=self._full_results_table_name,
1341
+ quote_identifiers=False, # already done in get_temp_table_name
1342
+ # schema='INBOUND_RAW', # it seems to be ok to provide schema in the table name
1343
+ table_type="transient"
1344
+ )
1345
+ if not success:
1346
+ raise ValueError(
1347
+ f"Failed to write results to table {self._full_results_table_name}"
1348
+ )
1349
+ logger.info(
1350
+ f"Wrote {nrows} rows and {nchunks} chunks to table {self._full_results_table_name}"
1351
+ )
1352
+ # temp tables aren't allowed
1353
+ # snowflake_df = self._session.create_dataframe(results_df)
1354
+ # snowflake_df.write.save_as_table(table_name=temp_table,
1355
+ # mode='append',
1356
+ # column_order='index',
1357
+ # #create_temp_table=True
1358
+ # )
1359
+ for stream_name in stream_names:
1360
+ self._results_exist[stream_name] = True
1361
+ return
1362
+ except Exception as e:
1363
+ if 'is being committed' not in str(e):
1364
+ raise e
1365
+ logger.error(
1366
+ f"Transaction clash writing results to table {self._full_results_table_name}: {e}"
1367
+ )
1368
+ attempts_remaining -= 1
1369
+ time.sleep(5)
1370
+ raise ValueError(
1371
+ f"Failed to write results to table {self._full_results_table_name} after {attempts_remaining} attempts"
1329
1372
  )
1330
- # temp tables aren't allowed
1331
- # snowflake_df = self._session.create_dataframe(results_df)
1332
- # snowflake_df.write.save_as_table(table_name=temp_table,
1333
- # mode='append',
1334
- # column_order='index',
1335
- # #create_temp_table=True
1336
- # )
1337
- for stream_name in stream_names:
1338
- self._results_exist[stream_name] = True
1339
1373
  else:
1340
1374
  logger.info("Results dataframe is empty, not applying")
1341
1375
 
@@ -1353,23 +1387,38 @@ class InboundSyncRequest(SyncRequest):
1353
1387
  """
1354
1388
  if len(results_df) > 0:
1355
1389
  with self._snowflake_query_lock:
1356
- logger.info(
1357
- f"Applying {len(results_df)} criteria deletes to {self._criteria_deletes_table_name}"
1358
- )
1359
- # try setting parquet engine here, since the engine parameter does not seem to make it through to the write_pandas function
1360
- success, nchunks, nrows, _ = write_pandas(
1361
- conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
1362
- df=results_df,
1363
- table_name=self._criteria_deletes_table_name,
1364
- quote_identifiers=False, # already done in get_temp_table_name
1365
- table_type="transient"
1366
- )
1367
- if not success:
1368
- raise ValueError(
1369
- f"Failed to write results to table {self._criteria_deletes_table_name}"
1370
- )
1371
- logger.info(
1372
- f"Wrote {nrows} rows and {nchunks} chunks to table {self._criteria_deletes_table_name}"
1390
+ attempts_remaining = 12
1391
+ while attempts_remaining > 0:
1392
+ try:
1393
+ logger.info(
1394
+ f"Applying {len(results_df)} criteria deletes to {self._criteria_deletes_table_name}"
1395
+ )
1396
+ # try setting parquet engine here, since the engine parameter does not seem to make it through to the write_pandas function
1397
+ success, nchunks, nrows, _ = write_pandas(
1398
+ conn=self._session._conn._cursor.connection, # pylint: disable=protected-access
1399
+ df=results_df,
1400
+ table_name=self._criteria_deletes_table_name,
1401
+ quote_identifiers=False, # already done in get_temp_table_name
1402
+ table_type="transient"
1403
+ )
1404
+ if not success:
1405
+ raise ValueError(
1406
+ f"Failed to write results to table {self._criteria_deletes_table_name}"
1407
+ )
1408
+ logger.info(
1409
+ f"Wrote {nrows} rows and {nchunks} chunks to table {self._criteria_deletes_table_name}"
1410
+ )
1411
+ return
1412
+ except Exception as e:
1413
+ if 'is being committed' not in str(e):
1414
+ raise e
1415
+ logger.error(
1416
+ f"Transaction clash writing results to table {self._full_results_table_name}: {e}"
1417
+ )
1418
+ attempts_remaining -= 1
1419
+ time.sleep(5)
1420
+ raise ValueError(
1421
+ f"Failed to write results to table {self._full_results_table_name} after {attempts_remaining} attempts"
1373
1422
  )
1374
1423
  else:
1375
1424
  logger.info("Results dataframe is empty, not applying")