omnata-plugin-runtime 0.4.7a104__py3-none-any.whl → 0.4.8__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -1244,55 +1244,68 @@ class InboundSyncRequest(SyncRequest):
1244
1244
  )
1245
1245
  primary_key_field = None
1246
1246
  records = results_df.to_dict("records")
1247
- if (self._streams_dict[stream_name].stream.source_defined_primary_key is not None):
1248
- primary_key_field = self._streams_dict[stream_name].stream.source_defined_primary_key
1247
+ # this extra bit of source_defined_primary_key logic is just to catch the situation where the plugin accidentally
1248
+ # provides an empty list as the source defined primary key (SFMC plugin did this for a while, so some customers have [] here).
1249
+ # This should be caught upstream during configuration, but we'll check here too.
1250
+ # Note that source defined primary keys override any user choice.
1251
+ primary_key_correctly_defined_by_source = False
1252
+ if stream_obj.source_defined_primary_key is not None:
1253
+ if isinstance(stream_obj.source_defined_primary_key,list):
1254
+ if len(stream_obj.source_defined_primary_key)>0:
1255
+ primary_key_correctly_defined_by_source = True
1256
+ else:
1257
+ primary_key_correctly_defined_by_source = True
1258
+
1259
+ if primary_key_correctly_defined_by_source:
1260
+ primary_key_field = stream_obj.source_defined_primary_key
1249
1261
  elif self._streams_dict[stream_name].primary_key_field is not None:
1250
1262
  primary_key_field = self._streams_dict[stream_name].primary_key_field
1251
1263
  else:
1252
- results_df["APP_IDENTIFIER"] = None
1253
- if primary_key_field is not None:
1254
- if isinstance(primary_key_field,list) and len(primary_key_field) == 1:
1255
- # don't hash it if it's just a single value
1256
- primary_key_field = primary_key_field[0]
1257
- if isinstance(primary_key_field,list):
1258
- primary_key_fields = cast(List[str],primary_key_field)
1259
- primary_key_fields = sorted(primary_key_fields)
1260
- # handle the sitation where the primary key is a list of fields
1261
- # first, check that all records contain all of the primary key fields
1262
- if not all(
1263
- all(
1264
- field in record["RECORD_DATA"]
1265
- for field in primary_key_fields
1266
- )
1267
- for record in records
1268
- ):
1269
- raise ValueError(
1270
- f"Primary key fields '{primary_key_fields}' were not present in all records for stream {stream_name}"
1271
- )
1272
- # hash all of the primary key fields
1273
- results_df["APP_IDENTIFIER"] = results_df["RECORD_DATA"].apply(lambda x: self.get_hash([str(x[field]) for field in primary_key_fields]))
1274
- else:
1275
- # the primary key field could contain a nested field, so we need to check for that
1276
- # we need to check that each record in the results contains the primary key field
1277
- if not all(
1278
- primary_key_field in record["RECORD_DATA"]
1279
- for record in records
1280
- ):
1281
- if "." in primary_key_field:
1282
- primary_key_field = primary_key_field.split(".")
1283
-
1284
- if not all(
1285
- get_nested_value(record["RECORD_DATA"], primary_key_field)
1286
- for record in records
1287
- ):
1288
- raise ValueError(
1289
- f"Primary key field '{primary_key_field}' was not present in all records for stream {stream_name}"
1290
- )
1291
- else:
1264
+ # originally, we did not require primary keys for inbound syncs if they were doing the replace option
1265
+ # when we brought in delete flagging, we began to mandate that primary keys are defined
1266
+ raise ValueError(f"Stream {stream_name} does not have a primary key field defined")
1267
+ if isinstance(primary_key_field,list) and len(primary_key_field) == 1:
1268
+ # don't hash it if it's just a single value
1269
+ primary_key_field = primary_key_field[0]
1270
+ if isinstance(primary_key_field,list):
1271
+ primary_key_fields = cast(List[str],primary_key_field)
1272
+ primary_key_fields = sorted(primary_key_fields)
1273
+ # handle the sitation where the primary key is a list of fields
1274
+ # first, check that all records contain all of the primary key fields
1275
+ if not all(
1276
+ all(
1277
+ field in record["RECORD_DATA"]
1278
+ for field in primary_key_fields
1279
+ )
1280
+ for record in records
1281
+ ):
1282
+ raise ValueError(
1283
+ f"Primary key fields '{primary_key_fields}' were not present in all records for stream {stream_name}"
1284
+ )
1285
+ # hash all of the primary key fields
1286
+ results_df["APP_IDENTIFIER"] = results_df["RECORD_DATA"].apply(lambda x: self.get_hash([str(x[field]) for field in primary_key_fields]))
1287
+ else:
1288
+ # the primary key field could contain a nested field, so we need to check for that
1289
+ # we need to check that each record in the results contains the primary key field
1290
+ if not all(
1291
+ primary_key_field in record["RECORD_DATA"]
1292
+ for record in records
1293
+ ):
1294
+ if "." in primary_key_field:
1295
+ primary_key_field = primary_key_field.split(".")
1296
+
1297
+ if not all(
1298
+ get_nested_value(record["RECORD_DATA"], primary_key_field)
1299
+ for record in records
1300
+ ):
1292
1301
  raise ValueError(
1293
1302
  f"Primary key field '{primary_key_field}' was not present in all records for stream {stream_name}"
1294
1303
  )
1295
- results_df["APP_IDENTIFIER"] = results_df["RECORD_DATA"].apply(lambda x: get_nested_value(dict(x),primary_key_field))
1304
+ else:
1305
+ raise ValueError(
1306
+ f"Primary key field '{primary_key_field}' was not present in all records for stream {stream_name}"
1307
+ )
1308
+ results_df["APP_IDENTIFIER"] = results_df["RECORD_DATA"].apply(lambda x: get_nested_value(dict(x),primary_key_field))
1296
1309
  # ensure APP_IDENTIFIER is a string
1297
1310
  results_df["APP_IDENTIFIER"] = results_df["APP_IDENTIFIER"].apply(str)
1298
1311
  # the timestamps in Snowflake are TIMESTAMP_LTZ, so we upload in string format to ensure the
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omnata-plugin-runtime
3
- Version: 0.4.7a104
3
+ Version: 0.4.8
4
4
  Summary: Classes and common runtime components for building and running Omnata Plugins
5
5
  Author: James Weakley
6
6
  Author-email: james.weakley@omnata.com
@@ -3,10 +3,10 @@ omnata_plugin_runtime/api.py,sha256=W79CsAcl127Dzy-XVS9CzvzsbS3IigVH4QAhFFDkaXg,
3
3
  omnata_plugin_runtime/configuration.py,sha256=7cMekoY8CeZAJHpASU6tCMidF55Hzfr7CD74jtebqIY,35742
4
4
  omnata_plugin_runtime/forms.py,sha256=pw_aKVsXSz47EP8PFBI3VDwdSN5IjvZxp8JTjO1V130,18421
5
5
  omnata_plugin_runtime/logging.py,sha256=bn7eKoNWvtuyTk7RTwBS9UARMtqkiICtgMtzq3KA2V0,3272
6
- omnata_plugin_runtime/omnata_plugin.py,sha256=b-FCL2D6J0LVrEnZjwvEH37YBX3Hxiy2H1EAKCMNeY0,109206
6
+ omnata_plugin_runtime/omnata_plugin.py,sha256=uBkVOnV_HZtHZ0h5kxMCzrT6wQz1vtWWvSk856hR9yc,110077
7
7
  omnata_plugin_runtime/plugin_entrypoints.py,sha256=JAGEdVcy9QEXv7TO5zt7co64LTP8nqGusOc0sJG9GtU,29149
8
8
  omnata_plugin_runtime/rate_limiting.py,sha256=27_sgEkD7kmQlfSF3IaM09Hs8MA5tXuacVUOFR4zwC0,23454
9
- omnata_plugin_runtime-0.4.7a104.dist-info/LICENSE,sha256=IMF9i4xIpgCADf0U-V1cuf9HBmqWQd3qtI3FSuyW4zE,26526
10
- omnata_plugin_runtime-0.4.7a104.dist-info/METADATA,sha256=9T3A_FC4NoVQweXGrI9N3TJKl3YgrRAjTreeoUU2240,1642
11
- omnata_plugin_runtime-0.4.7a104.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
12
- omnata_plugin_runtime-0.4.7a104.dist-info/RECORD,,
9
+ omnata_plugin_runtime-0.4.8.dist-info/LICENSE,sha256=IMF9i4xIpgCADf0U-V1cuf9HBmqWQd3qtI3FSuyW4zE,26526
10
+ omnata_plugin_runtime-0.4.8.dist-info/METADATA,sha256=NHP8Zzdu9nlbw4TqE8MErrjP5CBfjTWwVeSBp1wcOZA,1638
11
+ omnata_plugin_runtime-0.4.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
12
+ omnata_plugin_runtime-0.4.8.dist-info/RECORD,,