omnata-plugin-runtime 0.9.0a209__py3-none-any.whl → 0.9.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -13,7 +13,7 @@ else:
13
13
  from typing_extensions import Annotated
14
14
  from abc import ABC
15
15
  from types import MethodType
16
- from pydantic import BaseModel, Field, validator # pylint: disable=no-name-in-module
16
+ from pydantic import BaseModel, Field, field_validator # pylint: disable=no-name-in-module
17
17
  from .configuration import (
18
18
  SubscriptableBaseModel,
19
19
  NgrokTunnelSettings,
@@ -264,7 +264,8 @@ class DynamicFormOptionsDataSource(SubscriptableBaseModel):
264
264
  new_option_creator: Optional[NewOptionCreator] = Field(default=None)
265
265
  type: Literal["dynamic"] = "dynamic"
266
266
 
267
- @validator("source_function", always=True)
267
+ @field_validator("source_function", mode='after')
268
+ @classmethod
268
269
  def function_name_convertor(cls, v) -> str:
269
270
  return v.__name__ if isinstance(v, MethodType) else v
270
271
 
@@ -376,15 +377,18 @@ class NewOptionCreator(SubscriptableBaseModel):
376
377
  ]
377
378
  allow_create: bool = Field(default=True)
378
379
 
379
- @validator("creation_form_function", always=True)
380
+ @field_validator("creation_form_function", mode='after')
381
+ @classmethod
380
382
  def function_name_convertor(cls, v) -> str:
381
383
  return v.__name__ if isinstance(v, MethodType) else v
382
384
 
383
- @validator("creation_complete_function", always=True)
385
+ @field_validator("creation_complete_function", mode='after')
386
+ @classmethod
384
387
  def function_name_convertor_2(cls, v) -> str:
385
388
  return v.__name__ if isinstance(v, MethodType) else v
386
389
 
387
- @validator("construct_form_option", always=True)
390
+ @field_validator("construct_form_option", mode='after')
391
+ @classmethod
388
392
  def function_name_convertor_3(cls, v) -> str:
389
393
  return v.__name__ if isinstance(v, MethodType) else v
390
394
 
@@ -469,7 +473,8 @@ class NGrokMTLSTunnel(SubscriptableBaseModel):
469
473
  post_tunnel_fields_function: Union[
470
474
  Callable[[ConnectionConfigurationParameters], List[FormFieldBase]], str
471
475
  ]
472
- @validator("post_tunnel_fields_function", always=True)
476
+ @field_validator("post_tunnel_fields_function", mode='after')
477
+ @classmethod
473
478
  def function_name_convertor(cls, v) -> str:
474
479
  return v.__name__ if isinstance(v, MethodType) else v
475
480
 
@@ -0,0 +1,598 @@
1
+ """
2
+ Models used to represent JSON schemas and Snowflake view definitions.
3
+ This was originally internal to the Sync Engine, but was moved to the
4
+ plugin runtime so that it could be used for testing column expressions (formulas, etc).
5
+ """
6
+ from typing import Any, Dict, Optional, Literal, List, Union
7
+ from typing_extensions import Self
8
+ from pydantic import BaseModel, Field, model_validator, computed_field
9
+ from jinja2 import Environment
10
+
11
+ class JsonSchemaProperty(BaseModel):
12
+ """
13
+ The most basic common properties for a JSON schema property, plus the extra ones we use for providing Snowflake-specific information.
14
+ Used mainly to do partial parsing as we extract fields from within the schema
15
+ """
16
+
17
+ type: Optional[Union[str,List[str]]] = Field(..., description="The type of the property")
18
+ ref: Optional[str] = Field(
19
+ None, description="The reference to another schema", alias="$ref"
20
+ )
21
+ nullable: bool = Field(
22
+ True, description="Whether the property is nullable"
23
+ )
24
+ description: Optional[str] = Field(
25
+ None, description="The description of the property"
26
+ )
27
+ format: Optional[str] = Field(
28
+ None, description="The format of the property, e.g. date-time"
29
+ )
30
+ properties: Optional[Dict[str, Self]] = Field(
31
+ None, description="The sub-properties of the property, if the property is an object type"
32
+ )
33
+ snowflakeTimestampType: Optional[Literal['TIMESTAMP_TZ','TIMESTAMP_NTZ','TIMESTAMP_LTZ']] = Field(
34
+ None, description="The Snowflake timestamp type to use when interpreting a date-time string."
35
+ )
36
+ snowflakeTimestampFormat: Optional[str] = Field(
37
+ None, description="The Snowflake timestamp format to use when interpreting a date-time string."
38
+ )
39
+ snowflakePrecision: Optional[int] = Field(
40
+ None, description="The Snowflake precision to assign to the column."
41
+ )
42
+ snowflakeScale: Optional[int] = Field(
43
+ None, description="The Snowflake scale to assign to the column."
44
+ )
45
+ snowflakeColumnExpression: Optional[str] = Field(
46
+ None,description="""When advanced processing is needed, you can provide a value here. Use {{variant_path}} to interpolate the path to the JSON field.""",
47
+ )
48
+ isJoinColumn: Optional[bool] = Field(
49
+ False, description="Whether this column is sourced from a joined stream"
50
+ )
51
+
52
+ @model_validator(mode='after')
53
+ def validate(self) -> Self:
54
+ # If the type is a list, we need to condense it down to a single string
55
+ if self.type is None:
56
+ if self.ref is None:
57
+ raise ValueError("You must provide either a type or a reference")
58
+ else:
59
+ if isinstance(self.type, list):
60
+ data_types = [t for t in self.type if t != "null"]
61
+ if len(data_types) == 0:
62
+ raise ValueError(
63
+ f"For a list of types, you must provide at least one non-null type ({self.type})"
64
+ )
65
+ self.nullable = "null" in self.type
66
+ self.type = data_types[0]
67
+ return self
68
+
69
+ @computed_field
70
+ @property
71
+ def precision(self) -> Optional[int]:
72
+ """
73
+ Returns the precision for this property.
74
+ """
75
+ precision = None
76
+ if self.type == "number" or self.type == "integer":
77
+ precision = 38
78
+ if self.snowflakePrecision is not None:
79
+ precision = self.snowflakePrecision
80
+ return precision
81
+
82
+ @computed_field
83
+ @property
84
+ def scale(self) -> Optional[int]:
85
+ """
86
+ Returns the scale for this property.
87
+ """
88
+ scale = None
89
+ if self.type == "number":
90
+ scale = 19
91
+ if self.type == "integer":
92
+ scale = 0
93
+ if self.snowflakeScale is not None:
94
+ scale = self.snowflakeScale
95
+ return scale
96
+
97
+ @computed_field
98
+ @property
99
+ def snowflake_data_type(self) -> str:
100
+ """
101
+ Returns the Snowflake data type for this property.
102
+ """
103
+ if self.type is not None:
104
+ if self.type == "string":
105
+ if self.format is not None:
106
+ if self.format == "date-time":
107
+ if self.snowflakeTimestampType is not None:
108
+ return self.snowflakeTimestampType
109
+ return "TIMESTAMP" # not sure if we should default to something that may vary according to account parameters
110
+ elif self.format == "time":
111
+ return "TIME"
112
+ elif self.format == "date":
113
+ return "DATE"
114
+ return "VARCHAR"
115
+ elif self.type == "number":
116
+ return "NUMERIC"
117
+ elif self.type == "integer":
118
+ return "NUMERIC"
119
+ elif self.type == "boolean":
120
+ return "BOOLEAN"
121
+ if self.type == "object":
122
+ return "OBJECT"
123
+ if self.type == "array":
124
+ return "ARRAY"
125
+ return "VARCHAR"
126
+ elif self.ref is not None:
127
+ if self.ref == "WellKnownTypes.json#definitions/Boolean":
128
+ return "BOOLEAN"
129
+ elif self.ref == "WellKnownTypes.json#definitions/Date":
130
+ return "DATE"
131
+ elif self.ref == "WellKnownTypes.json#definitions/TimestampWithTimezone":
132
+ return "TIMESTAMP_TZ"
133
+ elif self.ref == "WellKnownTypes.json#definitions/TimestampWithoutTimezone":
134
+ return "TIMESTAMP_NTZ"
135
+ elif self.ref == "WellKnownTypes.json#definitions/TimeWithTimezone":
136
+ return "TIME"
137
+ elif self.ref == "WellKnownTypes.json#definitions/TimeWithoutTimezone":
138
+ return "TIME"
139
+ elif self.ref == "WellKnownTypes.json#definitions/Integer":
140
+ return "NUMERIC"
141
+ elif self.ref == "WellKnownTypes.json#definitions/Number":
142
+ return "NUMERIC"
143
+ return "VARCHAR"
144
+
145
+
146
+ class SnowflakeViewColumn(BaseModel):
147
+ """
148
+ Represents everything needed to express a column in a Snowflake normalized view.
149
+ The name is the column name, the expression is the SQL expression to use in the view.
150
+ In other words, the column definition is "expression as name".
151
+ """
152
+ name: str
153
+ expression: str
154
+ comment: Optional[str] = Field(default=None)
155
+ is_join_column: Optional[bool] = Field(
156
+ default=False, description="Whether this column is sourced from a joined stream"
157
+ )
158
+
159
+ def __repr__(self) -> str:
160
+ return "SnowflakeViewColumn(name=%r, definition=%r, comment=%r)" % (
161
+ self.name,
162
+ self.definition(),
163
+ self.comment,
164
+ )
165
+
166
+ def definition(self) -> str:
167
+ return f'{self.expression} as "{self.name}"'
168
+
169
+ def name_with_comment(self) -> str:
170
+ """
171
+ Returns the column name (quoted), along with any comment.
172
+ The resulting text can be used in a CREATE VIEW statement.
173
+ """
174
+ return (
175
+ f'"{self.name}"'
176
+ if self.comment is None
177
+ else f'"{self.name}" COMMENT $${self.comment}$$'
178
+ )
179
+
180
+ @classmethod
181
+ def from_json_schema_property(cls,
182
+ column_name:str,
183
+ comment:str,
184
+ variant_path:str,
185
+ json_schema_property:JsonSchemaProperty,
186
+ column_name_environment:Environment,
187
+ column_name_expression:str) -> Self:
188
+ """
189
+ Takes a JSON schema property (which may be nested via variant_path), along with its final name and comment,
190
+ and returns a SnowflakeViewColumn object which is ready to use in a select statement.
191
+ It does this by applying overarching type conversion rules, and evaluating the final column name using Jinja.
192
+ """
193
+ jinja_vars = {"column_name": column_name}
194
+ final_column_name = column_name_environment.from_string(column_name_expression).render(**jinja_vars)
195
+ expression = f"""RECORD_DATA:{variant_path}"""
196
+ if json_schema_property.snowflakeColumnExpression:
197
+ jinja_vars = {"variant_path": expression}
198
+ expression = column_name_environment.from_string(json_schema_property.snowflakeColumnExpression).render(
199
+ **jinja_vars
200
+ )
201
+
202
+ if json_schema_property.precision is not None and json_schema_property.scale is not None:
203
+ expression=f"{expression}::NUMERIC({json_schema_property.precision},{json_schema_property.scale})"
204
+ elif json_schema_property.snowflakeTimestampType and json_schema_property.snowflakeTimestampFormat:
205
+ timestamp_type = json_schema_property.snowflakeTimestampType
206
+ timestamp_format = json_schema_property.snowflakeTimestampFormat
207
+ expression=f"""TO_{timestamp_type}({expression}::varchar,'{timestamp_format}')"""
208
+ else:
209
+ if not json_schema_property.snowflakeColumnExpression:
210
+ expression=f"""{expression}::{json_schema_property.snowflake_data_type}"""
211
+ return cls(
212
+ name=final_column_name,
213
+ expression=expression,
214
+ comment=comment,
215
+ is_join_column=json_schema_property.isJoinColumn,
216
+ )
217
+
218
+ @classmethod
219
+ def order_by_reference(cls,join_columns:List[Self]) -> List[Self]:
220
+ """
221
+ In some situations, column expressions may reference the alias of another column
222
+ This is allowed in Snowflake, as long as the aliased column is defined before it's used in a later column
223
+ So we need to sort the columns so that if the name of the column appears (in quotes) in the expression of another column, it is ordered first
224
+ """
225
+
226
+ # Collect columns to be moved
227
+ columns_to_move:List[Self] = []
228
+ for column in join_columns:
229
+ for other_column in join_columns:
230
+ if f'"{column.name}"' in other_column.expression:
231
+ if column not in columns_to_move:
232
+ columns_to_move.append(column)
233
+
234
+ # Move collected columns to the front
235
+ for column in columns_to_move:
236
+ join_columns.remove(column)
237
+ join_columns.insert(0, column)
238
+ return join_columns
239
+
240
+
241
+ class SnowflakeViewJoin(BaseModel):
242
+ """
243
+ Represents a join in a Snowflake normalized view.
244
+ """
245
+
246
+ left_alias: str = Field(
247
+ ..., description="The alias to use on the left side of the join"
248
+ )
249
+ left_column: str = Field(
250
+ ..., description="The column to join on from the left side"
251
+ )
252
+ join_stream_name: str = Field(
253
+ ..., description="The name of the stream to join (right side)"
254
+ )
255
+ join_stream_alias: str = Field(
256
+ ...,
257
+ description="The alias to use for the joined stream, this is used in the column definitions instead of the stream name, and accomodates the possibility of multiple joins to the same stream",
258
+ )
259
+ join_stream_column: str = Field(
260
+ ..., description="The column to join on from the right side"
261
+ )
262
+
263
+ def __repr__(self) -> str:
264
+ return (
265
+ "SnowflakeViewJoin(left_alias=%r, left_column=%r, join_stream_name=%r, join_stream_alias=%r, join_stream_column=%r)"
266
+ % (
267
+ self.left_alias,
268
+ self.left_column,
269
+ self.join_stream_name,
270
+ self.join_stream_alias,
271
+ self.join_stream_column,
272
+ )
273
+ )
274
+
275
+ def definition(self) -> str:
276
+ """
277
+ Returns the SQL for a single join in a normalized view
278
+ """
279
+ # we don't need to fully qualify the table name, because they'll be aliased in CTEs
280
+ return f"""JOIN "{self.join_stream_name}" as "{self.join_stream_alias}"
281
+ ON "{self.left_alias}"."{self.left_column}" = "{self.join_stream_alias}"."{self.join_stream_column}" """
282
+
283
+
284
+ class FullyQualifiedTable(BaseModel):
285
+ """
286
+ Represents a fully qualified table name in Snowflake, including database, schema, and table name.
287
+ This is not a template, it's a fully specified object.
288
+ """
289
+
290
+ database_name: Optional[str] = Field(default=None, description="The database name")
291
+ schema_name: str = Field(..., description="The schema name")
292
+ table_name: str = Field(..., description="The table name")
293
+
294
+ def get_fully_qualified_name(self, table_override: Optional[str] = None) -> str:
295
+ """
296
+ If table_override is provided, it will be used instead of the table name
297
+ """
298
+ actual_table_name = (
299
+ self.table_name if table_override is None else table_override
300
+ )
301
+ # We try to make this resilient to quoting
302
+ schema_name = self.schema_name.replace('"', "")
303
+ table_name = actual_table_name.replace('"', "")
304
+ if self.database_name is None or self.database_name == "":
305
+ return f'"{schema_name}"."{table_name}"'
306
+ database_name = self.database_name.replace('"', "")
307
+ return f'"{database_name}"."{schema_name}"."{table_name}"'
308
+
309
+ def get_fully_qualified_stage_name(self) -> str:
310
+ """
311
+ Stage name is derived from the table name
312
+ """
313
+ return self.get_fully_qualified_name(table_override=f"{self.table_name}_STAGE")
314
+
315
+ def get_fully_qualified_criteria_deletes_table_name(self) -> str:
316
+ """
317
+ Deletes table name is derived from the table name
318
+ """
319
+ return self.get_fully_qualified_name(
320
+ table_override=f"{self.table_name}_CRITERIA_DELETES"
321
+ )
322
+
323
+ class SnowflakeViewPart(BaseModel):
324
+ """
325
+ Represents a stream within a normalized view.
326
+ Because a normalized view can be built from multiple streams, this is potentially only part of the view.
327
+ """
328
+ stream_name: str = Field(..., description="The name of the stream")
329
+ raw_table_location: FullyQualifiedTable = Field(
330
+ ..., description="The location of the raw table that the stream is sourced from"
331
+ )
332
+ comment: Optional[str] = Field(
333
+ None, description="The comment to assign to the view"
334
+ )
335
+ columns: List[SnowflakeViewColumn] = Field(
336
+ ..., description="The columns to include in the view"
337
+ )
338
+ joins: List[SnowflakeViewJoin] = Field(
339
+ ..., description="The joins to include in the view"
340
+ )
341
+
342
+ def direct_columns(self) -> List[SnowflakeViewColumn]:
343
+ """
344
+ Returns the columns that are not sourced from joins.
345
+ """
346
+ return [c for c in self.columns if not c.is_join_column]
347
+
348
+ def join_columns(self) -> List[SnowflakeViewColumn]:
349
+ """
350
+ Returns the columns that are sourced from joins.
351
+ """
352
+ return SnowflakeViewColumn.order_by_reference([c for c in self.columns if c.is_join_column])
353
+
354
+ def comment_clause(self) -> str:
355
+ """
356
+ Returns the comment clause for the view definition.
357
+ """
358
+ return f"COMMENT = $${self.comment}$$ " if self.comment is not None else ""
359
+
360
+ def column_names_with_comments(self) -> List[str]:
361
+ # the outer view definition has all of the column names and comments, but with the direct columns
362
+ # first and the join columns last, same as they are ordered in the inner query
363
+ return [
364
+ c.name_with_comment() for c in (self.direct_columns() + self.join_columns())
365
+ ]
366
+
367
+ def cte_text(self) -> str:
368
+ """
369
+ Returns the CTE text for this view part.
370
+ """
371
+ return f""" "{self.stream_name}" as (
372
+ select {', '.join([c.definition() for c in self.direct_columns()])}
373
+ from {self.raw_table_location.get_fully_qualified_name()}
374
+ ) """
375
+
376
+ class SnowflakeViewParts(BaseModel):
377
+ """
378
+ Represents a set of streams within a normalized view.
379
+ This is the top level object that represents the whole view.
380
+ """
381
+
382
+ main_part: SnowflakeViewPart = Field(
383
+ ..., description="The main part of the view, which is the stream that the view is named after"
384
+ )
385
+ joined_parts: List[SnowflakeViewPart] = Field(
386
+ ..., description="The other streams that are joined to the main stream"
387
+ )
388
+
389
+ def view_body(self):
390
+ """
391
+ Creates a view definition from the parts
392
+ """
393
+ ctes = [self.main_part.cte_text()] + [part.cte_text() for part in self.joined_parts]
394
+ all_ctes = "\n,".join(ctes)
395
+ join_columns = self.main_part.join_columns()
396
+ join_column_clauses = [c.definition() for c in join_columns]
397
+ # we select * from the original view (in the CTE) and then add any expressions that come from the join columns
398
+ final_column_clauses = [f'"{self.main_part.stream_name}".*'] + join_column_clauses
399
+ view_body = f"""with {all_ctes}
400
+ select {', '.join(final_column_clauses)}
401
+ from "{self.main_part.stream_name}" """
402
+ if len(self.main_part.joins) > 0:
403
+ join_clauses = [join.definition() for join in self.main_part.joins]
404
+ view_body += "\n" + ("\n".join(join_clauses))
405
+ return view_body
406
+
407
+ @classmethod
408
+ def generate(cls,
409
+ raw_stream_locations: Dict[str,FullyQualifiedTable],
410
+ stream_schemas: Dict[str,Dict],
411
+ stream_name: str,
412
+ include_default_columns: bool = True,
413
+ column_name_environment: Environment = Environment(),
414
+ column_name_expression: str = "{{column_name}}"
415
+ ) -> Self:
416
+ """
417
+ Returns the building blocks required to create a normalized view from a stream.
418
+ This includes any joins that are required, via CTEs.
419
+ """
420
+ # we start with the view parts for the view we are building
421
+ main_stream_view_part = normalized_view_part(
422
+ stream_name=stream_name,
423
+ raw_table_location=raw_stream_locations[stream_name],
424
+ include_default_columns=include_default_columns,
425
+ stream_schema=stream_schemas.get(stream_name),
426
+ column_name_environment=column_name_environment,
427
+ column_name_expression=column_name_expression
428
+ )
429
+ joined_parts = []
430
+ for join in main_stream_view_part.joins:
431
+ if join.join_stream_name not in raw_stream_locations:
432
+ raise ValueError(f"Stream {join.join_stream_name} is required as a join for stream {stream_name}, but its location was not provided")
433
+ if join.join_stream_name not in stream_schemas:
434
+ raise ValueError(f"Stream {join.join_stream_name} is required as a join for stream {stream_name}, but its schema was not provided")
435
+ joined_parts.append(normalized_view_part(
436
+ stream_name=join.join_stream_name,
437
+ raw_table_location=raw_stream_locations[join.join_stream_name],
438
+ include_default_columns=include_default_columns,
439
+ stream_schema=stream_schemas[join.join_stream_name],
440
+ column_name_environment=column_name_environment,
441
+ column_name_expression=column_name_expression
442
+ ))
443
+ return cls(main_part=main_stream_view_part, joined_parts=joined_parts)
444
+
445
+
446
+
447
+ class JsonSchemaTopLevel(BaseModel):
448
+ """
449
+ This model is used as a starting point for parsing a JSON schema.
450
+ It does not validate the whole thing up-front, as there is some complex recursion as well as external configuration.
451
+ Instead, it takes the basic properties and then allows for further parsing on demand.
452
+ """
453
+ description: Optional[str] = Field(
454
+ None, description="The description of the schema"
455
+ )
456
+ joins: Optional[List[SnowflakeViewJoin]] = Field(
457
+ None, description="The joins to include in the view"
458
+ )
459
+ properties: Optional[Dict[str, Any]] = Field(
460
+ None, description="The properties of the schema. This is left as a dictionary, and parsed on demand."
461
+ )
462
+
463
+ def build_view_columns(self,
464
+ column_name_environment: Environment,
465
+ column_name_expression: str
466
+ ) -> List[SnowflakeViewColumn]:
467
+ """
468
+ Returns a list of column definitions from a json schema
469
+ """
470
+ if self.properties is None:
471
+ return []
472
+ columns = [
473
+ self._extract_view_columns(
474
+ property_name=property_name,
475
+ property_value=property_value,
476
+ column_name_environment=column_name_environment,
477
+ column_name_expression=column_name_expression,
478
+ )
479
+ for property_name, property_value in self.properties.items()
480
+ ]
481
+ return [item for sublist in columns for item in sublist]
482
+
483
+
484
+ def _extract_view_columns(
485
+ self,
486
+ property_name: str,
487
+ property_value: Dict,
488
+ column_name_environment: Environment,
489
+ column_name_expression: str,
490
+ current_field_name_path: List[str] = [],
491
+ current_comment_path: List[str] = []
492
+ ) -> List[SnowflakeViewColumn]:
493
+ """
494
+ Recursive function which returns a list of column definitions.
495
+ - property_name is the name of the current property.
496
+ - property_value is the value of the current property, (the JSON-schema node).
497
+ - current_field_name_path is [] on initial entry, then contains parent path field names as it recurses.
498
+ - current_comment_path is the same length as above, and contains any "description" values found on the way down
499
+ """
500
+ json_property = JsonSchemaProperty.model_validate(property_value)
501
+ # bit of basic home-grown validation, could probably use a library for this
502
+ if json_property.type:
503
+ if json_property.type == "object":
504
+ # TODO: make this depth configurable on the sync
505
+ if len(current_field_name_path) < 5 and json_property.properties is not None:
506
+ children = [
507
+ self._extract_view_columns(
508
+ property_name=child_property_name,
509
+ property_value=child_property_value,
510
+ column_name_environment=column_name_environment,
511
+ column_name_expression=column_name_expression,
512
+ current_field_name_path=current_field_name_path + [property_name],
513
+ current_comment_path=current_comment_path + [json_property.description or ""],
514
+ )
515
+ for child_property_name, child_property_value in json_property.properties.items()
516
+ ]
517
+ return [item for sublist in children for item in sublist]
518
+
519
+ current_field_name_path = current_field_name_path + [property_name]
520
+ current_comment_path = current_comment_path + [
521
+ json_property.description or ""
522
+ ]
523
+ # remove empty strings from current_comment_path
524
+ current_comment_path = [c for c in current_comment_path if c]
525
+
526
+ return [SnowflakeViewColumn.from_json_schema_property(
527
+ column_name="_".join(current_field_name_path),
528
+ comment=" -> ".join(current_comment_path),
529
+ variant_path=":".join([f'"{p}"' for p in current_field_name_path if p]),
530
+ json_schema_property=json_property,
531
+ column_name_environment=column_name_environment,
532
+ column_name_expression=column_name_expression
533
+ )]
534
+
535
+
536
+ def normalized_view_part(
537
+ stream_name:str,
538
+ raw_table_location:FullyQualifiedTable,
539
+ include_default_columns: bool,
540
+ column_name_environment: Environment,
541
+ column_name_expression: str,
542
+ stream_schema: Optional[Dict] = None,
543
+ ) -> SnowflakeViewPart:
544
+ """
545
+ Returns an object containing:
546
+ - A top level comment for the view
547
+ - A list of SnowflakeViewColumn objects, representing the columns to create in the view
548
+ - A list of SnowflakeViewJoin objects, representing the joins to create in the view
549
+ """
550
+ snowflake_columns: List[SnowflakeViewColumn] = []
551
+ if include_default_columns:
552
+ snowflake_columns.append(
553
+ SnowflakeViewColumn(
554
+ name="OMNATA_APP_IDENTIFIER",
555
+ expression="APP_IDENTIFIER",
556
+ comment="The value of the unique identifier for the record in the source system",
557
+ )
558
+ )
559
+ snowflake_columns.append(
560
+ SnowflakeViewColumn(
561
+ name="OMNATA_RETRIEVE_DATE",
562
+ expression="RETRIEVE_DATE",
563
+ comment="The date and time the record was retrieved from the source system",
564
+ )
565
+ )
566
+ snowflake_columns.append(
567
+ SnowflakeViewColumn(
568
+ name="OMNATA_RAW_RECORD",
569
+ expression="RECORD_DATA",
570
+ comment="The raw semi-structured record as retrieved from the source system",
571
+ )
572
+ )
573
+ snowflake_columns.append(
574
+ SnowflakeViewColumn(
575
+ name="OMNATA_IS_DELETED",
576
+ expression="IS_DELETED",
577
+ comment="A flag to indicate that the record was deleted from the source system",
578
+ )
579
+ )
580
+ snowflake_columns.append(
581
+ SnowflakeViewColumn(
582
+ name="OMNATA_RUN_ID",
583
+ expression="RUN_ID",
584
+ comment="A flag to indicate which run the record was last processed in",
585
+ )
586
+ )
587
+ json_schema = JsonSchemaTopLevel.model_validate(stream_schema)
588
+
589
+ return SnowflakeViewPart(
590
+ stream_name=stream_name,
591
+ raw_table_location=raw_table_location,
592
+ columns=snowflake_columns + json_schema.build_view_columns(
593
+ column_name_environment=column_name_environment,
594
+ column_name_expression=column_name_expression
595
+ ),
596
+ joins=json_schema.joins or [],
597
+ comment=json_schema.description
598
+ )
@@ -8,6 +8,7 @@ from inspect import signature
8
8
  import sys
9
9
  from types import FunctionType
10
10
  from typing import Union
11
+ from typing_extensions import Self
11
12
  if tuple(sys.version_info[:2]) >= (3, 9):
12
13
  # Python 3.9 and above
13
14
  from typing import Annotated # pylint: disable=ungrouped-imports
@@ -38,7 +39,7 @@ from typing import Any, Callable, Dict, Iterable, List, Literal, Optional, Type,
38
39
  import jinja2
39
40
  import pandas
40
41
  from pydantic_core import to_jsonable_python
41
- from pydantic import Field, TypeAdapter, ValidationError, create_model, root_validator, BaseModel
42
+ from pydantic import Field, TypeAdapter, ValidationError, create_model, model_validator, BaseModel
42
43
  from dateutil.parser import parse
43
44
  from jinja2 import Environment
44
45
  from snowflake.connector.pandas_tools import write_pandas
@@ -1577,19 +1578,19 @@ class SnowflakeBillingEvent(BaseModel):
1577
1578
  objects: List[str] = []
1578
1579
  additional_info: Dict[str, Any] = {}
1579
1580
 
1580
- @root_validator(pre=True)
1581
- def validate_datetime_fields(cls, values):
1581
+ @model_validator(mode='after')
1582
+ def validate_datetime_fields(self) -> Self:
1582
1583
  # Handling timestamps, we want to be strict on supplying a timezone
1583
- timestamp = values.get('timestamp')
1584
+ timestamp = self.timestamp
1584
1585
  if timestamp is not None and isinstance(timestamp, datetime.datetime):
1585
1586
  if timestamp.tzinfo is None or timestamp.tzinfo.utcoffset(timestamp) is None:
1586
1587
  raise ValueError("timestamp must be timezone aware")
1587
1588
 
1588
- start_timestamp = values.get('start_timestamp')
1589
+ start_timestamp = self.start_timestamp
1589
1590
  if start_timestamp is not None and isinstance(start_timestamp, datetime.datetime):
1590
1591
  if start_timestamp.tzinfo is None or start_timestamp.tzinfo.utcoffset(start_timestamp) is None:
1591
1592
  raise ValueError("start_timestamp must be timezone aware")
1592
- return values
1593
+ return self
1593
1594
 
1594
1595
  class DailyBillingEventRequest(BaseModel):
1595
1596
  """
@@ -12,8 +12,9 @@ from typing import Any, List, Literal, Optional, Dict, Tuple
12
12
  import requests
13
13
  import time
14
14
  import logging
15
- from pydantic import Field, root_validator, PrivateAttr, field_serializer
15
+ from pydantic import Field, model_validator, PrivateAttr, field_serializer
16
16
  from pydantic_core import to_jsonable_python
17
+ from typing_extensions import Self
17
18
  from .configuration import SubscriptableBaseModel
18
19
  from .logging import logger, tracer
19
20
  import pytz
@@ -194,12 +195,12 @@ class RateLimitState(SubscriptableBaseModel):
194
195
 
195
196
 
196
197
  # Combined root validator
197
- @root_validator(pre=True)
198
- def validate_datetime_fields(cls, values):
198
+ @model_validator(mode='after')
199
+ def validate_datetime_fields(self) -> Self:
199
200
  # Handling wait_until
200
- wait_until = values.get('wait_until')
201
+ wait_until = self.wait_until
201
202
  if isinstance(wait_until, int):
202
- values['wait_until'] = epoch_milliseconds_to_datetime(wait_until)
203
+ self.wait_until = epoch_milliseconds_to_datetime(wait_until)
203
204
  elif wait_until and isinstance(wait_until, datetime.datetime):
204
205
  if wait_until.tzinfo is None:
205
206
  raise ValueError("wait_until must be timezone aware")
@@ -207,16 +208,16 @@ class RateLimitState(SubscriptableBaseModel):
207
208
  raise ValueError("wait_until must be timezone aware and UTC")
208
209
 
209
210
  # Handling previous_request_timestamps
210
- timestamps = values.get('previous_request_timestamps', [])
211
+ timestamps = self.previous_request_timestamps or []
211
212
  if timestamps and isinstance(timestamps[0], int):
212
- values['previous_request_timestamps'] = [epoch_milliseconds_to_datetime(epoch) for epoch in timestamps]
213
+ self.previous_request_timestamps = [epoch_milliseconds_to_datetime(epoch) for epoch in timestamps]
213
214
  elif timestamps and isinstance(timestamps[0], datetime.datetime):
214
215
  if timestamps[0].tzinfo is None:
215
216
  raise ValueError("previous_request_timestamps must be timezone aware")
216
217
  elif timestamps[0].tzinfo != datetime.timezone.utc:
217
218
  raise ValueError("previous_request_timestamps must be timezone aware and UTC")
218
219
 
219
- return values
220
+ return self
220
221
 
221
222
  def merge(self,other:RateLimitState):
222
223
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omnata-plugin-runtime
3
- Version: 0.9.0a209
3
+ Version: 0.9.1
4
4
  Summary: Classes and common runtime components for building and running Omnata Plugins
5
5
  Author: James Weakley
6
6
  Author-email: james.weakley@omnata.com
@@ -0,0 +1,13 @@
1
+ omnata_plugin_runtime/__init__.py,sha256=MS9d1whnfT_B3-ThqZ7l63QeC_8OEKTuaYV5wTwRpBA,1576
2
+ omnata_plugin_runtime/api.py,sha256=baGraSMiD4Yvi3ZWrEv_TKh8Ktd1U8riBdOpe9j0Puw,8202
3
+ omnata_plugin_runtime/configuration.py,sha256=hHWaK72q45cCQ2R7x9vX2tGifvUDabMrVXBZF4XX0TY,41286
4
+ omnata_plugin_runtime/forms.py,sha256=9YHJ_T17lT-rwyDaUg_0yj_YMPda4DRCw_wrvf8hE0E,19964
5
+ omnata_plugin_runtime/json_schema.py,sha256=Q5lGoRRoM_RKd4LzuH2khLTweqFoskgAr1oLGHczR0Y,25807
6
+ omnata_plugin_runtime/logging.py,sha256=WBuZt8lF9E5oFWM4KYQbE8dDJ_HctJ1pN3BHwU6rcd0,4461
7
+ omnata_plugin_runtime/omnata_plugin.py,sha256=IDj8EaWZuEKaTPrWm3wzHvdmW4l2WibCZEj9AnyTHLU,131622
8
+ omnata_plugin_runtime/plugin_entrypoints.py,sha256=iqGl8_nEEnPGKg3Aem4YLSQ6d5xS3ju5gq8MJbx6sCA,31968
9
+ omnata_plugin_runtime/rate_limiting.py,sha256=qpr5esU4Ks8hMzuMpSR3gLFdor2ZUXYWCjmsQH_K6lQ,25882
10
+ omnata_plugin_runtime-0.9.1.dist-info/LICENSE,sha256=rGaMQG3R3F5-JGDp_-rlMKpDIkg5n0SI4kctTk8eZSI,56
11
+ omnata_plugin_runtime-0.9.1.dist-info/METADATA,sha256=chjybXM-VYLKtsE0dCd8GPt4Z5K1t3mLVab168cruaE,2154
12
+ omnata_plugin_runtime-0.9.1.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
13
+ omnata_plugin_runtime-0.9.1.dist-info/RECORD,,
@@ -1,12 +0,0 @@
1
- omnata_plugin_runtime/__init__.py,sha256=MS9d1whnfT_B3-ThqZ7l63QeC_8OEKTuaYV5wTwRpBA,1576
2
- omnata_plugin_runtime/api.py,sha256=baGraSMiD4Yvi3ZWrEv_TKh8Ktd1U8riBdOpe9j0Puw,8202
3
- omnata_plugin_runtime/configuration.py,sha256=hHWaK72q45cCQ2R7x9vX2tGifvUDabMrVXBZF4XX0TY,41286
4
- omnata_plugin_runtime/forms.py,sha256=ueodN2GIMS5N9fqebpY4uNGJnjEb9HcuaVQVfWH-cGg,19838
5
- omnata_plugin_runtime/logging.py,sha256=WBuZt8lF9E5oFWM4KYQbE8dDJ_HctJ1pN3BHwU6rcd0,4461
6
- omnata_plugin_runtime/omnata_plugin.py,sha256=3OPFFYhbxmffAcb5pJA09gV62SLX-1nu2j3mJMy3pis,131600
7
- omnata_plugin_runtime/plugin_entrypoints.py,sha256=iqGl8_nEEnPGKg3Aem4YLSQ6d5xS3ju5gq8MJbx6sCA,31968
8
- omnata_plugin_runtime/rate_limiting.py,sha256=eOWVRYWiqPlVeYzmB1exVXfXbrcpmYb7vtTi9B-4zkQ,25868
9
- omnata_plugin_runtime-0.9.0a209.dist-info/LICENSE,sha256=rGaMQG3R3F5-JGDp_-rlMKpDIkg5n0SI4kctTk8eZSI,56
10
- omnata_plugin_runtime-0.9.0a209.dist-info/METADATA,sha256=3j72wTua0fHhugze-4pSSFl58MR7ODerhgoz7ZWaB10,2158
11
- omnata_plugin_runtime-0.9.0a209.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
12
- omnata_plugin_runtime-0.9.0a209.dist-info/RECORD,,