dagster-snowflake 0.21.5__py3-none-any.whl → 0.25.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dagster-snowflake might be problematic. Click here for more details.

@@ -1,15 +1,16 @@
1
1
  from dagster._core.libraries import DagsterLibraryRegistry
2
2
 
3
- from .ops import snowflake_op_for_query as snowflake_op_for_query
4
- from .resources import (
3
+ from dagster_snowflake.ops import snowflake_op_for_query as snowflake_op_for_query
4
+ from dagster_snowflake.resources import (
5
5
  SnowflakeConnection as SnowflakeConnection,
6
6
  SnowflakeResource as SnowflakeResource,
7
+ fetch_last_updated_timestamps as fetch_last_updated_timestamps,
7
8
  snowflake_resource as snowflake_resource,
8
9
  )
9
- from .snowflake_io_manager import (
10
+ from dagster_snowflake.snowflake_io_manager import (
10
11
  SnowflakeIOManager as SnowflakeIOManager,
11
12
  build_snowflake_io_manager as build_snowflake_io_manager,
12
13
  )
13
- from .version import __version__
14
+ from dagster_snowflake.version import __version__
14
15
 
15
16
  DagsterLibraryRegistry.register("dagster-snowflake", __version__)
@@ -0,0 +1,7 @@
1
+ # Description: This file contains the Snowflake connection identifiers for the Snowflake partner account.
2
+ # The connection identifiers are used to identify the partner account when connecting to Snowflake.
3
+
4
+ # We use different connection identifiers for different connection code paths to ensure that each is
5
+ # working as expected.
6
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER = "DagsterLabs_Dagster"
7
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER_SQLALCHEMY = "DagsterLabs_Dagster_SqlAlchemy"
dagster_snowflake/ops.py CHANGED
@@ -4,6 +4,7 @@ from dagster import (
4
4
  op,
5
5
  )
6
6
  from dagster._core.definitions.input import In
7
+ from dagster._core.storage.tags import COMPUTE_KIND_TAG
7
8
 
8
9
 
9
10
  def _core_create_snowflake_command(dagster_decorator, decorator_name, sql, parameters=None):
@@ -14,7 +15,7 @@ def _core_create_snowflake_command(dagster_decorator, decorator_name, sql, param
14
15
  name=f"snowflake_{decorator_name}",
15
16
  ins={"start": In(Nothing)},
16
17
  required_resource_keys={"snowflake"},
17
- tags={"kind": "sql", "sql": sql},
18
+ tags={COMPUTE_KIND_TAG: "sql", "sql": sql},
18
19
  )
19
20
  def snowflake_fn(context):
20
21
  context.resources.snowflake.execute_query(sql=sql, parameters=parameters)
@@ -0,0 +1 @@
1
+ partial
@@ -1,8 +1,10 @@
1
1
  import base64
2
2
  import sys
3
3
  import warnings
4
+ from collections.abc import Iterator, Mapping, Sequence
4
5
  from contextlib import closing, contextmanager
5
- from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Union
6
+ from datetime import datetime
7
+ from typing import Any, Optional, Union
6
8
 
7
9
  import dagster._check as check
8
10
  from cryptography.hazmat.backends import default_backend
@@ -14,11 +16,15 @@ from dagster import (
14
16
  resource,
15
17
  )
16
18
  from dagster._annotations import public
17
- from dagster._config.pythonic_config.pydantic_compat_layer import compat_model_validator
18
19
  from dagster._core.definitions.resource_definition import dagster_maintained_resource
19
20
  from dagster._core.storage.event_log.sql_event_log import SqlDbConnection
20
21
  from dagster._utils.cached_method import cached_method
21
- from pydantic import Field, validator
22
+ from pydantic import Field, model_validator, validator
23
+
24
+ from dagster_snowflake.constants import (
25
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER,
26
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER_SQLALCHEMY,
27
+ )
22
28
 
23
29
  try:
24
30
  import snowflake.connector
@@ -229,7 +235,7 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
229
235
  "Indicate alternative database connection engine. Permissible option is "
230
236
  "'sqlalchemy' otherwise defaults to use the Snowflake Connector for Python."
231
237
  ),
232
- is_required=False,
238
+ is_required=False, # type: ignore
233
239
  )
234
240
 
235
241
  cache_column_metadata: Optional[str] = Field(
@@ -253,6 +259,15 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
253
259
  default=None,
254
260
  description="Optional parameter to specify the authentication mechanism to use.",
255
261
  )
262
+ additional_snowflake_connection_args: Optional[dict[str, Any]] = Field(
263
+ default=None,
264
+ description=(
265
+ "Additional keyword arguments to pass to the snowflake.connector.connect function. For a full list of"
266
+ " available arguments, see the `Snowflake documentation"
267
+ " <https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect>`__."
268
+ " This config will be ignored if using the sqlalchemy connector."
269
+ ),
270
+ )
256
271
 
257
272
  @validator("paramstyle")
258
273
  def validate_paramstyle(cls, v: Optional[str]) -> Optional[str]:
@@ -272,7 +287,7 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
272
287
  )
273
288
  return v
274
289
 
275
- @compat_model_validator(mode="before")
290
+ @model_validator(mode="before")
276
291
  def validate_authentication(cls, values):
277
292
  auths_set = 0
278
293
  auths_set += 1 if values.get("password") is not None else 0
@@ -332,12 +347,16 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
332
347
  ):
333
348
  conn_args["private_key"] = self._snowflake_private_key(self._resolved_config_dict)
334
349
 
350
+ conn_args["application"] = SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER
351
+
352
+ if self._resolved_config_dict.get("additional_snowflake_connection_args") is not None:
353
+ conn_args.update(self._resolved_config_dict["additional_snowflake_connection_args"])
335
354
  return conn_args
336
355
 
337
356
  @property
338
357
  @cached_method
339
358
  def _sqlalchemy_connection_args(self) -> Mapping[str, Any]:
340
- conn_args: Dict[str, Any] = {
359
+ conn_args: dict[str, Any] = {
341
360
  k: self._resolved_config_dict.get(k)
342
361
  for k in (
343
362
  "account",
@@ -352,6 +371,7 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
352
371
  )
353
372
  if self._resolved_config_dict.get(k) is not None
354
373
  }
374
+ conn_args["application"] = SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER_SQLALCHEMY
355
375
 
356
376
  return conn_args
357
377
 
@@ -559,9 +579,6 @@ class SnowflakeConnection:
559
579
 
560
580
  with self.get_connection() as conn:
561
581
  with closing(conn.cursor()) as cursor:
562
- if sys.version_info[0] < 3:
563
- sql = sql.encode("utf-8")
564
-
565
582
  self.log.info("Executing query: " + sql)
566
583
  parameters = dict(parameters) if isinstance(parameters, Mapping) else parameters
567
584
  cursor.execute(sql, parameters)
@@ -612,7 +629,7 @@ class SnowflakeConnection:
612
629
  if not fetch_results and use_pandas_result:
613
630
  check.failed("If use_pandas_result is True, fetch_results must also be True.")
614
631
 
615
- results: List[Any] = []
632
+ results: list[Any] = []
616
633
  with self.get_connection() as conn:
617
634
  with closing(conn.cursor()) as cursor:
618
635
  for raw_sql in sql_queries:
@@ -713,3 +730,63 @@ def snowflake_resource(context) -> SnowflakeConnection:
713
730
  return SnowflakeConnection(
714
731
  config=context, log=context.log, snowflake_connection_resource=snowflake_resource
715
732
  )
733
+
734
+
735
+ def fetch_last_updated_timestamps(
736
+ *,
737
+ snowflake_connection: Union[SqlDbConnection, snowflake.connector.SnowflakeConnection],
738
+ schema: str,
739
+ tables: Sequence[str],
740
+ database: Optional[str] = None,
741
+ ignore_missing_tables: Optional[bool] = False,
742
+ ) -> Mapping[str, datetime]:
743
+ """Fetch the last updated times of a list of tables in Snowflake.
744
+
745
+ If the underlying query to fetch the last updated time returns no results, a ValueError will be raised.
746
+
747
+ Args:
748
+ snowflake_connection (Union[SqlDbConnection, SnowflakeConnection]): A connection to Snowflake.
749
+ Accepts either a SnowflakeConnection or a sqlalchemy connection object,
750
+ which are the two types of connections emittable from the snowflake resource.
751
+ schema (str): The schema of the tables to fetch the last updated time for.
752
+ tables (Sequence[str]): A list of table names to fetch the last updated time for.
753
+ database (Optional[str]): The database of the table. Only required if the connection
754
+ has not been set with a database.
755
+ ignore_missing_tables (Optional[bool]): If True, tables not found in Snowflake
756
+ will be excluded from the result.
757
+
758
+ Returns:
759
+ Mapping[str, datetime]: A dictionary of table names to their last updated time in UTC.
760
+ """
761
+ check.invariant(len(tables) > 0, "Must provide at least one table name to query upon.")
762
+ # Table names in snowflake's information schema are stored in uppercase
763
+ uppercase_tables = [table.upper() for table in tables]
764
+ tables_str = ", ".join([f"'{table_name}'" for table_name in uppercase_tables])
765
+ fully_qualified_table_name = (
766
+ f"{database}.information_schema.tables" if database else "information_schema.tables"
767
+ )
768
+
769
+ query = f"""
770
+ SELECT table_name, CONVERT_TIMEZONE('UTC', last_altered) AS last_altered
771
+ FROM {fully_qualified_table_name}
772
+ WHERE table_schema = '{schema}' AND table_name IN ({tables_str});
773
+ """
774
+ result = snowflake_connection.cursor().execute(query)
775
+ if not result:
776
+ raise ValueError("No results returned from Snowflake update time query.")
777
+
778
+ result_mapping = {table_name: last_altered for table_name, last_altered in result}
779
+ result_correct_case = {}
780
+ for table_name in tables:
781
+ if table_name.upper() not in result_mapping:
782
+ if ignore_missing_tables:
783
+ continue
784
+ raise ValueError(f"Table {table_name} could not be found.")
785
+ last_altered = result_mapping[table_name.upper()]
786
+ check.invariant(
787
+ isinstance(last_altered, datetime),
788
+ "Expected last_altered to be a datetime, but it was not.",
789
+ )
790
+ result_correct_case[table_name] = last_altered
791
+
792
+ return result_correct_case
@@ -1,11 +1,10 @@
1
1
  from abc import abstractmethod
2
+ from collections.abc import Sequence
2
3
  from contextlib import contextmanager
3
- from typing import Optional, Sequence, Type, cast
4
+ from typing import Any, Optional, cast
4
5
 
5
6
  from dagster import IOManagerDefinition, OutputContext, io_manager
6
- from dagster._config.pythonic_config import (
7
- ConfigurableIOManagerFactory,
8
- )
7
+ from dagster._config.pythonic_config import ConfigurableIOManagerFactory
9
8
  from dagster._core.definitions.time_window_partitions import TimeWindow
10
9
  from dagster._core.storage.db_io_manager import (
11
10
  DbClient,
@@ -16,15 +15,15 @@ from dagster._core.storage.db_io_manager import (
16
15
  )
17
16
  from dagster._core.storage.io_manager import dagster_maintained_io_manager
18
17
  from pydantic import Field
19
- from sqlalchemy.exc import ProgrammingError
18
+ from snowflake.connector.errors import ProgrammingError
20
19
 
21
- from .resources import SnowflakeResource
20
+ from dagster_snowflake.resources import SnowflakeResource
22
21
 
23
22
  SNOWFLAKE_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
24
23
 
25
24
 
26
25
  def build_snowflake_io_manager(
27
- type_handlers: Sequence[DbTypeHandler], default_load_type: Optional[Type] = None
26
+ type_handlers: Sequence[DbTypeHandler], default_load_type: Optional[type] = None
28
27
  ) -> IOManagerDefinition:
29
28
  """Builds an IO manager definition that reads inputs from and writes outputs to Snowflake.
30
29
 
@@ -46,15 +45,22 @@ def build_snowflake_io_manager(
46
45
  from dagster import Definitions
47
46
 
48
47
  @asset(
49
- key_prefix=["my_schema"] # will be used as the schema in snowflake
48
+ key_prefix=["my_prefix"]
49
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
50
50
  )
51
51
  def my_table() -> pd.DataFrame: # the name of the asset will be the table name
52
52
  ...
53
53
 
54
+ @asset(
55
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
56
+ )
57
+ def my_second_table() -> pd.DataFrame: # the name of the asset will be the table name
58
+ ...
59
+
54
60
  snowflake_io_manager = build_snowflake_io_manager([SnowflakePandasTypeHandler(), SnowflakePySparkTypeHandler()])
55
61
 
56
62
  defs = Definitions(
57
- assets=[my_table],
63
+ assets=[my_table, my_second_table],
58
64
  resources={
59
65
  "io_manager": snowflake_io_manager.configured({
60
66
  "database": "my_database",
@@ -64,12 +70,38 @@ def build_snowflake_io_manager(
64
70
  }
65
71
  )
66
72
 
67
- If you do not provide a schema, Dagster will determine a schema based on the assets and ops using
68
- the IO Manager. For assets, the schema will be determined from the asset key,
69
- as shown in the above example. The final prefix before the asset name will be used as the schema. For example,
70
- if the asset ``my_table`` had the key prefix ``["snowflake", "my_schema"]``, the schema ``my_schema`` will be
71
- used. For ops, the schema can be specified by including a ``schema`` entry in output metadata. If ``schema`` is not provided
72
- via config or on the asset/op, ``public`` will be used for the schema.
73
+ You can set a default schema to store the assets using the ``schema`` configuration value of the Snowflake I/O
74
+ Manager. This schema will be used if no other schema is specified directly on an asset or op.
75
+
76
+ .. code-block:: python
77
+
78
+ defs = Definitions(
79
+ assets=[my_table]
80
+ resources={"io_manager" snowflake_io_manager.configured(
81
+ {"database": "my_database", "schema": "my_schema", ...} # will be used as the schema
82
+ )}
83
+ )
84
+
85
+
86
+ On individual assets, you an also specify the schema where they should be stored using metadata or
87
+ by adding a ``key_prefix`` to the asset key. If both ``key_prefix`` and metadata are defined, the metadata will
88
+ take precedence.
89
+
90
+ .. code-block:: python
91
+
92
+ @asset(
93
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
94
+ )
95
+ def my_table() -> pd.DataFrame:
96
+ ...
97
+
98
+ @asset(
99
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
100
+ )
101
+ def my_other_table() -> pd.DataFrame:
102
+ ...
103
+
104
+ For ops, the schema can be specified by including a "schema" entry in output metadata.
73
105
 
74
106
  .. code-block:: python
75
107
 
@@ -77,9 +109,10 @@ def build_snowflake_io_manager(
77
109
  out={"my_table": Out(metadata={"schema": "my_schema"})}
78
110
  )
79
111
  def make_my_table() -> pd.DataFrame:
80
- # the returned value will be stored at my_schema.my_table
81
112
  ...
82
113
 
114
+ If none of these is provided, the schema will default to "public".
115
+
83
116
  To only use specific columns of a table as input to a downstream op or asset, add the metadata ``columns`` to the
84
117
  In or AssetIn.
85
118
 
@@ -134,16 +167,42 @@ class SnowflakeIOManager(ConfigurableIOManagerFactory):
134
167
  defs = Definitions(
135
168
  assets=[my_table],
136
169
  resources={
137
- "io_manager": MySnowflakeIOManager(database="MY_DATABASE", account=EnvVar("SNOWFLAKE_ACCOUNT"), ...)
170
+ "io_manager": MySnowflakeIOManager(database="my_database", account=EnvVar("SNOWFLAKE_ACCOUNT"), ...)
138
171
  }
139
172
  )
140
173
 
141
- If you do not provide a schema, Dagster will determine a schema based on the assets and ops using
142
- the IO Manager. For assets, the schema will be determined from the asset key,
143
- as shown in the above example. The final prefix before the asset name will be used as the schema. For example,
144
- if the asset ``my_table`` had the key prefix ``["snowflake", "my_schema"]``, the schema ``my_schema`` will be
145
- used. For ops, the schema can be specified by including a ``schema`` entry in output metadata. If ``schema`` is not provided
146
- via config or on the asset/op, ``public`` will be used for the schema.
174
+ You can set a default schema to store the assets using the ``schema`` configuration value of the Snowflake I/O
175
+ Manager. This schema will be used if no other schema is specified directly on an asset or op.
176
+
177
+ .. code-block:: python
178
+
179
+ defs = Definitions(
180
+ assets=[my_table]
181
+ resources={
182
+ "io_manager" MySnowflakeIOManager(database="my_database", schema="my_schema", ...)
183
+ }
184
+ )
185
+
186
+
187
+ On individual assets, you an also specify the schema where they should be stored using metadata or
188
+ by adding a ``key_prefix`` to the asset key. If both ``key_prefix`` and metadata are defined, the metadata will
189
+ take precedence.
190
+
191
+ .. code-block:: python
192
+
193
+ @asset(
194
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
195
+ )
196
+ def my_table() -> pd.DataFrame:
197
+ ...
198
+
199
+ @asset(
200
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
201
+ )
202
+ def my_other_table() -> pd.DataFrame:
203
+ ...
204
+
205
+ For ops, the schema can be specified by including a "schema" entry in output metadata.
147
206
 
148
207
  .. code-block:: python
149
208
 
@@ -151,9 +210,10 @@ class SnowflakeIOManager(ConfigurableIOManagerFactory):
151
210
  out={"my_table": Out(metadata={"schema": "my_schema"})}
152
211
  )
153
212
  def make_my_table() -> pd.DataFrame:
154
- # the returned value will be stored at my_schema.my_table
155
213
  ...
156
214
 
215
+ If none of these is provided, the schema will default to "public".
216
+
157
217
  To only use specific columns of a table as input to a downstream op or asset, add the metadata ``columns`` to the
158
218
  In or AssetIn.
159
219
 
@@ -220,6 +280,15 @@ class SnowflakeIOManager(ConfigurableIOManagerFactory):
220
280
  default=None,
221
281
  description="Optional parameter to specify the authentication mechanism to use.",
222
282
  )
283
+ additional_snowflake_connection_args: Optional[dict[str, Any]] = Field(
284
+ default=None,
285
+ description=(
286
+ "Additional keyword arguments to pass to the snowflake.connector.connect function. For a full list of"
287
+ " available arguments, see the `Snowflake documentation"
288
+ " <https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect>`__."
289
+ " This config will be ignored if using the sqlalchemy connector."
290
+ ),
291
+ )
223
292
 
224
293
  @staticmethod
225
294
  @abstractmethod
@@ -241,7 +310,7 @@ class SnowflakeIOManager(ConfigurableIOManagerFactory):
241
310
  ...
242
311
 
243
312
  @staticmethod
244
- def default_load_type() -> Optional[Type]:
313
+ def default_load_type() -> Optional[type]:
245
314
  """If an asset or op is not annotated with an return type, default_load_type will be used to
246
315
  determine which TypeHandler to use to store and load the output.
247
316
 
@@ -288,26 +357,31 @@ class SnowflakeDbClient(DbClient):
288
357
  if context.resource_config
289
358
  else {}
290
359
  )
291
- with SnowflakeResource(
292
- schema=table_slice.schema, connector="sqlalchemy", **no_schema_config
293
- ).get_connection(raw_conn=False) as conn:
360
+ with SnowflakeResource(schema=table_slice.schema, **no_schema_config).get_connection( # pyright: ignore[reportArgumentType]
361
+ raw_conn=False
362
+ ) as conn:
294
363
  yield conn
295
364
 
296
365
  @staticmethod
297
366
  def ensure_schema_exists(context: OutputContext, table_slice: TableSlice, connection) -> None:
298
- schemas = connection.execute(
299
- f"show schemas like '{table_slice.schema}' in database {table_slice.database}"
300
- ).fetchall()
367
+ schemas = (
368
+ connection.cursor()
369
+ .execute(f"show schemas like '{table_slice.schema}' in database {table_slice.database}")
370
+ .fetchall()
371
+ )
301
372
  if len(schemas) == 0:
302
- connection.execute(f"create schema {table_slice.schema};")
373
+ connection.cursor().execute(f"create schema {table_slice.schema};")
303
374
 
304
375
  @staticmethod
305
376
  def delete_table_slice(context: OutputContext, table_slice: TableSlice, connection) -> None:
306
377
  try:
307
- connection.execute(_get_cleanup_statement(table_slice))
308
- except ProgrammingError:
309
- # table doesn't exist yet, so ignore the error
310
- pass
378
+ connection.cursor().execute(_get_cleanup_statement(table_slice))
379
+ except ProgrammingError as e:
380
+ if "does not exist" in e.msg: # type: ignore
381
+ # table doesn't exist yet, so ignore the error
382
+ return
383
+ else:
384
+ raise
311
385
 
312
386
  @staticmethod
313
387
  def get_select_statement(table_slice: TableSlice) -> str:
@@ -1 +1 @@
1
- __version__ = "0.21.5"
1
+ __version__ = "0.25.9"
@@ -1,23 +1,25 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagster-snowflake
3
- Version: 0.21.5
3
+ Version: 0.25.9
4
4
  Summary: Package for Snowflake Dagster framework components.
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-snowflake
6
6
  Author: Dagster Labs
7
7
  Author-email: hello@dagsterlabs.com
8
8
  License: Apache-2.0
9
- Classifier: Programming Language :: Python :: 3.8
10
9
  Classifier: Programming Language :: Python :: 3.9
11
10
  Classifier: Programming Language :: Python :: 3.10
12
11
  Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: License :: OSI Approved :: Apache Software License
14
14
  Classifier: Operating System :: OS Independent
15
+ Requires-Python: >=3.9,<3.13
15
16
  License-File: LICENSE
16
- Requires-Dist: dagster ==1.5.5
17
- Requires-Dist: snowflake-connector-python >=2.1.0
17
+ Requires-Dist: dagster ==1.9.9
18
+ Requires-Dist: snowflake-connector-python >=3.4.0
19
+ Requires-Dist: pyOpenSSL >=22.1.0
18
20
  Provides-Extra: pandas
19
21
  Requires-Dist: pandas ; extra == 'pandas'
20
- Requires-Dist: snowflake-connector-python[pandas] >=2.1.0 ; extra == 'pandas'
22
+ Requires-Dist: snowflake-connector-python[pandas] >=3.4.0 ; extra == 'pandas'
21
23
  Provides-Extra: snowflake.sqlalchemy
22
24
  Requires-Dist: sqlalchemy !=1.4.42 ; extra == 'snowflake.sqlalchemy'
23
25
  Requires-Dist: snowflake-sqlalchemy ; extra == 'snowflake.sqlalchemy'
@@ -0,0 +1,12 @@
1
+ dagster_snowflake/__init__.py,sha256=MeLU-ghhmyMk82I2b3qCz9h7mxgb2eMxXfkqGBjI29k,673
2
+ dagster_snowflake/constants.py,sha256=26PGL1eFncm0WfbgZjA7jaznfaRx75vVJnqbieVYeco,481
3
+ dagster_snowflake/ops.py,sha256=L_MP28fLm7_hrJmzMoDocLwyvVnkpy1LVwUSLIArKWc,2225
4
+ dagster_snowflake/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
5
+ dagster_snowflake/resources.py,sha256=MTZrXijDXsR-f8qUCRARfHv97DWWtTtCt1lyUEhXI7o,32008
6
+ dagster_snowflake/snowflake_io_manager.py,sha256=Q6Yl4H8Ab8Twh-Iq-VLqtDqc-XJfCkosL4KKdd-azoo,17778
7
+ dagster_snowflake/version.py,sha256=NsKiCCQq5j7wW1paL-Bw27h63w_P0r0bIHvsX9TsjGY,23
8
+ dagster_snowflake-0.25.9.dist-info/LICENSE,sha256=TMatHW4_G9ldRdodEAp-l2Xa2WvsdeOh60E3v1R2jis,11349
9
+ dagster_snowflake-0.25.9.dist-info/METADATA,sha256=DPdMGYTjUzX7wUVSIRQBXw-LrlXs7enOq5dloY9g5Xs,1103
10
+ dagster_snowflake-0.25.9.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
11
+ dagster_snowflake-0.25.9.dist-info/top_level.txt,sha256=uECYCiluOxLQ996SCUPBBwdK0CTyz45FjWqf7WDqMMc,18
12
+ dagster_snowflake-0.25.9.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- dagster_snowflake/__init__.py,sha256=svCkKOLfCh3_1DBoTtrEa8Hu7puNB4c7JX4FvIGbUf0,537
2
- dagster_snowflake/ops.py,sha256=cxCMvbzsELa62ax3AWodKSpmad3Kb6NLcZlj6SW2KVg,2159
3
- dagster_snowflake/resources.py,sha256=ATW__nAcep_t7muqylM6Ux5jxVCgSM9uz7tpjpLx-jU,28337
4
- dagster_snowflake/snowflake_io_manager.py,sha256=C5sHxtsHpuKsRWdbsCv9hVmZ7mvProSCgwoQ1UeCy6Y,15478
5
- dagster_snowflake/version.py,sha256=yq-fGfdU069XbMVu8cXj7x9Ga-YNsSY_1SmDswa-WEI,23
6
- dagster_snowflake-0.21.5.dist-info/LICENSE,sha256=TMatHW4_G9ldRdodEAp-l2Xa2WvsdeOh60E3v1R2jis,11349
7
- dagster_snowflake-0.21.5.dist-info/METADATA,sha256=Fw_rmHZNz0yVkFZUIChtL7IBSybN6rdtHJdyCE9V-Gg,1039
8
- dagster_snowflake-0.21.5.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
9
- dagster_snowflake-0.21.5.dist-info/top_level.txt,sha256=uECYCiluOxLQ996SCUPBBwdK0CTyz45FjWqf7WDqMMc,18
10
- dagster_snowflake-0.21.5.dist-info/RECORD,,