dagster-snowflake 0.21.5__py3-none-any.whl → 0.25.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dagster-snowflake might be problematic. Click here for more details.

@@ -1,15 +1,16 @@
1
1
  from dagster._core.libraries import DagsterLibraryRegistry
2
2
 
3
- from .ops import snowflake_op_for_query as snowflake_op_for_query
4
- from .resources import (
3
+ from dagster_snowflake.ops import snowflake_op_for_query as snowflake_op_for_query
4
+ from dagster_snowflake.resources import (
5
5
  SnowflakeConnection as SnowflakeConnection,
6
6
  SnowflakeResource as SnowflakeResource,
7
+ fetch_last_updated_timestamps as fetch_last_updated_timestamps,
7
8
  snowflake_resource as snowflake_resource,
8
9
  )
9
- from .snowflake_io_manager import (
10
+ from dagster_snowflake.snowflake_io_manager import (
10
11
  SnowflakeIOManager as SnowflakeIOManager,
11
12
  build_snowflake_io_manager as build_snowflake_io_manager,
12
13
  )
13
- from .version import __version__
14
+ from dagster_snowflake.version import __version__
14
15
 
15
16
  DagsterLibraryRegistry.register("dagster-snowflake", __version__)
@@ -0,0 +1,7 @@
1
+ # Description: This file contains the Snowflake connection identifiers for the Snowflake partner account.
2
+ # The connection identifiers are used to identify the partner account when connecting to Snowflake.
3
+
4
+ # We use different connection identifiers for different connection code paths to ensure that each is
5
+ # working as expected.
6
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER = "DagsterLabs_Dagster"
7
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER_SQLALCHEMY = "DagsterLabs_Dagster_SqlAlchemy"
dagster_snowflake/ops.py CHANGED
@@ -4,6 +4,7 @@ from dagster import (
4
4
  op,
5
5
  )
6
6
  from dagster._core.definitions.input import In
7
+ from dagster._core.storage.tags import COMPUTE_KIND_TAG
7
8
 
8
9
 
9
10
  def _core_create_snowflake_command(dagster_decorator, decorator_name, sql, parameters=None):
@@ -14,7 +15,7 @@ def _core_create_snowflake_command(dagster_decorator, decorator_name, sql, param
14
15
  name=f"snowflake_{decorator_name}",
15
16
  ins={"start": In(Nothing)},
16
17
  required_resource_keys={"snowflake"},
17
- tags={"kind": "sql", "sql": sql},
18
+ tags={COMPUTE_KIND_TAG: "sql", "sql": sql},
18
19
  )
19
20
  def snowflake_fn(context):
20
21
  context.resources.snowflake.execute_query(sql=sql, parameters=parameters)
@@ -0,0 +1 @@
1
+ partial
@@ -2,6 +2,7 @@ import base64
2
2
  import sys
3
3
  import warnings
4
4
  from contextlib import closing, contextmanager
5
+ from datetime import datetime
5
6
  from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Union
6
7
 
7
8
  import dagster._check as check
@@ -14,11 +15,15 @@ from dagster import (
14
15
  resource,
15
16
  )
16
17
  from dagster._annotations import public
17
- from dagster._config.pythonic_config.pydantic_compat_layer import compat_model_validator
18
18
  from dagster._core.definitions.resource_definition import dagster_maintained_resource
19
19
  from dagster._core.storage.event_log.sql_event_log import SqlDbConnection
20
20
  from dagster._utils.cached_method import cached_method
21
- from pydantic import Field, validator
21
+ from pydantic import Field, model_validator, validator
22
+
23
+ from dagster_snowflake.constants import (
24
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER,
25
+ SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER_SQLALCHEMY,
26
+ )
22
27
 
23
28
  try:
24
29
  import snowflake.connector
@@ -229,7 +234,7 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
229
234
  "Indicate alternative database connection engine. Permissible option is "
230
235
  "'sqlalchemy' otherwise defaults to use the Snowflake Connector for Python."
231
236
  ),
232
- is_required=False,
237
+ is_required=False, # type: ignore
233
238
  )
234
239
 
235
240
  cache_column_metadata: Optional[str] = Field(
@@ -253,6 +258,15 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
253
258
  default=None,
254
259
  description="Optional parameter to specify the authentication mechanism to use.",
255
260
  )
261
+ additional_snowflake_connection_args: Optional[Dict[str, Any]] = Field(
262
+ default=None,
263
+ description=(
264
+ "Additional keyword arguments to pass to the snowflake.connector.connect function. For a full list of"
265
+ " available arguments, see the `Snowflake documentation"
266
+ " <https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect>`__."
267
+ " This config will be ignored if using the sqlalchemy connector."
268
+ ),
269
+ )
256
270
 
257
271
  @validator("paramstyle")
258
272
  def validate_paramstyle(cls, v: Optional[str]) -> Optional[str]:
@@ -272,7 +286,7 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
272
286
  )
273
287
  return v
274
288
 
275
- @compat_model_validator(mode="before")
289
+ @model_validator(mode="before")
276
290
  def validate_authentication(cls, values):
277
291
  auths_set = 0
278
292
  auths_set += 1 if values.get("password") is not None else 0
@@ -332,6 +346,10 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
332
346
  ):
333
347
  conn_args["private_key"] = self._snowflake_private_key(self._resolved_config_dict)
334
348
 
349
+ conn_args["application"] = SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER
350
+
351
+ if self._resolved_config_dict.get("additional_snowflake_connection_args") is not None:
352
+ conn_args.update(self._resolved_config_dict["additional_snowflake_connection_args"])
335
353
  return conn_args
336
354
 
337
355
  @property
@@ -352,6 +370,7 @@ class SnowflakeResource(ConfigurableResource, IAttachDifferentObjectToOpContext)
352
370
  )
353
371
  if self._resolved_config_dict.get(k) is not None
354
372
  }
373
+ conn_args["application"] = SNOWFLAKE_PARTNER_CONNECTION_IDENTIFIER_SQLALCHEMY
355
374
 
356
375
  return conn_args
357
376
 
@@ -713,3 +732,63 @@ def snowflake_resource(context) -> SnowflakeConnection:
713
732
  return SnowflakeConnection(
714
733
  config=context, log=context.log, snowflake_connection_resource=snowflake_resource
715
734
  )
735
+
736
+
737
+ def fetch_last_updated_timestamps(
738
+ *,
739
+ snowflake_connection: Union[SqlDbConnection, snowflake.connector.SnowflakeConnection],
740
+ schema: str,
741
+ tables: Sequence[str],
742
+ database: Optional[str] = None,
743
+ ignore_missing_tables: Optional[bool] = False,
744
+ ) -> Mapping[str, datetime]:
745
+ """Fetch the last updated times of a list of tables in Snowflake.
746
+
747
+ If the underlying query to fetch the last updated time returns no results, a ValueError will be raised.
748
+
749
+ Args:
750
+ snowflake_connection (Union[SqlDbConnection, SnowflakeConnection]): A connection to Snowflake.
751
+ Accepts either a SnowflakeConnection or a sqlalchemy connection object,
752
+ which are the two types of connections emittable from the snowflake resource.
753
+ schema (str): The schema of the tables to fetch the last updated time for.
754
+ tables (Sequence[str]): A list of table names to fetch the last updated time for.
755
+ database (Optional[str]): The database of the table. Only required if the connection
756
+ has not been set with a database.
757
+ ignore_missing_tables (Optional[bool]): If True, tables not found in Snowflake
758
+ will be excluded from the result.
759
+
760
+ Returns:
761
+ Mapping[str, datetime]: A dictionary of table names to their last updated time in UTC.
762
+ """
763
+ check.invariant(len(tables) > 0, "Must provide at least one table name to query upon.")
764
+ # Table names in snowflake's information schema are stored in uppercase
765
+ uppercase_tables = [table.upper() for table in tables]
766
+ tables_str = ", ".join([f"'{table_name}'" for table_name in uppercase_tables])
767
+ fully_qualified_table_name = (
768
+ f"{database}.information_schema.tables" if database else "information_schema.tables"
769
+ )
770
+
771
+ query = f"""
772
+ SELECT table_name, CONVERT_TIMEZONE('UTC', last_altered) AS last_altered
773
+ FROM {fully_qualified_table_name}
774
+ WHERE table_schema = '{schema}' AND table_name IN ({tables_str});
775
+ """
776
+ result = snowflake_connection.cursor().execute(query)
777
+ if not result:
778
+ raise ValueError("No results returned from Snowflake update time query.")
779
+
780
+ result_mapping = {table_name: last_altered for table_name, last_altered in result}
781
+ result_correct_case = {}
782
+ for table_name in tables:
783
+ if table_name.upper() not in result_mapping:
784
+ if ignore_missing_tables:
785
+ continue
786
+ raise ValueError(f"Table {table_name} could not be found.")
787
+ last_altered = result_mapping[table_name.upper()]
788
+ check.invariant(
789
+ isinstance(last_altered, datetime),
790
+ "Expected last_altered to be a datetime, but it was not.",
791
+ )
792
+ result_correct_case[table_name] = last_altered
793
+
794
+ return result_correct_case
@@ -1,11 +1,9 @@
1
1
  from abc import abstractmethod
2
2
  from contextlib import contextmanager
3
- from typing import Optional, Sequence, Type, cast
3
+ from typing import Any, Dict, Optional, Sequence, Type, cast
4
4
 
5
5
  from dagster import IOManagerDefinition, OutputContext, io_manager
6
- from dagster._config.pythonic_config import (
7
- ConfigurableIOManagerFactory,
8
- )
6
+ from dagster._config.pythonic_config import ConfigurableIOManagerFactory
9
7
  from dagster._core.definitions.time_window_partitions import TimeWindow
10
8
  from dagster._core.storage.db_io_manager import (
11
9
  DbClient,
@@ -16,9 +14,9 @@ from dagster._core.storage.db_io_manager import (
16
14
  )
17
15
  from dagster._core.storage.io_manager import dagster_maintained_io_manager
18
16
  from pydantic import Field
19
- from sqlalchemy.exc import ProgrammingError
17
+ from snowflake.connector.errors import ProgrammingError
20
18
 
21
- from .resources import SnowflakeResource
19
+ from dagster_snowflake.resources import SnowflakeResource
22
20
 
23
21
  SNOWFLAKE_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
24
22
 
@@ -46,15 +44,22 @@ def build_snowflake_io_manager(
46
44
  from dagster import Definitions
47
45
 
48
46
  @asset(
49
- key_prefix=["my_schema"] # will be used as the schema in snowflake
47
+ key_prefix=["my_prefix"]
48
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
50
49
  )
51
50
  def my_table() -> pd.DataFrame: # the name of the asset will be the table name
52
51
  ...
53
52
 
53
+ @asset(
54
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
55
+ )
56
+ def my_second_table() -> pd.DataFrame: # the name of the asset will be the table name
57
+ ...
58
+
54
59
  snowflake_io_manager = build_snowflake_io_manager([SnowflakePandasTypeHandler(), SnowflakePySparkTypeHandler()])
55
60
 
56
61
  defs = Definitions(
57
- assets=[my_table],
62
+ assets=[my_table, my_second_table],
58
63
  resources={
59
64
  "io_manager": snowflake_io_manager.configured({
60
65
  "database": "my_database",
@@ -64,12 +69,38 @@ def build_snowflake_io_manager(
64
69
  }
65
70
  )
66
71
 
67
- If you do not provide a schema, Dagster will determine a schema based on the assets and ops using
68
- the IO Manager. For assets, the schema will be determined from the asset key,
69
- as shown in the above example. The final prefix before the asset name will be used as the schema. For example,
70
- if the asset ``my_table`` had the key prefix ``["snowflake", "my_schema"]``, the schema ``my_schema`` will be
71
- used. For ops, the schema can be specified by including a ``schema`` entry in output metadata. If ``schema`` is not provided
72
- via config or on the asset/op, ``public`` will be used for the schema.
72
+ You can set a default schema to store the assets using the ``schema`` configuration value of the Snowflake I/O
73
+ Manager. This schema will be used if no other schema is specified directly on an asset or op.
74
+
75
+ .. code-block:: python
76
+
77
+ defs = Definitions(
78
+ assets=[my_table]
79
+ resources={"io_manager" snowflake_io_manager.configured(
80
+ {"database": "my_database", "schema": "my_schema", ...} # will be used as the schema
81
+ )}
82
+ )
83
+
84
+
85
+ On individual assets, you an also specify the schema where they should be stored using metadata or
86
+ by adding a ``key_prefix`` to the asset key. If both ``key_prefix`` and metadata are defined, the metadata will
87
+ take precedence.
88
+
89
+ .. code-block:: python
90
+
91
+ @asset(
92
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
93
+ )
94
+ def my_table() -> pd.DataFrame:
95
+ ...
96
+
97
+ @asset(
98
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
99
+ )
100
+ def my_other_table() -> pd.DataFrame:
101
+ ...
102
+
103
+ For ops, the schema can be specified by including a "schema" entry in output metadata.
73
104
 
74
105
  .. code-block:: python
75
106
 
@@ -77,9 +108,10 @@ def build_snowflake_io_manager(
77
108
  out={"my_table": Out(metadata={"schema": "my_schema"})}
78
109
  )
79
110
  def make_my_table() -> pd.DataFrame:
80
- # the returned value will be stored at my_schema.my_table
81
111
  ...
82
112
 
113
+ If none of these is provided, the schema will default to "public".
114
+
83
115
  To only use specific columns of a table as input to a downstream op or asset, add the metadata ``columns`` to the
84
116
  In or AssetIn.
85
117
 
@@ -134,16 +166,42 @@ class SnowflakeIOManager(ConfigurableIOManagerFactory):
134
166
  defs = Definitions(
135
167
  assets=[my_table],
136
168
  resources={
137
- "io_manager": MySnowflakeIOManager(database="MY_DATABASE", account=EnvVar("SNOWFLAKE_ACCOUNT"), ...)
169
+ "io_manager": MySnowflakeIOManager(database="my_database", account=EnvVar("SNOWFLAKE_ACCOUNT"), ...)
138
170
  }
139
171
  )
140
172
 
141
- If you do not provide a schema, Dagster will determine a schema based on the assets and ops using
142
- the IO Manager. For assets, the schema will be determined from the asset key,
143
- as shown in the above example. The final prefix before the asset name will be used as the schema. For example,
144
- if the asset ``my_table`` had the key prefix ``["snowflake", "my_schema"]``, the schema ``my_schema`` will be
145
- used. For ops, the schema can be specified by including a ``schema`` entry in output metadata. If ``schema`` is not provided
146
- via config or on the asset/op, ``public`` will be used for the schema.
173
+ You can set a default schema to store the assets using the ``schema`` configuration value of the Snowflake I/O
174
+ Manager. This schema will be used if no other schema is specified directly on an asset or op.
175
+
176
+ .. code-block:: python
177
+
178
+ defs = Definitions(
179
+ assets=[my_table]
180
+ resources={
181
+ "io_manager" MySnowflakeIOManager(database="my_database", schema="my_schema", ...)
182
+ }
183
+ )
184
+
185
+
186
+ On individual assets, you an also specify the schema where they should be stored using metadata or
187
+ by adding a ``key_prefix`` to the asset key. If both ``key_prefix`` and metadata are defined, the metadata will
188
+ take precedence.
189
+
190
+ .. code-block:: python
191
+
192
+ @asset(
193
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
194
+ )
195
+ def my_table() -> pd.DataFrame:
196
+ ...
197
+
198
+ @asset(
199
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
200
+ )
201
+ def my_other_table() -> pd.DataFrame:
202
+ ...
203
+
204
+ For ops, the schema can be specified by including a "schema" entry in output metadata.
147
205
 
148
206
  .. code-block:: python
149
207
 
@@ -151,9 +209,10 @@ class SnowflakeIOManager(ConfigurableIOManagerFactory):
151
209
  out={"my_table": Out(metadata={"schema": "my_schema"})}
152
210
  )
153
211
  def make_my_table() -> pd.DataFrame:
154
- # the returned value will be stored at my_schema.my_table
155
212
  ...
156
213
 
214
+ If none of these is provided, the schema will default to "public".
215
+
157
216
  To only use specific columns of a table as input to a downstream op or asset, add the metadata ``columns`` to the
158
217
  In or AssetIn.
159
218
 
@@ -220,6 +279,15 @@ class SnowflakeIOManager(ConfigurableIOManagerFactory):
220
279
  default=None,
221
280
  description="Optional parameter to specify the authentication mechanism to use.",
222
281
  )
282
+ additional_snowflake_connection_args: Optional[Dict[str, Any]] = Field(
283
+ default=None,
284
+ description=(
285
+ "Additional keyword arguments to pass to the snowflake.connector.connect function. For a full list of"
286
+ " available arguments, see the `Snowflake documentation"
287
+ " <https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect>`__."
288
+ " This config will be ignored if using the sqlalchemy connector."
289
+ ),
290
+ )
223
291
 
224
292
  @staticmethod
225
293
  @abstractmethod
@@ -288,26 +356,31 @@ class SnowflakeDbClient(DbClient):
288
356
  if context.resource_config
289
357
  else {}
290
358
  )
291
- with SnowflakeResource(
292
- schema=table_slice.schema, connector="sqlalchemy", **no_schema_config
293
- ).get_connection(raw_conn=False) as conn:
359
+ with SnowflakeResource(schema=table_slice.schema, **no_schema_config).get_connection( # pyright: ignore[reportArgumentType]
360
+ raw_conn=False
361
+ ) as conn:
294
362
  yield conn
295
363
 
296
364
  @staticmethod
297
365
  def ensure_schema_exists(context: OutputContext, table_slice: TableSlice, connection) -> None:
298
- schemas = connection.execute(
299
- f"show schemas like '{table_slice.schema}' in database {table_slice.database}"
300
- ).fetchall()
366
+ schemas = (
367
+ connection.cursor()
368
+ .execute(f"show schemas like '{table_slice.schema}' in database {table_slice.database}")
369
+ .fetchall()
370
+ )
301
371
  if len(schemas) == 0:
302
- connection.execute(f"create schema {table_slice.schema};")
372
+ connection.cursor().execute(f"create schema {table_slice.schema};")
303
373
 
304
374
  @staticmethod
305
375
  def delete_table_slice(context: OutputContext, table_slice: TableSlice, connection) -> None:
306
376
  try:
307
- connection.execute(_get_cleanup_statement(table_slice))
308
- except ProgrammingError:
309
- # table doesn't exist yet, so ignore the error
310
- pass
377
+ connection.cursor().execute(_get_cleanup_statement(table_slice))
378
+ except ProgrammingError as e:
379
+ if "does not exist" in e.msg: # type: ignore
380
+ # table doesn't exist yet, so ignore the error
381
+ return
382
+ else:
383
+ raise
311
384
 
312
385
  @staticmethod
313
386
  def get_select_statement(table_slice: TableSlice) -> str:
@@ -1 +1 @@
1
- __version__ = "0.21.5"
1
+ __version__ = "0.25.8"
@@ -1,23 +1,25 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dagster-snowflake
3
- Version: 0.21.5
3
+ Version: 0.25.8
4
4
  Summary: Package for Snowflake Dagster framework components.
5
5
  Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-snowflake
6
6
  Author: Dagster Labs
7
7
  Author-email: hello@dagsterlabs.com
8
8
  License: Apache-2.0
9
- Classifier: Programming Language :: Python :: 3.8
10
9
  Classifier: Programming Language :: Python :: 3.9
11
10
  Classifier: Programming Language :: Python :: 3.10
12
11
  Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: License :: OSI Approved :: Apache Software License
14
14
  Classifier: Operating System :: OS Independent
15
+ Requires-Python: >=3.9,<3.13
15
16
  License-File: LICENSE
16
- Requires-Dist: dagster ==1.5.5
17
- Requires-Dist: snowflake-connector-python >=2.1.0
17
+ Requires-Dist: dagster ==1.9.8
18
+ Requires-Dist: snowflake-connector-python >=3.4.0
19
+ Requires-Dist: pyOpenSSL >=22.1.0
18
20
  Provides-Extra: pandas
19
21
  Requires-Dist: pandas ; extra == 'pandas'
20
- Requires-Dist: snowflake-connector-python[pandas] >=2.1.0 ; extra == 'pandas'
22
+ Requires-Dist: snowflake-connector-python[pandas] >=3.4.0 ; extra == 'pandas'
21
23
  Provides-Extra: snowflake.sqlalchemy
22
24
  Requires-Dist: sqlalchemy !=1.4.42 ; extra == 'snowflake.sqlalchemy'
23
25
  Requires-Dist: snowflake-sqlalchemy ; extra == 'snowflake.sqlalchemy'
@@ -0,0 +1,12 @@
1
+ dagster_snowflake/__init__.py,sha256=MeLU-ghhmyMk82I2b3qCz9h7mxgb2eMxXfkqGBjI29k,673
2
+ dagster_snowflake/constants.py,sha256=26PGL1eFncm0WfbgZjA7jaznfaRx75vVJnqbieVYeco,481
3
+ dagster_snowflake/ops.py,sha256=L_MP28fLm7_hrJmzMoDocLwyvVnkpy1LVwUSLIArKWc,2225
4
+ dagster_snowflake/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
5
+ dagster_snowflake/resources.py,sha256=_ERXAGne5q_Z0JG92fBvPOnAySUtWiqJmjU1qYddZwI,32084
6
+ dagster_snowflake/snowflake_io_manager.py,sha256=d_fFr7Fk95uNQ6PNvbxCYC6dgH2yyzJBAx_KVC8HtTQ,17763
7
+ dagster_snowflake/version.py,sha256=bkf--LdvzFbtaf_d7GSQn5A2t-yDZ-SX33wKaaeqHsY,23
8
+ dagster_snowflake-0.25.8.dist-info/LICENSE,sha256=TMatHW4_G9ldRdodEAp-l2Xa2WvsdeOh60E3v1R2jis,11349
9
+ dagster_snowflake-0.25.8.dist-info/METADATA,sha256=pUY1pTPFxlJFwf1fYWvBztWMjEUJeJmHJ6UM7qEH8gY,1103
10
+ dagster_snowflake-0.25.8.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
11
+ dagster_snowflake-0.25.8.dist-info/top_level.txt,sha256=uECYCiluOxLQ996SCUPBBwdK0CTyz45FjWqf7WDqMMc,18
12
+ dagster_snowflake-0.25.8.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- dagster_snowflake/__init__.py,sha256=svCkKOLfCh3_1DBoTtrEa8Hu7puNB4c7JX4FvIGbUf0,537
2
- dagster_snowflake/ops.py,sha256=cxCMvbzsELa62ax3AWodKSpmad3Kb6NLcZlj6SW2KVg,2159
3
- dagster_snowflake/resources.py,sha256=ATW__nAcep_t7muqylM6Ux5jxVCgSM9uz7tpjpLx-jU,28337
4
- dagster_snowflake/snowflake_io_manager.py,sha256=C5sHxtsHpuKsRWdbsCv9hVmZ7mvProSCgwoQ1UeCy6Y,15478
5
- dagster_snowflake/version.py,sha256=yq-fGfdU069XbMVu8cXj7x9Ga-YNsSY_1SmDswa-WEI,23
6
- dagster_snowflake-0.21.5.dist-info/LICENSE,sha256=TMatHW4_G9ldRdodEAp-l2Xa2WvsdeOh60E3v1R2jis,11349
7
- dagster_snowflake-0.21.5.dist-info/METADATA,sha256=Fw_rmHZNz0yVkFZUIChtL7IBSybN6rdtHJdyCE9V-Gg,1039
8
- dagster_snowflake-0.21.5.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
9
- dagster_snowflake-0.21.5.dist-info/top_level.txt,sha256=uECYCiluOxLQ996SCUPBBwdK0CTyz45FjWqf7WDqMMc,18
10
- dagster_snowflake-0.21.5.dist-info/RECORD,,