dagster-snowflake 0.13.3rc0__py3-none-any.whl → 0.28.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,436 @@
1
+ from abc import abstractmethod
2
+ from collections.abc import Sequence
3
+ from contextlib import contextmanager
4
+ from typing import Any, Optional, cast
5
+
6
+ from dagster import IOManagerDefinition, OutputContext, io_manager
7
+ from dagster._config.pythonic_config import ConfigurableIOManagerFactory
8
+ from dagster._core.definitions.partitions.utils import TimeWindow
9
+ from dagster._core.storage.db_io_manager import (
10
+ DbClient,
11
+ DbIOManager,
12
+ DbTypeHandler,
13
+ TablePartitionDimension,
14
+ TableSlice,
15
+ )
16
+ from dagster._core.storage.io_manager import dagster_maintained_io_manager
17
+ from pydantic import Field
18
+
19
+ from dagster_snowflake.resources import SnowflakeResource
20
+
21
+ SNOWFLAKE_DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
22
+
23
+
24
+ def build_snowflake_io_manager(
25
+ type_handlers: Sequence[DbTypeHandler], default_load_type: Optional[type] = None
26
+ ) -> IOManagerDefinition:
27
+ """Builds an IO manager definition that reads inputs from and writes outputs to Snowflake.
28
+
29
+ Args:
30
+ type_handlers (Sequence[DbTypeHandler]): Each handler defines how to translate between
31
+ slices of Snowflake tables and an in-memory type - e.g. a Pandas DataFrame. If only
32
+ one DbTypeHandler is provided, it will be used as the default_load_type.
33
+ default_load_type (Type): When an input has no type annotation, load it as this type.
34
+
35
+ Returns:
36
+ IOManagerDefinition
37
+
38
+ Examples:
39
+ .. code-block:: python
40
+
41
+ from dagster_snowflake import build_snowflake_io_manager
42
+ from dagster_snowflake_pandas import SnowflakePandasTypeHandler
43
+ from dagster_snowflake_pyspark import SnowflakePySparkTypeHandler
44
+ from dagster import Definitions
45
+
46
+ @asset(
47
+ key_prefix=["my_prefix"]
48
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
49
+ )
50
+ def my_table() -> pd.DataFrame: # the name of the asset will be the table name
51
+ ...
52
+
53
+ @asset(
54
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
55
+ )
56
+ def my_second_table() -> pd.DataFrame: # the name of the asset will be the table name
57
+ ...
58
+
59
+ snowflake_io_manager = build_snowflake_io_manager([SnowflakePandasTypeHandler(), SnowflakePySparkTypeHandler()])
60
+
61
+ Definitions(
62
+ assets=[my_table, my_second_table],
63
+ resources={
64
+ "io_manager": snowflake_io_manager.configured({
65
+ "database": "my_database",
66
+ "account" : {"env": "SNOWFLAKE_ACCOUNT"}
67
+ ...
68
+ })
69
+ }
70
+ )
71
+
72
+ You can set a default schema to store the assets using the ``schema`` configuration value of the Snowflake I/O
73
+ Manager. This schema will be used if no other schema is specified directly on an asset or op.
74
+
75
+ .. code-block:: python
76
+
77
+ Definitions(
78
+ assets=[my_table]
79
+ resources={"io_manager" snowflake_io_manager.configured(
80
+ {"database": "my_database", "schema": "my_schema", ...} # will be used as the schema
81
+ )}
82
+ )
83
+
84
+
85
+ On individual assets, you an also specify the schema where they should be stored using metadata or
86
+ by adding a ``key_prefix`` to the asset key. If both ``key_prefix`` and metadata are defined, the metadata will
87
+ take precedence.
88
+
89
+ .. code-block:: python
90
+
91
+ @asset(
92
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
93
+ )
94
+ def my_table() -> pd.DataFrame:
95
+ ...
96
+
97
+ @asset(
98
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
99
+ )
100
+ def my_other_table() -> pd.DataFrame:
101
+ ...
102
+
103
+ For ops, the schema can be specified by including a "schema" entry in output metadata.
104
+
105
+ .. code-block:: python
106
+
107
+ @op(
108
+ out={"my_table": Out(metadata={"schema": "my_schema"})}
109
+ )
110
+ def make_my_table() -> pd.DataFrame:
111
+ ...
112
+
113
+ If none of these is provided, the schema will default to "public".
114
+
115
+ To only use specific columns of a table as input to a downstream op or asset, add the metadata ``columns`` to the
116
+ In or AssetIn.
117
+
118
+ .. code-block:: python
119
+
120
+ @asset(
121
+ ins={"my_table": AssetIn("my_table", metadata={"columns": ["a"]})}
122
+ )
123
+ def my_table_a(my_table: pd.DataFrame) -> pd.DataFrame:
124
+ # my_table will just contain the data from column "a"
125
+ ...
126
+
127
+ """
128
+
129
+ @dagster_maintained_io_manager
130
+ @io_manager(config_schema=SnowflakeIOManager.to_config_schema())
131
+ def snowflake_io_manager(init_context):
132
+ return DbIOManager(
133
+ type_handlers=type_handlers,
134
+ db_client=SnowflakeDbClient(),
135
+ io_manager_name="SnowflakeIOManager",
136
+ database=init_context.resource_config["database"],
137
+ schema=init_context.resource_config.get("schema"),
138
+ default_load_type=default_load_type,
139
+ )
140
+
141
+ return snowflake_io_manager
142
+
143
+
144
+ class SnowflakeIOManager(ConfigurableIOManagerFactory):
145
+ """Base class for an IO manager definition that reads inputs from and writes outputs to Snowflake.
146
+
147
+ Examples:
148
+ .. code-block:: python
149
+
150
+ from dagster_snowflake import SnowflakeIOManager
151
+ from dagster_snowflake_pandas import SnowflakePandasTypeHandler
152
+ from dagster_snowflake_pyspark import SnowflakePySparkTypeHandler
153
+ from dagster import Definitions, EnvVar
154
+
155
+ class MySnowflakeIOManager(SnowflakeIOManager):
156
+ @staticmethod
157
+ def type_handlers() -> Sequence[DbTypeHandler]:
158
+ return [SnowflakePandasTypeHandler(), SnowflakePySparkTypeHandler()]
159
+
160
+ @asset(
161
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
162
+ )
163
+ def my_table() -> pd.DataFrame: # the name of the asset will be the table name
164
+ ...
165
+
166
+ defs = Definitions(
167
+ assets=[my_table],
168
+ resources={
169
+ "io_manager": MySnowflakeIOManager(database="my_database", account=EnvVar("SNOWFLAKE_ACCOUNT"), ...)
170
+ }
171
+ )
172
+
173
+ You can set a default schema to store the assets using the ``schema`` configuration value of the Snowflake I/O
174
+ Manager. This schema will be used if no other schema is specified directly on an asset or op.
175
+
176
+ .. code-block:: python
177
+
178
+ defs = Definitions(
179
+ assets=[my_table]
180
+ resources={
181
+ "io_manager" MySnowflakeIOManager(database="my_database", schema="my_schema", ...)
182
+ }
183
+ )
184
+
185
+
186
+ On individual assets, you an also specify the schema where they should be stored using metadata or
187
+ by adding a ``key_prefix`` to the asset key. If both ``key_prefix`` and metadata are defined, the metadata will
188
+ take precedence.
189
+
190
+ .. code-block:: python
191
+
192
+ @asset(
193
+ key_prefix=["my_schema"] # will be used as the schema in snowflake
194
+ )
195
+ def my_table() -> pd.DataFrame:
196
+ ...
197
+
198
+ @asset(
199
+ metadata={"schema": "my_schema"} # will be used as the schema in snowflake
200
+ )
201
+ def my_other_table() -> pd.DataFrame:
202
+ ...
203
+
204
+ For ops, the schema can be specified by including a "schema" entry in output metadata.
205
+
206
+ .. code-block:: python
207
+
208
+ @op(
209
+ out={"my_table": Out(metadata={"schema": "my_schema"})}
210
+ )
211
+ def make_my_table() -> pd.DataFrame:
212
+ ...
213
+
214
+ If none of these is provided, the schema will default to "public".
215
+
216
+ To only use specific columns of a table as input to a downstream op or asset, add the metadata ``columns`` to the
217
+ In or AssetIn.
218
+
219
+ .. code-block:: python
220
+
221
+ @asset(
222
+ ins={"my_table": AssetIn("my_table", metadata={"columns": ["a"]})}
223
+ )
224
+ def my_table_a(my_table: pd.DataFrame) -> pd.DataFrame:
225
+ # my_table will just contain the data from column "a"
226
+ ...
227
+
228
+ """
229
+
230
+ database: str = Field(description="Name of the database to use.")
231
+ account: str = Field(
232
+ description=(
233
+ "Your Snowflake account name. For more details, see the `Snowflake documentation."
234
+ " <https://docs.snowflake.com/developer-guide/python-connector/python-connector-api>`__"
235
+ ),
236
+ )
237
+ user: str = Field(description="User login name.")
238
+ schema_: Optional[str] = Field(
239
+ default=None, alias="schema", description="Name of the schema to use."
240
+ ) # schema is a reserved word for pydantic
241
+ password: Optional[str] = Field(default=None, description="User password.")
242
+ warehouse: Optional[str] = Field(default=None, description="Name of the warehouse to use.")
243
+ role: Optional[str] = Field(default=None, description="Name of the role to use.")
244
+ private_key: Optional[str] = Field(
245
+ default=None,
246
+ description=(
247
+ "Raw private key to use. See the `Snowflake documentation"
248
+ " <https://docs.snowflake.com/en/user-guide/key-pair-auth.html>`__ for details. To"
249
+ " avoid issues with newlines in the keys, you can base64 encode the key. You can"
250
+ " retrieve the base64 encoded key with this shell command: cat rsa_key.p8 | base64"
251
+ ),
252
+ )
253
+ private_key_path: Optional[str] = Field(
254
+ default=None,
255
+ description=(
256
+ "Path to the private key. See the `Snowflake documentation"
257
+ " <https://docs.snowflake.com/en/user-guide/key-pair-auth.html>`__ for details."
258
+ ),
259
+ )
260
+ private_key_password: Optional[str] = Field(
261
+ default=None,
262
+ description=(
263
+ "The password of the private key. See the `Snowflake documentation"
264
+ " <https://docs.snowflake.com/en/user-guide/key-pair-auth.html>`__ for details."
265
+ " Required for both private_key and private_key_path if the private key is encrypted."
266
+ " For unencrypted keys, this config can be omitted or set to None."
267
+ ),
268
+ )
269
+ store_timestamps_as_strings: bool = Field(
270
+ default=False,
271
+ description=(
272
+ "If using Pandas DataFrames, whether to convert time data to strings. If True, time"
273
+ " data will be converted to strings when storing the DataFrame and converted back to"
274
+ " time data when loading the DataFrame. If False, time data without a timezone will be"
275
+ " set to UTC timezone to avoid a Snowflake bug. Defaults to False."
276
+ ),
277
+ )
278
+ authenticator: Optional[str] = Field(
279
+ default=None,
280
+ description="Optional parameter to specify the authentication mechanism to use.",
281
+ )
282
+ additional_snowflake_connection_args: Optional[dict[str, Any]] = Field(
283
+ default=None,
284
+ description=(
285
+ "Additional keyword arguments to pass to the snowflake.connector.connect function. For a full list of"
286
+ " available arguments, see the `Snowflake documentation"
287
+ " <https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect>`__."
288
+ " This config will be ignored if using the sqlalchemy connector."
289
+ ),
290
+ )
291
+
292
+ @staticmethod
293
+ @abstractmethod
294
+ def type_handlers() -> Sequence[DbTypeHandler]:
295
+ """type_handlers should return a list of the TypeHandlers that the I/O manager can use.
296
+
297
+ .. code-block:: python
298
+
299
+ from dagster_snowflake import SnowflakeIOManager
300
+ from dagster_snowflake_pandas import SnowflakePandasTypeHandler
301
+ from dagster_snowflake_pyspark import SnowflakePySparkTypeHandler
302
+ from dagster import Definitions, EnvVar
303
+
304
+ class MySnowflakeIOManager(SnowflakeIOManager):
305
+ @staticmethod
306
+ def type_handlers() -> Sequence[DbTypeHandler]:
307
+ return [SnowflakePandasTypeHandler(), SnowflakePySparkTypeHandler()]
308
+ """
309
+ ...
310
+
311
+ @staticmethod
312
+ def default_load_type() -> Optional[type]:
313
+ """If an asset or op is not annotated with an return type, default_load_type will be used to
314
+ determine which TypeHandler to use to store and load the output.
315
+
316
+ If left unimplemented, default_load_type will return None. In that case, if there is only
317
+ one TypeHandler, the I/O manager will default to loading unannotated outputs with that
318
+ TypeHandler.
319
+
320
+ .. code-block:: python
321
+
322
+ from dagster_snowflake import SnowflakeIOManager
323
+ from dagster_snowflake_pandas import SnowflakePandasTypeHandler
324
+ from dagster_snowflake_pyspark import SnowflakePySparkTypeHandler
325
+ from dagster import Definitions, EnvVar
326
+ import pandas as pd
327
+
328
+ class MySnowflakeIOManager(SnowflakeIOManager):
329
+ @staticmethod
330
+ def type_handlers() -> Sequence[DbTypeHandler]:
331
+ return [SnowflakePandasTypeHandler(), SnowflakePySparkTypeHandler()]
332
+
333
+ @staticmethod
334
+ def default_load_type() -> Optional[Type]:
335
+ return pd.DataFrame
336
+ """
337
+ return None
338
+
339
+ def create_io_manager(self, context) -> DbIOManager:
340
+ return DbIOManager(
341
+ db_client=SnowflakeDbClient(),
342
+ io_manager_name="SnowflakeIOManager",
343
+ database=self.database,
344
+ schema=self.schema_,
345
+ type_handlers=self.type_handlers(),
346
+ default_load_type=self.default_load_type(),
347
+ )
348
+
349
+
350
+ class SnowflakeDbClient(DbClient):
351
+ @staticmethod
352
+ @contextmanager
353
+ def connect(context, table_slice):
354
+ no_schema_config = (
355
+ {k: v for k, v in context.resource_config.items() if k != "schema"}
356
+ if context.resource_config
357
+ else {}
358
+ )
359
+ with SnowflakeResource(schema=table_slice.schema, **no_schema_config).get_connection( # pyright: ignore[reportArgumentType]
360
+ raw_conn=False
361
+ ) as conn:
362
+ yield conn
363
+
364
+ @staticmethod
365
+ def ensure_schema_exists(context: OutputContext, table_slice: TableSlice, connection) -> None:
366
+ with connection.cursor() as cursor:
367
+ cursor.execute(
368
+ f"show schemas like '{table_slice.schema}' in database {table_slice.database}"
369
+ )
370
+ schemas = cursor.fetchall()
371
+
372
+ if len(schemas) == 0:
373
+ with connection.cursor() as cursor:
374
+ cursor.execute(f"create schema {table_slice.schema};")
375
+
376
+ @staticmethod
377
+ def delete_table_slice(context: OutputContext, table_slice: TableSlice, connection) -> None:
378
+ try:
379
+ connection.cursor().execute(_get_cleanup_statement(table_slice))
380
+ except Exception as e:
381
+ if "does not exist or not authorized" in str(e):
382
+ # table doesn't exist yet, so ignore the error
383
+ return
384
+ else:
385
+ raise
386
+
387
+ @staticmethod
388
+ def get_select_statement(table_slice: TableSlice) -> str:
389
+ col_str = ", ".join(table_slice.columns) if table_slice.columns else "*"
390
+ if table_slice.partition_dimensions:
391
+ query = (
392
+ f"SELECT {col_str} FROM"
393
+ f" {table_slice.database}.{table_slice.schema}.{table_slice.table} WHERE\n"
394
+ )
395
+ return query + _partition_where_clause(table_slice.partition_dimensions)
396
+ else:
397
+ return f"""SELECT {col_str} FROM {table_slice.database}.{table_slice.schema}.{table_slice.table}"""
398
+
399
+
400
+ def _get_cleanup_statement(table_slice: TableSlice) -> str:
401
+ """Returns a SQL statement that deletes data in the given table to make way for the output data
402
+ being written.
403
+ """
404
+ if table_slice.partition_dimensions:
405
+ query = (
406
+ f"DELETE FROM {table_slice.database}.{table_slice.schema}.{table_slice.table} WHERE\n"
407
+ )
408
+ return query + _partition_where_clause(table_slice.partition_dimensions)
409
+ else:
410
+ return f"DELETE FROM {table_slice.database}.{table_slice.schema}.{table_slice.table}"
411
+
412
+
413
+ def _partition_where_clause(partition_dimensions: Sequence[TablePartitionDimension]) -> str:
414
+ return " AND\n".join(
415
+ (
416
+ _time_window_where_clause(partition_dimension)
417
+ if isinstance(partition_dimension.partitions, TimeWindow)
418
+ else _static_where_clause(partition_dimension)
419
+ )
420
+ for partition_dimension in partition_dimensions
421
+ )
422
+
423
+
424
+ def _time_window_where_clause(table_partition: TablePartitionDimension) -> str:
425
+ partition = cast("TimeWindow", table_partition.partitions)
426
+ start_dt, end_dt = partition
427
+ start_dt_str = start_dt.strftime(SNOWFLAKE_DATETIME_FORMAT)
428
+ end_dt_str = end_dt.strftime(SNOWFLAKE_DATETIME_FORMAT)
429
+ # Snowflake BETWEEN is inclusive; start <= partition expr <= end. We don't want to remove the next partition so we instead
430
+ # write this as start <= partition expr < end.
431
+ return f"""{table_partition.partition_expr} >= '{start_dt_str}' AND {table_partition.partition_expr} < '{end_dt_str}'"""
432
+
433
+
434
+ def _static_where_clause(table_partition: TablePartitionDimension) -> str:
435
+ partitions = ", ".join(f"'{partition}'" for partition in table_partition.partitions)
436
+ return f"""{table_partition.partition_expr} in ({partitions})"""
@@ -1 +1 @@
1
- __version__ = "0.13.3rc0"
1
+ __version__ = "0.28.1"
@@ -0,0 +1,36 @@
1
+ Metadata-Version: 2.4
2
+ Name: dagster-snowflake
3
+ Version: 0.28.1
4
+ Summary: Package for Snowflake Dagster framework components.
5
+ Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-snowflake
6
+ Author: Dagster Labs
7
+ Author-email: hello@dagsterlabs.com
8
+ License: Apache-2.0
9
+ Classifier: Programming Language :: Python :: 3.9
10
+ Classifier: Programming Language :: Python :: 3.10
11
+ Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Classifier: License :: OSI Approved :: Apache Software License
15
+ Classifier: Operating System :: OS Independent
16
+ Requires-Python: >=3.9,<3.14
17
+ License-File: LICENSE
18
+ Requires-Dist: dagster==1.12.1
19
+ Requires-Dist: snowflake-connector-python>=3.4.0
20
+ Requires-Dist: pyOpenSSL>=22.1.0
21
+ Provides-Extra: snowflake-sqlalchemy
22
+ Requires-Dist: sqlalchemy!=1.4.42; extra == "snowflake-sqlalchemy"
23
+ Requires-Dist: snowflake-sqlalchemy; extra == "snowflake-sqlalchemy"
24
+ Provides-Extra: pandas
25
+ Requires-Dist: pandas; extra == "pandas"
26
+ Requires-Dist: snowflake-connector-python[pandas]>=3.4.0; extra == "pandas"
27
+ Dynamic: author
28
+ Dynamic: author-email
29
+ Dynamic: classifier
30
+ Dynamic: home-page
31
+ Dynamic: license
32
+ Dynamic: license-file
33
+ Dynamic: provides-extra
34
+ Dynamic: requires-dist
35
+ Dynamic: requires-python
36
+ Dynamic: summary
@@ -0,0 +1,16 @@
1
+ dagster_snowflake/__init__.py,sha256=aDqda8Dff_jKm6tjyvOX2BqiXYNIVVMmPbL2h7pZKec,785
2
+ dagster_snowflake/constants.py,sha256=26PGL1eFncm0WfbgZjA7jaznfaRx75vVJnqbieVYeco,481
3
+ dagster_snowflake/ops.py,sha256=L_MP28fLm7_hrJmzMoDocLwyvVnkpy1LVwUSLIArKWc,2225
4
+ dagster_snowflake/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
5
+ dagster_snowflake/resources.py,sha256=1s-vLJ3d9xGIXSgF04NqFkuggf_J4BXdjdXxSwWkghk,36042
6
+ dagster_snowflake/snowflake_io_manager.py,sha256=E40E-09u4BMu-Rt5vFfjG8z4Y2N7uaebXVxbOfQqgcg,17682
7
+ dagster_snowflake/version.py,sha256=ZRQKbgDaGz_yuLk-cUKuk6ZBKCSRKZC8nQd041NRNXk,23
8
+ dagster_snowflake/components/__init__.py,sha256=RlBmjimRJNlrlkZKMrZftBjco1wmD4cOFyomfFyooks,145
9
+ dagster_snowflake/components/sql_component/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ dagster_snowflake/components/sql_component/component.py,sha256=jSQK0odxTY3KQ5BDhCAp-gJbfGW81-lVs7kcCXWKiiM,2141
11
+ dagster_snowflake-0.28.1.dist-info/licenses/LICENSE,sha256=4lsMW-RCvfVD4_F57wrmpe3vX1xwUk_OAKKmV_XT7Z0,11348
12
+ dagster_snowflake-0.28.1.dist-info/METADATA,sha256=maVAgut9C8nr48ST1zOZPuw3QGSMyhWPzqG670BBYy0,1350
13
+ dagster_snowflake-0.28.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
14
+ dagster_snowflake-0.28.1.dist-info/entry_points.txt,sha256=Ddljs88OCl4BNyhSyE8lIAe54bIqEms0cOkR9vUAqsM,72
15
+ dagster_snowflake-0.28.1.dist-info/top_level.txt,sha256=uECYCiluOxLQ996SCUPBBwdK0CTyz45FjWqf7WDqMMc,18
16
+ dagster_snowflake-0.28.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.33.6)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1,2 @@
1
+ [dagster_dg_cli.registry_modules]
2
+ dagster_snowflake = dagster_snowflake
@@ -186,7 +186,7 @@
186
186
  same "printed page" as the copyright notice for easier
187
187
  identification within third-party archives.
188
188
 
189
- Copyright {yyyy} {name of copyright owner}
189
+ Copyright 2025 Dagster Labs, Inc.
190
190
 
191
191
  Licensed under the Apache License, Version 2.0 (the "License");
192
192
  you may not use this file except in compliance with the License.
@@ -0,0 +1 @@
1
+ dagster_snowflake
@@ -1,156 +0,0 @@
1
- from dagster import Bool, Field, IntSource, StringSource
2
-
3
-
4
- def define_snowflake_config():
5
- """Snowflake configuration.
6
-
7
- See the Snowflake documentation for reference:
8
- https://docs.snowflake.net/manuals/user-guide/python-connector-api.html
9
- """
10
-
11
- account = Field(
12
- StringSource,
13
- description="Your Snowflake account name. For more details, see https://bit.ly/2FBL320.",
14
- is_required=False,
15
- )
16
-
17
- user = Field(StringSource, description="User login name.", is_required=True)
18
-
19
- password = Field(StringSource, description="User password.", is_required=True)
20
-
21
- database = Field(
22
- StringSource,
23
- description="""Name of the default database to use. After login, you can use USE DATABASE
24
- to change the database.""",
25
- is_required=False,
26
- )
27
-
28
- schema = Field(
29
- StringSource,
30
- description="""Name of the default schema to use. After login, you can use USE SCHEMA to
31
- change the schema.""",
32
- is_required=False,
33
- )
34
-
35
- role = Field(
36
- StringSource,
37
- description="""Name of the default role to use. After login, you can use USE ROLE to change
38
- the role.""",
39
- is_required=False,
40
- )
41
-
42
- warehouse = Field(
43
- StringSource,
44
- description="""Name of the default warehouse to use. After login, you can use USE WAREHOUSE
45
- to change the role.""",
46
- is_required=False,
47
- )
48
-
49
- autocommit = Field(
50
- Bool,
51
- description="""None by default, which honors the Snowflake parameter AUTOCOMMIT. Set to True
52
- or False to enable or disable autocommit mode in the session, respectively.""",
53
- is_required=False,
54
- )
55
-
56
- client_prefetch_threads = Field(
57
- IntSource,
58
- description="""Number of threads used to download the results sets (4 by default).
59
- Increasing the value improves fetch performance but requires more memory.""",
60
- is_required=False,
61
- )
62
-
63
- client_session_keep_alive = Field(
64
- StringSource,
65
- description="""False by default. Set this to True to keep the session active indefinitely,
66
- even if there is no activity from the user. Make certain to call the close method to
67
- terminate the thread properly or the process may hang.""",
68
- is_required=False,
69
- )
70
-
71
- login_timeout = Field(
72
- IntSource,
73
- description="""Timeout in seconds for login. By default, 60 seconds. The login request gives
74
- up after the timeout length if the HTTP response is "success".""",
75
- is_required=False,
76
- )
77
-
78
- network_timeout = Field(
79
- IntSource,
80
- description='''Timeout in seconds for all other operations. By default, none/infinite. A
81
- general request gives up after the timeout length if the HTTP response is not "success"''',
82
- is_required=False,
83
- )
84
-
85
- ocsp_response_cache_filename = Field(
86
- StringSource,
87
- description="""URI for the OCSP response cache file.
88
- By default, the OCSP response cache file is created in the cache directory.""",
89
- is_required=False,
90
- )
91
-
92
- validate_default_parameters = Field(
93
- Bool,
94
- description="""False by default. Raise an exception if either one of specified database,
95
- schema or warehouse doesn't exists if True.""",
96
- is_required=False,
97
- )
98
-
99
- paramstyle = Field(
100
- # TODO should validate only against permissible values for this
101
- StringSource,
102
- description="""pyformat by default for client side binding. Specify qmark or numeric to
103
- change bind variable formats for server side binding.""",
104
- is_required=False,
105
- )
106
-
107
- timezone = Field(
108
- StringSource,
109
- description="""None by default, which honors the Snowflake parameter TIMEZONE. Set to a
110
- valid time zone (e.g. America/Los_Angeles) to set the session time zone.""",
111
- is_required=False,
112
- )
113
-
114
- connector = Field(
115
- StringSource,
116
- description="""Indicate alternative database connection engine. Permissible option is
117
- 'sqlalchemy' otherwise defaults to use the Snowflake Connector for Python.""",
118
- is_required=False,
119
- )
120
-
121
- cache_column_metadata = Field(
122
- StringSource,
123
- description="""Optional parameter when connector is set to sqlalchemy. Snowflake SQLAlchemy
124
- takes a flag cache_column_metadata=True such that all of column metadata for all tables are
125
- cached""",
126
- is_required=False,
127
- )
128
-
129
- numpy = Field(
130
- StringSource,
131
- description="""Optional parameter when connector is set to sqlalchemy. To enable fetching
132
- NumPy data types, add numpy=True to the connection parameters.""",
133
- is_required=False,
134
- )
135
-
136
- return {
137
- "account": account,
138
- "user": user,
139
- "password": password,
140
- "database": database,
141
- "schema": schema,
142
- "role": role,
143
- "warehouse": warehouse,
144
- "autocommit": autocommit,
145
- "client_prefetch_threads": client_prefetch_threads,
146
- "client_session_keep_alive": client_session_keep_alive,
147
- "login_timeout": login_timeout,
148
- "network_timeout": network_timeout,
149
- "ocsp_response_cache_filename": ocsp_response_cache_filename,
150
- "validate_default_parameters": validate_default_parameters,
151
- "paramstyle": paramstyle,
152
- "timezone": timezone,
153
- "connector": connector,
154
- "cache_column_metadata": cache_column_metadata,
155
- "numpy": numpy,
156
- }