fakesnow 0.9.21__py3-none-any.whl → 0.9.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fakesnow/fakes.py CHANGED
@@ -114,7 +114,6 @@ class FakeSnowflakeCursor:
114
114
  def description(self) -> list[ResultMetadata]:
115
115
  # use a separate cursor to avoid consuming the result set on this cursor
116
116
  with self._conn.cursor() as cur:
117
- # self._duck_conn.execute(sql, params)
118
117
  expression = sqlglot.parse_one(f"DESCRIBE {self._last_sql}", read="duckdb")
119
118
  cur._execute(expression, self._last_params) # noqa: SLF001
120
119
  meta = FakeSnowflakeCursor._describe_as_result_metadata(cur.fetchall())
@@ -235,12 +234,10 @@ class FakeSnowflakeCursor:
235
234
  if transformed.find(exp.Select) and (seed := transformed.args.get("seed")):
236
235
  sql = f"SELECT setseed({seed}); {sql}"
237
236
 
238
- if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
239
- print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
240
-
241
237
  result_sql = None
242
238
 
243
239
  try:
240
+ self._log_sql(sql, params)
244
241
  self._duck_conn.execute(sql, params)
245
242
  except duckdb.BinderException as e:
246
243
  msg = e.args[0]
@@ -287,9 +284,9 @@ class FakeSnowflakeCursor:
287
284
  (affected_count,) = self._duck_conn.fetchall()[0]
288
285
  result_sql = SQL_DELETED_ROWS.substitute(count=affected_count)
289
286
 
290
- elif cmd == "DESCRIBE TABLE":
291
- # DESCRIBE TABLE has already been run above to detect and error if the table exists
292
- # We now rerun DESCRIBE TABLE but transformed with columns to match Snowflake
287
+ elif cmd in ("DESCRIBE TABLE", "DESCRIBE VIEW"):
288
+ # DESCRIBE TABLE/VIEW has already been run above to detect and error if the table exists
289
+ # We now rerun DESCRIBE TABLE/VIEW but transformed with columns to match Snowflake
293
290
  result_sql = transformed.transform(
294
291
  lambda e: transforms.describe_table(e, self._conn.database, self._conn.schema)
295
292
  ).sql(dialect="duckdb")
@@ -337,6 +334,7 @@ class FakeSnowflakeCursor:
337
334
  self._duck_conn.execute(info_schema.insert_text_lengths_sql(catalog, schema, table.name, text_lengths))
338
335
 
339
336
  if result_sql:
337
+ self._log_sql(result_sql, params)
340
338
  self._duck_conn.execute(result_sql)
341
339
 
342
340
  self._arrow_table = self._duck_conn.fetch_arrow_table()
@@ -347,6 +345,10 @@ class FakeSnowflakeCursor:
347
345
 
348
346
  return self
349
347
 
348
+ def _log_sql(self, sql: str, params: Sequence[Any] | dict[Any, Any] | None = None) -> None:
349
+ if (fs_debug := os.environ.get("FAKESNOW_DEBUG")) and fs_debug != "snowflake":
350
+ print(f"{sql};{params=}" if params else f"{sql};", file=sys.stderr)
351
+
350
352
  def executemany(
351
353
  self,
352
354
  command: str,
@@ -389,12 +391,13 @@ class FakeSnowflakeCursor:
389
391
  if self._arrow_table is None:
390
392
  # mimic snowflake python connector error type
391
393
  raise TypeError("No open result set")
394
+ tslice = self._arrow_table.slice(offset=self._arrow_table_fetch_index or 0, length=size).to_pylist()
395
+
392
396
  if self._arrow_table_fetch_index is None:
393
- self._arrow_table_fetch_index = 0
397
+ self._arrow_table_fetch_index = size
394
398
  else:
395
399
  self._arrow_table_fetch_index += size
396
400
 
397
- tslice = self._arrow_table.slice(offset=self._arrow_table_fetch_index, length=size).to_pylist()
398
401
  return tslice if self._use_dict_result else [tuple(d.values()) for d in tslice]
399
402
 
400
403
  def get_result_batches(self) -> list[ResultBatch] | None:
fakesnow/info_schema.py CHANGED
@@ -78,6 +78,7 @@ LEFT JOIN duckdb_columns ddb_columns
78
78
  """
79
79
  )
80
80
 
81
+
81
82
  # replicates https://docs.snowflake.com/sql-reference/info-schema/databases
82
83
  SQL_CREATE_INFORMATION_SCHEMA_DATABASES_VIEW = Template(
83
84
  """
fakesnow/server.py CHANGED
@@ -103,6 +103,7 @@ routes = [
103
103
  query_request,
104
104
  methods=["POST"],
105
105
  ),
106
+ Route("/queries/v1/abort-request", lambda _: JSONResponse({"success": True}), methods=["POST"]),
106
107
  ]
107
108
 
108
109
  app = Starlette(debug=True, routes=routes)
fakesnow/transforms.py CHANGED
@@ -159,22 +159,41 @@ SELECT
159
159
  column_default AS "default",
160
160
  'N' AS "primary key",
161
161
  'N' AS "unique key",
162
- NULL AS "check",
163
- NULL AS "expression",
164
- NULL AS "comment",
165
- NULL AS "policy name",
166
- NULL AS "privacy domain",
162
+ NULL::VARCHAR AS "check",
163
+ NULL::VARCHAR AS "expression",
164
+ NULL::VARCHAR AS "comment",
165
+ NULL::VARCHAR AS "policy name",
166
+ NULL::JSON AS "privacy domain",
167
167
  FROM information_schema._fs_columns_snowflake
168
168
  WHERE table_catalog = '${catalog}' AND table_schema = '${schema}' AND table_name = '${table}'
169
169
  ORDER BY ordinal_position
170
170
  """
171
171
  )
172
172
 
173
+ SQL_DESCRIBE_INFO_SCHEMA = Template(
174
+ """
175
+ SELECT
176
+ column_name AS "name",
177
+ column_type as "type",
178
+ 'COLUMN' AS "kind",
179
+ CASE WHEN "null" = 'YES' THEN 'Y' ELSE 'N' END AS "null?",
180
+ NULL::VARCHAR AS "default",
181
+ 'N' AS "primary key",
182
+ 'N' AS "unique key",
183
+ NULL::VARCHAR AS "check",
184
+ NULL::VARCHAR AS "expression",
185
+ NULL::VARCHAR AS "comment",
186
+ NULL::VARCHAR AS "policy name",
187
+ NULL::JSON AS "privacy domain",
188
+ FROM (DESCRIBE information_schema.${view})
189
+ """
190
+ )
191
+
173
192
 
174
193
  def describe_table(
175
194
  expression: exp.Expression, current_database: str | None = None, current_schema: str | None = None
176
195
  ) -> exp.Expression:
177
- """Redirect to the information_schema._fs_describe_table to match snowflake.
196
+ """Redirect to the information_schema._fs_columns_snowflake to match snowflake.
178
197
 
179
198
  See https://docs.snowflake.com/en/sql-reference/sql/desc-table
180
199
  """
@@ -183,12 +202,16 @@ def describe_table(
183
202
  isinstance(expression, exp.Describe)
184
203
  and (kind := expression.args.get("kind"))
185
204
  and isinstance(kind, str)
186
- and kind.upper() == "TABLE"
205
+ and kind.upper() in ("TABLE", "VIEW")
187
206
  and (table := expression.find(exp.Table))
188
207
  ):
189
208
  catalog = table.catalog or current_database
190
209
  schema = table.db or current_schema
191
210
 
211
+ if schema and schema.upper() == "INFORMATION_SCHEMA":
212
+ # information schema views don't exist in _fs_columns_snowflake
213
+ return sqlglot.parse_one(SQL_DESCRIBE_INFO_SCHEMA.substitute(view=table.name), read="duckdb")
214
+
192
215
  return sqlglot.parse_one(
193
216
  SQL_DESCRIBE_TABLE.substitute(catalog=catalog, schema=schema, table=table.name),
194
217
  read="duckdb",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fakesnow
3
- Version: 0.9.21
3
+ Version: 0.9.22
4
4
  Summary: Fake Snowflake Connector for Python. Run, mock and test Snowflake DB locally.
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -210,28 +210,28 @@ Classifier: License :: OSI Approved :: MIT License
210
210
  Requires-Python: >=3.9
211
211
  Description-Content-Type: text/markdown
212
212
  License-File: LICENSE
213
- Requires-Dist: duckdb ~=1.0.0
213
+ Requires-Dist: duckdb~=1.0.0
214
214
  Requires-Dist: pyarrow
215
215
  Requires-Dist: snowflake-connector-python
216
- Requires-Dist: sqlglot ~=25.5.1
216
+ Requires-Dist: sqlglot~=25.9.0
217
217
  Provides-Extra: dev
218
- Requires-Dist: build ~=1.0 ; extra == 'dev'
219
- Requires-Dist: pandas-stubs ; extra == 'dev'
220
- Requires-Dist: snowflake-connector-python[pandas,secure-local-storage] ; extra == 'dev'
221
- Requires-Dist: pre-commit ~=3.4 ; extra == 'dev'
222
- Requires-Dist: pyarrow-stubs ; extra == 'dev'
223
- Requires-Dist: pytest ~=8.0 ; extra == 'dev'
224
- Requires-Dist: pytest-asyncio ; extra == 'dev'
225
- Requires-Dist: ruff ~=0.5.1 ; extra == 'dev'
226
- Requires-Dist: twine ~=5.0 ; extra == 'dev'
227
- Requires-Dist: snowflake-sqlalchemy ~=1.5.0 ; extra == 'dev'
218
+ Requires-Dist: build~=1.0; extra == "dev"
219
+ Requires-Dist: pandas-stubs; extra == "dev"
220
+ Requires-Dist: snowflake-connector-python[pandas,secure-local-storage]; extra == "dev"
221
+ Requires-Dist: pre-commit~=3.4; extra == "dev"
222
+ Requires-Dist: pyarrow-stubs; extra == "dev"
223
+ Requires-Dist: pytest~=8.0; extra == "dev"
224
+ Requires-Dist: pytest-asyncio; extra == "dev"
225
+ Requires-Dist: ruff~=0.5.1; extra == "dev"
226
+ Requires-Dist: twine~=5.0; extra == "dev"
227
+ Requires-Dist: snowflake-sqlalchemy~=1.5.0; extra == "dev"
228
228
  Provides-Extra: notebook
229
- Requires-Dist: duckdb-engine ; extra == 'notebook'
230
- Requires-Dist: ipykernel ; extra == 'notebook'
231
- Requires-Dist: jupysql ; extra == 'notebook'
229
+ Requires-Dist: duckdb-engine; extra == "notebook"
230
+ Requires-Dist: ipykernel; extra == "notebook"
231
+ Requires-Dist: jupysql; extra == "notebook"
232
232
  Provides-Extra: server
233
- Requires-Dist: starlette ; extra == 'server'
234
- Requires-Dist: uvicorn ; extra == 'server'
233
+ Requires-Dist: starlette; extra == "server"
234
+ Requires-Dist: uvicorn; extra == "server"
235
235
 
236
236
  # fakesnow ❄️
237
237
 
@@ -4,18 +4,18 @@ fakesnow/arrow.py,sha256=1ypCsf-r2Ven6CuSm-bTLoeq1G31kBD6JnaLvDxpwhU,1218
4
4
  fakesnow/checks.py,sha256=-QMvdcrRbhN60rnzxLBJ0IkUBWyLR8gGGKKmCS0w9mA,2383
5
5
  fakesnow/cli.py,sha256=9qfI-Ssr6mo8UmIlXkUAOz2z2YPBgDsrEVaZv9FjGFs,2201
6
6
  fakesnow/expr.py,sha256=CAxuYIUkwI339DQIBzvFF0F-m1tcVGKEPA5rDTzmH9A,892
7
- fakesnow/fakes.py,sha256=wLSjKrNI8wxe3MuUAa97jpUHd5vZTzvrlF1-Hf0FC0M,31208
7
+ fakesnow/fakes.py,sha256=8roPAjUiVxSZDhxnpsP85sueSa3abZhyoDwM8awZZBY,31376
8
8
  fakesnow/fixtures.py,sha256=G-NkVeruSQAJ7fvSS2fR2oysUn0Yra1pohHlOvacKEk,455
9
- fakesnow/info_schema.py,sha256=LjS_-8YXBtCSvkdU5uL0aJdFcZEsBa6o5zf-Q_aV9i0,6302
9
+ fakesnow/info_schema.py,sha256=DObVOrhzppAFHsdtj4YI9oRISn9SkJUG6ONjVleQQ_Y,6303
10
10
  fakesnow/instance.py,sha256=3cJvPRuFy19dMKXbtBLl6imzO48pEw8uTYhZyFDuwhk,3133
11
11
  fakesnow/macros.py,sha256=pX1YJDnQOkFJSHYUjQ6ErEkYIKvFI6Ncz_au0vv1csA,265
12
12
  fakesnow/py.typed,sha256=B-DLSjYBi7pkKjwxCSdpVj2J02wgfJr-E7B1wOUyxYU,80
13
- fakesnow/server.py,sha256=HwCAZ5AhU4nRbFGIqDBs2rdwoK70dYZDyw1XfE1cHqU,3082
14
- fakesnow/transforms.py,sha256=GCpczoFdvnffQMvSG59PNiiTVTld9kROnnR5dWNXQvY,53624
13
+ fakesnow/server.py,sha256=cTuMzbYL3etm61wZJ7bcnWpcSNoCSTk31gAnl0Kxi20,3183
14
+ fakesnow/transforms.py,sha256=ellcY5OBc7mqgL9ChNolrqcCLWXF9RH21Jt88FcFl-I,54419
15
15
  fakesnow/variables.py,sha256=WXyPnkeNwD08gy52yF66CVe2twiYC50tztNfgXV4q1k,3032
16
- fakesnow-0.9.21.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
17
- fakesnow-0.9.21.dist-info/METADATA,sha256=6EzLZbGBTS0VEYCHdG4sNNZT-w5SiespKcitL_XEtM4,18043
18
- fakesnow-0.9.21.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
19
- fakesnow-0.9.21.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
20
- fakesnow-0.9.21.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
21
- fakesnow-0.9.21.dist-info/RECORD,,
16
+ fakesnow-0.9.22.dist-info/LICENSE,sha256=kW-7NWIyaRMQiDpryfSmF2DObDZHGR1cJZ39s6B1Svg,11344
17
+ fakesnow-0.9.22.dist-info/METADATA,sha256=Uu-JhX3mgGrgAP3jgLCP3b8YjRhWTh4qXwx-izVDZPM,18020
18
+ fakesnow-0.9.22.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
19
+ fakesnow-0.9.22.dist-info/entry_points.txt,sha256=2riAUgu928ZIHawtO8EsfrMEJhi-EH-z_Vq7Q44xKPM,47
20
+ fakesnow-0.9.22.dist-info/top_level.txt,sha256=500evXI1IFX9so82cizGIEMHAb_dJNPaZvd2H9dcKTA,24
21
+ fakesnow-0.9.22.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: bdist_wheel (0.44.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5