sqlframe 1.12.0__py3-none-any.whl → 1.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlframe/_version.py +2 -2
- sqlframe/base/function_alternatives.py +12 -11
- sqlframe/base/functions.py +24 -15
- sqlframe/base/session.py +4 -1
- sqlframe/base/util.py +24 -1
- sqlframe/bigquery/functions.py +10 -17
- sqlframe/bigquery/functions.pyi +0 -1
- sqlframe/bigquery/session.py +0 -1
- sqlframe/duckdb/functions.py +0 -1
- sqlframe/duckdb/session.py +0 -2
- sqlframe/postgres/session.py +0 -2
- sqlframe/snowflake/functions.pyi +0 -1
- sqlframe/snowflake/session.py +0 -2
- {sqlframe-1.12.0.dist-info → sqlframe-1.13.0.dist-info}/METADATA +2 -2
- {sqlframe-1.12.0.dist-info → sqlframe-1.13.0.dist-info}/RECORD +18 -18
- {sqlframe-1.12.0.dist-info → sqlframe-1.13.0.dist-info}/LICENSE +0 -0
- {sqlframe-1.12.0.dist-info → sqlframe-1.13.0.dist-info}/WHEEL +0 -0
- {sqlframe-1.12.0.dist-info → sqlframe-1.13.0.dist-info}/top_level.txt +0 -0
sqlframe/_version.py
CHANGED
|
@@ -6,11 +6,16 @@ import re
|
|
|
6
6
|
import typing as t
|
|
7
7
|
|
|
8
8
|
from sqlglot import exp as expression
|
|
9
|
+
from sqlglot.dialects.dialect import build_formatted_time
|
|
9
10
|
from sqlglot.helper import ensure_list
|
|
10
11
|
from sqlglot.helper import flatten as _flatten
|
|
11
12
|
|
|
12
13
|
from sqlframe.base.column import Column
|
|
13
|
-
from sqlframe.base.util import
|
|
14
|
+
from sqlframe.base.util import (
|
|
15
|
+
format_time_from_spark,
|
|
16
|
+
get_func_from_session,
|
|
17
|
+
spark_default_time_format,
|
|
18
|
+
)
|
|
14
19
|
|
|
15
20
|
if t.TYPE_CHECKING:
|
|
16
21
|
from sqlframe.base._typing import ColumnOrLiteral, ColumnOrName
|
|
@@ -715,14 +720,10 @@ def months_between_cast_as_date_cast_roundoff(
|
|
|
715
720
|
|
|
716
721
|
|
|
717
722
|
def from_unixtime_from_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
718
|
-
from sqlframe.base.session import _BaseSession
|
|
719
|
-
|
|
720
|
-
session: _BaseSession = _BaseSession()
|
|
721
723
|
lit = get_func_from_session("lit")
|
|
722
724
|
col_func = get_func_from_session("col")
|
|
723
725
|
|
|
724
|
-
|
|
725
|
-
format = session.DEFAULT_TIME_FORMAT
|
|
726
|
+
format = lit(format or spark_default_time_format())
|
|
726
727
|
return Column.invoke_expression_over_column(
|
|
727
728
|
Column(
|
|
728
729
|
expression.Anonymous(
|
|
@@ -731,7 +732,7 @@ def from_unixtime_from_timestamp(col: ColumnOrName, format: t.Optional[str] = No
|
|
|
731
732
|
)
|
|
732
733
|
),
|
|
733
734
|
expression.TimeToStr,
|
|
734
|
-
format=
|
|
735
|
+
format=format_time_from_spark(format), # type: ignore
|
|
735
736
|
)
|
|
736
737
|
|
|
737
738
|
|
|
@@ -1511,10 +1512,10 @@ def to_unix_timestamp_include_default_format(
|
|
|
1511
1512
|
format: t.Optional[ColumnOrName] = None,
|
|
1512
1513
|
) -> Column:
|
|
1513
1514
|
from sqlframe.base.functions import to_unix_timestamp
|
|
1514
|
-
|
|
1515
|
-
lit = get_func_from_session("lit")
|
|
1515
|
+
from sqlframe.base.session import _BaseSession
|
|
1516
1516
|
|
|
1517
1517
|
if not format:
|
|
1518
|
-
format =
|
|
1519
|
-
|
|
1518
|
+
format = _BaseSession().output_dialect.TIME_FORMAT
|
|
1519
|
+
else:
|
|
1520
|
+
format = format_time_from_spark(format)
|
|
1520
1521
|
return to_unix_timestamp(timestamp, format)
|
sqlframe/base/functions.py
CHANGED
|
@@ -6,12 +6,14 @@ import decimal
|
|
|
6
6
|
import logging
|
|
7
7
|
import typing as t
|
|
8
8
|
|
|
9
|
+
from sqlglot import Dialect
|
|
9
10
|
from sqlglot import exp as expression
|
|
10
11
|
from sqlglot.helper import ensure_list
|
|
11
12
|
from sqlglot.helper import flatten as _flatten
|
|
12
13
|
|
|
13
14
|
from sqlframe.base.column import Column
|
|
14
15
|
from sqlframe.base.decorators import func_metadata as meta
|
|
16
|
+
from sqlframe.base.util import format_time_from_spark, spark_default_time_format
|
|
15
17
|
|
|
16
18
|
if t.TYPE_CHECKING:
|
|
17
19
|
from pyspark.sql.session import SparkContext
|
|
@@ -695,7 +697,7 @@ def date_format(col: ColumnOrName, format: str) -> Column:
|
|
|
695
697
|
return Column.invoke_expression_over_column(
|
|
696
698
|
Column(expression.TimeStrToTime(this=Column.ensure_col(col).expression)),
|
|
697
699
|
expression.TimeToStr,
|
|
698
|
-
format=
|
|
700
|
+
format=format_time_from_spark(format),
|
|
699
701
|
)
|
|
700
702
|
|
|
701
703
|
|
|
@@ -875,17 +877,21 @@ def months_between(
|
|
|
875
877
|
|
|
876
878
|
@meta()
|
|
877
879
|
def to_date(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
880
|
+
format = lit(format or spark_default_time_format())
|
|
878
881
|
if format is not None:
|
|
879
882
|
return Column.invoke_expression_over_column(
|
|
880
|
-
col, expression.TsOrDsToDate, format=
|
|
883
|
+
col, expression.TsOrDsToDate, format=format_time_from_spark(format)
|
|
881
884
|
)
|
|
882
885
|
return Column.invoke_expression_over_column(col, expression.TsOrDsToDate)
|
|
883
886
|
|
|
884
887
|
|
|
885
888
|
@meta()
|
|
886
889
|
def to_timestamp(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
890
|
+
format = lit(format or spark_default_time_format())
|
|
887
891
|
if format is not None:
|
|
888
|
-
return Column.invoke_expression_over_column(
|
|
892
|
+
return Column.invoke_expression_over_column(
|
|
893
|
+
col, expression.StrToTime, format=format_time_from_spark(format)
|
|
894
|
+
)
|
|
889
895
|
|
|
890
896
|
return Column.ensure_col(col).cast("timestamp")
|
|
891
897
|
|
|
@@ -916,23 +922,23 @@ def last_day(col: ColumnOrName) -> Column:
|
|
|
916
922
|
|
|
917
923
|
@meta()
|
|
918
924
|
def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
925
|
+
format = lit(format or spark_default_time_format())
|
|
926
|
+
return Column.invoke_expression_over_column(
|
|
927
|
+
col,
|
|
928
|
+
expression.UnixToStr,
|
|
929
|
+
format=format_time_from_spark(format), # type: ignore
|
|
930
|
+
)
|
|
924
931
|
|
|
925
932
|
|
|
926
933
|
@meta()
|
|
927
934
|
def unix_timestamp(
|
|
928
935
|
timestamp: t.Optional[ColumnOrName] = None, format: t.Optional[str] = None
|
|
929
936
|
) -> Column:
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
if format is None:
|
|
933
|
-
format = _BaseSession().DEFAULT_TIME_FORMAT
|
|
937
|
+
format = lit(format or spark_default_time_format())
|
|
934
938
|
return Column.invoke_expression_over_column(
|
|
935
|
-
timestamp,
|
|
939
|
+
timestamp,
|
|
940
|
+
expression.StrToUnix,
|
|
941
|
+
format=format_time_from_spark(format), # type: ignore
|
|
936
942
|
).cast("bigint")
|
|
937
943
|
|
|
938
944
|
|
|
@@ -5106,8 +5112,11 @@ def to_unix_timestamp(
|
|
|
5106
5112
|
[Row(r=None)]
|
|
5107
5113
|
>>> spark.conf.unset("spark.sql.session.timeZone")
|
|
5108
5114
|
"""
|
|
5115
|
+
format = lit(spark_default_time_format()) if format is None else format
|
|
5109
5116
|
if format is not None:
|
|
5110
|
-
return Column.invoke_expression_over_column(
|
|
5117
|
+
return Column.invoke_expression_over_column(
|
|
5118
|
+
timestamp, expression.StrToUnix, format=format_time_from_spark(format)
|
|
5119
|
+
)
|
|
5111
5120
|
else:
|
|
5112
5121
|
return Column.invoke_expression_over_column(timestamp, expression.StrToUnix)
|
|
5113
5122
|
|
|
@@ -5324,7 +5333,7 @@ def ucase(str: ColumnOrName) -> Column:
|
|
|
5324
5333
|
return Column.invoke_expression_over_column(str, expression.Upper)
|
|
5325
5334
|
|
|
5326
5335
|
|
|
5327
|
-
@meta()
|
|
5336
|
+
@meta(unsupported_engines=["bigquery", "snowflake"])
|
|
5328
5337
|
def unix_date(col: ColumnOrName) -> Column:
|
|
5329
5338
|
"""Returns the number of days since 1970-01-01.
|
|
5330
5339
|
|
sqlframe/base/session.py
CHANGED
|
@@ -72,7 +72,6 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN]):
|
|
|
72
72
|
_df: t.Type[DF]
|
|
73
73
|
|
|
74
74
|
SANITIZE_COLUMN_NAMES = False
|
|
75
|
-
DEFAULT_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"
|
|
76
75
|
|
|
77
76
|
def __init__(
|
|
78
77
|
self,
|
|
@@ -114,6 +113,10 @@ class _BaseSession(t.Generic[CATALOG, READER, WRITER, DF, CONN]):
|
|
|
114
113
|
def _cur(self) -> DBAPICursorWithPandas:
|
|
115
114
|
return self._conn.cursor()
|
|
116
115
|
|
|
116
|
+
@property
|
|
117
|
+
def default_time_format(self) -> str:
|
|
118
|
+
return self.output_dialect.TIME_FORMAT.strip("'")
|
|
119
|
+
|
|
117
120
|
def _sanitize_column_name(self, name: str) -> str:
|
|
118
121
|
if self.SANITIZE_COLUMN_NAMES:
|
|
119
122
|
return name.replace("(", "_").replace(")", "_")
|
sqlframe/base/util.py
CHANGED
|
@@ -13,7 +13,12 @@ if t.TYPE_CHECKING:
|
|
|
13
13
|
from pyspark.sql.dataframe import SparkSession as PySparkSession
|
|
14
14
|
|
|
15
15
|
from sqlframe.base import types
|
|
16
|
-
from sqlframe.base._typing import
|
|
16
|
+
from sqlframe.base._typing import (
|
|
17
|
+
ColumnOrLiteral,
|
|
18
|
+
OptionalPrimitiveType,
|
|
19
|
+
SchemaInput,
|
|
20
|
+
)
|
|
21
|
+
from sqlframe.base.column import Column
|
|
17
22
|
from sqlframe.base.session import _BaseSession
|
|
18
23
|
from sqlframe.base.types import StructType
|
|
19
24
|
|
|
@@ -342,3 +347,21 @@ def sqlglot_to_spark(sqlglot_dtype: exp.DataType) -> types.DataType:
|
|
|
342
347
|
]
|
|
343
348
|
)
|
|
344
349
|
raise NotImplementedError(f"Unsupported data type: {sqlglot_dtype}")
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def format_time_from_spark(value: ColumnOrLiteral) -> Column:
|
|
353
|
+
from sqlframe.base.column import Column
|
|
354
|
+
from sqlframe.base.session import _BaseSession
|
|
355
|
+
|
|
356
|
+
lit = get_func_from_session("lit")
|
|
357
|
+
value = lit(value) if not isinstance(value, Column) else value
|
|
358
|
+
formatted_time = Dialect["spark"].format_time(value.expression)
|
|
359
|
+
return Column(
|
|
360
|
+
_BaseSession()
|
|
361
|
+
.output_dialect.generator()
|
|
362
|
+
.format_time(exp.StrToTime(this=exp.Null(), format=formatted_time))
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def spark_default_time_format() -> str:
|
|
367
|
+
return Dialect["spark"].TIME_FORMAT.strip("'")
|
sqlframe/bigquery/functions.py
CHANGED
|
@@ -7,7 +7,11 @@ import typing as t
|
|
|
7
7
|
from sqlglot import exp as sqlglot_expression
|
|
8
8
|
|
|
9
9
|
import sqlframe.base.functions
|
|
10
|
-
from sqlframe.base.util import
|
|
10
|
+
from sqlframe.base.util import (
|
|
11
|
+
format_time_from_spark,
|
|
12
|
+
get_func_from_session,
|
|
13
|
+
spark_default_time_format,
|
|
14
|
+
)
|
|
11
15
|
from sqlframe.bigquery.column import Column
|
|
12
16
|
|
|
13
17
|
if t.TYPE_CHECKING:
|
|
@@ -148,23 +152,15 @@ def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
|
148
152
|
|
|
149
153
|
session: _BaseSession = _BaseSession()
|
|
150
154
|
lit = get_func_from_session("lit")
|
|
151
|
-
to_timestamp = get_func_from_session("to_timestamp")
|
|
152
155
|
|
|
153
156
|
expressions = [Column.ensure_col(col).expression]
|
|
154
|
-
if format is not None:
|
|
155
|
-
expressions.append(lit(format).expression)
|
|
156
157
|
return Column(
|
|
157
158
|
sqlglot_expression.Anonymous(
|
|
158
159
|
this="FORMAT_TIMESTAMP",
|
|
159
160
|
expressions=[
|
|
160
|
-
lit(session.
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
sqlglot_expression.Anonymous(
|
|
164
|
-
this="TIMESTAMP_SECONDS", expressions=expressions
|
|
165
|
-
)
|
|
166
|
-
),
|
|
167
|
-
format,
|
|
161
|
+
lit(session.default_time_format).expression,
|
|
162
|
+
Column(
|
|
163
|
+
sqlglot_expression.Anonymous(this="TIMESTAMP_SECONDS", expressions=expressions)
|
|
168
164
|
).expression,
|
|
169
165
|
],
|
|
170
166
|
)
|
|
@@ -174,12 +170,9 @@ def from_unixtime(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
|
174
170
|
def unix_timestamp(
|
|
175
171
|
timestamp: t.Optional[ColumnOrName] = None, format: t.Optional[str] = None
|
|
176
172
|
) -> Column:
|
|
177
|
-
from sqlframe.base.session import _BaseSession
|
|
178
|
-
|
|
179
173
|
lit = get_func_from_session("lit")
|
|
180
174
|
|
|
181
|
-
|
|
182
|
-
format = _BaseSession().DEFAULT_TIME_FORMAT
|
|
175
|
+
format = lit(format or spark_default_time_format())
|
|
183
176
|
return Column(
|
|
184
177
|
sqlglot_expression.Anonymous(
|
|
185
178
|
this="UNIX_SECONDS",
|
|
@@ -187,7 +180,7 @@ def unix_timestamp(
|
|
|
187
180
|
sqlglot_expression.Anonymous(
|
|
188
181
|
this="PARSE_TIMESTAMP",
|
|
189
182
|
expressions=[
|
|
190
|
-
|
|
183
|
+
format_time_from_spark(format).expression,
|
|
191
184
|
Column.ensure_col(timestamp).expression,
|
|
192
185
|
lit("UTC").expression,
|
|
193
186
|
],
|
sqlframe/bigquery/functions.pyi
CHANGED
|
@@ -267,7 +267,6 @@ from sqlframe.base.functions import trunc as trunc
|
|
|
267
267
|
from sqlframe.base.functions import ucase as ucase
|
|
268
268
|
from sqlframe.base.functions import unbase64 as unbase64
|
|
269
269
|
from sqlframe.base.functions import unhex as unhex
|
|
270
|
-
from sqlframe.base.functions import unix_date as unix_date
|
|
271
270
|
from sqlframe.base.functions import upper as upper
|
|
272
271
|
from sqlframe.base.functions import user as user
|
|
273
272
|
from sqlframe.base.functions import var_pop as var_pop
|
sqlframe/bigquery/session.py
CHANGED
sqlframe/duckdb/functions.py
CHANGED
sqlframe/duckdb/session.py
CHANGED
|
@@ -33,8 +33,6 @@ class DuckDBSession(
|
|
|
33
33
|
_writer = DuckDBDataFrameWriter
|
|
34
34
|
_df = DuckDBDataFrame
|
|
35
35
|
|
|
36
|
-
DEFAULT_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
|
|
37
|
-
|
|
38
36
|
def __init__(self, conn: t.Optional[DuckDBPyConnection] = None, *args, **kwargs):
|
|
39
37
|
import duckdb
|
|
40
38
|
from duckdb.typing import VARCHAR
|
sqlframe/postgres/session.py
CHANGED
|
@@ -34,8 +34,6 @@ class PostgresSession(
|
|
|
34
34
|
_writer = PostgresDataFrameWriter
|
|
35
35
|
_df = PostgresDataFrame
|
|
36
36
|
|
|
37
|
-
DEFAULT_TIME_FORMAT = "yyyy-MM-dd HH:MI:SS"
|
|
38
|
-
|
|
39
37
|
def __init__(self, conn: t.Optional[psycopg2_connection] = None):
|
|
40
38
|
if not hasattr(self, "_conn"):
|
|
41
39
|
super().__init__(conn)
|
sqlframe/snowflake/functions.pyi
CHANGED
sqlframe/snowflake/session.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: sqlframe
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.13.0
|
|
4
4
|
Summary: Turning PySpark Into a Universal DataFrame API
|
|
5
5
|
Home-page: https://github.com/eakmanrq/sqlframe
|
|
6
6
|
Author: Ryan Eakman
|
|
@@ -35,7 +35,7 @@ Requires-Dist: pyspark (<3.6,>=2) ; extra == 'dev'
|
|
|
35
35
|
Requires-Dist: pytest-postgresql (<7,>=6) ; extra == 'dev'
|
|
36
36
|
Requires-Dist: pytest-xdist (<3.7,>=3.6) ; extra == 'dev'
|
|
37
37
|
Requires-Dist: pytest (<8.3,>=8.2.0) ; extra == 'dev'
|
|
38
|
-
Requires-Dist: ruff (<0.
|
|
38
|
+
Requires-Dist: ruff (<0.6,>=0.4.4) ; extra == 'dev'
|
|
39
39
|
Requires-Dist: types-psycopg2 (<3,>=2.9) ; extra == 'dev'
|
|
40
40
|
Requires-Dist: pre-commit (>=3.5) ; (python_version == "3.8") and extra == 'dev'
|
|
41
41
|
Requires-Dist: pre-commit (<3.8,>=3.7) ; (python_version >= "3.9") and extra == 'dev'
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
sqlframe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
sqlframe/_version.py,sha256=
|
|
2
|
+
sqlframe/_version.py,sha256=AUQ_T8MbwYZ2XSvJQzslbWqqnepX9elnqGgL_AdONT8,413
|
|
3
3
|
sqlframe/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
sqlframe/base/_typing.py,sha256=DuTay8-o9W-pw3RPZCgLunKNJLS9PkaV11G_pxXp9NY,1256
|
|
5
5
|
sqlframe/base/catalog.py,sha256=ATDGirouUjal05P4ymL-wIi8rgjg_8w4PoACamiO64A,37245
|
|
@@ -7,16 +7,16 @@ sqlframe/base/column.py,sha256=y41rFV7y_seTNkAK3SSqnggGi2otXt0ejKzsMyHCYT4,17515
|
|
|
7
7
|
sqlframe/base/dataframe.py,sha256=75ZM9r52fufFmVShtntcDUr6dZ1stX9HDmXLuDrYTAU,71004
|
|
8
8
|
sqlframe/base/decorators.py,sha256=I5osMgx9BuCgbtp4jVM2DNwYJVLzCv-OtTedhQEik0g,1882
|
|
9
9
|
sqlframe/base/exceptions.py,sha256=9Uwvqn2eAkDpqm4BrRgbL61qM-GMCbJEMAW8otxO46s,370
|
|
10
|
-
sqlframe/base/function_alternatives.py,sha256=
|
|
11
|
-
sqlframe/base/functions.py,sha256=
|
|
10
|
+
sqlframe/base/function_alternatives.py,sha256=Ac9RKOANX8wV2ou8M7XaxK5WMbKtUMqUWYdEWnNCvUA,46055
|
|
11
|
+
sqlframe/base/functions.py,sha256=yqRtaSk0xcSLbN4YeFum8ABMhOEgAfMK_SzzwEQTLt8,188059
|
|
12
12
|
sqlframe/base/group.py,sha256=TES9CleVmH3x-0X-tqmuUKfCKSWjH5vg1aU3R6dDmFc,4059
|
|
13
13
|
sqlframe/base/normalize.py,sha256=nXAJ5CwxVf4DV0GsH-q1w0p8gmjSMlv96k_ez1eVul8,3880
|
|
14
14
|
sqlframe/base/operations.py,sha256=-AhNuEzcV7ZExoP1oY3blaKip-joQyJeQVvfBTs_2g4,3456
|
|
15
15
|
sqlframe/base/readerwriter.py,sha256=5NPQMiOrw6I54U243R_6-ynnWYsNksgqwRpPp4IFjIw,25288
|
|
16
|
-
sqlframe/base/session.py,sha256=
|
|
16
|
+
sqlframe/base/session.py,sha256=gg0OX6MK6sV4t91aS7GtyYGXhefXnm33433XDw5GpLY,22068
|
|
17
17
|
sqlframe/base/transforms.py,sha256=y0j3SGDz3XCmNGrvassk1S-owllUWfkHyMgZlY6SFO4,467
|
|
18
18
|
sqlframe/base/types.py,sha256=iBNk9bpFtb2NBIogYS8i7OlQZMRvpR6XxqzBebsjQDU,12280
|
|
19
|
-
sqlframe/base/util.py,sha256=
|
|
19
|
+
sqlframe/base/util.py,sha256=Bw1J2w52pqsUnpfI4EBoZkdUfRQvcFYXmCCaBfxEI8A,12437
|
|
20
20
|
sqlframe/base/window.py,sha256=8hOv-ignPPIsZA9FzvYzcLE9J_glalVaYjIAUdRUX3o,4943
|
|
21
21
|
sqlframe/base/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
22
|
sqlframe/base/mixins/catalog_mixins.py,sha256=NhuPGxIqPjyuC_V_NALN1sn9v9h0-xwFOlJyJgsvyek,14212
|
|
@@ -26,22 +26,22 @@ sqlframe/bigquery/__init__.py,sha256=i2NsMbiXOj2xphCtPuNk6cVw4iYeq5_B1I9dVI9aGAk
|
|
|
26
26
|
sqlframe/bigquery/catalog.py,sha256=h3aQAQAJg6MMvFpP8Ku0S4pcx30n5qYrqHhWSomxb6A,9319
|
|
27
27
|
sqlframe/bigquery/column.py,sha256=E1tUa62Y5HajkhgFuebU9zohrGyieudcHzTT8gfalio,40
|
|
28
28
|
sqlframe/bigquery/dataframe.py,sha256=Y2uy4FEYw0KxIHgnaA9uMwdIzxJzTlD_NSzIe7P7kxA,2405
|
|
29
|
-
sqlframe/bigquery/functions.py,sha256=
|
|
30
|
-
sqlframe/bigquery/functions.pyi,sha256=
|
|
29
|
+
sqlframe/bigquery/functions.py,sha256=0mZUljM5F6EevpyIVYlKhY-nfDijaIOT4CQMgvMfLLg,11108
|
|
30
|
+
sqlframe/bigquery/functions.pyi,sha256=BCYqHpZzv4KWVtTuiC9wCSzXdxeHsz9gwkEvKzwHnoY,13583
|
|
31
31
|
sqlframe/bigquery/group.py,sha256=UVBNBRTo8OqS-_cS5YwvTeJYgYxeG-d6R3kfyHmlFqw,391
|
|
32
32
|
sqlframe/bigquery/readwriter.py,sha256=WAD3ZMwkkjOpvPPoZXfaLLNM6tRTeUvdEj-hQZAzXeo,870
|
|
33
|
-
sqlframe/bigquery/session.py,sha256=
|
|
33
|
+
sqlframe/bigquery/session.py,sha256=rOFAtCGWXjtM5Vc5_XiDlJv7H8K_Ga9h2M97if3F0mM,2663
|
|
34
34
|
sqlframe/bigquery/types.py,sha256=KwNyuXIo-2xVVd4bZED3YrQOobKCtemlxGrJL7DrTC8,34
|
|
35
35
|
sqlframe/bigquery/window.py,sha256=6GKPzuxeSapJakBaKBeT9VpED1ACdjggDv9JRILDyV0,35
|
|
36
36
|
sqlframe/duckdb/__init__.py,sha256=t85TA3ufZtL1weQNFmEs8itCSwbJFtw03-p0GT4XGf8,669
|
|
37
37
|
sqlframe/duckdb/catalog.py,sha256=rt3XuP3m4DbhuibOFyvx_95F2zZa6uDwCI_TmcvKy1A,3895
|
|
38
38
|
sqlframe/duckdb/column.py,sha256=wkEPcp3xVsH5nC3kpacXqNkRv9htPtBgt-0uFRxIRNs,56
|
|
39
39
|
sqlframe/duckdb/dataframe.py,sha256=WmBrrmrfxDpYuikSMFqacgV2Jawkx4sSYE-_mwnL4Jw,1225
|
|
40
|
-
sqlframe/duckdb/functions.py,sha256=
|
|
40
|
+
sqlframe/duckdb/functions.py,sha256=p8YXg5Y7KTRRm1zip5xSCM9MHMI7PTFIoE8gSzfDbTg,1604
|
|
41
41
|
sqlframe/duckdb/functions.pyi,sha256=nU-6a2cfLDkuMCdYrNRLfa6-i8Aa0CxQQ1nLT6roIdI,5813
|
|
42
42
|
sqlframe/duckdb/group.py,sha256=IkhbW42Ng1U5YT3FkIdiB4zBqRkW4QyTb-1detY1e_4,383
|
|
43
43
|
sqlframe/duckdb/readwriter.py,sha256=6xiyE3JKzY9ieKqvbAOBlifiHE6NpYISHul3Idlmoa0,4542
|
|
44
|
-
sqlframe/duckdb/session.py,sha256=
|
|
44
|
+
sqlframe/duckdb/session.py,sha256=pk1b-eR2RPr7SJYftnHKfZj5EXuN-D5xanUHLoWNMYU,2100
|
|
45
45
|
sqlframe/duckdb/types.py,sha256=KwNyuXIo-2xVVd4bZED3YrQOobKCtemlxGrJL7DrTC8,34
|
|
46
46
|
sqlframe/duckdb/window.py,sha256=6GKPzuxeSapJakBaKBeT9VpED1ACdjggDv9JRILDyV0,35
|
|
47
47
|
sqlframe/postgres/__init__.py,sha256=Sz_MtgV_oh_QhfZTC7iKM07ICUmNcJEDV0kEkSW9ZKU,712
|
|
@@ -52,7 +52,7 @@ sqlframe/postgres/functions.py,sha256=b9ccP5vY8EDZXkJbhE_LjAlH50_6wcUF9VbzPrarie
|
|
|
52
52
|
sqlframe/postgres/functions.pyi,sha256=um-qE2g9iPs0-53vJ46lArbfvDqAbFIwrxLJgcrPM_8,5536
|
|
53
53
|
sqlframe/postgres/group.py,sha256=KUXeSFKWTSH9yCRJAhW85OvjZaG6Zr4In9LR_ie3yGU,391
|
|
54
54
|
sqlframe/postgres/readwriter.py,sha256=L1e3yKXzFVNR_W5s1DHaWol7G8x7l4jcZ5sLGualyMk,870
|
|
55
|
-
sqlframe/postgres/session.py,sha256=
|
|
55
|
+
sqlframe/postgres/session.py,sha256=8w9mtKB6rFQyC5pPrxVcyvN-HLH2sX5GFsmp5ULFbGU,1887
|
|
56
56
|
sqlframe/postgres/types.py,sha256=KwNyuXIo-2xVVd4bZED3YrQOobKCtemlxGrJL7DrTC8,34
|
|
57
57
|
sqlframe/postgres/window.py,sha256=6GKPzuxeSapJakBaKBeT9VpED1ACdjggDv9JRILDyV0,35
|
|
58
58
|
sqlframe/redshift/__init__.py,sha256=jamKYQtQaKjjXnQ01QGPHvatbrZSw9sWno_VOUGSz6I,712
|
|
@@ -70,10 +70,10 @@ sqlframe/snowflake/catalog.py,sha256=uDjBgDdCyxaDkGNX_8tb-lol7MwwazcClUBAZsOSj70
|
|
|
70
70
|
sqlframe/snowflake/column.py,sha256=E1tUa62Y5HajkhgFuebU9zohrGyieudcHzTT8gfalio,40
|
|
71
71
|
sqlframe/snowflake/dataframe.py,sha256=jUyQNCe3K6SH4PtmrR67YN0SLqkHakMxLiB261fDgkc,1862
|
|
72
72
|
sqlframe/snowflake/functions.py,sha256=cIO56ZsOpjg6ICLjTh-osG1h1UjjEtM39_ieMiWkmyI,2466
|
|
73
|
-
sqlframe/snowflake/functions.pyi,sha256=
|
|
73
|
+
sqlframe/snowflake/functions.pyi,sha256=wqUPXuZxjRY0rPY2BRAb7XXkWYP1DyuDzvlriRySdSw,6185
|
|
74
74
|
sqlframe/snowflake/group.py,sha256=pPP1l2RRo_LgkXrji8a87n2PKo-63ZRPT-WUtvVcBME,395
|
|
75
75
|
sqlframe/snowflake/readwriter.py,sha256=yhRc2HcMq6PwV3ghZWC-q-qaE7LE4aEjZEXCip4OOlQ,884
|
|
76
|
-
sqlframe/snowflake/session.py,sha256=
|
|
76
|
+
sqlframe/snowflake/session.py,sha256=_EBXr6-GQAksAYqAE-Dv3HJMnrAEGINH0IzCDoIswJE,3235
|
|
77
77
|
sqlframe/snowflake/types.py,sha256=KwNyuXIo-2xVVd4bZED3YrQOobKCtemlxGrJL7DrTC8,34
|
|
78
78
|
sqlframe/snowflake/window.py,sha256=6GKPzuxeSapJakBaKBeT9VpED1ACdjggDv9JRILDyV0,35
|
|
79
79
|
sqlframe/spark/__init__.py,sha256=WhYQAZMJN1EMNAVGUH7BEinxNdYtXOrrr-6HUniJOyI,649
|
|
@@ -99,8 +99,8 @@ sqlframe/standalone/types.py,sha256=KwNyuXIo-2xVVd4bZED3YrQOobKCtemlxGrJL7DrTC8,
|
|
|
99
99
|
sqlframe/standalone/window.py,sha256=6GKPzuxeSapJakBaKBeT9VpED1ACdjggDv9JRILDyV0,35
|
|
100
100
|
sqlframe/testing/__init__.py,sha256=VVCosQhitU74A3NnE52O4mNtGZONapuEXcc20QmSlnQ,132
|
|
101
101
|
sqlframe/testing/utils.py,sha256=9DDYVuocO7tygee3RaajuJNZ24sJwf_LY556kKg7kTw,13011
|
|
102
|
-
sqlframe-1.
|
|
103
|
-
sqlframe-1.
|
|
104
|
-
sqlframe-1.
|
|
105
|
-
sqlframe-1.
|
|
106
|
-
sqlframe-1.
|
|
102
|
+
sqlframe-1.13.0.dist-info/LICENSE,sha256=VZu79YgW780qxaFJMr0t5ZgbOYEh04xWoxaWOaqIGWk,1068
|
|
103
|
+
sqlframe-1.13.0.dist-info/METADATA,sha256=xN-IU6RbUKjayk3rEK83h0b750b2HqzEPcDPjRhKhjw,7497
|
|
104
|
+
sqlframe-1.13.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
|
105
|
+
sqlframe-1.13.0.dist-info/top_level.txt,sha256=T0_RpoygaZSF6heeWwIDQgaP0varUdSK1pzjeJZRjM8,9
|
|
106
|
+
sqlframe-1.13.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|