sqlframe 1.13.0__py3-none-any.whl → 1.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlframe/_version.py +2 -2
- sqlframe/base/function_alternatives.py +104 -0
- sqlframe/base/functions.py +27 -10
- sqlframe/base/util.py +4 -0
- sqlframe/bigquery/functions.py +3 -0
- sqlframe/duckdb/functions.py +4 -0
- sqlframe/postgres/functions.py +3 -0
- sqlframe/postgres/session.py +8 -0
- sqlframe/snowflake/functions.py +2 -0
- sqlframe/spark/functions.py +1 -0
- {sqlframe-1.13.0.dist-info → sqlframe-1.14.0.dist-info}/METADATA +2 -2
- {sqlframe-1.13.0.dist-info → sqlframe-1.14.0.dist-info}/RECORD +15 -15
- {sqlframe-1.13.0.dist-info → sqlframe-1.14.0.dist-info}/LICENSE +0 -0
- {sqlframe-1.13.0.dist-info → sqlframe-1.14.0.dist-info}/WHEEL +0 -0
- {sqlframe-1.13.0.dist-info → sqlframe-1.14.0.dist-info}/top_level.txt +0 -0
sqlframe/_version.py
CHANGED
|
@@ -561,6 +561,14 @@ def to_date_from_timestamp(col: ColumnOrName, format: t.Optional[str] = None) ->
|
|
|
561
561
|
return to_date(to_timestamp(col, format))
|
|
562
562
|
|
|
563
563
|
|
|
564
|
+
def to_date_time_format(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
565
|
+
from sqlframe.base.functions import to_date
|
|
566
|
+
|
|
567
|
+
lit = get_func_from_session("lit")
|
|
568
|
+
format = lit(format or spark_default_time_format())
|
|
569
|
+
return to_date(col, format=format)
|
|
570
|
+
|
|
571
|
+
|
|
564
572
|
def last_day_with_cast(col: ColumnOrName) -> Column:
|
|
565
573
|
from sqlframe.base.functions import last_day
|
|
566
574
|
|
|
@@ -1519,3 +1527,99 @@ def to_unix_timestamp_include_default_format(
|
|
|
1519
1527
|
else:
|
|
1520
1528
|
format = format_time_from_spark(format)
|
|
1521
1529
|
return to_unix_timestamp(timestamp, format)
|
|
1530
|
+
|
|
1531
|
+
|
|
1532
|
+
def array_append_list_append(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
|
1533
|
+
lit = get_func_from_session("lit")
|
|
1534
|
+
value = value if isinstance(value, Column) else lit(value)
|
|
1535
|
+
return Column.invoke_anonymous_function(col, "LIST_APPEND", value)
|
|
1536
|
+
|
|
1537
|
+
|
|
1538
|
+
def array_append_using_array_cat(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
|
1539
|
+
lit = get_func_from_session("lit")
|
|
1540
|
+
array = get_func_from_session("array")
|
|
1541
|
+
value = value if isinstance(value, Column) else lit(value)
|
|
1542
|
+
return Column.invoke_anonymous_function(col, "ARRAY_CONCAT", array(value))
|
|
1543
|
+
|
|
1544
|
+
|
|
1545
|
+
def day_with_try_to_timestamp(col: ColumnOrName) -> Column:
|
|
1546
|
+
from sqlframe.base.functions import day
|
|
1547
|
+
|
|
1548
|
+
try_to_timestamp = get_func_from_session("try_to_timestamp")
|
|
1549
|
+
to_date = get_func_from_session("to_date")
|
|
1550
|
+
when = get_func_from_session("when")
|
|
1551
|
+
_is_string = get_func_from_session("_is_string")
|
|
1552
|
+
coalesce = get_func_from_session("coalesce")
|
|
1553
|
+
return day(
|
|
1554
|
+
when(
|
|
1555
|
+
_is_string(col),
|
|
1556
|
+
coalesce(try_to_timestamp(col), to_date(col)),
|
|
1557
|
+
).otherwise(col)
|
|
1558
|
+
)
|
|
1559
|
+
|
|
1560
|
+
|
|
1561
|
+
def try_to_timestamp_strptime(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1562
|
+
lit = get_func_from_session("lit")
|
|
1563
|
+
|
|
1564
|
+
format = lit(format or spark_default_time_format())
|
|
1565
|
+
return Column.invoke_anonymous_function(col, "TRY_STRPTIME", format_time_from_spark(format)) # type: ignore
|
|
1566
|
+
|
|
1567
|
+
|
|
1568
|
+
def try_to_timestamp_safe(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1569
|
+
lit = get_func_from_session("lit")
|
|
1570
|
+
|
|
1571
|
+
format = lit(format or spark_default_time_format())
|
|
1572
|
+
return Column.invoke_anonymous_function(
|
|
1573
|
+
format_time_from_spark(format), # type: ignore
|
|
1574
|
+
"SAFE.PARSE_TIMESTAMP",
|
|
1575
|
+
col, # type: ignore
|
|
1576
|
+
)
|
|
1577
|
+
|
|
1578
|
+
|
|
1579
|
+
def try_to_timestamp_pgtemp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
1580
|
+
lit = get_func_from_session("lit")
|
|
1581
|
+
|
|
1582
|
+
format = lit(format or spark_default_time_format())
|
|
1583
|
+
return Column.invoke_anonymous_function(
|
|
1584
|
+
col,
|
|
1585
|
+
"pg_temp.TRY_TO_TIMESTAMP",
|
|
1586
|
+
format_time_from_spark(format), # type: ignore
|
|
1587
|
+
)
|
|
1588
|
+
|
|
1589
|
+
|
|
1590
|
+
def typeof_pg_typeof(col: ColumnOrName) -> Column:
|
|
1591
|
+
return Column.invoke_anonymous_function(col, "pg_typeof").cast("regtype").cast("text")
|
|
1592
|
+
|
|
1593
|
+
|
|
1594
|
+
def typeof_from_variant(col: ColumnOrName) -> Column:
|
|
1595
|
+
col = Column.invoke_anonymous_function(col, "TO_VARIANT")
|
|
1596
|
+
return Column.invoke_anonymous_function(col, "TYPEOF")
|
|
1597
|
+
|
|
1598
|
+
|
|
1599
|
+
def _is_string_using_typeof_varchar(col: ColumnOrName) -> Column:
|
|
1600
|
+
typeof = get_func_from_session("typeof")
|
|
1601
|
+
lit = get_func_from_session("lit")
|
|
1602
|
+
return lit(typeof(col) == lit("VARCHAR"))
|
|
1603
|
+
|
|
1604
|
+
|
|
1605
|
+
def _is_string_using_typeof_char_varying(col: ColumnOrName) -> Column:
|
|
1606
|
+
typeof = get_func_from_session("typeof")
|
|
1607
|
+
lit = get_func_from_session("lit")
|
|
1608
|
+
return lit(
|
|
1609
|
+
(typeof(col) == lit("text"))
|
|
1610
|
+
| (typeof(col) == lit("character varying"))
|
|
1611
|
+
| (typeof(col) == lit("unknown"))
|
|
1612
|
+
| (typeof(col) == lit("text"))
|
|
1613
|
+
)
|
|
1614
|
+
|
|
1615
|
+
|
|
1616
|
+
def _is_string_using_typeof_string(col: ColumnOrName) -> Column:
|
|
1617
|
+
typeof = get_func_from_session("typeof")
|
|
1618
|
+
lit = get_func_from_session("lit")
|
|
1619
|
+
return lit(typeof(col) == lit("STRING"))
|
|
1620
|
+
|
|
1621
|
+
|
|
1622
|
+
def _is_string_using_typeof_string_lcase(col: ColumnOrName) -> Column:
|
|
1623
|
+
typeof = get_func_from_session("typeof")
|
|
1624
|
+
lit = get_func_from_session("lit")
|
|
1625
|
+
return lit(typeof(col) == lit("string"))
|
sqlframe/base/functions.py
CHANGED
|
@@ -13,7 +13,12 @@ from sqlglot.helper import flatten as _flatten
|
|
|
13
13
|
|
|
14
14
|
from sqlframe.base.column import Column
|
|
15
15
|
from sqlframe.base.decorators import func_metadata as meta
|
|
16
|
-
from sqlframe.base.util import
|
|
16
|
+
from sqlframe.base.util import (
|
|
17
|
+
format_time_from_spark,
|
|
18
|
+
get_func_from_session,
|
|
19
|
+
spark_default_date_format,
|
|
20
|
+
spark_default_time_format,
|
|
21
|
+
)
|
|
17
22
|
|
|
18
23
|
if t.TYPE_CHECKING:
|
|
19
24
|
from pyspark.sql.session import SparkContext
|
|
@@ -877,7 +882,7 @@ def months_between(
|
|
|
877
882
|
|
|
878
883
|
@meta()
|
|
879
884
|
def to_date(col: ColumnOrName, format: t.Optional[str] = None) -> Column:
|
|
880
|
-
format = lit(format or
|
|
885
|
+
format = lit(format or spark_default_date_format())
|
|
881
886
|
if format is not None:
|
|
882
887
|
return Column.invoke_expression_over_column(
|
|
883
888
|
col, expression.TsOrDsToDate, format=format_time_from_spark(format)
|
|
@@ -1295,7 +1300,7 @@ def array_agg(col: ColumnOrName) -> Column:
|
|
|
1295
1300
|
return Column.invoke_expression_over_column(col, expression.ArrayAgg)
|
|
1296
1301
|
|
|
1297
1302
|
|
|
1298
|
-
@meta(
|
|
1303
|
+
@meta()
|
|
1299
1304
|
def array_append(col: ColumnOrName, value: ColumnOrLiteral) -> Column:
|
|
1300
1305
|
value = value if isinstance(value, Column) else lit(value)
|
|
1301
1306
|
return Column.invoke_anonymous_function(col, "ARRAY_APPEND", value)
|
|
@@ -1743,7 +1748,7 @@ def map_zip_with(
|
|
|
1743
1748
|
return Column.invoke_anonymous_function(col1, "MAP_ZIP_WITH", col2, Column(f_expression))
|
|
1744
1749
|
|
|
1745
1750
|
|
|
1746
|
-
@meta(
|
|
1751
|
+
@meta()
|
|
1747
1752
|
def typeof(col: ColumnOrName) -> Column:
|
|
1748
1753
|
return Column.invoke_anonymous_function(col, "TYPEOF")
|
|
1749
1754
|
|
|
@@ -2162,7 +2167,7 @@ def datepart(field: ColumnOrName, source: ColumnOrName) -> Column:
|
|
|
2162
2167
|
return Column.invoke_anonymous_function(field, "datepart", source)
|
|
2163
2168
|
|
|
2164
2169
|
|
|
2165
|
-
@meta(unsupported_engines="
|
|
2170
|
+
@meta(unsupported_engines=["bigquery", "postgres", "snowflake"])
|
|
2166
2171
|
def day(col: ColumnOrName) -> Column:
|
|
2167
2172
|
return Column.invoke_expression_over_column(col, expression.Day)
|
|
2168
2173
|
|
|
@@ -5277,7 +5282,7 @@ def try_element_at(col: ColumnOrName, extraction: ColumnOrName) -> Column:
|
|
|
5277
5282
|
)
|
|
5278
5283
|
|
|
5279
5284
|
|
|
5280
|
-
@meta(
|
|
5285
|
+
@meta()
|
|
5281
5286
|
def try_to_timestamp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None) -> Column:
|
|
5282
5287
|
"""
|
|
5283
5288
|
Parses the `col` with the `format` to a timestamp. The function always
|
|
@@ -5302,10 +5307,8 @@ def try_to_timestamp(col: ColumnOrName, format: t.Optional[ColumnOrName] = None)
|
|
|
5302
5307
|
>>> df.select(try_to_timestamp(df.t, lit('yyyy-MM-dd HH:mm:ss')).alias('dt')).collect()
|
|
5303
5308
|
[Row(dt=datetime.datetime(1997, 2, 28, 10, 30))]
|
|
5304
5309
|
"""
|
|
5305
|
-
|
|
5306
|
-
|
|
5307
|
-
else:
|
|
5308
|
-
return Column.invoke_anonymous_function(col, "try_to_timestamp")
|
|
5310
|
+
format = lit(format or spark_default_time_format())
|
|
5311
|
+
return Column.invoke_anonymous_function(col, "try_to_timestamp", format_time_from_spark(format)) # type: ignore
|
|
5309
5312
|
|
|
5310
5313
|
|
|
5311
5314
|
@meta()
|
|
@@ -5797,6 +5800,20 @@ def years(col: ColumnOrName) -> Column:
|
|
|
5797
5800
|
return Column.invoke_anonymous_function(col, "years")
|
|
5798
5801
|
|
|
5799
5802
|
|
|
5803
|
+
# SQLFrame specific
|
|
5804
|
+
@meta()
|
|
5805
|
+
def _is_string(col: ColumnOrName) -> Column:
|
|
5806
|
+
col = Column.invoke_anonymous_function(col, "TO_VARIANT")
|
|
5807
|
+
return Column.invoke_anonymous_function(col, "IS_VARCHAR")
|
|
5808
|
+
|
|
5809
|
+
|
|
5810
|
+
@meta()
|
|
5811
|
+
def _is_date(col: ColumnOrName) -> Column:
|
|
5812
|
+
typeof = get_func_from_session("typeof")
|
|
5813
|
+
upper = get_func_from_session("upper")
|
|
5814
|
+
return lit(upper(typeof(col)) == lit("DATE"))
|
|
5815
|
+
|
|
5816
|
+
|
|
5800
5817
|
@meta()
|
|
5801
5818
|
def _lambda_quoted(value: str) -> t.Optional[bool]:
|
|
5802
5819
|
return False if value == "_" else None
|
sqlframe/base/util.py
CHANGED
|
@@ -365,3 +365,7 @@ def format_time_from_spark(value: ColumnOrLiteral) -> Column:
|
|
|
365
365
|
|
|
366
366
|
def spark_default_time_format() -> str:
|
|
367
367
|
return Dialect["spark"].TIME_FORMAT.strip("'")
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
def spark_default_date_format() -> str:
|
|
371
|
+
return Dialect["spark"].DATE_FORMAT.strip("'")
|
sqlframe/bigquery/functions.py
CHANGED
|
@@ -72,6 +72,9 @@ from sqlframe.base.function_alternatives import ( # noqa
|
|
|
72
72
|
array_union_using_array_concat as array_union,
|
|
73
73
|
sequence_from_generate_array as sequence,
|
|
74
74
|
position_as_strpos as position,
|
|
75
|
+
try_to_timestamp_safe as try_to_timestamp,
|
|
76
|
+
_is_string_using_typeof_string as _is_string,
|
|
77
|
+
array_append_using_array_cat as array_append,
|
|
75
78
|
)
|
|
76
79
|
|
|
77
80
|
|
sqlframe/duckdb/functions.py
CHANGED
|
@@ -46,4 +46,8 @@ from sqlframe.base.function_alternatives import ( # noqa
|
|
|
46
46
|
array_max_from_sort as array_max,
|
|
47
47
|
sequence_from_generate_series as sequence,
|
|
48
48
|
try_element_at_zero_based as try_element_at,
|
|
49
|
+
day_with_try_to_timestamp as day,
|
|
50
|
+
try_to_timestamp_strptime as try_to_timestamp,
|
|
51
|
+
_is_string_using_typeof_varchar as _is_string,
|
|
52
|
+
array_append_list_append as array_append,
|
|
49
53
|
)
|
sqlframe/postgres/functions.py
CHANGED
|
@@ -64,4 +64,7 @@ from sqlframe.base.function_alternatives import ( # noqa
|
|
|
64
64
|
right_cast_len as right,
|
|
65
65
|
position_cast_start as position,
|
|
66
66
|
try_element_at_zero_based as try_element_at,
|
|
67
|
+
try_to_timestamp_pgtemp as try_to_timestamp,
|
|
68
|
+
typeof_pg_typeof as typeof,
|
|
69
|
+
_is_string_using_typeof_char_varying as _is_string,
|
|
67
70
|
)
|
sqlframe/postgres/session.py
CHANGED
|
@@ -38,6 +38,14 @@ class PostgresSession(
|
|
|
38
38
|
if not hasattr(self, "_conn"):
|
|
39
39
|
super().__init__(conn)
|
|
40
40
|
self._execute("CREATE EXTENSION IF NOT EXISTS fuzzystrmatch")
|
|
41
|
+
self._execute("""CREATE OR REPLACE FUNCTION pg_temp.try_to_timestamp(input_text TEXT, format TEXT)
|
|
42
|
+
RETURNS TIMESTAMP AS $$
|
|
43
|
+
BEGIN
|
|
44
|
+
RETURN TO_TIMESTAMP(input_text, format);
|
|
45
|
+
EXCEPTION WHEN OTHERS THEN
|
|
46
|
+
RETURN NULL;
|
|
47
|
+
END;
|
|
48
|
+
$$ LANGUAGE plpgsql;""")
|
|
41
49
|
|
|
42
50
|
def _fetch_rows(
|
|
43
51
|
self, sql: t.Union[str, exp.Expression], *, quote_identifiers: bool = True
|
sqlframe/snowflake/functions.py
CHANGED
|
@@ -63,4 +63,6 @@ from sqlframe.base.function_alternatives import ( # noqa
|
|
|
63
63
|
map_concat_using_map_cat as map_concat,
|
|
64
64
|
sequence_from_array_generate_range as sequence,
|
|
65
65
|
to_number_using_to_double as to_number,
|
|
66
|
+
typeof_from_variant as typeof,
|
|
67
|
+
to_date_time_format as to_date,
|
|
66
68
|
)
|
sqlframe/spark/functions.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: sqlframe
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.14.0
|
|
4
4
|
Summary: Turning PySpark Into a Universal DataFrame API
|
|
5
5
|
Home-page: https://github.com/eakmanrq/sqlframe
|
|
6
6
|
Author: Ryan Eakman
|
|
@@ -18,7 +18,7 @@ Requires-Python: >=3.8
|
|
|
18
18
|
Description-Content-Type: text/markdown
|
|
19
19
|
License-File: LICENSE
|
|
20
20
|
Requires-Dist: prettytable (<3.11.0)
|
|
21
|
-
Requires-Dist: sqlglot (<25.
|
|
21
|
+
Requires-Dist: sqlglot (<25.5,>=24.0.0)
|
|
22
22
|
Requires-Dist: typing-extensions (<5,>=4.8)
|
|
23
23
|
Provides-Extra: bigquery
|
|
24
24
|
Requires-Dist: google-cloud-bigquery-storage (<3,>=2) ; extra == 'bigquery'
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
sqlframe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
sqlframe/_version.py,sha256=
|
|
2
|
+
sqlframe/_version.py,sha256=P7Qh1JtG4Is-HJ-aT0XbCm84DoRrTTlY2cMmIjRTLGQ,413
|
|
3
3
|
sqlframe/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
sqlframe/base/_typing.py,sha256=DuTay8-o9W-pw3RPZCgLunKNJLS9PkaV11G_pxXp9NY,1256
|
|
5
5
|
sqlframe/base/catalog.py,sha256=ATDGirouUjal05P4ymL-wIi8rgjg_8w4PoACamiO64A,37245
|
|
@@ -7,8 +7,8 @@ sqlframe/base/column.py,sha256=y41rFV7y_seTNkAK3SSqnggGi2otXt0ejKzsMyHCYT4,17515
|
|
|
7
7
|
sqlframe/base/dataframe.py,sha256=75ZM9r52fufFmVShtntcDUr6dZ1stX9HDmXLuDrYTAU,71004
|
|
8
8
|
sqlframe/base/decorators.py,sha256=I5osMgx9BuCgbtp4jVM2DNwYJVLzCv-OtTedhQEik0g,1882
|
|
9
9
|
sqlframe/base/exceptions.py,sha256=9Uwvqn2eAkDpqm4BrRgbL61qM-GMCbJEMAW8otxO46s,370
|
|
10
|
-
sqlframe/base/function_alternatives.py,sha256=
|
|
11
|
-
sqlframe/base/functions.py,sha256=
|
|
10
|
+
sqlframe/base/function_alternatives.py,sha256=B4UkHuUD1COc4xRm20pNtDloyMU6mhhesEZ3yWwaBJE,49702
|
|
11
|
+
sqlframe/base/functions.py,sha256=L_I028dDt2th9DeKIheidMLY8jjFICvze4Gw7F62NUk,188446
|
|
12
12
|
sqlframe/base/group.py,sha256=TES9CleVmH3x-0X-tqmuUKfCKSWjH5vg1aU3R6dDmFc,4059
|
|
13
13
|
sqlframe/base/normalize.py,sha256=nXAJ5CwxVf4DV0GsH-q1w0p8gmjSMlv96k_ez1eVul8,3880
|
|
14
14
|
sqlframe/base/operations.py,sha256=-AhNuEzcV7ZExoP1oY3blaKip-joQyJeQVvfBTs_2g4,3456
|
|
@@ -16,7 +16,7 @@ sqlframe/base/readerwriter.py,sha256=5NPQMiOrw6I54U243R_6-ynnWYsNksgqwRpPp4IFjIw
|
|
|
16
16
|
sqlframe/base/session.py,sha256=gg0OX6MK6sV4t91aS7GtyYGXhefXnm33433XDw5GpLY,22068
|
|
17
17
|
sqlframe/base/transforms.py,sha256=y0j3SGDz3XCmNGrvassk1S-owllUWfkHyMgZlY6SFO4,467
|
|
18
18
|
sqlframe/base/types.py,sha256=iBNk9bpFtb2NBIogYS8i7OlQZMRvpR6XxqzBebsjQDU,12280
|
|
19
|
-
sqlframe/base/util.py,sha256=
|
|
19
|
+
sqlframe/base/util.py,sha256=l6zu-3SzE2e0-iDMH0GD55gsxYx362tVc0QA6eLPvYk,12530
|
|
20
20
|
sqlframe/base/window.py,sha256=8hOv-ignPPIsZA9FzvYzcLE9J_glalVaYjIAUdRUX3o,4943
|
|
21
21
|
sqlframe/base/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
22
|
sqlframe/base/mixins/catalog_mixins.py,sha256=NhuPGxIqPjyuC_V_NALN1sn9v9h0-xwFOlJyJgsvyek,14212
|
|
@@ -26,7 +26,7 @@ sqlframe/bigquery/__init__.py,sha256=i2NsMbiXOj2xphCtPuNk6cVw4iYeq5_B1I9dVI9aGAk
|
|
|
26
26
|
sqlframe/bigquery/catalog.py,sha256=h3aQAQAJg6MMvFpP8Ku0S4pcx30n5qYrqHhWSomxb6A,9319
|
|
27
27
|
sqlframe/bigquery/column.py,sha256=E1tUa62Y5HajkhgFuebU9zohrGyieudcHzTT8gfalio,40
|
|
28
28
|
sqlframe/bigquery/dataframe.py,sha256=Y2uy4FEYw0KxIHgnaA9uMwdIzxJzTlD_NSzIe7P7kxA,2405
|
|
29
|
-
sqlframe/bigquery/functions.py,sha256=
|
|
29
|
+
sqlframe/bigquery/functions.py,sha256=Suy4KX75pYIpYrXgN1Af0NLf73ahuS7PmhNY2FkQhnk,11255
|
|
30
30
|
sqlframe/bigquery/functions.pyi,sha256=BCYqHpZzv4KWVtTuiC9wCSzXdxeHsz9gwkEvKzwHnoY,13583
|
|
31
31
|
sqlframe/bigquery/group.py,sha256=UVBNBRTo8OqS-_cS5YwvTeJYgYxeG-d6R3kfyHmlFqw,391
|
|
32
32
|
sqlframe/bigquery/readwriter.py,sha256=WAD3ZMwkkjOpvPPoZXfaLLNM6tRTeUvdEj-hQZAzXeo,870
|
|
@@ -37,7 +37,7 @@ sqlframe/duckdb/__init__.py,sha256=t85TA3ufZtL1weQNFmEs8itCSwbJFtw03-p0GT4XGf8,6
|
|
|
37
37
|
sqlframe/duckdb/catalog.py,sha256=rt3XuP3m4DbhuibOFyvx_95F2zZa6uDwCI_TmcvKy1A,3895
|
|
38
38
|
sqlframe/duckdb/column.py,sha256=wkEPcp3xVsH5nC3kpacXqNkRv9htPtBgt-0uFRxIRNs,56
|
|
39
39
|
sqlframe/duckdb/dataframe.py,sha256=WmBrrmrfxDpYuikSMFqacgV2Jawkx4sSYE-_mwnL4Jw,1225
|
|
40
|
-
sqlframe/duckdb/functions.py,sha256=
|
|
40
|
+
sqlframe/duckdb/functions.py,sha256=RXwfP3Ls9OYpNNHT5f0yjKxxaDqrqSAj2qCD1esMW-U,1790
|
|
41
41
|
sqlframe/duckdb/functions.pyi,sha256=nU-6a2cfLDkuMCdYrNRLfa6-i8Aa0CxQQ1nLT6roIdI,5813
|
|
42
42
|
sqlframe/duckdb/group.py,sha256=IkhbW42Ng1U5YT3FkIdiB4zBqRkW4QyTb-1detY1e_4,383
|
|
43
43
|
sqlframe/duckdb/readwriter.py,sha256=6xiyE3JKzY9ieKqvbAOBlifiHE6NpYISHul3Idlmoa0,4542
|
|
@@ -48,11 +48,11 @@ sqlframe/postgres/__init__.py,sha256=Sz_MtgV_oh_QhfZTC7iKM07ICUmNcJEDV0kEkSW9ZKU
|
|
|
48
48
|
sqlframe/postgres/catalog.py,sha256=uGMKo4RXOU6fA4IjcfebukEI18QswVk3cnB_G7S6_Fw,8130
|
|
49
49
|
sqlframe/postgres/column.py,sha256=E1tUa62Y5HajkhgFuebU9zohrGyieudcHzTT8gfalio,40
|
|
50
50
|
sqlframe/postgres/dataframe.py,sha256=f-w6UHxZtmeZ5oMbaqJaZ8FrYeOhzyveNlZOK57ke0k,1289
|
|
51
|
-
sqlframe/postgres/functions.py,sha256=
|
|
51
|
+
sqlframe/postgres/functions.py,sha256=HV9wLwrFtlwRJlTW4ipEx4XerFLREeGdKnBbqYe70WE,2511
|
|
52
52
|
sqlframe/postgres/functions.pyi,sha256=um-qE2g9iPs0-53vJ46lArbfvDqAbFIwrxLJgcrPM_8,5536
|
|
53
53
|
sqlframe/postgres/group.py,sha256=KUXeSFKWTSH9yCRJAhW85OvjZaG6Zr4In9LR_ie3yGU,391
|
|
54
54
|
sqlframe/postgres/readwriter.py,sha256=L1e3yKXzFVNR_W5s1DHaWol7G8x7l4jcZ5sLGualyMk,870
|
|
55
|
-
sqlframe/postgres/session.py,sha256=
|
|
55
|
+
sqlframe/postgres/session.py,sha256=YuWvzuPhhCt9bSN_ZCUp9WYWx4XW7_PGbMdTGRuqGzE,2147
|
|
56
56
|
sqlframe/postgres/types.py,sha256=KwNyuXIo-2xVVd4bZED3YrQOobKCtemlxGrJL7DrTC8,34
|
|
57
57
|
sqlframe/postgres/window.py,sha256=6GKPzuxeSapJakBaKBeT9VpED1ACdjggDv9JRILDyV0,35
|
|
58
58
|
sqlframe/redshift/__init__.py,sha256=jamKYQtQaKjjXnQ01QGPHvatbrZSw9sWno_VOUGSz6I,712
|
|
@@ -69,7 +69,7 @@ sqlframe/snowflake/__init__.py,sha256=nuQ3cuHjDpW4ELZfbd2qOYmtXmcYl7MtsrdOrRdozo
|
|
|
69
69
|
sqlframe/snowflake/catalog.py,sha256=uDjBgDdCyxaDkGNX_8tb-lol7MwwazcClUBAZsOSj70,5014
|
|
70
70
|
sqlframe/snowflake/column.py,sha256=E1tUa62Y5HajkhgFuebU9zohrGyieudcHzTT8gfalio,40
|
|
71
71
|
sqlframe/snowflake/dataframe.py,sha256=jUyQNCe3K6SH4PtmrR67YN0SLqkHakMxLiB261fDgkc,1862
|
|
72
|
-
sqlframe/snowflake/functions.py,sha256=
|
|
72
|
+
sqlframe/snowflake/functions.py,sha256=lyKIiChgfr97EaxI4tAapaFSToUgT2RqyqfiHl91ZNo,2537
|
|
73
73
|
sqlframe/snowflake/functions.pyi,sha256=wqUPXuZxjRY0rPY2BRAb7XXkWYP1DyuDzvlriRySdSw,6185
|
|
74
74
|
sqlframe/snowflake/group.py,sha256=pPP1l2RRo_LgkXrji8a87n2PKo-63ZRPT-WUtvVcBME,395
|
|
75
75
|
sqlframe/snowflake/readwriter.py,sha256=yhRc2HcMq6PwV3ghZWC-q-qaE7LE4aEjZEXCip4OOlQ,884
|
|
@@ -80,7 +80,7 @@ sqlframe/spark/__init__.py,sha256=WhYQAZMJN1EMNAVGUH7BEinxNdYtXOrrr-6HUniJOyI,64
|
|
|
80
80
|
sqlframe/spark/catalog.py,sha256=rIX5DtPnINbcPZRUe4Z1bOpkJoNRlrO9qWkUeTQClNc,32612
|
|
81
81
|
sqlframe/spark/column.py,sha256=E1tUa62Y5HajkhgFuebU9zohrGyieudcHzTT8gfalio,40
|
|
82
82
|
sqlframe/spark/dataframe.py,sha256=_TD-h7oz0-i80r90v17UoLDoIzcGNchU2SL13ujOOic,1779
|
|
83
|
-
sqlframe/spark/functions.py,sha256=
|
|
83
|
+
sqlframe/spark/functions.py,sha256=AQfqfvaojJzCuo9DyDklz0JYZPhn_3dzWvztsklBO0o,586
|
|
84
84
|
sqlframe/spark/functions.pyi,sha256=bjz6s8E6OB0c4KfTTsls7rhb_R9mIYvkaeaXefMziqM,11617
|
|
85
85
|
sqlframe/spark/group.py,sha256=MrvV_v-YkBc6T1zz882WrEqtWjlooWIyHBCmTQg3fCA,379
|
|
86
86
|
sqlframe/spark/readwriter.py,sha256=w68EImTcGJv64X7pc1tk5tDjDxb1nAnn-MiIaaN9Dc8,812
|
|
@@ -99,8 +99,8 @@ sqlframe/standalone/types.py,sha256=KwNyuXIo-2xVVd4bZED3YrQOobKCtemlxGrJL7DrTC8,
|
|
|
99
99
|
sqlframe/standalone/window.py,sha256=6GKPzuxeSapJakBaKBeT9VpED1ACdjggDv9JRILDyV0,35
|
|
100
100
|
sqlframe/testing/__init__.py,sha256=VVCosQhitU74A3NnE52O4mNtGZONapuEXcc20QmSlnQ,132
|
|
101
101
|
sqlframe/testing/utils.py,sha256=9DDYVuocO7tygee3RaajuJNZ24sJwf_LY556kKg7kTw,13011
|
|
102
|
-
sqlframe-1.
|
|
103
|
-
sqlframe-1.
|
|
104
|
-
sqlframe-1.
|
|
105
|
-
sqlframe-1.
|
|
106
|
-
sqlframe-1.
|
|
102
|
+
sqlframe-1.14.0.dist-info/LICENSE,sha256=VZu79YgW780qxaFJMr0t5ZgbOYEh04xWoxaWOaqIGWk,1068
|
|
103
|
+
sqlframe-1.14.0.dist-info/METADATA,sha256=Bm9d-eqk6pN20TX6hzR2xeppjjWuelyqOf87i8e4eRQ,7497
|
|
104
|
+
sqlframe-1.14.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
|
105
|
+
sqlframe-1.14.0.dist-info/top_level.txt,sha256=T0_RpoygaZSF6heeWwIDQgaP0varUdSK1pzjeJZRjM8,9
|
|
106
|
+
sqlframe-1.14.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|