pytrilogy 0.0.3.57__tar.gz → 0.0.3.61__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (149) hide show
  1. {pytrilogy-0.0.3.57/pytrilogy.egg-info → pytrilogy-0.0.3.61}/PKG-INFO +1 -1
  2. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61/pytrilogy.egg-info}/PKG-INFO +1 -1
  3. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/pytrilogy.egg-info/SOURCES.txt +1 -0
  4. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_imports.py +20 -0
  5. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_models.py +79 -1
  6. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_parsing.py +13 -0
  7. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/__init__.py +1 -1
  8. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/constants.py +1 -0
  9. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/enums.py +2 -0
  10. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/build.py +1 -1
  11. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/core.py +5 -1
  12. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/environment.py +3 -1
  13. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/optimization.py +1 -1
  14. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/discovery_node_factory.py +1 -1
  15. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/basic_node.py +12 -1
  16. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/node_merge_node.py +15 -7
  17. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/synonym_node.py +0 -1
  18. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/unnest_node.py +1 -1
  19. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/base.py +20 -0
  20. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/bigquery.py +2 -2
  21. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/common.py +12 -4
  22. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/presto.py +8 -2
  23. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/common.py +7 -2
  24. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/parse_engine.py +34 -6
  25. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/render.py +12 -0
  26. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/trilogy.lark +2 -2
  27. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/std/date.preql +4 -1
  28. pytrilogy-0.0.3.61/trilogy/std/ranking.preql +6 -0
  29. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/LICENSE.md +0 -0
  30. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/README.md +0 -0
  31. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/pyproject.toml +0 -0
  32. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/pytrilogy.egg-info/dependency_links.txt +0 -0
  33. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/pytrilogy.egg-info/entry_points.txt +0 -0
  34. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/pytrilogy.egg-info/requires.txt +0 -0
  35. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/pytrilogy.egg-info/top_level.txt +0 -0
  36. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/setup.cfg +0 -0
  37. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/setup.py +0 -0
  38. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_datatypes.py +0 -0
  39. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_declarations.py +0 -0
  40. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_derived_concepts.py +0 -0
  41. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_discovery_nodes.py +0 -0
  42. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_enums.py +0 -0
  43. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_environment.py +0 -0
  44. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_executor.py +0 -0
  45. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_failure.py +0 -0
  46. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_functions.py +0 -0
  47. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_metadata.py +0 -0
  48. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_multi_join_assignments.py +0 -0
  49. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_parse_engine.py +0 -0
  50. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_parsing_failures.py +0 -0
  51. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_partial_handling.py +0 -0
  52. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_query_processing.py +0 -0
  53. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_query_render.py +0 -0
  54. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_select.py +0 -0
  55. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_show.py +0 -0
  56. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_statements.py +0 -0
  57. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_typing.py +0 -0
  58. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_undefined_concept.py +0 -0
  59. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_user_functions.py +0 -0
  60. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/tests/test_where_clause.py +0 -0
  61. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/authoring/__init__.py +0 -0
  62. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/compiler.py +0 -0
  63. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/constants.py +0 -0
  64. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/__init__.py +0 -0
  65. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/env_processor.py +0 -0
  66. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/environment_helpers.py +0 -0
  67. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/ergonomics.py +0 -0
  68. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/exceptions.py +0 -0
  69. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/functions.py +0 -0
  70. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/graph_models.py +0 -0
  71. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/internal.py +0 -0
  72. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/__init__.py +0 -0
  73. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/author.py +0 -0
  74. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/build_environment.py +0 -0
  75. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/datasource.py +0 -0
  76. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/models/execute.py +0 -0
  77. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/optimizations/__init__.py +0 -0
  78. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/optimizations/base_optimization.py +0 -0
  79. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/optimizations/inline_datasource.py +0 -0
  80. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
  81. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/__init__.py +0 -0
  82. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/concept_strategies_v3.py +0 -0
  83. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/discovery_loop.py +0 -0
  84. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/discovery_utility.py +0 -0
  85. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/discovery_validation.py +0 -0
  86. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/graph_utils.py +0 -0
  87. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/__init__.py +0 -0
  88. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/common.py +0 -0
  89. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/filter_node.py +0 -0
  90. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/group_node.py +0 -0
  91. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  92. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  93. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/recursive_node.py +0 -0
  94. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
  95. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  96. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
  97. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
  98. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/select_node.py +0 -0
  99. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/union_node.py +0 -0
  100. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/node_generators/window_node.py +0 -0
  101. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/__init__.py +0 -0
  102. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/base_node.py +0 -0
  103. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/filter_node.py +0 -0
  104. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/group_node.py +0 -0
  105. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/merge_node.py +0 -0
  106. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/recursive_node.py +0 -0
  107. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  108. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/union_node.py +0 -0
  109. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  110. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/nodes/window_node.py +0 -0
  111. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/processing/utility.py +0 -0
  112. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/query_processor.py +0 -0
  113. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/statements/__init__.py +0 -0
  114. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/statements/author.py +0 -0
  115. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/statements/build.py +0 -0
  116. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/statements/common.py +0 -0
  117. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/statements/execute.py +0 -0
  118. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/core/utility.py +0 -0
  119. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/__init__.py +0 -0
  120. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/config.py +0 -0
  121. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/dataframe.py +0 -0
  122. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/duckdb.py +0 -0
  123. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/enums.py +0 -0
  124. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/postgres.py +0 -0
  125. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/snowflake.py +0 -0
  126. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/dialect/sql_server.py +0 -0
  127. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/engine.py +0 -0
  128. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/executor.py +0 -0
  129. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/hooks/__init__.py +0 -0
  130. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/hooks/base_hook.py +0 -0
  131. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/hooks/graph_hook.py +0 -0
  132. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/hooks/query_debugger.py +0 -0
  133. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/metadata/__init__.py +0 -0
  134. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parser.py +0 -0
  135. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/__init__.py +0 -0
  136. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/config.py +0 -0
  137. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/exceptions.py +0 -0
  138. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/parsing/helpers.py +0 -0
  139. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/py.typed +0 -0
  140. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/render.py +0 -0
  141. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/scripts/__init__.py +0 -0
  142. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/scripts/trilogy.py +0 -0
  143. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/std/__init__.py +0 -0
  144. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/std/display.preql +0 -0
  145. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/std/geography.preql +0 -0
  146. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/std/money.preql +0 -0
  147. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/std/net.preql +0 -0
  148. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/std/report.preql +0 -0
  149. {pytrilogy-0.0.3.57 → pytrilogy-0.0.3.61}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.57
3
+ Version: 0.0.3.61
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.57
3
+ Version: 0.0.3.61
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -143,4 +143,5 @@ trilogy/std/display.preql
143
143
  trilogy/std/geography.preql
144
144
  trilogy/std/money.preql
145
145
  trilogy/std/net.preql
146
+ trilogy/std/ranking.preql
146
147
  trilogy/std/report.preql
@@ -52,3 +52,23 @@ def test_import_concept_resolution():
52
52
  materialized = basic.materialize_for_select()
53
53
  assert "one.two.import_key" in materialized.materialized_concepts
54
54
  assert "two.two.import_key" in materialized.materialized_concepts
55
+
56
+
57
+ def test_import_basics():
58
+ basic = Environment(working_path=Path(__file__).parent)
59
+
60
+ basic.parse(
61
+ """
62
+ import test_env;
63
+
64
+ key id2 int;
65
+
66
+
67
+ """,
68
+ )
69
+
70
+ assert len(basic.imports["local"]) == 1, basic.imports
71
+ importz = basic.imports["local"][0]
72
+ assert importz.path == Path("test_env")
73
+ expected = Path(__file__).parent / "test_env.preql"
74
+ assert importz.input_path == expected
@@ -2,7 +2,7 @@ from copy import deepcopy
2
2
 
3
3
  from pytest import raises
4
4
 
5
- from trilogy import parse
5
+ from trilogy import Dialects, parse
6
6
  from trilogy.core.enums import (
7
7
  BooleanOperator,
8
8
  ComparisonOperator,
@@ -360,3 +360,81 @@ def test_parenthetical(test_environment: Environment):
360
360
 
361
361
  merged = x + x
362
362
  assert isinstance(merged, Conditional)
363
+
364
+
365
+ def test_datasource_grain_application():
366
+ env, statements = parse(
367
+ """
368
+ key x string;
369
+ property x.part_1 string;
370
+ property x.part_2 string;
371
+ auto _x <- concat(x.part_1, '.', x.part_2);
372
+
373
+ merge _x into x;
374
+
375
+ datasource xes (
376
+ x:x,
377
+ )
378
+ grain (x)
379
+ address x_table;
380
+
381
+ datasource parts (
382
+ part_1: part_1,
383
+ part_2:part_2,
384
+ concat(x.part_1, '.', x.part_2): x
385
+ )
386
+ grain (part_1, part_2)
387
+ address parts;
388
+
389
+
390
+
391
+ """
392
+ )
393
+
394
+ test_grain = Grain.from_concepts(
395
+ concepts=["x", "part_1", "part_2"], environment=env
396
+ )
397
+ assert test_grain.components == {
398
+ "local.x",
399
+ }
400
+
401
+
402
+ def test_datasource_merge_generation():
403
+ env, statements = parse(
404
+ """
405
+ key x string;
406
+ property x.part_1 string;
407
+ property x.part_2 string;
408
+ auto _x <- concat(part_1, '.', part_2);
409
+
410
+ merge _x into x;
411
+
412
+ datasource xes (
413
+ x:x,
414
+ )
415
+ grain (x)
416
+ address x_table;
417
+
418
+ datasource parts (
419
+ part_1: part_1,
420
+ part_2:part_2,
421
+ concat(part_1, '.', part_2): x
422
+ )
423
+ grain (part_1, part_2)
424
+ address parts;
425
+
426
+
427
+ select x,
428
+ part_1,
429
+ part_2
430
+ ;
431
+
432
+
433
+ """
434
+ )
435
+
436
+ q = Dialects.DUCK_DB.default_executor(environment=env).generate_sql(statements[-1])[
437
+ 0
438
+ ]
439
+
440
+ assert "JOIN" not in q, q
@@ -426,6 +426,19 @@ property id.labels map<string, int>;
426
426
  assert env.concepts["labels"].datatype.value_type == DataType.INTEGER
427
427
 
428
428
 
429
+ def test_map_concept_definition():
430
+ env, parsed = parse_text(
431
+ """
432
+ key id int;
433
+ property id.label string;
434
+ key map_store map<id, string>;
435
+
436
+ """
437
+ )
438
+ assert env.concepts["map_store"].datatype.key_data_type == DataType.INTEGER
439
+ assert env.concepts["map_store"].datatype.value_data_type == DataType.STRING
440
+
441
+
429
442
  def test_map_string_access():
430
443
  env, parsed = parse_text(
431
444
  """
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.57"
7
+ __version__ = "0.0.3.61"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -2,3 +2,4 @@ CONSTANT_DATASET: str = "preql_internal_constant_dataset"
2
2
  ALL_ROWS_CONCEPT = "all_rows"
3
3
  INTERNAL_NAMESPACE = "__preql_internal"
4
4
  PERSISTED_CONCEPT_PREFIX = "__pre_persist"
5
+ UNNEST_NAME = "_unnest_alias"
@@ -7,7 +7,9 @@ class UnnestMode(Enum):
7
7
  DIRECT = "direct"
8
8
  CROSS_APPLY = "cross_apply"
9
9
  CROSS_JOIN = "cross_join"
10
+ CROSS_JOIN_UNNEST = "cross_join_unnest"
10
11
  CROSS_JOIN_ALIAS = "cross_join_alias"
12
+ PRESTO = "presto"
11
13
  SNOWFLAKE = "snowflake"
12
14
 
13
15
 
@@ -131,7 +131,7 @@ def concept_is_relevant(
131
131
  if (
132
132
  concept.purpose == Purpose.KEY
133
133
  and concept.keys
134
- and all([c in others for c in concept.keys])
134
+ and all([c in others and c != concept.address for c in concept.keys])
135
135
  ):
136
136
  return False
137
137
  if concept.purpose in (Purpose.METRIC,):
@@ -175,13 +175,17 @@ class ListType(BaseModel):
175
175
 
176
176
 
177
177
  class MapType(BaseModel):
178
- key_type: DataType
178
+ key_type: TYPEDEF_TYPES
179
179
  value_type: TYPEDEF_TYPES
180
180
 
181
181
  @field_validator("value_type", mode="plain")
182
182
  def validate_type(cls, v):
183
183
  return v
184
184
 
185
+ @field_validator("key_type", mode="plain")
186
+ def validate_key_type(cls, v):
187
+ return v
188
+
185
189
  @property
186
190
  def data_type(self):
187
191
  return DataType.MAP
@@ -58,7 +58,9 @@ if TYPE_CHECKING:
58
58
  class Import:
59
59
  alias: str
60
60
  path: Path
61
- input_path: str | None = None
61
+ input_path: Path | None = (
62
+ None # filepath where the text came from (path is the import path, but may be resolved from a dictionary for some resolvers)
63
+ )
62
64
 
63
65
 
64
66
  class BaseImportResolver(BaseModel):
@@ -52,7 +52,7 @@ def reorder_ctes(
52
52
  return input
53
53
  return [mapping[x] for x in topological_order]
54
54
  except nx.NetworkXUnfeasible as e:
55
- print(
55
+ logger.error(
56
56
  "The graph is not a DAG (contains cycles) and cannot be topologically sorted."
57
57
  )
58
58
  raise e
@@ -373,7 +373,7 @@ class RootNodeHandler:
373
373
  ) -> Optional[StrategyNode]:
374
374
  logger.info(
375
375
  f"{depth_to_prefix(self.ctx.depth)}{LOGGER_PREFIX} "
376
- f"Could not resolve root concepts, checking for synonyms"
376
+ f"Could not resolve root concepts, checking for synonyms for {root_targets}"
377
377
  )
378
378
 
379
379
  if not self.ctx.history.check_started(
@@ -21,6 +21,8 @@ def is_equivalent_basic_function_lineage(
21
21
  y.lineage, BuildFunction
22
22
  ):
23
23
  return False
24
+ if x.lineage.operator == y.lineage.operator == FunctionType.ATTR_ACCESS:
25
+ return x.lineage.concept_arguments == y.lineage.concept_arguments
24
26
  if x.lineage.operator == y.lineage.operator:
25
27
  return True
26
28
  if (
@@ -55,9 +57,14 @@ def gen_basic_node(
55
57
  f"{depth_prefix}{LOGGER_PREFIX} checking for synonyms for attribute access"
56
58
  )
57
59
  for x in local_optional:
60
+ found = False
58
61
  for z in x.pseudonyms:
62
+ # gate to ensure we don't match to multiple synonyms
63
+ if found:
64
+ continue
59
65
  s_concept = environment.alias_origin_lookup[z]
60
66
  if is_equivalent_basic_function_lineage(concept, s_concept):
67
+ found = True
61
68
  synonyms.append(s_concept)
62
69
  ignored_optional.add(x.address)
63
70
  equivalent_optional = [
@@ -72,7 +79,11 @@ def gen_basic_node(
72
79
  f"{depth_prefix}{LOGGER_PREFIX} basic node for {concept} has equivalent optional {[x.address for x in equivalent_optional]}"
73
80
  )
74
81
  for eo in equivalent_optional:
75
- parent_concepts += resolve_function_parent_concepts(eo, environment=environment)
82
+ new_parents = resolve_function_parent_concepts(eo, environment=environment)
83
+ logger.info(
84
+ f"{depth_prefix}{LOGGER_PREFIX} equivalent optional {eo.address} has parents {[x.address for x in new_parents]}"
85
+ )
86
+ parent_concepts += new_parents
76
87
  non_equivalent_optional = [
77
88
  x
78
89
  for x in local_optional
@@ -88,7 +88,10 @@ def determine_induced_minimal_nodes(
88
88
  for node in G.nodes:
89
89
  if concepts.get(node):
90
90
  lookup: BuildConcept = concepts[node]
91
- if lookup.derivation in (Derivation.CONSTANT,):
91
+ # inclusion of aggregates can create ambiguous node relation chains
92
+ # there may be a better way to handle this
93
+ # can be revisited if we need to connect a derived synonym based on an aggregate
94
+ if lookup.derivation in (Derivation.CONSTANT, Derivation.AGGREGATE):
92
95
  nodes_to_remove.append(node)
93
96
  # purge a node if we're already looking for all it's parents
94
97
  if filter_downstream and lookup.derivation not in (Derivation.ROOT,):
@@ -112,6 +115,7 @@ def determine_induced_minimal_nodes(
112
115
  return None
113
116
  H.remove_nodes_from(list(x for x in H.nodes if x not in paths))
114
117
  sG: nx.Graph = ax.steinertree.steiner_tree(H, nodelist).copy()
118
+ logger.debug("Steiner tree found for nodes %s", nodelist)
115
119
  final: nx.DiGraph = nx.subgraph(G, sG.nodes).copy()
116
120
 
117
121
  for edge in G.edges:
@@ -228,11 +232,14 @@ def resolve_weak_components(
228
232
  # to ensure there are not ambiguous discovery paths
229
233
  # (if we did not care about raising ambiguity errors, we could just use the first one)
230
234
  count = 0
231
- node_list = [
232
- concept_to_node(c.with_default_grain())
233
- for c in all_concepts
234
- if "__preql_internal" not in c.address
235
- ]
235
+ node_list = sorted(
236
+ [
237
+ concept_to_node(c.with_default_grain())
238
+ for c in all_concepts
239
+ if "__preql_internal" not in c.address
240
+ ]
241
+ )
242
+ logger.debug(f"Resolving weak components for {node_list} in {search_graph.nodes}")
236
243
  synonyms: set[str] = set()
237
244
  for x in all_concepts:
238
245
  synonyms = synonyms.union(x.pseudonyms)
@@ -354,7 +361,7 @@ def subgraphs_to_merge_node(
354
361
  parents.append(parent)
355
362
  input_c = []
356
363
  for x in parents:
357
- for y in x.output_concepts:
364
+ for y in x.usable_outputs:
358
365
  input_c.append(y)
359
366
  if len(parents) == 1 and enable_early_exit:
360
367
  logger.info(
@@ -392,6 +399,7 @@ def gen_merge_node(
392
399
  )
393
400
  else:
394
401
  all_search_concepts = all_concepts
402
+ all_search_concepts = sorted(all_search_concepts, key=lambda x: x.address)
395
403
  for filter_downstream in [True, False]:
396
404
  weak_resolve = resolve_weak_components(
397
405
  all_search_concepts,
@@ -64,6 +64,5 @@ def gen_synonym_node(
64
64
  )
65
65
  if attempt:
66
66
  logger.info(f"{local_prefix} found inputs with {combo}")
67
- print(attempt.output_concepts)
68
67
  return attempt
69
68
  return None
@@ -31,7 +31,7 @@ def gen_unnest_node(
31
31
  ]
32
32
  all_parents = arguments + non_equivalent_optional
33
33
  logger.info(
34
- f"{depth_prefix}{LOGGER_PREFIX} unnest node for {concept} with lineage {concept.lineage} has parents {all_parents} and equivalent optional {equivalent_optional}"
34
+ f"{depth_prefix}{LOGGER_PREFIX} unnest node for {concept} with lineage {concept.lineage} has parents + optional {all_parents} and equivalent optional {equivalent_optional}"
35
35
  )
36
36
  if arguments or local_optional:
37
37
  parent = source_concepts(
@@ -10,6 +10,7 @@ from trilogy.constants import (
10
10
  Rendering,
11
11
  logger,
12
12
  )
13
+ from trilogy.core.constants import UNNEST_NAME
13
14
  from trilogy.core.enums import (
14
15
  DatePart,
15
16
  FunctionType,
@@ -756,6 +757,16 @@ class BaseDialect:
756
757
  f"{self.QUOTE_CHARACTER}{c.safe_address}{self.QUOTE_CHARACTER}"
757
758
  for c in cte.join_derived_concepts
758
759
  ]
760
+ elif self.UNNEST_MODE in (UnnestMode.CROSS_JOIN_UNNEST, UnnestMode.PRESTO):
761
+ select_columns = [
762
+ self.render_concept_sql(c, cte)
763
+ for c in cte.output_columns
764
+ if c.address not in [y.address for y in cte.join_derived_concepts]
765
+ and c.address not in cte.hidden_concepts
766
+ ] + [
767
+ f"{UNNEST_NAME} as {self.QUOTE_CHARACTER}{c.safe_address}{self.QUOTE_CHARACTER}"
768
+ for c in cte.join_derived_concepts
769
+ ]
759
770
  else:
760
771
  # otherwse, assume we are unnesting directly in the select
761
772
  select_columns = [
@@ -770,11 +781,20 @@ class BaseDialect:
770
781
  if len(cte.joins) > 0:
771
782
  if cte.join_derived_concepts and self.UNNEST_MODE in (
772
783
  UnnestMode.CROSS_JOIN_ALIAS,
784
+ # UnnestMode.CROSS_JOIN_UNNEST,
773
785
  UnnestMode.CROSS_JOIN,
774
786
  UnnestMode.CROSS_APPLY,
775
787
  ):
776
788
 
777
789
  source = f"{render_unnest(self.UNNEST_MODE, self.QUOTE_CHARACTER, cte.join_derived_concepts[0], self.render_expr, cte)}"
790
+ elif cte.join_derived_concepts and self.UNNEST_MODE in (
791
+ UnnestMode.CROSS_JOIN_UNNEST,
792
+ ):
793
+ source = f"{self.render_expr(cte.join_derived_concepts[0], cte)} as {self.QUOTE_CHARACTER}{UNNEST_NAME}{self.QUOTE_CHARACTER}"
794
+ elif cte.join_derived_concepts and self.UNNEST_MODE in (
795
+ UnnestMode.PRESTO,
796
+ ):
797
+ source = f"{self.render_expr(cte.join_derived_concepts[0], cte)} as t({self.QUOTE_CHARACTER}{UNNEST_NAME}{self.QUOTE_CHARACTER})"
778
798
  elif (
779
799
  cte.join_derived_concepts
780
800
  and self.UNNEST_MODE == UnnestMode.SNOWFLAKE
@@ -22,7 +22,7 @@ FUNCTION_MAP = {
22
22
  FunctionType.MINUTE: lambda x: f"EXTRACT(MINUTE from {x[0]})",
23
23
  FunctionType.SECOND: lambda x: f"EXTRACT(SECOND from {x[0]})",
24
24
  FunctionType.HOUR: lambda x: f"EXTRACT(HOUR from {x[0]})",
25
- FunctionType.DAY_OF_WEEK: lambda x: f"EXTRACT(DAYOFWEEK from {x[0]})",
25
+ FunctionType.DAY_OF_WEEK: lambda x: f"EXTRACT(DAYOFWEEK from {x[0]})-1", # BigQuery's DAYOFWEEK returns 1 for Sunday
26
26
  FunctionType.DAY: lambda x: f"EXTRACT(DAY from {x[0]})",
27
27
  FunctionType.YEAR: lambda x: f"EXTRACT(YEAR from {x[0]})",
28
28
  FunctionType.MONTH: lambda x: f"EXTRACT(MONTH from {x[0]})",
@@ -97,5 +97,5 @@ class BigqueryDialect(BaseDialect):
97
97
  }
98
98
  QUOTE_CHARACTER = "`"
99
99
  SQL_TEMPLATE = BQ_SQL_TEMPLATE
100
- UNNEST_MODE = UnnestMode.CROSS_JOIN
100
+ UNNEST_MODE = UnnestMode.CROSS_JOIN_UNNEST
101
101
  DATATYPE_MAP = DATATYPE_MAP
@@ -1,5 +1,6 @@
1
1
  from typing import Callable
2
2
 
3
+ from trilogy.core.constants import UNNEST_NAME
3
4
  from trilogy.core.enums import Modifier, UnnestMode
4
5
  from trilogy.core.models.build import (
5
6
  BuildComparison,
@@ -34,11 +35,15 @@ def render_unnest(
34
35
  cte: CTE,
35
36
  ):
36
37
  if not isinstance(concept, (BuildConcept, BuildParamaterizedConceptReference)):
37
- address = "anon_function"
38
+ address = UNNEST_NAME
38
39
  else:
39
40
  address = concept.safe_address
40
41
  if unnest_mode == UnnestMode.CROSS_JOIN:
41
42
  return f"{render_func(concept, cte)} as {quote_character}{address}{quote_character}"
43
+ elif unnest_mode == UnnestMode.CROSS_JOIN_UNNEST:
44
+ return f"unnest({render_func(concept, cte)}) as {quote_character}{address}{quote_character}"
45
+ elif unnest_mode == UnnestMode.PRESTO:
46
+ return f"unnest({render_func(concept, cte)}) as t({quote_character}{UNNEST_NAME}{quote_character})"
42
47
  elif unnest_mode == UnnestMode.CROSS_JOIN_ALIAS:
43
48
  return f"{render_func(concept, cte)} as unnest_wrapper ({quote_character}{address}{quote_character})"
44
49
  elif unnest_mode == UnnestMode.SNOWFLAKE:
@@ -95,9 +100,12 @@ def render_join(
95
100
  return None
96
101
  if not cte:
97
102
  raise ValueError("must provide a cte to build an unnest joins")
98
- if unnest_mode == UnnestMode.CROSS_JOIN:
99
- return f"CROSS JOIN {render_unnest(unnest_mode, quote_character, join.object_to_unnest, render_expr_func, cte)}"
100
- if unnest_mode == UnnestMode.CROSS_JOIN_ALIAS:
103
+ if unnest_mode in (
104
+ UnnestMode.CROSS_JOIN,
105
+ UnnestMode.CROSS_JOIN_UNNEST,
106
+ UnnestMode.CROSS_JOIN_ALIAS,
107
+ UnnestMode.PRESTO,
108
+ ):
101
109
  return f"CROSS JOIN {render_unnest(unnest_mode, quote_character, join.object_to_unnest, render_expr_func, cte)}"
102
110
  if unnest_mode == UnnestMode.SNOWFLAKE:
103
111
  return f"LEFT JOIN LATERAL {render_unnest(unnest_mode, quote_character, join.object_to_unnest, render_expr_func, cte)}"
@@ -33,6 +33,8 @@ FUNCTION_MAP = {
33
33
  FunctionType.CURRENT_DATE: lambda x: "CURRENT_DATE",
34
34
  FunctionType.CURRENT_DATETIME: lambda x: "CURRENT_TIMESTAMP",
35
35
  FunctionType.ARRAY: lambda x: f"ARRAY[{', '.join(x)}]",
36
+ # regex
37
+ FunctionType.REGEXP_CONTAINS: lambda x: f"REGEXP_LIKE({x[0]}, {x[1]})",
36
38
  }
37
39
 
38
40
  FUNCTION_GRAIN_MATCH_MAP = {
@@ -85,8 +87,12 @@ class PrestoDialect(BaseDialect):
85
87
  }
86
88
  QUOTE_CHARACTER = '"'
87
89
  SQL_TEMPLATE = SQL_TEMPLATE
88
- DATATYPE_MAP = {**BaseDialect.DATATYPE_MAP, DataType.NUMERIC: "DECIMAL"}
89
- UNNEST_MODE = UnnestMode.CROSS_JOIN_ALIAS
90
+ DATATYPE_MAP = {
91
+ **BaseDialect.DATATYPE_MAP,
92
+ DataType.NUMERIC: "DECIMAL",
93
+ DataType.STRING: "VARCHAR",
94
+ }
95
+ UNNEST_MODE = UnnestMode.PRESTO
90
96
 
91
97
 
92
98
  class TrinoDialect(PrestoDialect):
@@ -38,6 +38,7 @@ from trilogy.core.models.author import (
38
38
  Parenthetical,
39
39
  RowsetItem,
40
40
  RowsetLineage,
41
+ TraitDataType,
41
42
  UndefinedConcept,
42
43
  WhereClause,
43
44
  WindowItem,
@@ -608,6 +609,9 @@ def window_item_to_concept(
608
609
  and set([x.address for x in item.expr.by]) == keys
609
610
  ):
610
611
  continue
612
+ elif isinstance(item.expr, AggregateWrapper):
613
+
614
+ grain_components += item.expr.by
611
615
  else:
612
616
  grain_components += item.concept_arguments
613
617
  else:
@@ -617,19 +621,20 @@ def window_item_to_concept(
617
621
  modifiers = get_upstream_modifiers(bcontent.concept_arguments, environment)
618
622
  datatype = parent.content.datatype
619
623
  if parent.type in (
620
- WindowType.RANK,
624
+ # WindowType.RANK,
621
625
  WindowType.ROW_NUMBER,
622
626
  WindowType.COUNT,
623
627
  WindowType.COUNT_DISTINCT,
624
628
  ):
625
629
  datatype = DataType.INTEGER
630
+ if parent.type == WindowType.RANK:
631
+ datatype = TraitDataType(type=DataType.INTEGER, traits=["rank"])
626
632
  return Concept(
627
633
  name=name,
628
634
  datatype=datatype,
629
635
  purpose=local_purpose,
630
636
  lineage=parent,
631
637
  metadata=fmetadata,
632
- # filters are implicitly at the grain of the base item
633
638
  grain=final_grain,
634
639
  namespace=namespace,
635
640
  keys=keys,
@@ -460,13 +460,22 @@ class ParseToObjects(Transformer):
460
460
  )
461
461
 
462
462
  def list_type(self, args) -> ListType:
463
- return ListType(type=args[0])
463
+ content = args[0]
464
+ if isinstance(content, str):
465
+ content = self.environment.concepts[content]
466
+ return ListType(type=content)
464
467
 
465
468
  def numeric_type(self, args) -> NumericType:
466
469
  return NumericType(precision=args[0], scale=args[1])
467
470
 
468
471
  def map_type(self, args) -> MapType:
469
- return MapType(key_type=args[0], value_type=args[1])
472
+ key = args[0]
473
+ value = args[1]
474
+ if isinstance(key, str):
475
+ key = self.environment.concepts[key]
476
+ elif isinstance(value, str):
477
+ value = self.environment.concepts[value]
478
+ return MapType(key_type=key, value_type=value)
470
479
 
471
480
  @v_args(meta=True)
472
481
  def data_type(
@@ -842,6 +851,17 @@ class ParseToObjects(Transformer):
842
851
  continue
843
852
 
844
853
  key_inputs = grain.components
854
+ eligible = True
855
+ for key in key_inputs:
856
+ # never overwrite a key with a dependency on a property
857
+ # for example - binding a datasource with a grain of <x>.fun should
858
+ # never override the grain of x to <fun>
859
+ if column.concept.address in (
860
+ self.environment.concepts[key].keys or set()
861
+ ):
862
+ eligible = False
863
+ if not eligible:
864
+ continue
845
865
  keys = [self.environment.concepts[grain] for grain in key_inputs]
846
866
  # target_c.purpose = Purpose.PROPERTY
847
867
  target_c.keys = set([x.address for x in keys])
@@ -1030,6 +1050,9 @@ class ParseToObjects(Transformer):
1030
1050
 
1031
1051
  def import_statement(self, args: list[str]) -> ImportStatement:
1032
1052
  start = datetime.now()
1053
+ is_file_resolver = isinstance(
1054
+ self.environment.config.import_resolver, FileSystemImportResolver
1055
+ )
1033
1056
  if len(args) == 2:
1034
1057
  alias = args[-1]
1035
1058
  cache_key = args[-1]
@@ -1043,9 +1066,7 @@ class ParseToObjects(Transformer):
1043
1066
  is_stdlib = True
1044
1067
  target = join(STDLIB_ROOT, *path) + ".preql"
1045
1068
  token_lookup: Path | str = Path(target)
1046
- elif isinstance(
1047
- self.environment.config.import_resolver, FileSystemImportResolver
1048
- ):
1069
+ elif is_file_resolver:
1049
1070
  target = join(self.environment.working_path, *path) + ".preql"
1050
1071
  # tokens + text are cached by path
1051
1072
  token_lookup = Path(target)
@@ -1125,7 +1146,13 @@ class ParseToObjects(Transformer):
1125
1146
  imps = ImportStatement(alias=alias, input_path=input_path, path=parsed_path)
1126
1147
 
1127
1148
  self.environment.add_import(
1128
- alias, new_env, Import(alias=alias, path=parsed_path)
1149
+ alias,
1150
+ new_env,
1151
+ Import(
1152
+ alias=alias,
1153
+ path=parsed_path,
1154
+ input_path=Path(target) if is_file_resolver else None,
1155
+ ),
1129
1156
  )
1130
1157
  end = datetime.now()
1131
1158
  perf_logger.debug(
@@ -1677,6 +1704,7 @@ class ParseToObjects(Transformer):
1677
1704
 
1678
1705
  @v_args(meta=True)
1679
1706
  def unnest(self, meta, args):
1707
+
1680
1708
  return self.function_factory.create_function(args, FunctionType.UNNEST, meta)
1681
1709
 
1682
1710
  @v_args(meta=True)
@@ -31,6 +31,7 @@ from trilogy.core.models.core import (
31
31
  DataType,
32
32
  ListType,
33
33
  ListWrapper,
34
+ MapWrapper,
34
35
  NumericType,
35
36
  TraitDataType,
36
37
  TupleWrapper,
@@ -250,6 +251,17 @@ class Renderer:
250
251
  def _(self, arg: TupleWrapper):
251
252
  return "(" + ", ".join([self.to_string(x) for x in arg]) + ")"
252
253
 
254
+ @to_string.register
255
+ def _(self, arg: MapWrapper):
256
+ def process_key_value(key, value):
257
+ return f"{self.to_string(key)}: {self.to_string(value)}"
258
+
259
+ return (
260
+ "{"
261
+ + ", ".join([process_key_value(key, value) for key, value in arg.items()])
262
+ + "}"
263
+ )
264
+
253
265
  @to_string.register
254
266
  def _(self, arg: DatePart):
255
267
  return arg.value
@@ -402,11 +402,11 @@
402
402
 
403
403
  struct_type: "struct"i "<" ((data_type | IDENTIFIER) ",")* (data_type | IDENTIFIER) ","? ">"
404
404
 
405
- list_type: ("list"i "<" data_type ">" ) | ("array"i "<" data_type ">" )
405
+ list_type: ("list"i | "array"i) "<" (data_type | IDENTIFIER) ">"
406
406
 
407
407
  numeric_type: "numeric"i "(" int_lit "," int_lit ")"
408
408
 
409
- map_type: "map"i "<" data_type "," data_type ">"
409
+ map_type: "map"i "<" (data_type | IDENTIFIER) "," (data_type | IDENTIFIER) ">"
410
410
 
411
411
  !data_type: ("string"i | "number"i | "numeric"i | "map"i | "list"i | "array"i | "any"i | "int"i | "bigint"i | "date"i | "datetime"i | "timestamp"i | "float"i | "bool"i | numeric_type | map_type | struct_type | list_type) ("::" IDENTIFIER)?
412
412
 
@@ -2,9 +2,12 @@
2
2
 
3
3
  type year int;
4
4
  type month int;
5
+ type month_name string;
5
6
  type week int;
6
7
  type day int;
7
8
  type hour int;
8
9
  type minute int;
9
10
  type second int;
10
- type day_of_week int;
11
+ type day_of_week int;
12
+ type day_of_week_name string;
13
+
@@ -0,0 +1,6 @@
1
+
2
+ type rank int;
3
+ type score int;
4
+ type position int;
5
+ type index int;
6
+ type grade int;
File without changes
File without changes
File without changes
File without changes