pytrilogy 0.0.3.51__tar.gz → 0.0.3.53__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pytrilogy might be problematic. Click here for more details.

Files changed (141) hide show
  1. {pytrilogy-0.0.3.51/pytrilogy.egg-info → pytrilogy-0.0.3.53}/PKG-INFO +1 -1
  2. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53/pytrilogy.egg-info}/PKG-INFO +1 -1
  3. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/pytrilogy.egg-info/SOURCES.txt +1 -0
  4. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_functions.py +3 -0
  5. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_parse_engine.py +30 -0
  6. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_parsing.py +14 -6
  7. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_show.py +1 -1
  8. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/__init__.py +1 -1
  9. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/enums.py +5 -0
  10. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/functions.py +23 -2
  11. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/author.py +6 -7
  12. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/build.py +3 -9
  13. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/execute.py +43 -24
  14. pytrilogy-0.0.3.53/trilogy/core/utility.py +8 -0
  15. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/base.py +15 -13
  16. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/bigquery.py +1 -0
  17. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/common.py +5 -4
  18. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/common.py +3 -3
  19. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/parse_engine.py +29 -6
  20. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/trilogy.lark +11 -4
  21. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/LICENSE.md +0 -0
  22. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/README.md +0 -0
  23. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/pyproject.toml +0 -0
  24. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/pytrilogy.egg-info/dependency_links.txt +0 -0
  25. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/pytrilogy.egg-info/entry_points.txt +0 -0
  26. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/pytrilogy.egg-info/requires.txt +0 -0
  27. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/pytrilogy.egg-info/top_level.txt +0 -0
  28. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/setup.cfg +0 -0
  29. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/setup.py +0 -0
  30. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_datatypes.py +0 -0
  31. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_declarations.py +0 -0
  32. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_derived_concepts.py +0 -0
  33. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_discovery_nodes.py +0 -0
  34. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_enums.py +0 -0
  35. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_environment.py +0 -0
  36. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_executor.py +0 -0
  37. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_failure.py +0 -0
  38. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_imports.py +0 -0
  39. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_metadata.py +0 -0
  40. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_models.py +0 -0
  41. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_multi_join_assignments.py +0 -0
  42. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_parsing_failures.py +0 -0
  43. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_partial_handling.py +0 -0
  44. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_query_processing.py +0 -0
  45. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_query_render.py +0 -0
  46. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_select.py +0 -0
  47. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_statements.py +0 -0
  48. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_typing.py +0 -0
  49. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_undefined_concept.py +0 -0
  50. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_user_functions.py +0 -0
  51. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/tests/test_where_clause.py +0 -0
  52. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/authoring/__init__.py +0 -0
  53. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/compiler.py +0 -0
  54. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/constants.py +0 -0
  55. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/__init__.py +0 -0
  56. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/constants.py +0 -0
  57. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/env_processor.py +0 -0
  58. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/environment_helpers.py +0 -0
  59. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/ergonomics.py +0 -0
  60. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/exceptions.py +0 -0
  61. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/graph_models.py +0 -0
  62. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/internal.py +0 -0
  63. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/__init__.py +0 -0
  64. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/build_environment.py +0 -0
  65. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/core.py +0 -0
  66. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/datasource.py +0 -0
  67. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/models/environment.py +0 -0
  68. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/optimization.py +0 -0
  69. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/optimizations/__init__.py +0 -0
  70. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/optimizations/base_optimization.py +0 -0
  71. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/optimizations/inline_datasource.py +0 -0
  72. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/optimizations/predicate_pushdown.py +0 -0
  73. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/__init__.py +0 -0
  74. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/concept_strategies_v3.py +0 -0
  75. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/graph_utils.py +0 -0
  76. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/__init__.py +0 -0
  77. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/basic_node.py +0 -0
  78. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/common.py +0 -0
  79. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/filter_node.py +0 -0
  80. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/group_node.py +0 -0
  81. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/group_to_node.py +0 -0
  82. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/multiselect_node.py +0 -0
  83. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/node_merge_node.py +0 -0
  84. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/rowset_node.py +0 -0
  85. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  86. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +0 -0
  87. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/select_merge_node.py +0 -0
  88. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/select_node.py +0 -0
  89. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/synonym_node.py +0 -0
  90. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/union_node.py +0 -0
  91. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/unnest_node.py +0 -0
  92. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/node_generators/window_node.py +0 -0
  93. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/__init__.py +0 -0
  94. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/base_node.py +0 -0
  95. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/filter_node.py +0 -0
  96. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/group_node.py +0 -0
  97. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/merge_node.py +0 -0
  98. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/select_node_v2.py +0 -0
  99. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/union_node.py +0 -0
  100. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/unnest_node.py +0 -0
  101. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/nodes/window_node.py +0 -0
  102. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/processing/utility.py +0 -0
  103. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/query_processor.py +0 -0
  104. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/statements/__init__.py +0 -0
  105. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/statements/author.py +0 -0
  106. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/statements/build.py +0 -0
  107. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/statements/common.py +0 -0
  108. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/core/statements/execute.py +0 -0
  109. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/__init__.py +0 -0
  110. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/config.py +0 -0
  111. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/dataframe.py +0 -0
  112. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/duckdb.py +0 -0
  113. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/enums.py +0 -0
  114. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/postgres.py +0 -0
  115. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/presto.py +0 -0
  116. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/snowflake.py +0 -0
  117. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/dialect/sql_server.py +0 -0
  118. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/engine.py +0 -0
  119. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/executor.py +0 -0
  120. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/hooks/__init__.py +0 -0
  121. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/hooks/base_hook.py +0 -0
  122. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/hooks/graph_hook.py +0 -0
  123. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/hooks/query_debugger.py +0 -0
  124. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/metadata/__init__.py +0 -0
  125. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parser.py +0 -0
  126. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/__init__.py +0 -0
  127. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/config.py +0 -0
  128. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/exceptions.py +0 -0
  129. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/helpers.py +0 -0
  130. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/parsing/render.py +0 -0
  131. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/py.typed +0 -0
  132. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/render.py +0 -0
  133. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/scripts/__init__.py +0 -0
  134. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/scripts/trilogy.py +0 -0
  135. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/std/__init__.py +0 -0
  136. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/std/date.preql +0 -0
  137. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/std/display.preql +0 -0
  138. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/std/geography.preql +0 -0
  139. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/std/money.preql +0 -0
  140. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/std/report.preql +0 -0
  141. {pytrilogy-0.0.3.51 → pytrilogy-0.0.3.53}/trilogy/utility.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.51
3
+ Version: 0.0.3.53
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pytrilogy
3
- Version: 0.0.3.51
3
+ Version: 0.0.3.53
4
4
  Summary: Declarative, typed query language that compiles to SQL.
5
5
  Home-page:
6
6
  Author:
@@ -56,6 +56,7 @@ trilogy/core/graph_models.py
56
56
  trilogy/core/internal.py
57
57
  trilogy/core/optimization.py
58
58
  trilogy/core/query_processor.py
59
+ trilogy/core/utility.py
59
60
  trilogy/core/models/__init__.py
60
61
  trilogy/core/models/author.py
61
62
  trilogy/core/models/build.py
@@ -261,6 +261,9 @@ def test_string_functions(test_environment):
261
261
  property strpos_name <- strpos(category_name, 'a');
262
262
  property like_name <- like(category_name, 'a%');
263
263
  property like_alt <- category_name like 'a%';
264
+ property regex_contains <- regexp_contains(category_name, 'a');
265
+ property regex_substring <- regexp_extract(category_name, 'a');
266
+ property regex_replace <- regexp_replace(category_name, 'a', 'b');
264
267
 
265
268
  select
266
269
  test_name,
@@ -46,6 +46,36 @@ FROM a
46
46
  """
47
47
 
48
48
 
49
+ def test_parse_datatype_in_datasource():
50
+ env = Environment()
51
+ x = ParseToObjects(environment=env)
52
+ test_text = """
53
+ key x int;
54
+ property x.timestamp timestamp;
55
+
56
+ datasource funky (
57
+ x: x,
58
+ timestamp:timestamp)
59
+ address fun;
60
+
61
+ """
62
+ x.set_text(test_text)
63
+
64
+ tokens = PARSER.parse(test_text)
65
+ x.transform(tokens)
66
+ x.run_second_parse_pass()
67
+
68
+
69
+ TEXT2 = """
70
+ const a <- 1;
71
+
72
+ select
73
+ a,
74
+ FROM a
75
+ ;
76
+ """
77
+
78
+
49
79
  def test_from_error():
50
80
  env = Environment()
51
81
 
@@ -92,7 +92,12 @@ def test_arg_to_datatype():
92
92
 
93
93
 
94
94
  def test_argument_to_purpose(test_environment: Environment):
95
- assert argument_to_purpose(1.00) == Purpose.CONSTANT
95
+ assert (
96
+ argument_to_purpose(
97
+ 1.00,
98
+ )
99
+ == Purpose.CONSTANT
100
+ )
96
101
  assert argument_to_purpose("test") == Purpose.CONSTANT
97
102
  assert argument_to_purpose(test_environment.concepts["order_id"]) == Purpose.KEY
98
103
  assert (
@@ -100,19 +105,22 @@ def test_argument_to_purpose(test_environment: Environment):
100
105
  [
101
106
  "test",
102
107
  1.00,
103
- ]
108
+ ],
109
+ test_environment,
104
110
  )
105
111
  == Purpose.CONSTANT
106
112
  )
107
113
  assert (
108
114
  function_args_to_output_purpose(
109
- ["test", 1.00, test_environment.concepts["order_id"]]
115
+ ["test", 1.00, test_environment.concepts["order_id"]], test_environment
110
116
  )
111
117
  == Purpose.PROPERTY
112
118
  )
113
119
  unnest_env, parsed = parse_text("const random <- unnest([1,2,3,4]);")
114
120
  assert (
115
- function_args_to_output_purpose([unnest_env.concepts["random"]])
121
+ function_args_to_output_purpose(
122
+ [unnest_env.concepts["random"]], test_environment
123
+ )
116
124
  == Purpose.PROPERTY
117
125
  )
118
126
 
@@ -488,7 +496,7 @@ select x;
488
496
 
489
497
  results = Dialects.DUCK_DB.default_executor().generate_sql(text)[0]
490
498
 
491
- assert '"abc:def" as test' in results
499
+ assert '"abc:def" as "test"' in results
492
500
 
493
501
  text = """
494
502
  key x int;
@@ -508,7 +516,7 @@ select x;
508
516
 
509
517
  results = Dialects.DUCK_DB.default_executor().generate_sql(text)[0]
510
518
 
511
- assert "abcdef as test" in results, results
519
+ assert '"abcdef" as "test"' in results, results
512
520
 
513
521
 
514
522
  def test_datasource_where_equivalent():
@@ -53,5 +53,5 @@ def test_show_bigquery():
53
53
  .fetchall()
54
54
  )
55
55
  assert (
56
- "FULL JOIN cheerful on 1=1" in query[0]["__preql_internal_query_text"]
56
+ 'FULL JOIN "cheerful" on 1=1' in query[0]["__preql_internal_query_text"]
57
57
  ), query[0]["__preql_internal_query_text"]
@@ -4,6 +4,6 @@ from trilogy.dialect.enums import Dialects
4
4
  from trilogy.executor import Executor
5
5
  from trilogy.parser import parse
6
6
 
7
- __version__ = "0.0.3.51"
7
+ __version__ = "0.0.3.53"
8
8
 
9
9
  __all__ = ["parse", "Executor", "Dialects", "Environment", "CONFIG"]
@@ -181,6 +181,11 @@ class FunctionType(Enum):
181
181
  STRPOS = "strpos"
182
182
  CONTAINS = "contains"
183
183
 
184
+ # STRING REGEX
185
+ REGEXP_CONTAINS = "regexp_contains"
186
+ REGEXP_EXTRACT = "regexp_extract"
187
+ REGEXP_REPLACE = "regexp_replace"
188
+
184
189
  # Dates
185
190
  DATE = "date"
186
191
  DATETIME = "datetime"
@@ -360,6 +360,24 @@ FUNCTION_REGISTRY: dict[FunctionType, FunctionConfig] = {
360
360
  output_type=DataType.STRING,
361
361
  arg_count=1,
362
362
  ),
363
+ FunctionType.REGEXP_CONTAINS: FunctionConfig(
364
+ valid_inputs={DataType.STRING},
365
+ output_purpose=Purpose.PROPERTY,
366
+ output_type=DataType.BOOL,
367
+ arg_count=2,
368
+ ),
369
+ FunctionType.REGEXP_EXTRACT: FunctionConfig(
370
+ valid_inputs={DataType.STRING},
371
+ output_purpose=Purpose.PROPERTY,
372
+ output_type=DataType.STRING,
373
+ arg_count=2,
374
+ ),
375
+ FunctionType.REGEXP_REPLACE: FunctionConfig(
376
+ valid_inputs={DataType.STRING},
377
+ output_purpose=Purpose.PROPERTY,
378
+ output_type=DataType.STRING,
379
+ arg_count=3,
380
+ ),
363
381
  FunctionType.DATE: FunctionConfig(
364
382
  valid_inputs={
365
383
  DataType.DATE,
@@ -809,13 +827,14 @@ def create_function_derived_concept(
809
827
  namespace: str,
810
828
  operator: FunctionType,
811
829
  arguments: list[Concept],
830
+ environment: Environment,
812
831
  output_type: Optional[
813
832
  DataType | ListType | StructType | MapType | NumericType | TraitDataType
814
833
  ] = None,
815
834
  output_purpose: Optional[Purpose] = None,
816
835
  ) -> Concept:
817
836
  purpose = (
818
- function_args_to_output_purpose(arguments)
837
+ function_args_to_output_purpose(arguments, environment=environment)
819
838
  if output_purpose is None
820
839
  else output_purpose
821
840
  )
@@ -868,13 +887,15 @@ def argument_to_purpose(arg) -> Purpose:
868
887
  raise ValueError(f"Cannot parse arg purpose for {arg} of type {type(arg)}")
869
888
 
870
889
 
871
- def function_args_to_output_purpose(args) -> Purpose:
890
+ def function_args_to_output_purpose(args, environment: Environment) -> Purpose:
872
891
  has_metric = False
873
892
  has_non_constant = False
874
893
  has_non_single_row_constant = False
875
894
  if not args:
876
895
  return Purpose.CONSTANT
877
896
  for arg in args:
897
+ if isinstance(arg, ConceptRef):
898
+ arg = environment.concepts[arg.address]
878
899
  purpose = argument_to_purpose(arg)
879
900
  if purpose == Purpose.METRIC:
880
901
  has_metric = True
@@ -25,6 +25,7 @@ from pydantic import (
25
25
  ValidationInfo,
26
26
  computed_field,
27
27
  field_validator,
28
+ model_validator,
28
29
  )
29
30
 
30
31
  from trilogy.constants import DEFAULT_NAMESPACE, MagicConstants
@@ -621,8 +622,8 @@ class Comparison(ConceptArgs, Mergeable, DataTyped, Namespaced, BaseModel):
621
622
  return v.reference
622
623
  return v
623
624
 
624
- def __init__(self, *args, **kwargs) -> None:
625
- super().__init__(*args, **kwargs)
625
+ @model_validator(mode="after")
626
+ def validate_comparison(self):
626
627
  if self.operator in (ComparisonOperator.IS, ComparisonOperator.IS_NOT):
627
628
  if self.right != MagicConstants.NULL and DataType.BOOL != arg_to_datatype(
628
629
  self.right
@@ -632,7 +633,6 @@ class Comparison(ConceptArgs, Mergeable, DataTyped, Namespaced, BaseModel):
632
633
  )
633
634
  elif self.operator in (ComparisonOperator.IN, ComparisonOperator.NOT_IN):
634
635
  right_type = arg_to_datatype(self.right)
635
-
636
636
  if isinstance(right_type, ListType) and not is_compatible_datatype(
637
637
  arg_to_datatype(self.left), right_type.value_data_type
638
638
  ):
@@ -653,6 +653,8 @@ class Comparison(ConceptArgs, Mergeable, DataTyped, Namespaced, BaseModel):
653
653
  f"Cannot compare {arg_to_datatype(self.left)} and {arg_to_datatype(self.right)} of different types with operator {self.operator} in {str(self)}"
654
654
  )
655
655
 
656
+ return self
657
+
656
658
  def __add__(self, other):
657
659
  if other is None:
658
660
  return self
@@ -1022,7 +1024,7 @@ class Concept(Addressable, DataTyped, ConceptArgs, Mergeable, Namespaced, BaseMo
1022
1024
  keys = self.keys
1023
1025
 
1024
1026
  if self.is_aggregate and isinstance(new_lineage, Function) and grain.components:
1025
- grain_components = [
1027
+ grain_components: list[ConceptRef | Concept] = [
1026
1028
  environment.concepts[c].reference for c in grain.components
1027
1029
  ]
1028
1030
  new_lineage = AggregateWrapper(function=new_lineage, by=grain_components)
@@ -1847,9 +1849,6 @@ class AggregateWrapper(Mergeable, DataTyped, ConceptArgs, Namespaced, BaseModel)
1847
1849
  function: Function
1848
1850
  by: List[ConceptRef | Concept] = Field(default_factory=list)
1849
1851
 
1850
- def __init__(self, **kwargs):
1851
- super().__init__(**kwargs)
1852
-
1853
1852
  @field_validator("by", mode="before")
1854
1853
  @classmethod
1855
1854
  def enforce_concept_ref(cls, v):
@@ -262,11 +262,8 @@ class BuildGrain(BaseModel):
262
262
  components: set[str] = Field(default_factory=set)
263
263
  where_clause: Optional[BuildWhereClause] = None
264
264
 
265
- def __init__(self, **kwargs):
266
- super().__init__(**kwargs)
267
-
268
265
  def without_condition(self):
269
- return BuildGrain(components=self.components)
266
+ return BuildGrain.model_construct(components=self.components)
270
267
 
271
268
  @classmethod
272
269
  def from_concepts(
@@ -321,12 +318,12 @@ class BuildGrain(BaseModel):
321
318
  # raise NotImplementedError(
322
319
  # f"Cannot merge grains with where clauses, self {self.where_clause} other {other.where_clause}"
323
320
  # )
324
- return BuildGrain(
321
+ return BuildGrain.model_construct(
325
322
  components=self.components.union(other.components), where_clause=where
326
323
  )
327
324
 
328
325
  def __sub__(self, other: "BuildGrain") -> "BuildGrain":
329
- return BuildGrain(
326
+ return BuildGrain.model_construct(
330
327
  components=self.components.difference(other.components),
331
328
  where_clause=self.where_clause,
332
329
  )
@@ -637,9 +634,6 @@ class BuildComparison(BuildConceptArgs, ConstantInlineable, BaseModel):
637
634
  ]
638
635
  operator: ComparisonOperator
639
636
 
640
- def __init__(self, *args, **kwargs) -> None:
641
- super().__init__(*args, **kwargs)
642
-
643
637
  def __add__(self, other):
644
638
  if other is None:
645
639
  return self
@@ -1,9 +1,16 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from collections import defaultdict
4
- from typing import Any, Dict, List, Optional, Set, Union
5
-
6
- from pydantic import BaseModel, Field, ValidationInfo, computed_field, field_validator
4
+ from typing import Dict, List, Optional, Set, Union
5
+
6
+ from pydantic import (
7
+ BaseModel,
8
+ Field,
9
+ ValidationInfo,
10
+ computed_field,
11
+ field_validator,
12
+ model_validator,
13
+ )
7
14
 
8
15
  from trilogy.constants import CONFIG, logger
9
16
  from trilogy.core.constants import CONSTANT_DATASET
@@ -30,6 +37,7 @@ from trilogy.core.models.build import (
30
37
  LooseBuildConceptList,
31
38
  )
32
39
  from trilogy.core.models.datasource import Address
40
+ from trilogy.core.utility import safe_quote
33
41
  from trilogy.utility import unique
34
42
 
35
43
  LOGGER_PREFIX = "[MODELS_EXECUTE]"
@@ -194,6 +202,7 @@ class CTE(BaseModel):
194
202
  self.base_name_override = ds_being_inlined.safe_location
195
203
  self.base_alias_override = ds_being_inlined.safe_identifier
196
204
 
205
+ # if we have a join to the parent, we need to remove it
197
206
  for join in self.joins:
198
207
  if isinstance(join, InstantiatedUnnestJoin):
199
208
  continue
@@ -315,17 +324,20 @@ class CTE(BaseModel):
315
324
  return self.source.name
316
325
 
317
326
  @property
318
- def quote_address(self) -> dict[str, bool]:
319
-
320
- return {
321
- candidate.safe_identifier: (
322
- candidate.address.quoted and not candidate.address.is_query
323
- if isinstance(candidate, BuildDatasource)
324
- and isinstance(candidate.address, Address)
325
- else False
326
- )
327
- for candidate in self.source.datasources
328
- }
327
+ def quote_address(self) -> bool:
328
+ if self.is_root_datasource:
329
+ root = self.source.datasources[0]
330
+ if isinstance(root, BuildDatasource) and isinstance(root.address, Address):
331
+ return not root.address.is_query
332
+ return True
333
+ elif not self.source.datasources:
334
+ return False
335
+ base = self.source.datasources[0]
336
+ if isinstance(base, BuildDatasource):
337
+ if isinstance(base.address, Address):
338
+ return not base.address.is_query
339
+ return True
340
+ return True
329
341
 
330
342
  @property
331
343
  def base_alias(self) -> str:
@@ -473,8 +485,8 @@ class BaseJoin(BaseModel):
473
485
  left_datasource: Optional[Union[BuildDatasource, "QueryDatasource"]] = None
474
486
  concept_pairs: list[ConceptPair] | None = None
475
487
 
476
- def __init__(self, **data: Any):
477
- super().__init__(**data)
488
+ @model_validator(mode="after")
489
+ def validate_join(self) -> "BaseJoin":
478
490
  if (
479
491
  self.left_datasource
480
492
  and self.left_datasource.identifier == self.right_datasource.identifier
@@ -483,14 +495,18 @@ class BaseJoin(BaseModel):
483
495
  f"Cannot join a dataself to itself, joining {self.left_datasource} and"
484
496
  f" {self.right_datasource}"
485
497
  )
486
- final_concepts = []
487
498
 
488
- # if we have a list of concept pairs
499
+ # Early returns maintained as in original code
489
500
  if self.concept_pairs:
490
- return
501
+ return self
502
+
491
503
  if self.concepts == []:
492
- return
504
+ return self
505
+
506
+ # Validation logic
507
+ final_concepts = []
493
508
  assert self.left_datasource and self.right_datasource
509
+
494
510
  for concept in self.concepts or []:
495
511
  include = True
496
512
  for ds in [self.left_datasource, self.right_datasource]:
@@ -507,6 +523,7 @@ class BaseJoin(BaseModel):
507
523
  )
508
524
  if include:
509
525
  final_concepts.append(concept)
526
+
510
527
  if not final_concepts and self.concepts:
511
528
  # if one datasource only has constants
512
529
  # we can join on 1=1
@@ -519,11 +536,11 @@ class BaseJoin(BaseModel):
519
536
  ]
520
537
  ):
521
538
  self.concepts = []
522
- return
539
+ return self
523
540
  # if everything is at abstract grain, we can skip joins
524
541
  if all([c.grain.abstract for c in ds.output_concepts]):
525
542
  self.concepts = []
526
- return
543
+ return self
527
544
 
528
545
  left_keys = [c.address for c in self.left_datasource.output_concepts]
529
546
  right_keys = [c.address for c in self.right_datasource.output_concepts]
@@ -535,7 +552,9 @@ class BaseJoin(BaseModel):
535
552
  f" right_keys {right_keys},"
536
553
  f" provided join concepts {match_concepts}"
537
554
  )
555
+
538
556
  self.concepts = final_concepts
557
+ return self
539
558
 
540
559
  @property
541
560
  def unique_id(self) -> str:
@@ -912,8 +931,8 @@ class Join(BaseModel):
912
931
  def right_ref(self) -> str:
913
932
  if self.quote:
914
933
  if self.right_cte.identifier in self.inlined_ctes:
915
- return f"{self.quote}{self.right_cte.source.datasources[0].safe_location}{self.quote} as {self.right_cte.source.datasources[0].safe_identifier}"
916
- return self.right_cte.safe_identifier
934
+ return f"{safe_quote(self.right_cte.source.datasources[0].safe_location, self.quote)} as {self.quote}{self.right_cte.source.datasources[0].safe_identifier}{self.quote}"
935
+ return f"{self.quote}{self.right_cte.safe_identifier}{self.quote}"
917
936
  if self.right_cte.identifier in self.inlined_ctes:
918
937
  return f"{self.right_cte.source.datasources[0].safe_location} as {self.right_cte.source.datasources[0].safe_identifier}"
919
938
  return self.right_cte.safe_identifier
@@ -0,0 +1,8 @@
1
+ def safe_quote(string: str, quote_char: str):
2
+ # split dotted identifiers
3
+ # TODO: evaluate if we need smarter parsing for strings that could actually include .
4
+ if string.startswith("https://"):
5
+ # it's a url, no splitting
6
+ return f"{quote_char}{string}{quote_char}"
7
+ components = string.split(".")
8
+ return ".".join([f"{quote_char}{string}{quote_char}" for string in components])
@@ -74,6 +74,7 @@ from trilogy.core.statements.execute import (
74
74
  ProcessedRawSQLStatement,
75
75
  ProcessedShowStatement,
76
76
  )
77
+ from trilogy.core.utility import safe_quote
77
78
  from trilogy.dialect.common import render_join, render_unnest
78
79
  from trilogy.hooks.base_hook import BaseHook
79
80
 
@@ -204,6 +205,9 @@ FUNCTION_MAP = {
204
205
  FunctionType.SUBSTRING: lambda x: f"SUBSTRING({x[0]},{x[1]},{x[2]})",
205
206
  FunctionType.STRPOS: lambda x: f"STRPOS({x[0]},{x[1]})",
206
207
  FunctionType.CONTAINS: lambda x: f"CONTAINS({x[0]},{x[1]})",
208
+ FunctionType.REGEXP_CONTAINS: lambda x: f"REGEXP_CONTAINS({x[0]},{x[1]})",
209
+ FunctionType.REGEXP_EXTRACT: lambda x: f"REGEXP_EXTRACT({x[0]},{x[1]})",
210
+ FunctionType.REGEXP_REPLACE: lambda x: f"REGEXP_REPLACE({x[0]},{x[1]}, {x[2]})",
207
211
  # FunctionType.NOT_LIKE: lambda x: f" CASE WHEN {x[0]} like {x[1]} THEN 0 ELSE 1 END",
208
212
  # date types
209
213
  FunctionType.DATE_TRUNCATE: lambda x: f"date_trunc({x[0]},{x[1]})",
@@ -270,13 +274,6 @@ ORDER BY{% for order in order_by %}
270
274
  )
271
275
 
272
276
 
273
- def safe_quote(string: str, quote_char: str):
274
- # split dotted identifiers
275
- # TODO: evaluate if we need smarter parsing for strings that could actually include .
276
- components = string.split(".")
277
- return ".".join([f"{quote_char}{string}{quote_char}" for string in components])
278
-
279
-
280
277
  def safe_get_cte_value(coalesce, cte: CTE | UnionCTE, c: BuildConcept, quote_char: str):
281
278
  address = c.address
282
279
  raw = cte.source_map.get(address, None)
@@ -285,12 +282,17 @@ def safe_get_cte_value(coalesce, cte: CTE | UnionCTE, c: BuildConcept, quote_cha
285
282
  return None
286
283
  if isinstance(raw, str):
287
284
  rendered = cte.get_alias(c, raw)
288
- return f"{raw}.{safe_quote(rendered, quote_char)}"
285
+ return f"{quote_char}{raw}{quote_char}.{safe_quote(rendered, quote_char)}"
289
286
  if isinstance(raw, list) and len(raw) == 1:
290
287
  rendered = cte.get_alias(c, raw[0])
291
- return f"{raw[0]}.{safe_quote(rendered, quote_char)}"
288
+ return f"{quote_char}{raw[0]}{quote_char}.{safe_quote(rendered, quote_char)}"
292
289
  return coalesce(
293
- sorted([f"{x}.{safe_quote(cte.get_alias(c, x), quote_char)}" for x in raw])
290
+ sorted(
291
+ [
292
+ f"{quote_char}{x}{quote_char}.{safe_quote(cte.get_alias(c, x), quote_char)}"
293
+ for x in raw
294
+ ]
295
+ )
294
296
  )
295
297
 
296
298
 
@@ -783,12 +785,12 @@ class BaseDialect:
783
785
  else:
784
786
  source = None
785
787
  else:
786
- if cte.quote_address.get(cte.source.datasources[0].safe_identifier, False):
787
- source = f"{self.QUOTE_CHARACTER}{cte.base_name}{self.QUOTE_CHARACTER}"
788
+ if cte.quote_address:
789
+ source = safe_quote(cte.base_name, self.QUOTE_CHARACTER)
788
790
  else:
789
791
  source = cte.base_name
790
792
  if cte.base_name != cte.base_alias:
791
- source = f"{source} as {cte.base_alias}"
793
+ source = f"{source} as {self.QUOTE_CHARACTER}{cte.base_alias}{self.QUOTE_CHARACTER}"
792
794
  if not cte.render_from_clause:
793
795
  final_joins = []
794
796
  else:
@@ -18,6 +18,7 @@ FUNCTION_MAP = {
18
18
  FunctionType.LIKE: lambda x: (
19
19
  f" CASE WHEN {x[0]} like {x[1]} THEN True ELSE False END"
20
20
  ),
21
+ FunctionType.IS_NULL: lambda x: f"CASE WHEN {x[0]} IS NULL THEN True ELSE False END",
21
22
  FunctionType.MINUTE: lambda x: f"EXTRACT(MINUTE from {x[0]})",
22
23
  FunctionType.SECOND: lambda x: f"EXTRACT(SECOND from {x[0]})",
23
24
  FunctionType.HOUR: lambda x: f"EXTRACT(HOUR from {x[0]})",
@@ -63,8 +63,8 @@ def render_join_concept(
63
63
  elif isinstance(raw_content, BuildFunction):
64
64
  rval = render_expr(raw_content, cte=cte)
65
65
  return rval
66
- return f"{name}.{quote_character}{raw_content}{quote_character}"
67
- return f"{name}.{quote_character}{concept.safe_address}{quote_character}"
66
+ return f"{quote_character}{name}{quote_character}.{quote_character}{raw_content}{quote_character}"
67
+ return f"{quote_character}{name}{quote_character}.{quote_character}{concept.safe_address}{quote_character}"
68
68
 
69
69
 
70
70
  def render_join(
@@ -91,8 +91,9 @@ def render_join(
91
91
  return f"FULL JOIN {render_unnest(unnest_mode, quote_character, join.object_to_unnest, render_expr_func, cte)}"
92
92
  # left_name = join.left_name
93
93
  right_name = join.right_name
94
- if cte.quote_address.get(join.right_name, False):
95
- join.quote = quote_character
94
+ join.quote = quote_character
95
+ # if cte.quote_address.get(join.right_name, False):
96
+ # join.quote = quote_character
96
97
  right_base = join.right_ref
97
98
  base_joinkeys = []
98
99
  if join.joinkey_pairs:
@@ -62,7 +62,7 @@ def process_function_arg(
62
62
  operator=FunctionType.PARENTHETICAL,
63
63
  arguments=processed,
64
64
  output_datatype=arg_to_datatype(processed[0]),
65
- output_purpose=function_args_to_output_purpose(processed),
65
+ output_purpose=function_args_to_output_purpose(processed, environment),
66
66
  )
67
67
  elif isinstance(arg, Function):
68
68
  # if it's not an aggregate function, we can skip the virtual concepts
@@ -140,7 +140,7 @@ def get_purpose_and_keys(
140
140
  args: Tuple[ConceptRef | Concept, ...] | None,
141
141
  environment: Environment,
142
142
  ) -> Tuple[Purpose, set[str] | None]:
143
- local_purpose = purpose or function_args_to_output_purpose(args)
143
+ local_purpose = purpose or function_args_to_output_purpose(args, environment)
144
144
  if local_purpose in (Purpose.PROPERTY, Purpose.METRIC) and args:
145
145
  keys = concept_list_to_keys(args, environment)
146
146
  else:
@@ -548,7 +548,7 @@ def filter_item_to_concept(
548
548
 
549
549
  else:
550
550
  raise NotImplementedError(
551
- f"Filter item with non ref content {parent.content} not yet supported"
551
+ f"Filter item with non ref content {parent.content} ({type(parent.content)}) not yet supported"
552
552
  )
553
553
  modifiers = get_upstream_modifiers(
554
554
  cparent.concept_arguments, environment=environment
@@ -1257,16 +1257,21 @@ class ParseToObjects(Transformer):
1257
1257
  intersection = base.locally_derived.intersection(pre_keys)
1258
1258
  if intersection:
1259
1259
  for x in intersection:
1260
- if (
1261
- base.local_concepts[x].derivation
1262
- == self.environment.concepts[x].derivation
1260
+ if str(base.local_concepts[x].lineage) == str(
1261
+ self.environment.concepts[x].lineage
1263
1262
  ):
1263
+ local = base.local_concepts[x]
1264
+ friendly_name = (
1265
+ local.name
1266
+ if local.namespace == DEFAULT_NAMESPACE
1267
+ else local.namespace
1268
+ )
1264
1269
  raise NameShadowError(
1265
- f"Select statement {base} derives concept {x} with identical derivation as named concept. Use the named concept directly."
1270
+ f"Select statement {base} creates a new concept '{friendly_name}' with identical definition as the existing concept '{friendly_name}'. Replace {base.local_concepts[x].lineage} with a direct reference to {friendly_name}."
1266
1271
  )
1267
1272
  else:
1268
1273
  raise NameShadowError(
1269
- f"Select statement {base} creates new derived concepts {list(intersection)} with identical name(s) to existing concept(s). If these are identical, reference the concept directly. Otherwise alias your column as a new name."
1274
+ f"Select statement {base} creates new named concepts from calculations {list(intersection)} with identical name(s) to existing concept(s). Use new unique names for these."
1270
1275
  )
1271
1276
  return base
1272
1277
 
@@ -1739,9 +1744,27 @@ class ParseToObjects(Transformer):
1739
1744
  return self.function_factory.create_function(args, FunctionType.SUBSTRING, meta)
1740
1745
 
1741
1746
  @v_args(meta=True)
1742
- def lower(self, meta, args):
1747
+ def flower(self, meta, args):
1743
1748
  return self.function_factory.create_function(args, FunctionType.LOWER, meta)
1744
1749
 
1750
+ @v_args(meta=True)
1751
+ def fregexp_contains(self, meta, args):
1752
+ return self.function_factory.create_function(
1753
+ args, FunctionType.REGEXP_CONTAINS, meta
1754
+ )
1755
+
1756
+ @v_args(meta=True)
1757
+ def fregexp_extract(self, meta, args):
1758
+ return self.function_factory.create_function(
1759
+ args, FunctionType.REGEXP_EXTRACT, meta
1760
+ )
1761
+
1762
+ @v_args(meta=True)
1763
+ def fregexp_replace(self, meta, args):
1764
+ return self.function_factory.create_function(
1765
+ args, FunctionType.REGEXP_REPLACE, meta
1766
+ )
1767
+
1745
1768
  # date functions
1746
1769
  @v_args(meta=True)
1747
1770
  def fdate(self, meta, args):
@@ -267,7 +267,7 @@
267
267
  _UPPER.1: "upper("i
268
268
  upper: _UPPER expr ")"
269
269
  _LOWER.1: "lower("i
270
- lower: _LOWER expr ")"
270
+ flower: _LOWER expr ")"
271
271
  _SPLIT.1: "split("i
272
272
  fsplit: _SPLIT expr "," string_lit ")"
273
273
  _STRPOS.1: "strpos("i
@@ -276,8 +276,14 @@
276
276
  fcontains: _CONTAINS expr "," expr ")"
277
277
  _SUBSTRING.1: "substring("i
278
278
  fsubstring: _SUBSTRING expr "," expr "," expr ")"
279
-
280
- _string_functions: like | ilike | upper | lower | fsplit | fstrpos | fsubstring | fcontains
279
+ _REGEXP_EXTRACT.1: "regexp_extract("
280
+ fregexp_extract: _REGEXP_EXTRACT expr "," expr ")"
281
+ _REGEXP_CONTAINS.1: "regexp_contains("
282
+ fregexp_contains: _REGEXP_CONTAINS expr "," expr ")"
283
+ _REGEXP_REPLACE.1: "regexp_replace("
284
+ fregexp_replace: _REGEXP_REPLACE expr "," expr "," expr ")"
285
+
286
+ _string_functions: like | ilike | upper | flower | fsplit | fstrpos | fsubstring | fcontains | fregexp_extract | fregexp_contains | fregexp_replace
281
287
 
282
288
  // special aggregate
283
289
  _GROUP.1: "group("i
@@ -311,7 +317,8 @@
311
317
  _DATE.1: "date("i
312
318
  fdate: _DATE expr ")"
313
319
  fdatetime: "datetime"i "(" expr ")"
314
- ftimestamp: "timestamp"i "(" expr ")"
320
+ _TIMESTAMP.1: "timestamp("i
321
+ ftimestamp: _TIMESTAMP expr ")"
315
322
 
316
323
  _SECOND.1: "second("i
317
324
  fsecond: _SECOND expr ")"
File without changes
File without changes
File without changes
File without changes