relationalai 0.12.7__py3-none-any.whl → 0.12.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. relationalai/clients/snowflake.py +37 -5
  2. relationalai/clients/use_index_poller.py +11 -1
  3. relationalai/semantics/internal/internal.py +29 -7
  4. relationalai/semantics/lqp/compiler.py +1 -1
  5. relationalai/semantics/lqp/constructors.py +6 -0
  6. relationalai/semantics/lqp/executor.py +23 -38
  7. relationalai/semantics/lqp/intrinsics.py +4 -3
  8. relationalai/semantics/lqp/model2lqp.py +6 -12
  9. relationalai/semantics/lqp/passes.py +4 -2
  10. relationalai/semantics/lqp/rewrite/__init__.py +2 -1
  11. relationalai/semantics/lqp/rewrite/function_annotations.py +91 -56
  12. relationalai/semantics/lqp/rewrite/functional_dependencies.py +282 -0
  13. relationalai/semantics/metamodel/builtins.py +6 -0
  14. relationalai/semantics/metamodel/rewrite/__init__.py +2 -1
  15. relationalai/semantics/metamodel/rewrite/dnf_union_splitter.py +1 -1
  16. relationalai/semantics/metamodel/rewrite/extract_nested_logicals.py +9 -9
  17. relationalai/semantics/metamodel/rewrite/flatten.py +18 -149
  18. relationalai/semantics/metamodel/rewrite/format_outputs.py +165 -0
  19. relationalai/semantics/reasoners/graph/core.py +98 -70
  20. relationalai/semantics/reasoners/optimization/__init__.py +55 -10
  21. relationalai/semantics/reasoners/optimization/common.py +63 -8
  22. relationalai/semantics/reasoners/optimization/solvers_dev.py +39 -33
  23. relationalai/semantics/reasoners/optimization/solvers_pb.py +1033 -385
  24. relationalai/semantics/rel/compiler.py +21 -2
  25. relationalai/semantics/tests/test_snapshot_abstract.py +3 -0
  26. relationalai/tools/cli.py +10 -0
  27. relationalai/tools/cli_controls.py +15 -0
  28. relationalai/util/otel_handler.py +10 -4
  29. {relationalai-0.12.7.dist-info → relationalai-0.12.9.dist-info}/METADATA +1 -1
  30. {relationalai-0.12.7.dist-info → relationalai-0.12.9.dist-info}/RECORD +33 -31
  31. {relationalai-0.12.7.dist-info → relationalai-0.12.9.dist-info}/WHEEL +0 -0
  32. {relationalai-0.12.7.dist-info → relationalai-0.12.9.dist-info}/entry_points.txt +0 -0
  33. {relationalai-0.12.7.dist-info → relationalai-0.12.9.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,282 @@
1
+ from __future__ import annotations
2
+ from typing import Optional, Sequence
3
+ from relationalai.semantics.internal import internal
4
+ from relationalai.semantics.metamodel.ir import (
5
+ Require, Logical, Var, Relation, Lookup, ScalarType
6
+ )
7
+ from relationalai.semantics.metamodel import builtins
8
+
9
+
10
+ """
11
+ Helper functions for converting `Require` nodes with unique constraints to functional
12
+ dependencies. The main functionalities provided are:
13
+ 1. Check whether a `Require` node is a valid unique constraint representation
14
+ 2. Represent the uniqueness constraint as a functional dependency
15
+ 3. Check if the functional dependency is structural i.e., can be represented with
16
+ `@function(k)` annotation on a single relation.
17
+
18
+ =========================== Structure of unique constraints ================================
19
+ A `Require` node represents a _unique constraint_ if it meets the following criteria:
20
+ * the `Require` node's `domain` is an empty `Logical` node
21
+ * the `Require` node's `checks` has a single `Check` node
22
+ * the single `Check` node has `Logical` task that is a list of `Lookup` tasks
23
+ * precisely one `Lookup` task in the `Check` uses the `unique` builtin relation name
24
+ * the `unique` lookup has precisely one argument, which is a `TupleArg` or a `tuple`
25
+ containing at least one `Var`
26
+ * all `Lookup` nodes use variables only (no constants)
27
+ * the variables used in the `unique` lookup are a subset of the variables used in other
28
+ lookups
29
+ ============================================================================================
30
+
31
+ We use the following unique constraint as the running example.
32
+
33
+ ```
34
+ Require
35
+ domain
36
+ Logical
37
+ checks:
38
+ Check
39
+ check:
40
+ Logical
41
+ Person(person::Person)
42
+ first_name(person::Person, firstname::String)
43
+ last_name(person::Person, lastname::String)
44
+ unique((firstname::String, lastname::String))
45
+ error:
46
+ ...
47
+ ```
48
+
49
+ =========================== Semantics of unique constraints ================================
50
+ A unique constraint states that the columns declared in the `unique` predicate must be
51
+ unique in the result of the conjunctive query consisting of all remaining predicates.
52
+ ============================================================================================
53
+
54
+ In the running example, the conjunctive query computes a table with 3 columns, the person id
55
+ `person::Person`, the first name `firstname::String`, and the last name `lastname::String`.
56
+ The uniqueness predicate `unique((firstname::String, lastname::String))` states that no person
57
+ can have more than a single combination of first and last name.
58
+
59
+ The unique constraint in the running example above corresponds to the following functional
60
+ dependency.
61
+
62
+ ```
63
+ Person(x) ∧ first_name(x, y) ∧ last_name(x, z): {y, z} -> {x}
64
+ ```
65
+
66
+ ------------------------------ Redundant Type Atoms ----------------------------------------
67
+ At the time of writing, PyRel does not yet remove redundant unary atoms. For instance, in
68
+ the running example, the atom `Person(person::Person)` is redundant because the type of the
69
+ `person` variable is specified in the other two atoms `first_name` and `last_name`.
70
+ Consequently, we identify redundant atoms and remove them from the definition of the
71
+ corresponding functional dependency.
72
+
73
+ Formally, a _guard_ atom is any `Lookup` node whose relation name is not `unique`. Now, a
74
+ unary guard atom `T(x::T)` is _redundant_ if the uniqueness constraint has a non-unary guard
75
+ atom `R(...,x::T,...)`.
76
+
77
+ ================================ Normalized FDs ============================================
78
+ Now, the _(normalized)_ functional dependency_ corresponding to a unique constraint is an
79
+ object of the form `φ: X → Y`, where :
80
+ 1. `φ` is the set of all non-redundant guard atoms.
81
+ 2. `X` is the set of variables used in the `unique` atom
82
+ 3. `Y` is the set of all other variables used in the constraint
83
+ ============================================================================================
84
+
85
+ The normalized functional dependency corresponding to the unique constraints from the running
86
+ example is :
87
+ ```
88
+ first_name(person::Person, firstname::String) ∧ last_name(person::Person, lastname::String): {firstname:String, lastname:String} -> {person:Person}
89
+ ```
90
+ Note that the unary atom `Person(person::Person)` is redundant and thus omitted from the
91
+ decomposition.
92
+
93
+ Some simple functional dependencies can, however, be expressed simply with `@function(k)`
94
+ attribute of a single relation. Specifically, a functional dependency `φ: X → Y` is
95
+ _structural_ if φ consists of a single atom `R(x1,...,xm,y1,...,yk)` and `X = {x1,...,xm}`.
96
+ """
97
+
98
+ #
99
+ # Checks that an input `Require` node is a valid unique constraint. Returns `None` if not.
100
+ # If yes, we return the decomposition of the unique constraint as a tuple
101
+ # `(all_vars, unique_vars, guard)`, where
102
+ # - `all_vars` is the list of all variables used in the constraint
103
+ # - `unique_vars` is the list of variables used in the `unique` atom
104
+ # - `guard` is the list of all other `Lookup` atoms
105
+ #
106
+ def _split_unique_require_node(node: Require) -> Optional[tuple[list[Var], list[Var], list[Lookup]]]:
107
+ if not isinstance(node.domain, Logical):
108
+ return None
109
+ if len(node.domain.body) != 0:
110
+ return None
111
+ if len(node.checks) != 1:
112
+ return None
113
+ check = node.checks[0]
114
+ if not isinstance(check.check, Logical):
115
+ return None
116
+
117
+ unique_atom: Optional[Lookup] = None
118
+ guard: list[Lookup] = []
119
+ for task in check.check.body:
120
+ if not isinstance(task, Lookup):
121
+ return None
122
+ if task.relation.name == builtins.unique.name:
123
+ if unique_atom is not None:
124
+ return None
125
+ unique_atom = task
126
+ else:
127
+ guard.append(task)
128
+
129
+ if unique_atom is None:
130
+ return None
131
+
132
+ # collect variables
133
+ all_vars: set[Var] = set()
134
+ for lookup in guard:
135
+ for arg in lookup.args:
136
+ if not isinstance(arg, Var):
137
+ return None
138
+ all_vars.add(arg)
139
+
140
+ unique_vars: set[Var] = set()
141
+ if len(unique_atom.args) != 1:
142
+ return None
143
+ if not isinstance(unique_atom.args[0], (internal.TupleArg, tuple)):
144
+ return None
145
+ if len(unique_atom.args[0]) == 0:
146
+ return None
147
+ for arg in unique_atom.args[0]:
148
+ if not isinstance(arg, Var):
149
+ return None
150
+ unique_vars.add(arg)
151
+
152
+ # check that unique vars are a subset of other vars
153
+ if not unique_vars.issubset(all_vars):
154
+ return None
155
+
156
+ return list(all_vars), list(unique_vars), guard
157
+
158
+
159
+ def is_valid_unique_constraint(node: Require) -> bool:
160
+ """
161
+ Checks whether the input `Require` node is a valid unique constraint. See description at
162
+ the top of the file for details.
163
+ """
164
+ return _split_unique_require_node(node) is not None
165
+
166
+ #
167
+ # A unary guard atom `T(x::T)` is redundant if the constraint contains a non-unary atom
168
+ # `R(...,x::T,...)`. We discard all redundant guard atoms in the constructed fd.
169
+ #
170
+ def normalized_fd(node: Require) -> Optional[FunctionalDependency]:
171
+ """
172
+ If the input `Require` node is a uniqueness constraint, constructs its reduced
173
+ functional dependency `φ: X -> Y`, where `φ` contains all non-redundant guard atoms,
174
+ `X` are the variables used in the `unique` atom, and `Y` are the remaining variables.
175
+ Returns `None` if the input node is not a valid uniqueness constraint.
176
+ """
177
+ parts = _split_unique_require_node(node)
178
+ if parts is None:
179
+ return None
180
+ all_vars, unique_vars, guard_atoms = parts
181
+
182
+ # remove redundant lookups
183
+ redundant_guard_atoms: list[Lookup] = []
184
+ for atom in guard_atoms:
185
+ # the atom is unary A(x::T)
186
+ if len(atom.args) != 1:
187
+ continue
188
+ var = atom.args[0]
189
+ assert isinstance(var, Var)
190
+ # T is a scalar type (which includes entity types)
191
+ var_type = var.type
192
+ if not isinstance(var_type, ScalarType):
193
+ continue
194
+ # the atom is a entity typing T(x::T) i.e., T = A (and hence not a Boolean property)
195
+ var_type_name = var_type.name
196
+ rel_name = atom.relation.name
197
+ if rel_name != var_type_name:
198
+ continue
199
+ # Found an atom of the form T(x::T)
200
+ # check if there is another atom R(...,x::T,...)
201
+ for typed_atom in guard_atoms:
202
+ if len(typed_atom.args) == 1:
203
+ continue
204
+ if var in typed_atom.args:
205
+ redundant_guard_atoms.append(atom)
206
+ break
207
+
208
+ guard = [atom for atom in guard_atoms if atom not in redundant_guard_atoms]
209
+ keys = unique_vars
210
+ values = [v for v in all_vars if v not in keys]
211
+
212
+ return FunctionalDependency(guard, keys, values)
213
+
214
+ class FunctionalDependency:
215
+ """
216
+ Represents a functional dependency of the form `φ: X -> Y`, where
217
+ - `φ` is a set of `Lookup` atoms
218
+ - `X` and `Y` are disjoint and covering sets of variables used in `φ`
219
+ """
220
+ def __init__(self, guard: Sequence[Lookup], keys: Sequence[Var], values: Sequence[Var]):
221
+ self.guard = frozenset(guard)
222
+ self.keys = frozenset(keys)
223
+ self.values = frozenset(values)
224
+ assert self.keys.isdisjoint(self.values), "Keys and values must be disjoint"
225
+
226
+ # for structural fd check
227
+ self._is_structural:bool = False
228
+ self._structural_relation:Optional[Relation] = None
229
+ self._structural_rank:Optional[int] = None
230
+
231
+ self._determine_is_structural()
232
+
233
+ # A functional dependency `φ: X → Y` is _k-functional_ if `φ` consists of a single atom
234
+ # `R(x1,...,xm,y1,...,yk)` and `X = {x1,...,xm}`. Not all functional dependencies are
235
+ # k-functional. For instance, `R(x, y, z): {y, z} → {x}` cannot be expressed with
236
+ # `@function`. neither can `R(x, y) ∧ P(x, z) : {x} → {y, z}`.
237
+ def _determine_is_structural(self):
238
+ if len(self.guard) != 1:
239
+ self._is_structural = False
240
+ return
241
+ atom = next(iter(self.guard))
242
+ atom_vars = atom.args
243
+ if len(atom_vars) <= len(self.keys): # @function(0) provides no information
244
+ self._is_structural = False
245
+ return
246
+ prefix_vars = atom_vars[:len(self.keys)]
247
+ if set(prefix_vars) != set(self.keys):
248
+ self._is_structural = False
249
+ return
250
+ self._is_structural = True
251
+ self._structural_relation = atom.relation
252
+ self._structural_rank = len(atom_vars) - len(self.keys)
253
+
254
+ @property
255
+ def is_structural(self) -> bool:
256
+ """
257
+ Whether the functional dependency is functional, i.e., can be represented
258
+ with `@function(k)` annotation on a single relation.
259
+ """
260
+ return self._is_structural
261
+
262
+ @property
263
+ def structural_relation(self) -> Relation:
264
+ """
265
+ The structural relation of a functional dependency. Raises ValueError if the functional
266
+ dependency is not structural.
267
+ """
268
+ if not self._is_structural:
269
+ raise ValueError("Functional dependency is not structural")
270
+ assert self._structural_relation is not None
271
+ return self._structural_relation
272
+
273
+ @property
274
+ def structural_rank(self) -> int:
275
+ """
276
+ The structural rank k of k-structural fd. Raises ValueError if the structural
277
+ dependency is not k-structural.
278
+ """
279
+ if not self._is_structural:
280
+ raise ValueError("Functional dependency is not structural")
281
+ assert self._structural_rank is not None
282
+ return self._structural_rank
@@ -495,6 +495,11 @@ output_keys_annotation = f.annotation(output_keys, [])
495
495
  function = f.relation("function", [f.input_field("code", types.Symbol)])
496
496
  function_checked_annotation = f.annotation(function, [f.lit("checked")])
497
497
  function_annotation = f.annotation(function, [])
498
+ function_ranked = f.relation("function", [f.input_field("code", types.Symbol), f.input_field("rank", types.Int64)])
499
+ def function_ranked_checked_annotation(k:int) -> ir.Annotation:
500
+ return f.annotation(function_ranked, [f.lit("checked"), f.lit(k)])
501
+ def function_ranked_annotation(k:int) -> ir.Annotation:
502
+ return f.annotation(function_ranked, [f.lit(k)])
498
503
 
499
504
  # Indicates this relation should be tracked in telemetry. Supported for Relationships and Concepts.
500
505
  # `RAI_BackIR.with_relation_tracking` produces log messages at the start and end of each
@@ -654,6 +659,7 @@ rel_primitive_solverlib_ho_appl = aggregation("rel_primitive_solverlib_ho_appl",
654
659
  ])
655
660
  implies = f.relation("implies", [f.input_field("a", types.Bool), f.input_field("b", types.Bool)])
656
661
  all_different = aggregation("all_different", [f.input_field("over", types.Any)])
662
+ special_ordered_set_type_2 = aggregation("special_ordered_set_type_2", [f.input_field("rank", types.Any)])
657
663
 
658
664
  # graph primitive algorithm helpers
659
665
  infomap = aggregation("infomap", [
@@ -2,5 +2,6 @@ from .discharge_constraints import DischargeConstraints
2
2
  from .dnf_union_splitter import DNFUnionSplitter
3
3
  from .extract_nested_logicals import ExtractNestedLogicals
4
4
  from .flatten import Flatten
5
+ from .format_outputs import FormatOutputs
5
6
 
6
- __all__ = ["DischargeConstraints", "DNFUnionSplitter", "ExtractNestedLogicals", "Flatten"]
7
+ __all__ = ["DischargeConstraints", "DNFUnionSplitter", "ExtractNestedLogicals", "Flatten", "FormatOutputs"]
@@ -150,7 +150,7 @@ class DNFExtractor(Visitor):
150
150
 
151
151
  replacement_tasks: list[ir.Task] = []
152
152
  for body in replacement_bodies:
153
- new_task = f.logical(body)
153
+ new_task = f.logical(body, node.hoisted)
154
154
  replacement_tasks.append(new_task)
155
155
  self.replaced_by[node] = replacement_tasks
156
156
 
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from relationalai.semantics.metamodel import ir, factory as f, helpers
4
- from relationalai.semantics.metamodel.visitor import Rewriter
4
+ from relationalai.semantics.metamodel.visitor import Rewriter, collect_by_type
5
5
  from relationalai.semantics.metamodel.compiler import Pass
6
6
  from relationalai.semantics.metamodel.util import OrderedSet, ordered_set, NameCache
7
7
  from relationalai.semantics.metamodel import dependency
@@ -48,10 +48,10 @@ class LogicalExtractor(Rewriter):
48
48
  # variables (which is currently done by flatten), such as when the parent is a Match
49
49
  # or a Union, of if the logical has a Rank.
50
50
  if not (
51
- node.hoisted and
51
+ logical.hoisted and
52
52
  not isinstance(parent, (ir.Match, ir.Union)) and
53
- all(isinstance(v, ir.Var) for v in node.hoisted) and
54
- not any(isinstance(c, ir.Rank) for c in node.body)
53
+ all(isinstance(v, ir.Var) for v in logical.hoisted) and
54
+ not any(isinstance(c, ir.Rank) for c in logical.body)
55
55
  ):
56
56
  return logical
57
57
 
@@ -61,11 +61,11 @@ class LogicalExtractor(Rewriter):
61
61
 
62
62
  # if there are aggregations, make sure we don't expose the projected and input vars,
63
63
  # but expose groupbys
64
- for child in node.body:
65
- if isinstance(child, ir.Aggregate):
66
- exposed_vars.difference_update(child.projection)
67
- exposed_vars.difference_update(helpers.aggregate_inputs(child))
68
- exposed_vars.update(child.group)
64
+ for agg in collect_by_type(ir.Aggregate, logical):
65
+ exposed_vars.difference_update(agg.projection)
66
+ exposed_vars.difference_update(helpers.aggregate_inputs(agg))
67
+ exposed_vars.update(agg.group)
68
+
69
69
  # add the values (hoisted)
70
70
  exposed_vars.update(helpers.hoisted_vars(logical.hoisted))
71
71
 
@@ -3,12 +3,11 @@ from dataclasses import dataclass
3
3
  from typing import cast, Optional, TypeVar
4
4
  from typing import Tuple
5
5
 
6
- from relationalai.semantics.metamodel import builtins, ir, factory as f, helpers, types
6
+ from relationalai.semantics.metamodel import builtins, ir, factory as f, helpers
7
7
  from relationalai.semantics.metamodel.compiler import Pass, group_tasks
8
8
  from relationalai.semantics.metamodel.util import OrderedSet, ordered_set, NameCache
9
9
  from relationalai.semantics.metamodel import dependency
10
- from relationalai.semantics.metamodel.util import FrozenOrderedSet, filter_by_type
11
- from relationalai.semantics.metamodel.typer.typer import to_type, is_primitive
10
+ from relationalai.semantics.metamodel.typer.typer import to_type
12
11
 
13
12
  class Flatten(Pass):
14
13
  """
@@ -225,15 +224,26 @@ class Flatten(Pass):
225
224
  "ranks": ir.Rank,
226
225
  })
227
226
 
228
- # if there are outputs, adjust them (depending on the config for wide vs gnf)
227
+ # If there are outputs, flatten each into its own top-level rule, along with its
228
+ # dependencies.
229
229
  if groups["outputs"]:
230
- if self._handle_outputs:
231
- return self.adjust_outputs(task, body, groups, ctx)
232
- else:
233
- # When we do not handle outputs. For example, in SQL compiler. We need to leave output as a top-level element.
230
+ if not self._handle_outputs:
234
231
  ctx.rewrite_ctx.top_level.append(ir.Logical(task.engine, task.hoisted, tuple(body), task.annotations))
235
232
  return Flatten.HandleResult(None)
236
233
 
234
+ # Analyze the dependencies in the newly rewritten body
235
+ new_logical = ir.Logical(task.engine, task.hoisted, tuple(body))
236
+ info = dependency.analyze(new_logical)
237
+
238
+ for output in groups["outputs"]:
239
+ assert(isinstance(output, ir.Output))
240
+ new_body = info.task_dependencies(output)
241
+ new_body.update(ctx.extra_tasks)
242
+ new_body.add(output)
243
+ ctx.rewrite_ctx.top_level.append(ir.Logical(task.engine, task.hoisted, tuple(new_body), task.annotations))
244
+
245
+ return Flatten.HandleResult(None)
246
+
237
247
  # if there are updates, extract as a new top level rule
238
248
  if groups["updates"]:
239
249
  # add task dependencies to the body
@@ -455,147 +465,6 @@ class Flatten(Pass):
455
465
  task.annotations
456
466
  ))
457
467
 
458
- #--------------------------------------------------
459
- # GNF vs wide output support
460
- #--------------------------------------------------
461
- def adjust_outputs(self, task: ir.Logical, body: OrderedSet[ir.Task], groups: dict[str, OrderedSet[ir.Task]], ctx: Context):
462
-
463
- # for wide outputs, only adjust the output task to include the keys.
464
- if ctx.options.get("wide_outputs", False):
465
- for output in groups["outputs"]:
466
- assert(isinstance(output, ir.Output))
467
- if output.keys:
468
- body.remove(output)
469
- body.add(self.rewrite_wide_output(output))
470
- # self.remove_subsumptions(body, ctx)
471
- return Flatten.HandleResult(ir.Logical(task.engine, task.hoisted, tuple(body), task.annotations))
472
-
473
- # for GNF outputs we need to generate a rule for each "column" in the output
474
- else:
475
- # first split outputs in potentially multiple outputs, one for each "column"
476
- for output in groups["outputs"]:
477
- assert(isinstance(output, ir.Output))
478
- if output.keys:
479
- # we will replace the output bellow,
480
- body.remove(output)
481
-
482
- is_export = builtins.export_annotation in output.annotations
483
-
484
- # generate an output for each "column"
485
- # output looks like def output(:cols, :col000, key0, key1, value):
486
- original_cols = OrderedSet()
487
- for idx, alias in enumerate(output.aliases):
488
- # skip None values which are used as a placeholder for missing values
489
- if alias[1] is None:
490
- continue
491
- original_cols.add(alias[1])
492
- self._generate_output_column(body, output, idx, alias, is_export)
493
-
494
- idx = len(output.aliases)
495
- for key in output.keys:
496
- if key not in original_cols:
497
- self._generate_output_column(body, output, idx, (key.name, key), is_export)
498
- idx += 1
499
-
500
- # analyse the resulting logical to be able to pull dependencies
501
- logical = ir.Logical(task.engine, task.hoisted, tuple(body), task.annotations)
502
- info = dependency.analyze(logical)
503
-
504
- # now extract a logical for each output, bringing together its dependencies
505
- for output in filter_by_type(body, ir.Output):
506
- deps = info.task_dependencies(output)
507
- # TODO: verify safety of doing this
508
- # self.remove_subsumptions(deps, ctx)
509
-
510
- deps.add(output)
511
- ctx.rewrite_ctx.top_level.append(ir.Logical(task.engine, tuple(), tuple(deps)))
512
-
513
- return Flatten.HandleResult(None)
514
-
515
- def _generate_output_column(self, body: OrderedSet[ir.Task], output: ir.Output, idx: int, alias: tuple[str, ir.Value], is_export: bool):
516
- if not output.keys:
517
- return output
518
-
519
- aliases = [("cols", f.literal("cols", types.Symbol))] if not is_export else []
520
- aliases.append(("col", f.literal(f"col{idx:03}", types.Symbol)))
521
-
522
- for k in output.keys:
523
- aliases.append((f"key_{k.name}_{idx}", k))
524
-
525
- if (is_export and
526
- isinstance(alias[1], ir.Var) and
527
- (not is_primitive(alias[1].type) or alias[1].type == types.Hash)):
528
-
529
- uuid = f.var(f"{alias[0]}_{idx}_uuid", types.String)
530
- body.add(f.lookup(builtins.uuid_to_string, [alias[1], uuid]))
531
- aliases.append((uuid.name, uuid))
532
- else:
533
- aliases.append(alias)
534
-
535
- body.add(ir.Output(
536
- output.engine,
537
- FrozenOrderedSet.from_iterable(aliases),
538
- output.keys,
539
- output.annotations
540
- ))
541
-
542
-
543
- def remove_subsumptions(self, body:OrderedSet[ir.Task], ctx: Context):
544
- # remove from the body all the tasks that are subsumed by some other task in the set;
545
- # this can be done because some tasks are references to extracted nested logical that
546
- # contain filters they dependend on, so we don't need those filters here if the
547
- # reference is present.
548
- for logical in filter_by_type(body, ir.Logical):
549
- if logical.id in ctx.included:
550
- # if the logical id is included, it means it's a reference to an extracted
551
- # rule, so remove all other items in the body that are already included in
552
- # the body referenced by it
553
- for item in body:
554
- if item in ctx.included[logical.id]:
555
- body.remove(item)
556
-
557
-
558
- def rewrite_wide_output(self, output: ir.Output):
559
- assert(output.keys)
560
-
561
- # only append keys that are not already in the output
562
- suffix_keys = []
563
- for key in output.keys:
564
- if all([val is not key for _, val in output.aliases]):
565
- suffix_keys.append(key)
566
-
567
- aliases: OrderedSet[Tuple[str, ir.Value]] = ordered_set()
568
-
569
- # add the remaining args, unless it is already a key
570
- for name, val in output.aliases:
571
- if not isinstance(val, ir.Var) or val not in suffix_keys:
572
- aliases.add((name, val))
573
-
574
- # add the keys to the output
575
- for key in suffix_keys:
576
- aliases.add((key.name, key))
577
-
578
- # TODO - we are assuming that the Rel compiler will translate nullable lookups
579
- # properly, returning a `Missing` if necessary, like this:
580
- # (nested_192(_adult, _adult_name) or (not nested_192(_adult, _) and _adult_name = Missing)) and
581
- return ir.Output(
582
- output.engine,
583
- aliases.frozen(),
584
- output.keys,
585
- output.annotations
586
- )
587
-
588
- # TODO: in the rel compiler, see if we can do this outer join
589
- # 1. number of keys
590
- # 2. each relation
591
- # 3. each variable, starting with the keys
592
- # 4. tag output with @arrow
593
-
594
- # @arrow def output(_book, _book_title, _author_name):
595
- # rel_primitive_outer_join(#1, book_title, author_name, _book, _book_title, _author_name)
596
- # def output(p, n, c):
597
- # rel_primitive_outer_join(#1, name, coolness, p, n, c)
598
-
599
468
  #--------------------------------------------------
600
469
  # Helpers
601
470
  #--------------------------------------------------