relationalai 0.12.9__py3-none-any.whl → 0.12.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. relationalai/__init__.py +9 -0
  2. relationalai/clients/__init__.py +2 -2
  3. relationalai/clients/local.py +571 -0
  4. relationalai/clients/snowflake.py +106 -83
  5. relationalai/debugging.py +5 -2
  6. relationalai/semantics/__init__.py +2 -2
  7. relationalai/semantics/internal/__init__.py +2 -2
  8. relationalai/semantics/internal/internal.py +24 -7
  9. relationalai/semantics/lqp/README.md +34 -0
  10. relationalai/semantics/lqp/constructors.py +2 -1
  11. relationalai/semantics/lqp/executor.py +13 -2
  12. relationalai/semantics/lqp/ir.py +4 -0
  13. relationalai/semantics/lqp/model2lqp.py +41 -2
  14. relationalai/semantics/lqp/passes.py +6 -4
  15. relationalai/semantics/lqp/rewrite/__init__.py +2 -0
  16. relationalai/semantics/lqp/rewrite/annotate_constraints.py +55 -0
  17. relationalai/semantics/lqp/rewrite/extract_keys.py +22 -3
  18. relationalai/semantics/lqp/rewrite/functional_dependencies.py +42 -10
  19. relationalai/semantics/lqp/rewrite/quantify_vars.py +14 -0
  20. relationalai/semantics/lqp/validators.py +3 -0
  21. relationalai/semantics/metamodel/builtins.py +5 -0
  22. relationalai/semantics/metamodel/rewrite/flatten.py +10 -4
  23. relationalai/semantics/metamodel/typer/typer.py +13 -0
  24. relationalai/semantics/metamodel/types.py +2 -1
  25. relationalai/semantics/reasoners/graph/core.py +44 -53
  26. relationalai/tools/debugger.py +4 -2
  27. relationalai/tools/qb_debugger.py +5 -3
  28. {relationalai-0.12.9.dist-info → relationalai-0.12.10.dist-info}/METADATA +2 -2
  29. {relationalai-0.12.9.dist-info → relationalai-0.12.10.dist-info}/RECORD +32 -29
  30. {relationalai-0.12.9.dist-info → relationalai-0.12.10.dist-info}/WHEEL +0 -0
  31. {relationalai-0.12.9.dist-info → relationalai-0.12.10.dist-info}/entry_points.txt +0 -0
  32. {relationalai-0.12.9.dist-info → relationalai-0.12.10.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,55 @@
1
+ from __future__ import annotations
2
+
3
+ from relationalai.semantics.metamodel import builtins
4
+ from relationalai.semantics.metamodel.ir import Node, Model, Require
5
+ from relationalai.semantics.metamodel.compiler import Pass
6
+ from relationalai.semantics.metamodel.rewrite.discharge_constraints import (
7
+ DischargeConstraintsVisitor
8
+ )
9
+ from relationalai.semantics.lqp.rewrite.functional_dependencies import (
10
+ is_valid_unique_constraint, normalized_fd
11
+ )
12
+
13
+
14
+
15
+ class AnnotateConstraints(Pass):
16
+ """
17
+ Extends `DischargeConstraints` pass by discharging only those Require nodes that cannot
18
+ be declared as constraints in LQP.
19
+
20
+ More precisely, the pass annotates Require nodes depending on how they should be
21
+ treated when generating code:
22
+ * `@declare_constraint` if the Require represents a constraint that can be declared in LQP.
23
+ * `@discharge` if the Require represents a constraint that should be dismissed during
24
+ code generation. Namely, when it cannot be declared in LQP and uses one of the
25
+ `unique`, `exclusive`, `anyof` builtins. These nodes are removed from the IR model
26
+ in the Flatten pass.
27
+ """
28
+
29
+ def rewrite(self, model: Model, options: dict = {}) -> Model:
30
+ return AnnotateConstraintsRewriter().walk(model)
31
+
32
+
33
+ class AnnotateConstraintsRewriter(DischargeConstraintsVisitor):
34
+ """
35
+ Visitor marks all nodes which should be removed from IR model with `discharge` annotation.
36
+ """
37
+
38
+ def _should_be_declarable_constraint(self, node: Require) -> bool:
39
+ if not is_valid_unique_constraint(node):
40
+ return False
41
+ # Currently, we only declare non-structural functional dependencies.
42
+ fd = normalized_fd(node)
43
+ assert fd is not None # already checked by _is_valid_unique_constraint
44
+ return not fd.is_structural
45
+
46
+ def handle_require(self, node: Require, parent: Node):
47
+ if self._should_be_declarable_constraint(node):
48
+ return node.reconstruct(
49
+ node.engine,
50
+ node.domain,
51
+ node.checks,
52
+ node.annotations | [builtins.declare_constraint_annotation]
53
+ )
54
+
55
+ return super().handle_require(node, parent)
@@ -249,6 +249,24 @@ class ExtractKeysRewriter(Rewriter):
249
249
 
250
250
  return f.logical(tuple(outer_body), [])
251
251
 
252
+ def noop_logical(self, node: ir.Logical) -> bool:
253
+ # logicals that don't hoist variables are essentially filters like lookups
254
+ if not node.hoisted:
255
+ return True
256
+ if len(node.body) != 1:
257
+ return False
258
+ inner = node.body[0]
259
+ if not isinstance(inner, (ir.Match, ir.Union)):
260
+ return False
261
+ outer_vars = helpers.hoisted_vars(node.hoisted)
262
+ inner_vars = helpers.hoisted_vars(inner.hoisted)
263
+ for v in outer_vars:
264
+ if v not in inner_vars:
265
+ return False
266
+ # all vars hoisted by the outer logical, are also
267
+ # hoisted by the inner Match/Union
268
+ return True
269
+
252
270
  # compute inital information that's needed for later steps. E.g., what's nullable or
253
271
  # not, do some output columns have a default value, etc.
254
272
  def preprocess_logical(self, node: ir.Logical, output_keys: Iterable[ir.Var]):
@@ -264,10 +282,11 @@ class ExtractKeysRewriter(Rewriter):
264
282
  non_nullable_vars.update(vars)
265
283
  top_level_tasks.add(task)
266
284
  elif isinstance(task, ir.Logical):
267
- # logicals that don't hoist variables are essentially filters like lookups
268
- if not task.hoisted:
285
+ if self.noop_logical(task):
269
286
  top_level_tasks.add(task)
270
- # TODO: should we do something about the inner variables?
287
+ non_nullable_vars.update(helpers.hoisted_vars(task.hoisted))
288
+ continue
289
+
271
290
  for h in task.hoisted:
272
291
  # Hoisted vars without a default are not nullable
273
292
  if isinstance(h, ir.Var):
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
  from typing import Optional, Sequence
3
3
  from relationalai.semantics.internal import internal
4
4
  from relationalai.semantics.metamodel.ir import (
5
- Require, Logical, Var, Relation, Lookup, ScalarType
5
+ Node, Require, Logical, Var, Relation, Lookup, ScalarType
6
6
  )
7
7
  from relationalai.semantics.metamodel import builtins
8
8
 
@@ -130,14 +130,16 @@ def _split_unique_require_node(node: Require) -> Optional[tuple[list[Var], list[
130
130
  return None
131
131
 
132
132
  # collect variables
133
- all_vars: set[Var] = set()
133
+ all_vars: list[Var] = []
134
134
  for lookup in guard:
135
135
  for arg in lookup.args:
136
136
  if not isinstance(arg, Var):
137
137
  return None
138
- all_vars.add(arg)
138
+ if arg in all_vars:
139
+ continue
140
+ all_vars.append(arg)
139
141
 
140
- unique_vars: set[Var] = set()
142
+ unique_vars: list[Var] = []
141
143
  if len(unique_atom.args) != 1:
142
144
  return None
143
145
  if not isinstance(unique_atom.args[0], (internal.TupleArg, tuple)):
@@ -147,10 +149,12 @@ def _split_unique_require_node(node: Require) -> Optional[tuple[list[Var], list[
147
149
  for arg in unique_atom.args[0]:
148
150
  if not isinstance(arg, Var):
149
151
  return None
150
- unique_vars.add(arg)
152
+ if arg in unique_vars:
153
+ return None
154
+ unique_vars.append(arg)
151
155
 
152
156
  # check that unique vars are a subset of other vars
153
- if not unique_vars.issubset(all_vars):
157
+ if not set(unique_vars).issubset(set(all_vars)):
154
158
  return None
155
159
 
156
160
  return list(all_vars), list(unique_vars), guard
@@ -218,10 +222,10 @@ class FunctionalDependency:
218
222
  - `X` and `Y` are disjoint and covering sets of variables used in `φ`
219
223
  """
220
224
  def __init__(self, guard: Sequence[Lookup], keys: Sequence[Var], values: Sequence[Var]):
221
- self.guard = frozenset(guard)
222
- self.keys = frozenset(keys)
223
- self.values = frozenset(values)
224
- assert self.keys.isdisjoint(self.values), "Keys and values must be disjoint"
225
+ self.guard = tuple(guard)
226
+ self.keys = tuple(keys)
227
+ self.values = tuple(values)
228
+ assert set(self.keys).isdisjoint(set(self.values)), "Keys and values must be disjoint"
225
229
 
226
230
  # for structural fd check
227
231
  self._is_structural:bool = False
@@ -280,3 +284,31 @@ class FunctionalDependency:
280
284
  raise ValueError("Functional dependency is not structural")
281
285
  assert self._structural_rank is not None
282
286
  return self._structural_rank
287
+
288
+ def __str__(self) -> str:
289
+ guard_str = " ∧ ".join([str(atom) for atom in self.guard]).strip()
290
+ keys_str = ", ".join([str(var) for var in self.keys]).strip()
291
+ values_str = ", ".join([str(var) for var in self.values]).strip()
292
+ return f"{guard_str}: {{{keys_str}}} -> {{{values_str}}}"
293
+
294
+ def contains_only_declarable_constraints(node: Node) -> bool:
295
+ """
296
+ Checks whether the input `Logical` node contains only `Require` nodes annotated with
297
+ `declare_constraint`.
298
+ """
299
+ if not isinstance(node, Logical):
300
+ return False
301
+ if len(node.body) == 0:
302
+ return False
303
+ for task in node.body:
304
+ if not isinstance(task, Require):
305
+ return False
306
+ if not is_declarable_constraint(task):
307
+ return False
308
+ return True
309
+
310
+ def is_declarable_constraint(node: Require) -> bool:
311
+ """
312
+ Checks whether the input `Require` node is annotated with `declare_constraint`.
313
+ """
314
+ return builtins.declare_constraint_annotation in node.annotations
@@ -5,6 +5,7 @@ from relationalai.semantics.metamodel.compiler import Pass
5
5
  from relationalai.semantics.metamodel.visitor import Visitor, Rewriter
6
6
  from relationalai.semantics.metamodel.util import OrderedSet, ordered_set
7
7
  from typing import Optional, Any, Tuple, Iterable
8
+ from .functional_dependencies import contains_only_declarable_constraints
8
9
 
9
10
  class QuantifyVars(Pass):
10
11
  """
@@ -67,6 +68,7 @@ class VarScopeInfo(Visitor):
67
68
  IGNORED_NODES = (ir.Type,
68
69
  ir.Var, ir.Literal, ir.Relation, ir.Field,
69
70
  ir.Default, ir.Output, ir.Update, ir.Aggregate,
71
+ ir.Check, ir.Require,
70
72
  ir.Annotation, ir.Rank)
71
73
 
72
74
  def __init__(self):
@@ -74,6 +76,9 @@ class VarScopeInfo(Visitor):
74
76
  self._vars_in_scope = {}
75
77
 
76
78
  def leave(self, node: ir.Node, parent: Optional[ir.Node]=None):
79
+ if contains_only_declarable_constraints(node):
80
+ return node
81
+
77
82
  if isinstance(node, ir.Lookup):
78
83
  self._record(node, helpers.vars(node.args))
79
84
 
@@ -189,6 +194,9 @@ class FindQuantificationNodes(Visitor):
189
194
  self.node_quantifies_vars = {}
190
195
 
191
196
  def enter(self, node: ir.Node, parent: Optional[ir.Node]=None) -> "Visitor":
197
+ if contains_only_declarable_constraints(node):
198
+ return self
199
+
192
200
  if isinstance(node, (ir.Logical, ir.Not)):
193
201
  ignored_vars = _ignored_vars(node)
194
202
  self._handled_vars.update(ignored_vars)
@@ -202,6 +210,9 @@ class FindQuantificationNodes(Visitor):
202
210
  return self
203
211
 
204
212
  def leave(self, node: ir.Node, parent: Optional[ir.Node]=None) -> ir.Node:
213
+ if contains_only_declarable_constraints(node):
214
+ return node
215
+
205
216
  if isinstance(node, (ir.Logical, ir.Not)):
206
217
  ignored_vars = _ignored_vars(node)
207
218
  self._handled_vars.difference_update(ignored_vars)
@@ -221,6 +232,9 @@ class QuantifyVarsRewriter(Rewriter):
221
232
  self.node_quantifies_vars = quant.node_quantifies_vars
222
233
 
223
234
  def handle_logical(self, node: ir.Logical, parent: ir.Node, ctx:Optional[Any]=None) -> ir.Logical:
235
+ if contains_only_declarable_constraints(node):
236
+ return node
237
+
224
238
  new_body = self.walk_list(node.body, node)
225
239
 
226
240
  if node.id in self.node_quantifies_vars:
@@ -21,6 +21,9 @@ CompilableType = Union[
21
21
  # Effects
22
22
  ir.Output,
23
23
  ir.Update,
24
+
25
+ # Constraints
26
+ ir.Require,
24
27
  ]
25
28
 
26
29
  # Preconditions
@@ -524,6 +524,11 @@ recursion_config_annotation = f.annotation(recursion_config, [])
524
524
  discharged = f.relation("discharged", [])
525
525
  discharged_annotation = f.annotation(discharged, [])
526
526
 
527
+ # Require nodes with this annotation will be kept in the final metamodel to be emitted as
528
+ # constraint declarations (LQP)
529
+ declare_constraint = f.relation("declare_constraint", [])
530
+ declare_constraint_annotation = f.annotation(declare_constraint, [])
531
+
527
532
  #
528
533
  # Aggregations
529
534
  #
@@ -5,7 +5,7 @@ from typing import Tuple
5
5
 
6
6
  from relationalai.semantics.metamodel import builtins, ir, factory as f, helpers
7
7
  from relationalai.semantics.metamodel.compiler import Pass, group_tasks
8
- from relationalai.semantics.metamodel.util import OrderedSet, ordered_set, NameCache
8
+ from relationalai.semantics.metamodel.util import NameCache, OrderedSet, ordered_set
9
9
  from relationalai.semantics.metamodel import dependency
10
10
  from relationalai.semantics.metamodel.typer.typer import to_type
11
11
 
@@ -419,9 +419,15 @@ class Flatten(Pass):
419
419
  def handle_require(self, req: ir.Require, ctx: Context):
420
420
  # only extract the domain if it is a somewhat complex Logical and there's more than
421
421
  # one check, otherwise insert it straight into all checks
422
- domain = req.domain
423
- # only generate logic for not discharged requires
424
- if builtins.discharged_annotation not in req.annotations:
422
+ if builtins.discharged_annotation in req.annotations:
423
+ # remove discharged Requires
424
+ return Flatten.HandleResult(None)
425
+ elif builtins.declare_constraint_annotation in req.annotations:
426
+ # leave Requires that are declared constraints
427
+ return Flatten.HandleResult(req)
428
+ else:
429
+ # generate logic for remaining requires
430
+ domain = req.domain
425
431
  if len(req.checks) > 1 and isinstance(domain, ir.Logical) and len(domain.body) > 1:
426
432
  body = OrderedSet.from_iterable(domain.body)
427
433
  vars = helpers.hoisted_vars(domain.hoisted)
@@ -156,6 +156,10 @@ def type_matches(actual:ir.Type, expected:ir.Type, allow_expected_parents=False)
156
156
  if actual == types.Any or expected == types.Any:
157
157
  return True
158
158
 
159
+ # any entity matches any entity (surprise surprise!)
160
+ if extends_any_entity(expected) and not is_primitive(actual):
161
+ return True
162
+
159
163
  # all decimals match across each other
160
164
  if types.is_decimal(actual) and types.is_decimal(expected):
161
165
  return True
@@ -288,6 +292,15 @@ def is_base_primitive(type:ir.Type) -> bool:
288
292
  def is_primitive(type:ir.Type) -> bool:
289
293
  return to_base_primitive(type) is not None
290
294
 
295
+ def extends_any_entity(type:ir.Type) -> bool:
296
+ if type == types.AnyEntity:
297
+ return True
298
+ if isinstance(type, ir.ScalarType):
299
+ for parent in type.super_types:
300
+ if extends_any_entity(parent):
301
+ return True
302
+ return False
303
+
291
304
  def invalid_type(type:ir.Type) -> bool:
292
305
  if isinstance(type, ir.UnionType):
293
306
  # if there are multiple primitives, or a primtive and a non-primitive
@@ -80,6 +80,7 @@ GenericDecimal = ir.ScalarType("GenericDecimal", util.frozen())
80
80
  #
81
81
  Null = ir.ScalarType("Null", util.frozen())
82
82
  Any = ir.ScalarType("Any", util.frozen())
83
+ AnyEntity = ir.ScalarType("AnyEntity", util.frozen())
83
84
  Hash = ir.ScalarType("Hash", util.frozen())
84
85
  String = ir.ScalarType("String", util.frozen())
85
86
  Int64 = ir.ScalarType("Int64")
@@ -144,7 +145,7 @@ def is_null(t: ir.Type) -> bool:
144
145
 
145
146
  def is_abstract_type(t: ir.Type) -> bool:
146
147
  if isinstance(t, ir.ScalarType):
147
- return t in [Any, Number, GenericDecimal]
148
+ return t in [Any, AnyEntity, Number, GenericDecimal]
148
149
  elif isinstance(t, ir.ListType):
149
150
  return is_abstract_type(t.element_type)
150
151
  elif isinstance(t, ir.TupleType):
@@ -20,7 +20,7 @@ from relationalai.semantics import (
20
20
  count, sum, avg,
21
21
  )
22
22
  from relationalai.docutils import include_in_docs
23
- from relationalai.semantics.internal import annotations
23
+ from relationalai.semantics.internal import annotations, AnyEntity
24
24
  from relationalai.semantics.internal import internal as builder_internal # For primitive graph algorithms.
25
25
  from relationalai.semantics.std.math import abs, isnan, isinf, maximum, natural_log, sqrt
26
26
  from relationalai.semantics.std.integers import int64
@@ -158,7 +158,7 @@ class Graph():
158
158
  f"but is a `{type(weighted).__name__}`."
159
159
  )
160
160
  assert isinstance(model, Model), (
161
- "The `model` argument must be a `builder.Model`, "
161
+ "The `model` argument must be a `relationalai.semantics.Model`, "
162
162
  f"but is a `{type(model).__name__}`."
163
163
  )
164
164
  self.directed = directed
@@ -355,7 +355,7 @@ class Graph():
355
355
  @cached_property
356
356
  def Node(self) -> Concept:
357
357
  """Lazily define and cache the self.Node concept."""
358
- _Node = self._user_node_concept or self._model.Concept(self._NodeConceptStr)
358
+ _Node = self._user_node_concept or self._model.Concept(self._NodeConceptStr, extends=[AnyEntity])
359
359
  _Node.annotate(annotations.track("graphs", "Node"))
360
360
  return _Node
361
361
 
@@ -2304,14 +2304,14 @@ class Graph():
2304
2304
  # neighbor_a_rel = self._neighbor_of(node_subset_from)
2305
2305
  #
2306
2306
  # domain_w = Relationship(f"{{node:{self._NodeConceptStr}}} is the domain of `w` in `common_neighbor(u, v, w)`")
2307
- # node_x, node_y = graph.Node.ref(), graph.Node.ref()
2308
- # where(neighbor_a_rel(node_x, node_y)).define(domain_w(node_y))
2307
+ # where(neighbor_a_rel(node_a, node_b)).define(domain_w(node_b))
2309
2308
  # neighbor_b_rel = self._neighbor_of(domain_w)
2310
2309
  #
2311
2310
  # node_constraint = []
2312
2311
  #
2313
- # # need to reverse the args of `neighbor_b_rel()`, due to its domain constraint
2314
- # # relies on the symmetry of `neighbor`
2312
+ # # For this case only, we reverse the args of `neighbor_b_rel()`, which
2313
+ # # is allowed by the symmetry of `neighbor`, in order to take advantage
2314
+ # # of domain constraint on `neighbor_b_rel()`.
2315
2315
  # where(
2316
2316
  # *node_constraint,
2317
2317
  # neighbor_a_rel(node_a, neighbor_node),
@@ -2748,15 +2748,15 @@ class Graph():
2748
2748
  if node_subset is None:
2749
2749
  # No constraint - use cached count_inneighbor relationship and all nodes
2750
2750
  count_inneighbor_rel = self._count_inneighbor
2751
- node_set = self.Node
2751
+ node_constraint = []
2752
2752
  else:
2753
2753
  # Constrained to nodes in the subset - use constrained count_inneighbor relationship
2754
2754
  count_inneighbor_rel = self._count_inneighbor_of(node_subset)
2755
- node_set = node_subset
2755
+ node_constraint = [node_subset(self.Node)]
2756
2756
 
2757
2757
  # Apply the same indegree logic for both cases
2758
2758
  where(
2759
- node_set(self.Node),
2759
+ *node_constraint,
2760
2760
  _indegree := where(count_inneighbor_rel(self.Node, Integer)).select(Integer) | 0,
2761
2761
  ).define(_indegree_rel(self.Node, _indegree))
2762
2762
 
@@ -2933,15 +2933,15 @@ class Graph():
2933
2933
  if node_subset is None:
2934
2934
  # No constraint - use cached count_outneighbor relationship and all nodes
2935
2935
  count_outneighbor_rel = self._count_outneighbor
2936
- node_set = self.Node
2936
+ node_constraint = []
2937
2937
  else:
2938
2938
  # Constrained to nodes in the subset - use constrained count_outneighbor relationship
2939
2939
  count_outneighbor_rel = self._count_outneighbor_of(node_subset)
2940
- node_set = node_subset
2940
+ node_constraint = [node_subset(self.Node)]
2941
2941
 
2942
2942
  # Apply the same outdegree logic for both cases
2943
2943
  where(
2944
- node_set(self.Node),
2944
+ *node_constraint,
2945
2945
  _outdegree := where(count_outneighbor_rel(self.Node, Integer)).select(Integer) | 0,
2946
2946
  ).define(_outdegree_rel(self.Node, _outdegree))
2947
2947
 
@@ -3099,12 +3099,12 @@ class Graph():
3099
3099
  node, neighbor, weight = self.Node.ref(), self.Node.ref(), Float.ref()
3100
3100
 
3101
3101
  if node_subset is None:
3102
- node_constraint = node # No constraint on nodes.
3102
+ node_constraint = [] # No constraint on nodes.
3103
3103
  else:
3104
- node_constraint = node_subset(node) # Nodes constrained to given subset.
3104
+ node_constraint = [node_subset(node)] # Nodes constrained to given subset.
3105
3105
 
3106
3106
  where(
3107
- node_constraint,
3107
+ *node_constraint,
3108
3108
  weighted_degree_no_loops := sum(neighbor, weight).per(node).where(
3109
3109
  self._weight(node, neighbor, weight),
3110
3110
  node != neighbor,
@@ -3172,7 +3172,7 @@ class Graph():
3172
3172
  >>> define(n1, n2, n3)
3173
3173
  >>> define(
3174
3174
  ... Edge.new(src=n1, dst=n2, weight=1.0),
3175
- ... Edge.new(src=n2, dst=n1, weight=-1.0),
3175
+ ... Edge.new(src=n2, dst=n1, weight=0.0),
3176
3176
  ... Edge.new(src=n2, dst=n3, weight=1.0),
3177
3177
  ... )
3178
3178
  >>>
@@ -3186,7 +3186,7 @@ class Graph():
3186
3186
  ... ).inspect()
3187
3187
  ▰▰▰▰ Setup complete
3188
3188
  id node_weighted_indegree
3189
- 0 1 -1.0
3189
+ 0 1 0.0
3190
3190
  1 2 1.0
3191
3191
  2 3 1.0
3192
3192
  >>>
@@ -3218,9 +3218,6 @@ class Graph():
3218
3218
  weighted_outdegree
3219
3219
 
3220
3220
  """
3221
- # TODO: It looks like the weights in the example in the docstring above
3222
- # are holdovers from a version of the library that did not disallow
3223
- # negative weights. Need to update the example to use only non-negative weights.
3224
3221
  if of is None:
3225
3222
  return self._weighted_indegree
3226
3223
  else:
@@ -3251,20 +3248,15 @@ class Graph():
3251
3248
  # Choose the appropriate node set
3252
3249
  if node_subset is None:
3253
3250
  # No constraint - use all nodes
3254
- node_set = self.Node
3251
+ node_constraint = []
3255
3252
  else:
3256
3253
  # Constrained to nodes in the subset
3257
- node_set = node_subset
3258
- # TODO: In a future cleanup pass, replace `node_set` with a `node_constraint`
3259
- # that replaces the `node_set(self.Node)` in the where clause below,
3260
- # and generates only `self.Node` (rather than `self.Node(self.Node)`)
3261
- # in the `subset is None` case. This applies to a couple other
3262
- # degree-of type relations as well.
3254
+ node_constraint = [node_subset(self.Node)]
3263
3255
 
3264
3256
  # Apply the weighted indegree logic for both cases
3265
3257
  src, inweight = self.Node.ref(), Float.ref()
3266
3258
  where(
3267
- node_set(self.Node),
3259
+ *node_constraint,
3268
3260
  _weighted_indegree := sum(src, inweight).per(self.Node).where(self._weight(src, self.Node, inweight)) | 0.0,
3269
3261
  ).define(_weighted_indegree_rel(self.Node, _weighted_indegree))
3270
3262
 
@@ -3324,7 +3316,7 @@ class Graph():
3324
3316
  >>> define(n1, n2, n3)
3325
3317
  >>> define(
3326
3318
  ... Edge.new(src=n1, dst=n2, weight=1.0),
3327
- ... Edge.new(src=n2, dst=n1, weight=-1.0),
3319
+ ... Edge.new(src=n2, dst=n1, weight=0.0),
3328
3320
  ... Edge.new(src=n2, dst=n3, weight=1.0),
3329
3321
  ... )
3330
3322
  >>>
@@ -3339,7 +3331,7 @@ class Graph():
3339
3331
  ▰▰▰▰ Setup complete
3340
3332
  id node_weighted_outdegree
3341
3333
  0 1 1.0
3342
- 1 2 0.0
3334
+ 1 2 1.0
3343
3335
  2 3 0.0
3344
3336
  >>>
3345
3337
  >>> # 4. Use 'of' parameter to constrain the set of nodes to compute weighted outdegree of
@@ -3355,7 +3347,7 @@ class Graph():
3355
3347
  ▰▰▰▰ Setup complete
3356
3348
  id node_weighted_outdegree
3357
3349
  0 1 1.0
3358
- 1 2 0.0
3350
+ 1 2 1.0
3359
3351
 
3360
3352
  Notes
3361
3353
  -----
@@ -3404,15 +3396,15 @@ class Graph():
3404
3396
  # Choose the appropriate node set
3405
3397
  if node_subset is None:
3406
3398
  # No constraint - use all nodes
3407
- node_set = self.Node
3399
+ node_constraint = []
3408
3400
  else:
3409
3401
  # Constrained to nodes in the subset
3410
- node_set = node_subset
3402
+ node_constraint = [node_subset(self.Node)]
3411
3403
 
3412
3404
  # Apply the weighted outdegree logic for both cases
3413
3405
  dst, outweight = self.Node.ref(), Float.ref()
3414
3406
  where(
3415
- node_set(self.Node),
3407
+ *node_constraint,
3416
3408
  _weighted_outdegree := sum(dst, outweight).per(self.Node).where(self._weight(self.Node, dst, outweight)) | 0.0,
3417
3409
  ).define(_weighted_outdegree_rel(self.Node, _weighted_outdegree))
3418
3410
 
@@ -4537,12 +4529,12 @@ class Graph():
4537
4529
  _triangle_count_rel = self._model.Relationship(f"{{node:{self._NodeConceptStr}}} belongs to {{count:Integer}} triangles")
4538
4530
 
4539
4531
  if node_subset is None:
4540
- node_constraint = self.Node # No constraint on nodes.
4532
+ node_constraint = [] # No constraint on nodes.
4541
4533
  else:
4542
- node_constraint = node_subset(self.Node) # Nodes constrained to given subset.
4534
+ node_constraint = [node_subset(self.Node)] # Nodes constrained to given subset.
4543
4535
 
4544
4536
  where(
4545
- node_constraint,
4537
+ *node_constraint,
4546
4538
  _count := self._nonzero_triangle_count_fragment(self.Node) | 0
4547
4539
  ).define(_triangle_count_rel(self.Node, _count))
4548
4540
 
@@ -4819,16 +4811,16 @@ class Graph():
4819
4811
  if node_subset is None:
4820
4812
  degree_no_self_rel = self._degree_no_self
4821
4813
  triangle_count_rel = self._triangle_count
4822
- node_constraint = node # No constraint on nodes.
4814
+ node_constraint = [] # No constraint on nodes.
4823
4815
  else:
4824
4816
  degree_no_self_rel = self._degree_no_self_of(node_subset)
4825
4817
  triangle_count_rel = self._triangle_count_of(node_subset)
4826
- node_constraint = node_subset(node) # Nodes constrained to given subset.
4818
+ node_constraint = [node_subset(node)] # Nodes constrained to given subset.
4827
4819
 
4828
4820
  degree_no_self = Integer.ref()
4829
4821
  triangle_count = Integer.ref()
4830
4822
  where(
4831
- node_constraint,
4823
+ *node_constraint,
4832
4824
  _lcc := where(
4833
4825
  degree_no_self_rel(node, degree_no_self),
4834
4826
  triangle_count_rel(node, triangle_count),
@@ -4866,12 +4858,12 @@ class Graph():
4866
4858
  node, neighbor = self.Node.ref(), self.Node.ref()
4867
4859
 
4868
4860
  if node_subset is None:
4869
- node_constraint = node # No constraint on nodes.
4861
+ node_constraint = [] # No constraint on nodes.
4870
4862
  else:
4871
- node_constraint = node_subset(node) # Nodes constrained to given subset.
4863
+ node_constraint = [node_subset(node)] # Nodes constrained to given subset.
4872
4864
 
4873
4865
  where(
4874
- node_constraint,
4866
+ *node_constraint,
4875
4867
  _dns := count(neighbor).per(node).where(self._no_loop_edge(node, neighbor)) | 0,
4876
4868
  ).define(_degree_no_self_rel(node, _dns))
4877
4869
 
@@ -7296,7 +7288,7 @@ class Graph():
7296
7288
 
7297
7289
  # TODO: Optimization opportunity. In some of the cases below
7298
7290
  # (unweighted in particular), the node_constraint is redundant with
7299
- # the constraints baked into the _count_outneigherbor_of and
7291
+ # the constraints baked into the _count_outneighbor_of and
7300
7292
  # _outneighbor_of relationships. The join with node_constraint
7301
7293
  # could be eliminated in those cases. Possibly also relevant to
7302
7294
  # other domain-constrained relations.
@@ -7366,19 +7358,18 @@ class Graph():
7366
7358
  # Define cosine similarity logic for both weighted and unweighted cases.
7367
7359
  if not self.weighted:
7368
7360
  # Unweighted case: use count of common outneighbors.
7369
- count_outneighor_u, count_outneighor_v = Integer.ref(), Integer.ref()
7361
+ count_outneighbor_u, count_outneighbor_v = Integer.ref(), Integer.ref()
7370
7362
  common_outneighbor_node = self.Node.ref()
7371
- score = Float.ref()
7372
7363
 
7373
7364
  where(
7374
7365
  *node_constraints,
7375
- count_outneighbor_u_rel(node_u, count_outneighor_u),
7376
- count_outneighbor_v_rel(node_v, count_outneighor_v),
7366
+ count_outneighbor_u_rel(node_u, count_outneighbor_u),
7367
+ count_outneighbor_v_rel(node_v, count_outneighbor_v),
7377
7368
  c_common := count(common_outneighbor_node).per(node_u, node_v).where(
7378
7369
  outneighbor_u_rel(node_u, common_outneighbor_node),
7379
7370
  outneighbor_v_rel(node_v, common_outneighbor_node),
7380
7371
  ),
7381
- score := c_common / sqrt(count_outneighor_u * count_outneighor_v),
7372
+ score := c_common / sqrt(count_outneighbor_u * count_outneighbor_v),
7382
7373
  ).define(
7383
7374
  _cosine_similarity_rel(node_u, node_v, score)
7384
7375
  )
@@ -8186,13 +8177,13 @@ class Graph():
8186
8177
  neighbor_node = self.Node.ref()
8187
8178
  if node_subset is not None:
8188
8179
  neighbor_rel = self._neighbor_of(node_subset)
8189
- node_constraint = node_subset(self.Node)
8180
+ node_constraint = [node_subset(self.Node)]
8190
8181
  else:
8191
8182
  neighbor_rel = self._neighbor
8192
- node_constraint = self.Node
8183
+ node_constraint = []
8193
8184
 
8194
8185
  where(
8195
- node_constraint,
8186
+ *node_constraint,
8196
8187
  not_(neighbor_rel(self.Node, neighbor_node))
8197
8188
  ).define(_isolated_node_rel(self.Node))
8198
8189
 
@@ -3,6 +3,8 @@ import re
3
3
  from nicegui import ui
4
4
  import json
5
5
 
6
+ from relationalai.debugging import DEBUG_LOG_FILE
7
+
6
8
 
7
9
  last_mod_time = None
8
10
  current_json_objects = []
@@ -141,12 +143,12 @@ def poll():
141
143
  global current_json_objects
142
144
  # Check the last modification time of the file
143
145
  try:
144
- mod_time = os.path.getmtime('debug.jsonl')
146
+ mod_time = os.path.getmtime(DEBUG_LOG_FILE)
145
147
  if last_mod_time is None or mod_time > last_mod_time:
146
148
  last_mod_time = mod_time
147
149
  # File has changed, read and parse the new content
148
150
  new_objects = []
149
- with open('debug.jsonl', 'r') as file:
151
+ with open(DEBUG_LOG_FILE, 'r') as file:
150
152
  for line in file:
151
153
  try:
152
154
  # Parse each JSON object and add it to the list