relationalai 1.0.0a3__py3-none-any.whl → 1.0.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. relationalai/config/config.py +47 -21
  2. relationalai/config/connections/__init__.py +5 -2
  3. relationalai/config/connections/duckdb.py +2 -2
  4. relationalai/config/connections/local.py +31 -0
  5. relationalai/config/connections/snowflake.py +0 -1
  6. relationalai/config/external/raiconfig_converter.py +235 -0
  7. relationalai/config/external/raiconfig_models.py +202 -0
  8. relationalai/config/external/utils.py +31 -0
  9. relationalai/config/shims.py +1 -0
  10. relationalai/semantics/__init__.py +10 -8
  11. relationalai/semantics/backends/sql/sql_compiler.py +1 -4
  12. relationalai/semantics/experimental/__init__.py +0 -0
  13. relationalai/semantics/experimental/builder.py +295 -0
  14. relationalai/semantics/experimental/builtins.py +154 -0
  15. relationalai/semantics/frontend/base.py +67 -42
  16. relationalai/semantics/frontend/core.py +34 -6
  17. relationalai/semantics/frontend/front_compiler.py +209 -37
  18. relationalai/semantics/frontend/pprint.py +6 -2
  19. relationalai/semantics/metamodel/__init__.py +7 -0
  20. relationalai/semantics/metamodel/metamodel.py +2 -0
  21. relationalai/semantics/metamodel/metamodel_analyzer.py +58 -16
  22. relationalai/semantics/metamodel/pprint.py +6 -1
  23. relationalai/semantics/metamodel/rewriter.py +11 -7
  24. relationalai/semantics/metamodel/typer.py +116 -41
  25. relationalai/semantics/reasoners/__init__.py +11 -0
  26. relationalai/semantics/reasoners/graph/__init__.py +35 -0
  27. relationalai/semantics/reasoners/graph/core.py +9028 -0
  28. relationalai/semantics/std/__init__.py +30 -10
  29. relationalai/semantics/std/aggregates.py +641 -12
  30. relationalai/semantics/std/common.py +146 -13
  31. relationalai/semantics/std/constraints.py +71 -1
  32. relationalai/semantics/std/datetime.py +904 -21
  33. relationalai/semantics/std/decimals.py +143 -2
  34. relationalai/semantics/std/floats.py +57 -4
  35. relationalai/semantics/std/integers.py +98 -4
  36. relationalai/semantics/std/math.py +857 -35
  37. relationalai/semantics/std/numbers.py +216 -20
  38. relationalai/semantics/std/re.py +213 -5
  39. relationalai/semantics/std/strings.py +437 -44
  40. relationalai/shims/executor.py +60 -52
  41. relationalai/shims/fixtures.py +85 -0
  42. relationalai/shims/helpers.py +26 -2
  43. relationalai/shims/hoister.py +28 -9
  44. relationalai/shims/mm2v0.py +204 -173
  45. relationalai/tools/cli/cli.py +192 -10
  46. relationalai/tools/cli/components/progress_reader.py +1 -1
  47. relationalai/tools/cli/docs.py +394 -0
  48. relationalai/tools/debugger.py +11 -4
  49. relationalai/tools/qb_debugger.py +435 -0
  50. relationalai/tools/typer_debugger.py +1 -2
  51. relationalai/util/dataclasses.py +3 -5
  52. relationalai/util/docutils.py +1 -2
  53. relationalai/util/error.py +2 -5
  54. relationalai/util/python.py +23 -0
  55. relationalai/util/runtime.py +1 -2
  56. relationalai/util/schema.py +2 -4
  57. relationalai/util/structures.py +4 -2
  58. relationalai/util/tracing.py +8 -2
  59. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/METADATA +8 -5
  60. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/RECORD +118 -95
  61. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/WHEEL +1 -1
  62. v0/relationalai/__init__.py +1 -1
  63. v0/relationalai/clients/client.py +52 -18
  64. v0/relationalai/clients/exec_txn_poller.py +122 -0
  65. v0/relationalai/clients/local.py +23 -8
  66. v0/relationalai/clients/resources/azure/azure.py +36 -11
  67. v0/relationalai/clients/resources/snowflake/__init__.py +4 -4
  68. v0/relationalai/clients/resources/snowflake/cli_resources.py +12 -1
  69. v0/relationalai/clients/resources/snowflake/direct_access_resources.py +124 -100
  70. v0/relationalai/clients/resources/snowflake/engine_service.py +381 -0
  71. v0/relationalai/clients/resources/snowflake/engine_state_handlers.py +35 -29
  72. v0/relationalai/clients/resources/snowflake/error_handlers.py +43 -2
  73. v0/relationalai/clients/resources/snowflake/snowflake.py +277 -179
  74. v0/relationalai/clients/resources/snowflake/use_index_poller.py +8 -0
  75. v0/relationalai/clients/types.py +5 -0
  76. v0/relationalai/errors.py +19 -1
  77. v0/relationalai/semantics/lqp/algorithms.py +173 -0
  78. v0/relationalai/semantics/lqp/builtins.py +199 -2
  79. v0/relationalai/semantics/lqp/executor.py +68 -37
  80. v0/relationalai/semantics/lqp/ir.py +28 -2
  81. v0/relationalai/semantics/lqp/model2lqp.py +215 -45
  82. v0/relationalai/semantics/lqp/passes.py +13 -658
  83. v0/relationalai/semantics/lqp/rewrite/__init__.py +12 -0
  84. v0/relationalai/semantics/lqp/rewrite/algorithm.py +385 -0
  85. v0/relationalai/semantics/lqp/rewrite/constants_to_vars.py +70 -0
  86. v0/relationalai/semantics/lqp/rewrite/deduplicate_vars.py +104 -0
  87. v0/relationalai/semantics/lqp/rewrite/eliminate_data.py +108 -0
  88. v0/relationalai/semantics/lqp/rewrite/extract_keys.py +25 -3
  89. v0/relationalai/semantics/lqp/rewrite/period_math.py +77 -0
  90. v0/relationalai/semantics/lqp/rewrite/quantify_vars.py +65 -31
  91. v0/relationalai/semantics/lqp/rewrite/unify_definitions.py +317 -0
  92. v0/relationalai/semantics/lqp/utils.py +11 -1
  93. v0/relationalai/semantics/lqp/validators.py +14 -1
  94. v0/relationalai/semantics/metamodel/builtins.py +2 -1
  95. v0/relationalai/semantics/metamodel/compiler.py +2 -1
  96. v0/relationalai/semantics/metamodel/dependency.py +12 -3
  97. v0/relationalai/semantics/metamodel/executor.py +11 -1
  98. v0/relationalai/semantics/metamodel/factory.py +2 -2
  99. v0/relationalai/semantics/metamodel/helpers.py +7 -0
  100. v0/relationalai/semantics/metamodel/ir.py +3 -2
  101. v0/relationalai/semantics/metamodel/rewrite/dnf_union_splitter.py +30 -20
  102. v0/relationalai/semantics/metamodel/rewrite/flatten.py +50 -13
  103. v0/relationalai/semantics/metamodel/rewrite/format_outputs.py +9 -3
  104. v0/relationalai/semantics/metamodel/typer/checker.py +6 -4
  105. v0/relationalai/semantics/metamodel/typer/typer.py +4 -3
  106. v0/relationalai/semantics/metamodel/visitor.py +4 -3
  107. v0/relationalai/semantics/reasoners/optimization/solvers_dev.py +1 -1
  108. v0/relationalai/semantics/reasoners/optimization/solvers_pb.py +336 -86
  109. v0/relationalai/semantics/rel/compiler.py +2 -1
  110. v0/relationalai/semantics/rel/executor.py +3 -2
  111. v0/relationalai/semantics/tests/lqp/__init__.py +0 -0
  112. v0/relationalai/semantics/tests/lqp/algorithms.py +345 -0
  113. v0/relationalai/tools/cli.py +339 -186
  114. v0/relationalai/tools/cli_controls.py +216 -67
  115. v0/relationalai/tools/cli_helpers.py +410 -6
  116. v0/relationalai/util/format.py +5 -2
  117. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/entry_points.txt +0 -0
  118. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,381 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from typing import Any, Dict, List, Protocol, Tuple
5
+
6
+ from .... import debugging
7
+ from ....errors import EngineProvisioningFailed
8
+ from ...config import Config
9
+ from ...types import EngineState
10
+ from ...util import poll_with_specified_overhead
11
+
12
+ # ---------------------------------------------------------------------------
13
+ # CONSTANTS
14
+ # ---------------------------------------------------------------------------
15
+
16
+ # Keep the same placeholder string as `snowflake.py` so `Resources._exec_sql()` will
17
+ # replace it with the actual app name.
18
+ APP_NAME = "___RAI_APP___"
19
+ ENGINE_SCHEMA = "experimental"
20
+ API_SCHEMA = f"{APP_NAME}.{ENGINE_SCHEMA}"
21
+
22
+ # Cloud-specific engine sizes
23
+ INTERNAL_ENGINE_SIZES = ["XS", "S", "M", "L"]
24
+ ENGINE_SIZES_AWS = ["HIGHMEM_X64_S", "HIGHMEM_X64_M", "HIGHMEM_X64_L"]
25
+ ENGINE_SIZES_AZURE = ["HIGHMEM_X64_S", "HIGHMEM_X64_M", "HIGHMEM_X64_SL"]
26
+
27
+ # ---------------------------------------------------------------------------
28
+ # Engine types
29
+ # ---------------------------------------------------------------------------
30
+
31
+ class EngineType:
32
+ """Engine type constants with descriptions."""
33
+
34
+ LOGIC = "LOGIC"
35
+ SOLVER = "SOLVER"
36
+ ML = "ML"
37
+
38
+ _LABELS = {
39
+ LOGIC: "Logic",
40
+ SOLVER: "Prescriptive",
41
+ ML: "Predictive",
42
+ }
43
+
44
+ _DESCRIPTIONS = {
45
+ LOGIC: "Logic engine for deductive reasoning and relational queries",
46
+ SOLVER: "Optimization engine using mathematical solvers for prescriptive reasoning",
47
+ ML: "Machine learning engine for pattern recognition and predictive reasoning",
48
+ }
49
+
50
+ @classmethod
51
+ def get_label(cls, type: str) -> str:
52
+ """Get the user-friendly label for an engine type."""
53
+ return cls._LABELS.get(type, type)
54
+
55
+ @classmethod
56
+ def get_label_with_value(cls, type: str) -> str:
57
+ """Get the user-friendly label with the value in parentheses."""
58
+ label = cls.get_label(type)
59
+ return f"{label} ({type})"
60
+
61
+ @classmethod
62
+ def get_description(cls, type: str) -> str:
63
+ """Get the description for an engine type."""
64
+ return cls._DESCRIPTIONS.get(type, "Unknown engine type")
65
+
66
+ @classmethod
67
+ def is_valid(cls, type: str) -> bool:
68
+ """Check if an engine type is valid."""
69
+ return type in cls._DESCRIPTIONS
70
+
71
+ @classmethod
72
+ def get_all_types(cls) -> List[str]:
73
+ """Get all valid engine types."""
74
+ return list(cls._DESCRIPTIONS.keys())
75
+
76
+
77
+ class _ExecResources(Protocol):
78
+ """Minimal surface EngineServiceSQL needs from Resources (composition, no mixins)."""
79
+
80
+ config: Config
81
+
82
+ def _exec(
83
+ self,
84
+ code: str,
85
+ params: Any | None = None,
86
+ raw: bool = False,
87
+ help: bool = True,
88
+ skip_engine_db_error_retry: bool = False,
89
+ ) -> Any:
90
+ """Execute a statement via the owning resources object."""
91
+ ...
92
+
93
+ def get_cloud_provider(self) -> str:
94
+ """Return the configured cloud provider identifier (e.g. 'aws', 'azure')."""
95
+ ...
96
+
97
+
98
+ # ---------------------------------------------------------------------------
99
+ # Engine Service
100
+ # ---------------------------------------------------------------------------
101
+ class EngineServiceSQL:
102
+ """Snowflake engine management backed by SQL stored procedures."""
103
+
104
+ def __init__(self, resources: _ExecResources):
105
+ """Create an engine service bound to a resources-like executor."""
106
+ self._res = resources
107
+
108
+ @staticmethod
109
+ def _parse_settings(val: Any) -> Dict[str, Any] | None:
110
+ if val is None:
111
+ return None
112
+ if isinstance(val, dict):
113
+ return val
114
+ if isinstance(val, str):
115
+ s = val.strip()
116
+ if not s:
117
+ return None
118
+ try:
119
+ parsed = json.loads(s)
120
+ return parsed if isinstance(parsed, dict) else {"value": parsed}
121
+ except Exception:
122
+ return {"value": val}
123
+ # Snowflake VARIANT may arrive as a list/tuple/etc; preserve it under a wrapper
124
+ return {"value": val}
125
+
126
+ def list_engines(
127
+ self,
128
+ *,
129
+ state: str | None = None,
130
+ name: str | None = None,
131
+ type: str | None = None,
132
+ size: str | None = None,
133
+ created_by: str | None = None,
134
+ ) -> List[Dict[str, Any]]:
135
+ """
136
+ List engines with optional filtering.
137
+
138
+ Uses parameterized queries (? placeholders) for SQL injection protection.
139
+ """
140
+
141
+ where_conditions: list[str] = []
142
+ params: list[Any] = []
143
+
144
+ if state:
145
+ where_conditions.append("STATUS = ?")
146
+ params.append(state.upper())
147
+ if name:
148
+ where_conditions.append("UPPER(NAME) LIKE ?")
149
+ params.append(f"%{name.upper()}%")
150
+ if type:
151
+ where_conditions.append("TYPE = ?")
152
+ params.append(type.upper())
153
+ if size:
154
+ where_conditions.append("SIZE = ?")
155
+ params.append(size)
156
+ if created_by:
157
+ where_conditions.append("UPPER(CREATED_BY) LIKE ?")
158
+ params.append(f"%{created_by.upper()}%")
159
+
160
+ where_clause = f"WHERE {' AND '.join(where_conditions)}" if where_conditions else ""
161
+ where_suffix = f" {where_clause}" if where_clause else ""
162
+ statement = f"""
163
+ SELECT
164
+ NAME, TYPE, ID, SIZE, STATUS, CREATED_BY, CREATED_ON, UPDATED_ON,
165
+ AUTO_SUSPEND_MINS, SUSPENDS_AT, SETTINGS
166
+ FROM {API_SCHEMA}.engines{where_suffix}
167
+ ORDER BY NAME ASC;
168
+ """
169
+
170
+ results = self._res._exec(statement, params)
171
+ if not results:
172
+ return []
173
+
174
+ return [
175
+ {
176
+ "name": row["NAME"],
177
+ "type": row["TYPE"],
178
+ "id": row["ID"],
179
+ "size": row["SIZE"],
180
+ "state": row["STATUS"], # callers expect 'state'
181
+ "created_by": row["CREATED_BY"],
182
+ "created_on": row["CREATED_ON"],
183
+ "updated_on": row["UPDATED_ON"],
184
+ "auto_suspend_mins": row["AUTO_SUSPEND_MINS"],
185
+ "suspends_at": row["SUSPENDS_AT"],
186
+ "settings": self._parse_settings(
187
+ # Snowpark Row supports dict-style indexing but not `.get()`.
188
+ row["SETTINGS"] if "SETTINGS" in row else None
189
+ ),
190
+ }
191
+ for row in results
192
+ ]
193
+
194
+ def get_engine(self, name: str, type: str) -> EngineState | None:
195
+ """Fetch a single engine by (name, type), returning None if not found."""
196
+ results = self._res._exec(
197
+ f"""
198
+ SELECT
199
+ NAME, TYPE, ID, SIZE, STATUS, CREATED_BY, CREATED_ON, UPDATED_ON,
200
+ VERSION, AUTO_SUSPEND_MINS, SUSPENDS_AT, SETTINGS
201
+ FROM {API_SCHEMA}.engines
202
+ WHERE NAME = ? AND TYPE = ?;
203
+ """,
204
+ [name, type],
205
+ )
206
+ if not results:
207
+ return None
208
+ engine = results[0]
209
+ if not engine:
210
+ return None
211
+ engine_state: EngineState = {
212
+ "name": engine["NAME"],
213
+ "type": engine["TYPE"],
214
+ "id": engine["ID"],
215
+ "size": engine["SIZE"],
216
+ "state": engine["STATUS"], # callers expect 'state'
217
+ "created_by": engine["CREATED_BY"],
218
+ "created_on": engine["CREATED_ON"],
219
+ "updated_on": engine["UPDATED_ON"],
220
+ "version": engine["VERSION"],
221
+ "auto_suspend": engine["AUTO_SUSPEND_MINS"],
222
+ "suspends_at": engine["SUSPENDS_AT"],
223
+ "settings": self._parse_settings(engine["SETTINGS"] if "SETTINGS" in engine else None),
224
+ }
225
+ return engine_state
226
+
227
+ def _create_engine(
228
+ self,
229
+ *,
230
+ name: str,
231
+ type: str = EngineType.LOGIC,
232
+ size: str | None = None,
233
+ auto_suspend_mins: int | None = None,
234
+ is_async: bool = False,
235
+ headers: Dict | None = None,
236
+ settings: Dict[str, Any] | None = None,
237
+ ) -> None:
238
+ """Create an engine using the appropriate stored procedure.
239
+
240
+ Note: `headers` is accepted for API compatibility; it is not currently used
241
+ in the Snowflake implementation.
242
+ """
243
+ API = "create_engine_async" if is_async else "create_engine"
244
+ if size is None:
245
+ size = self._res.config.get_default_engine_size()
246
+ if auto_suspend_mins is None:
247
+ auto_suspend_mins = self._res.config.get_default_auto_suspend_mins()
248
+
249
+ try:
250
+ with debugging.span(API, name=name, size=size, auto_suspend_mins=auto_suspend_mins, engine_type=type):
251
+ payload: Dict[str, Any] | None = None
252
+ if settings:
253
+ payload = {"settings": dict(settings)}
254
+ if auto_suspend_mins is not None:
255
+ payload = dict(payload or {})
256
+ payload["auto_suspend_mins"] = auto_suspend_mins
257
+
258
+ if payload is None:
259
+ self._res._exec(
260
+ f"call {API_SCHEMA}.{API}(?, ?, ?, null);",
261
+ [type, name, size],
262
+ )
263
+ else:
264
+ self._res._exec(
265
+ f"call {API_SCHEMA}.{API}(?, ?, ?, PARSE_JSON(?));",
266
+ [type, name, size, json.dumps(payload)],
267
+ )
268
+ except Exception as e:
269
+ raise EngineProvisioningFailed(name, e) from e
270
+
271
+ def create_engine(
272
+ self,
273
+ name: str,
274
+ type: str | None = None,
275
+ size: str | None = None,
276
+ auto_suspend_mins: int | None = None,
277
+ headers: Dict | None = None,
278
+ settings: Dict[str, Any] | None = None,
279
+ ) -> None:
280
+ """Create an engine (synchronous variant)."""
281
+ if type is None:
282
+ type = EngineType.LOGIC
283
+ self._create_engine(
284
+ name=name,
285
+ type=type,
286
+ size=size,
287
+ auto_suspend_mins=auto_suspend_mins,
288
+ headers=headers,
289
+ settings=settings,
290
+ )
291
+
292
+ def create_engine_async(
293
+ self,
294
+ name: str,
295
+ type: str = EngineType.LOGIC,
296
+ size: str | None = None,
297
+ auto_suspend_mins: int | None = None,
298
+ ) -> None:
299
+ """Create an engine asynchronously."""
300
+ self._create_engine(
301
+ name=name,
302
+ type=type,
303
+ size=size,
304
+ auto_suspend_mins=auto_suspend_mins,
305
+ is_async=True,
306
+ )
307
+
308
+ def delete_engine(self, name: str, type: str) -> None:
309
+ """Delete an engine by (name, type)."""
310
+ self._res._exec(
311
+ f"call {API_SCHEMA}.delete_engine(?, ?);",
312
+ [type, name],
313
+ )
314
+
315
+ def suspend_engine(self, name: str, type: str | None = None) -> None:
316
+ """Suspend an engine by name (and optional type)."""
317
+ if type is None:
318
+ type = EngineType.LOGIC
319
+ self._res._exec(
320
+ f"call {API_SCHEMA}.suspend_engine(?, ?);",
321
+ [type, name],
322
+ )
323
+
324
+ def resume_engine(self, name: str, type: str | None = None, headers: Dict | None = None) -> Dict[str, Any]:
325
+ """Resume an engine and block until it is READY.
326
+
327
+ This preserves historical behavior where `resume_engine` was synchronous.
328
+ Use `resume_engine_async` for a fire-and-forget resume call.
329
+ """
330
+ if type is None:
331
+ type = EngineType.LOGIC
332
+ self.resume_engine_async(name, type=type, headers=headers)
333
+ poll_with_specified_overhead(
334
+ lambda: (self.get_engine(name, type) or {}).get("state") == "READY",
335
+ overhead_rate=0.1,
336
+ max_delay=0.5,
337
+ timeout=900,
338
+ )
339
+ return {}
340
+
341
+ def resume_engine_async(self, name: str, type: str | None = None, headers: Dict | None = None) -> Dict[str, Any]:
342
+ """Resume an engine asynchronously and return an API-compatible payload.
343
+
344
+ Note: `headers` is accepted for API compatibility; it is not currently used
345
+ in the Snowflake implementation.
346
+ """
347
+ if type is None:
348
+ type = EngineType.LOGIC
349
+ self._res._exec(
350
+ f"call {API_SCHEMA}.resume_engine_async(?, ?);",
351
+ [type, name],
352
+ )
353
+ return {}
354
+
355
+ def validate_engine_size(self, size: str) -> Tuple[bool, List[str]]:
356
+ """Validate an engine size, returning (is_valid, allowed_sizes_if_invalid)."""
357
+ if size is not None:
358
+ sizes = self.get_engine_sizes()
359
+ if size not in sizes:
360
+ return False, sizes
361
+ return True, []
362
+
363
+ def get_engine_sizes(self, cloud_provider: str | None = None) -> List[str]:
364
+ """Return the list of valid engine sizes for the given cloud provider."""
365
+ if cloud_provider is None:
366
+ cloud_provider = self._res.get_cloud_provider()
367
+
368
+ if cloud_provider == "azure":
369
+ sizes = ENGINE_SIZES_AZURE
370
+ else:
371
+ sizes = ENGINE_SIZES_AWS
372
+
373
+ if self._res.config.show_all_engine_sizes():
374
+ return INTERNAL_ENGINE_SIZES + sizes
375
+ return sizes
376
+
377
+ def alter_engine_pool(self, size: str | None = None, mins: int | None = None, maxs: int | None = None) -> None:
378
+ """Alter engine pool node limits for Snowflake."""
379
+ # Keep the exact SQL shape used by Resources for backwards compatibility.
380
+ self._res._exec(f"call {APP_NAME}.api.alter_engine_pool_node_limits('{size}', {mins}, {maxs});")
381
+
@@ -25,8 +25,9 @@ else:
25
25
  @dataclass
26
26
  class EngineContext:
27
27
  """Context for engine state handling."""
28
- engine_name: str
29
- engine_size: str | None
28
+ name: str
29
+ size: str | None
30
+ type: str
30
31
  headers: Dict | None
31
32
  requested_size: str | None # Size explicitly requested by user
32
33
  spinner: Spinner | None = None # For async mode UI updates
@@ -70,7 +71,7 @@ class SyncPendingStateHandler(EngineStateHandler):
70
71
  # Warn if requested size doesn't match pending engine size
71
72
  if context.requested_size is not None and engine.get("size") != context.requested_size:
72
73
  existing_size = engine.get("size") or ""
73
- EngineSizeMismatchWarning(context.engine_name, existing_size, context.requested_size)
74
+ EngineSizeMismatchWarning(context.name, existing_size, context.requested_size)
74
75
 
75
76
  # Poll until engine is ready
76
77
  with Spinner(
@@ -78,14 +79,14 @@ class SyncPendingStateHandler(EngineStateHandler):
78
79
  "Engine ready",
79
80
  ):
80
81
  poll_with_specified_overhead(
81
- lambda: resources.is_engine_ready(context.engine_name),
82
+ lambda: resources.is_engine_ready(context.name, context.type),
82
83
  overhead_rate=0.1,
83
84
  max_delay=0.5,
84
85
  timeout=900
85
86
  )
86
87
 
87
88
  # Return updated engine (should be READY now)
88
- updated_engine = resources.get_engine(context.engine_name)
89
+ updated_engine = resources.get_engine(context.name, context.type)
89
90
  return cast(EngineDict, updated_engine) if updated_engine else None
90
91
 
91
92
 
@@ -100,23 +101,23 @@ class SyncSuspendedStateHandler(EngineStateHandler):
100
101
  return None
101
102
 
102
103
  with Spinner(
103
- f"Resuming engine '{context.engine_name}'",
104
- f"Engine '{context.engine_name}' resumed",
105
- f"Failed to resume engine '{context.engine_name}'"
104
+ f"Resuming engine '{context.name}'",
105
+ f"Engine '{context.name}' resumed",
106
+ f"Failed to resume engine '{context.name}'"
106
107
  ):
107
108
  try:
108
- resources.resume_engine_async(context.engine_name, headers=context.headers)
109
+ resources.resume_engine_async(context.name, type=context.type, headers=context.headers)
109
110
  poll_with_specified_overhead(
110
- lambda: resources.is_engine_ready(context.engine_name),
111
+ lambda: resources.is_engine_ready(context.name, context.type),
111
112
  overhead_rate=0.1,
112
113
  max_delay=0.5,
113
114
  timeout=900
114
115
  )
115
116
  except Exception:
116
- raise EngineResumeFailed(context.engine_name)
117
+ raise EngineResumeFailed(context.name)
117
118
 
118
119
  # Return updated engine (should be READY now)
119
- updated_engine = resources.get_engine(context.engine_name)
120
+ updated_engine = resources.get_engine(context.name, context.type)
120
121
  return cast(EngineDict, updated_engine) if updated_engine else None
121
122
 
122
123
 
@@ -133,7 +134,7 @@ class SyncReadyStateHandler(EngineStateHandler):
133
134
  # Warn if requested size doesn't match ready engine size
134
135
  if context.requested_size is not None and engine.get("size") != context.requested_size:
135
136
  existing_size = engine.get("size") or ""
136
- EngineSizeMismatchWarning(context.engine_name, existing_size, context.requested_size)
137
+ EngineSizeMismatchWarning(context.name, existing_size, context.requested_size)
137
138
 
138
139
  # Cast to EngineState for _set_active_engine
139
140
  if TYPE_CHECKING:
@@ -158,7 +159,7 @@ class SyncGoneStateHandler(EngineStateHandler):
158
159
  # "Gone" is abnormal condition when metadata and SF service don't match
159
160
  # Therefore, we have to delete the engine and create a new one
160
161
  # It could be case that engine is already deleted, so we have to catch the exception
161
- resources.delete_engine(context.engine_name, headers=context.headers)
162
+ resources.delete_engine(context.name, context.type)
162
163
  # After deleting the engine, return None so that we can create a new engine
163
164
  return None
164
165
  except Exception as e:
@@ -167,7 +168,7 @@ class SyncGoneStateHandler(EngineStateHandler):
167
168
  if isinstance(e, EngineNotFoundException):
168
169
  return None
169
170
  else:
170
- raise EngineProvisioningFailed(context.engine_name, e) from e
171
+ raise EngineProvisioningFailed(context.name, e) from e
171
172
 
172
173
 
173
174
  class SyncMissingEngineHandler(EngineStateHandler):
@@ -180,13 +181,18 @@ class SyncMissingEngineHandler(EngineStateHandler):
180
181
  # This handler is called when engine doesn't exist
181
182
  # Create engine synchronously with spinner
182
183
  with Spinner(
183
- f"Auto-creating engine {context.engine_name}",
184
- f"Auto-created engine {context.engine_name}",
184
+ f"Auto-creating engine {context.name}",
185
+ f"Auto-created engine {context.name}",
185
186
  "Engine creation failed",
186
187
  ):
187
- resources.create_engine(context.engine_name, size=context.engine_size, headers=context.headers)
188
+ resources.create_engine(
189
+ context.name,
190
+ size=context.size,
191
+ type=context.type,
192
+ headers=context.headers,
193
+ )
188
194
 
189
- return resources.get_engine(context.engine_name)
195
+ return resources.get_engine(context.name, context.type)
190
196
 
191
197
 
192
198
  # ============================================================================
@@ -206,7 +212,7 @@ class AsyncPendingStateHandler(EngineStateHandler):
206
212
  # In async mode, just update spinner - use_index will wait for engine to be ready
207
213
  if context.spinner:
208
214
  context.spinner.update_messages({
209
- "finished_message": f"Starting engine {context.engine_name}",
215
+ "finished_message": f"Starting engine {context.name}",
210
216
  })
211
217
 
212
218
  return engine
@@ -224,13 +230,13 @@ class AsyncSuspendedStateHandler(EngineStateHandler):
224
230
 
225
231
  if context.spinner:
226
232
  context.spinner.update_messages({
227
- "finished_message": f"Resuming engine {context.engine_name}",
233
+ "finished_message": f"Resuming engine {context.name}",
228
234
  })
229
235
 
230
236
  try:
231
- resources.resume_engine_async(context.engine_name)
237
+ resources.resume_engine_async(context.name, type=context.type)
232
238
  except Exception:
233
- raise EngineResumeFailed(context.engine_name)
239
+ raise EngineResumeFailed(context.name)
234
240
 
235
241
  return engine
236
242
 
@@ -247,7 +253,7 @@ class AsyncReadyStateHandler(EngineStateHandler):
247
253
 
248
254
  if context.spinner:
249
255
  context.spinner.update_messages({
250
- "finished_message": f"Engine {context.engine_name} initialized",
256
+ "finished_message": f"Engine {context.name} initialized",
251
257
  })
252
258
 
253
259
  # Cast to EngineState for _set_active_engine
@@ -271,7 +277,7 @@ class AsyncGoneStateHandler(EngineStateHandler):
271
277
 
272
278
  if context.spinner:
273
279
  context.spinner.update_messages({
274
- "message": f"Restarting engine {context.engine_name}",
280
+ "message": f"Restarting engine {context.name}",
275
281
  })
276
282
 
277
283
  try:
@@ -279,7 +285,7 @@ class AsyncGoneStateHandler(EngineStateHandler):
279
285
  # Therefore, we have to delete the engine and create a new one
280
286
  # It could be case that engine is already deleted, so we have to catch the exception
281
287
  # Set it to None so that we can create a new engine asynchronously
282
- resources.delete_engine(context.engine_name)
288
+ resources.delete_engine(context.name, context.type)
283
289
  return None
284
290
  except Exception as e:
285
291
  # If engine is already deleted, we will get an exception
@@ -287,7 +293,7 @@ class AsyncGoneStateHandler(EngineStateHandler):
287
293
  if isinstance(e, EngineNotFoundException):
288
294
  return None
289
295
  else:
290
- raise EngineProvisioningFailed(context.engine_name, e) from e
296
+ raise EngineProvisioningFailed(context.name, e) from e
291
297
 
292
298
 
293
299
  class AsyncMissingEngineHandler(EngineStateHandler):
@@ -299,11 +305,11 @@ class AsyncMissingEngineHandler(EngineStateHandler):
299
305
  def handle(self, engine: EngineDict | None, context: EngineContext, resources: 'Resources') -> EngineDict | None:
300
306
  # This handler is called when engine doesn't exist
301
307
  # Create engine asynchronously
302
- resources.create_engine_async(context.engine_name, size=context.engine_size)
308
+ resources.create_engine_async(context.name, size=context.size, type=context.type)
303
309
 
304
310
  if context.spinner:
305
311
  context.spinner.update_messages({
306
- "finished_message": f"Starting engine {context.engine_name}...",
312
+ "finished_message": f"Starting engine {context.name}...",
307
313
  })
308
314
  return None # Engine is being created asynchronously
309
315
 
@@ -71,6 +71,39 @@ class AppMissingErrorHandler(ErrorHandler):
71
71
  raise SnowflakeAppMissingException(rai_app, resources.config.get("role"))
72
72
 
73
73
 
74
+ class AppFunctionMissingErrorHandler(ErrorHandler):
75
+ """Handle missing RAI app when app-scoped UDFs are unknown.
76
+
77
+ When the RelationalAI Snowflake native application isn't installed (or is installed
78
+ under a different name / not authorized), Snowflake can surface errors like:
79
+
80
+ SQL compilation error: Unknown user-defined function <APP>.<SCHEMA>.<FUNC>.
81
+
82
+ This should be presented to users as an "app missing" configuration issue, not
83
+ as a raw SQL error.
84
+ """
85
+
86
+ def matches(self, error: Exception, message: str, ctx: 'ExecContext', resources: 'Resources') -> bool:
87
+ rai_app = resources.config.get("rai_app_name", "")
88
+ if not isinstance(rai_app, str) or not rai_app:
89
+ return False
90
+
91
+ rai_app_lower = rai_app.lower()
92
+ # Normalize whitespace/newlines because Snowpark error strings may wrap.
93
+ messages = [" ".join(msg.split()).lower() for msg in collect_error_messages(error)]
94
+
95
+ # Examples:
96
+ # - "unknown user-defined function sqlib_ia_na_app.experimental.resume_engine_async."
97
+ # - "unknown user-defined function sqlib_ia_na_app.api.alter_engine_pool_node_limits."
98
+ needle = f"unknown user-defined function {rai_app_lower}."
99
+ return any(needle in msg for msg in messages)
100
+
101
+ def handle(self, error: Exception, ctx: 'ExecContext', resources: 'Resources') -> Any | None:
102
+ rai_app = resources.config.get("rai_app_name", "")
103
+ assert isinstance(rai_app, str), f"rai_app_name must be a string, not {type(rai_app)}"
104
+ raise SnowflakeAppMissingException(rai_app, resources.config.get("role"))
105
+
106
+
74
107
  class DatabaseErrorsHandler(ErrorHandler):
75
108
  """Handle database-related errors from Snowflake/RAI."""
76
109
 
@@ -122,8 +155,16 @@ class ServiceNotStartedErrorHandler(ErrorHandler):
122
155
  """Handle RAI service not started errors."""
123
156
 
124
157
  def matches(self, error: Exception, message: str, ctx: 'ExecContext', resources: 'Resources') -> bool:
125
- messages = collect_error_messages(error)
126
- return any(re.search(r"service has not been started", msg) for msg in messages)
158
+ messages = [" ".join(msg.split()).lower() for msg in collect_error_messages(error)]
159
+ return any(
160
+ (
161
+ # Native app not activated / service not started
162
+ "service has not been started" in msg
163
+ # Native app suspended/deactivated; SPCS control plane is unreachable
164
+ or "not reachable: service suspended" in msg
165
+ )
166
+ for msg in messages
167
+ )
127
168
 
128
169
  def handle(self, error: Exception, ctx: 'ExecContext', resources: 'Resources') -> Any | None:
129
170
  rai_app = resources.config.get("rai_app_name", "")