relationalai 1.0.0a3__py3-none-any.whl → 1.0.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. relationalai/config/config.py +47 -21
  2. relationalai/config/connections/__init__.py +5 -2
  3. relationalai/config/connections/duckdb.py +2 -2
  4. relationalai/config/connections/local.py +31 -0
  5. relationalai/config/connections/snowflake.py +0 -1
  6. relationalai/config/external/raiconfig_converter.py +235 -0
  7. relationalai/config/external/raiconfig_models.py +202 -0
  8. relationalai/config/external/utils.py +31 -0
  9. relationalai/config/shims.py +1 -0
  10. relationalai/semantics/__init__.py +10 -8
  11. relationalai/semantics/backends/sql/sql_compiler.py +1 -4
  12. relationalai/semantics/experimental/__init__.py +0 -0
  13. relationalai/semantics/experimental/builder.py +295 -0
  14. relationalai/semantics/experimental/builtins.py +154 -0
  15. relationalai/semantics/frontend/base.py +67 -42
  16. relationalai/semantics/frontend/core.py +34 -6
  17. relationalai/semantics/frontend/front_compiler.py +209 -37
  18. relationalai/semantics/frontend/pprint.py +6 -2
  19. relationalai/semantics/metamodel/__init__.py +7 -0
  20. relationalai/semantics/metamodel/metamodel.py +2 -0
  21. relationalai/semantics/metamodel/metamodel_analyzer.py +58 -16
  22. relationalai/semantics/metamodel/pprint.py +6 -1
  23. relationalai/semantics/metamodel/rewriter.py +11 -7
  24. relationalai/semantics/metamodel/typer.py +116 -41
  25. relationalai/semantics/reasoners/__init__.py +11 -0
  26. relationalai/semantics/reasoners/graph/__init__.py +35 -0
  27. relationalai/semantics/reasoners/graph/core.py +9028 -0
  28. relationalai/semantics/std/__init__.py +30 -10
  29. relationalai/semantics/std/aggregates.py +641 -12
  30. relationalai/semantics/std/common.py +146 -13
  31. relationalai/semantics/std/constraints.py +71 -1
  32. relationalai/semantics/std/datetime.py +904 -21
  33. relationalai/semantics/std/decimals.py +143 -2
  34. relationalai/semantics/std/floats.py +57 -4
  35. relationalai/semantics/std/integers.py +98 -4
  36. relationalai/semantics/std/math.py +857 -35
  37. relationalai/semantics/std/numbers.py +216 -20
  38. relationalai/semantics/std/re.py +213 -5
  39. relationalai/semantics/std/strings.py +437 -44
  40. relationalai/shims/executor.py +60 -52
  41. relationalai/shims/fixtures.py +85 -0
  42. relationalai/shims/helpers.py +26 -2
  43. relationalai/shims/hoister.py +28 -9
  44. relationalai/shims/mm2v0.py +204 -173
  45. relationalai/tools/cli/cli.py +192 -10
  46. relationalai/tools/cli/components/progress_reader.py +1 -1
  47. relationalai/tools/cli/docs.py +394 -0
  48. relationalai/tools/debugger.py +11 -4
  49. relationalai/tools/qb_debugger.py +435 -0
  50. relationalai/tools/typer_debugger.py +1 -2
  51. relationalai/util/dataclasses.py +3 -5
  52. relationalai/util/docutils.py +1 -2
  53. relationalai/util/error.py +2 -5
  54. relationalai/util/python.py +23 -0
  55. relationalai/util/runtime.py +1 -2
  56. relationalai/util/schema.py +2 -4
  57. relationalai/util/structures.py +4 -2
  58. relationalai/util/tracing.py +8 -2
  59. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/METADATA +8 -5
  60. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/RECORD +118 -95
  61. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/WHEEL +1 -1
  62. v0/relationalai/__init__.py +1 -1
  63. v0/relationalai/clients/client.py +52 -18
  64. v0/relationalai/clients/exec_txn_poller.py +122 -0
  65. v0/relationalai/clients/local.py +23 -8
  66. v0/relationalai/clients/resources/azure/azure.py +36 -11
  67. v0/relationalai/clients/resources/snowflake/__init__.py +4 -4
  68. v0/relationalai/clients/resources/snowflake/cli_resources.py +12 -1
  69. v0/relationalai/clients/resources/snowflake/direct_access_resources.py +124 -100
  70. v0/relationalai/clients/resources/snowflake/engine_service.py +381 -0
  71. v0/relationalai/clients/resources/snowflake/engine_state_handlers.py +35 -29
  72. v0/relationalai/clients/resources/snowflake/error_handlers.py +43 -2
  73. v0/relationalai/clients/resources/snowflake/snowflake.py +277 -179
  74. v0/relationalai/clients/resources/snowflake/use_index_poller.py +8 -0
  75. v0/relationalai/clients/types.py +5 -0
  76. v0/relationalai/errors.py +19 -1
  77. v0/relationalai/semantics/lqp/algorithms.py +173 -0
  78. v0/relationalai/semantics/lqp/builtins.py +199 -2
  79. v0/relationalai/semantics/lqp/executor.py +68 -37
  80. v0/relationalai/semantics/lqp/ir.py +28 -2
  81. v0/relationalai/semantics/lqp/model2lqp.py +215 -45
  82. v0/relationalai/semantics/lqp/passes.py +13 -658
  83. v0/relationalai/semantics/lqp/rewrite/__init__.py +12 -0
  84. v0/relationalai/semantics/lqp/rewrite/algorithm.py +385 -0
  85. v0/relationalai/semantics/lqp/rewrite/constants_to_vars.py +70 -0
  86. v0/relationalai/semantics/lqp/rewrite/deduplicate_vars.py +104 -0
  87. v0/relationalai/semantics/lqp/rewrite/eliminate_data.py +108 -0
  88. v0/relationalai/semantics/lqp/rewrite/extract_keys.py +25 -3
  89. v0/relationalai/semantics/lqp/rewrite/period_math.py +77 -0
  90. v0/relationalai/semantics/lqp/rewrite/quantify_vars.py +65 -31
  91. v0/relationalai/semantics/lqp/rewrite/unify_definitions.py +317 -0
  92. v0/relationalai/semantics/lqp/utils.py +11 -1
  93. v0/relationalai/semantics/lqp/validators.py +14 -1
  94. v0/relationalai/semantics/metamodel/builtins.py +2 -1
  95. v0/relationalai/semantics/metamodel/compiler.py +2 -1
  96. v0/relationalai/semantics/metamodel/dependency.py +12 -3
  97. v0/relationalai/semantics/metamodel/executor.py +11 -1
  98. v0/relationalai/semantics/metamodel/factory.py +2 -2
  99. v0/relationalai/semantics/metamodel/helpers.py +7 -0
  100. v0/relationalai/semantics/metamodel/ir.py +3 -2
  101. v0/relationalai/semantics/metamodel/rewrite/dnf_union_splitter.py +30 -20
  102. v0/relationalai/semantics/metamodel/rewrite/flatten.py +50 -13
  103. v0/relationalai/semantics/metamodel/rewrite/format_outputs.py +9 -3
  104. v0/relationalai/semantics/metamodel/typer/checker.py +6 -4
  105. v0/relationalai/semantics/metamodel/typer/typer.py +4 -3
  106. v0/relationalai/semantics/metamodel/visitor.py +4 -3
  107. v0/relationalai/semantics/reasoners/optimization/solvers_dev.py +1 -1
  108. v0/relationalai/semantics/reasoners/optimization/solvers_pb.py +336 -86
  109. v0/relationalai/semantics/rel/compiler.py +2 -1
  110. v0/relationalai/semantics/rel/executor.py +3 -2
  111. v0/relationalai/semantics/tests/lqp/__init__.py +0 -0
  112. v0/relationalai/semantics/tests/lqp/algorithms.py +345 -0
  113. v0/relationalai/tools/cli.py +339 -186
  114. v0/relationalai/tools/cli_controls.py +216 -67
  115. v0/relationalai/tools/cli_helpers.py +410 -6
  116. v0/relationalai/util/format.py +5 -2
  117. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/entry_points.txt +0 -0
  118. {relationalai-1.0.0a3.dist-info → relationalai-1.0.0a5.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,8 @@
1
- """Solver model implementation using protobuf format.
1
+ """Solver model implementation supporting protobuf and CSV formats.
2
2
 
3
3
  This module provides the SolverModelPB class for defining optimization and
4
- constraint programming problems that are serialized to protobuf format and
5
- solved by external solver engines.
4
+ constraint programming problems that are serialized and solved by external
5
+ solver engines. Supports both protobuf (default) and CSV (future) exchange formats.
6
6
 
7
7
  Note: This protobuf-based implementation will be deprecated in favor of the
8
8
  development version (solvers_dev.py) in future releases.
@@ -23,7 +23,6 @@ from v0.relationalai.util.timeout import calc_remaining_timeout_minutes
23
23
 
24
24
  from .common import make_name
25
25
 
26
-
27
26
  # =============================================================================
28
27
  # Solver ProtoBuf Format Constants and Helpers
29
28
  # =============================================================================
@@ -191,6 +190,7 @@ class SolverModelPB:
191
190
  """
192
191
  b.define(b.RawSource("rel", textwrap.dedent(install_rel)))
193
192
 
193
+
194
194
  # -------------------------------------------------------------------------
195
195
  # Variable Handling
196
196
  # -------------------------------------------------------------------------
@@ -501,69 +501,218 @@ class SolverModelPB:
501
501
  # Solving and Result Handling
502
502
  # -------------------------------------------------------------------------
503
503
 
504
- def solve(
505
- self, solver: Solver, log_to_console: bool = False, **kwargs: Any
504
+ def _export_model_to_csv(
505
+ self,
506
+ model_id: str,
507
+ executor: RelExecutor,
508
+ prefix_lowercase: str,
509
+ query_timeout_mins: Optional[int] = None
506
510
  ) -> None:
507
- """Solve the model.
511
+ """Export model to CSV files in Snowflake stage.
508
512
 
509
513
  Args:
510
- solver: Solver instance.
511
- log_to_console: Whether to show solver output.
512
- **kwargs: Solver options and parameters.
514
+ model_id: Unique model identifier for stage paths.
515
+ executor: RelExecutor instance.
516
+ prefix_lowercase: Prefix for relation names.
517
+ query_timeout_mins: Query timeout in minutes.
513
518
  """
514
- options = {**kwargs, "version": 1}
519
+ stage_base_no_txn = f"snowflake://APP_STATE.RAI_INTERNAL_STAGE/SOLVERS/job_{model_id}"
520
+
521
+ # Export all model relations using Rel-native export_csv in a single transaction
522
+ # Transformations (uuid_string, encode_base64) are done inline in the export query
523
+ export_rel = textwrap.dedent(f"""
524
+ // Get transaction ID for folder naming - solver service validates ownership
525
+ // Use uuid_string to get proper UUID format, then replace hyphens with underscores
526
+ def txn_id_str {{string_replace[uuid_string[current_transaction_id], "-", "_"]}}
527
+
528
+ // Define base path with txn_id in folder name: model_{{txn_id}}/
529
+ def base_path {{"{stage_base_no_txn}/model"}}
530
+
531
+ // Export variable_hash.csv - single column: HASH (UUID string)
532
+ // Transformation: convert Variable UInt128 to UUID string inline
533
+ def variable_hash_data(:HASH, v, h):
534
+ {self.Variable._name}(v) and uuid_string(v, h)
535
+
536
+ def export[:variable_hash]: {{export_csv[{{
537
+ (:path, base_path ++ "/variable_hash_" ++ txn_id_str ++ ".csv");
538
+ (:data, variable_hash_data);
539
+ (:compression, "gzip")
540
+ }}]}}
541
+
542
+ // Export variable_name.csv - columns: HASH (UUID string), VALUE (name string)
543
+ // Transformation: convert Variable UInt128 to UUID string inline
544
+ def variable_name_data(:HASH, v, h):
545
+ {prefix_lowercase}variable_name(v, _) and uuid_string(v, h)
546
+ def variable_name_data(:VALUE, v, name):
547
+ {prefix_lowercase}variable_name(v, name)
548
+
549
+ def export[:variable_name]: {{export_csv[{{
550
+ (:path, base_path ++ "/variable_name_" ++ txn_id_str ++ ".csv");
551
+ (:data, variable_name_data);
552
+ (:compression, "gzip")
553
+ }}]}}
554
+
555
+ // Export constraint.csv - single column: VALUE (base64 encoded constraint)
556
+ // Transformation: encode_base64 done inline
557
+ def constraint_data(:VALUE, c, e):
558
+ exists((s) |
559
+ {self.Constraint._name}(c) and
560
+ {prefix_lowercase}constraint_serialized(c, s) and
561
+ encode_base64(s, e))
562
+
563
+ def export[:constraint]: {{export_csv[{{
564
+ (:path, base_path ++ "/constraint_" ++ txn_id_str ++ ".csv");
565
+ (:data, constraint_data);
566
+ (:compression, "gzip")
567
+ }}]}}
568
+
569
+ // Export min_objective.csv - columns: HASH (UUID string), VALUE (base64 encoded)
570
+ // Transformations: uuid_string and encode_base64 done inline
571
+ def min_objective_data(:HASH, obj, h):
572
+ {self.MinObjective._name}(obj) and uuid_string(obj, h)
573
+ def min_objective_data(:VALUE, obj, e):
574
+ exists((s) |
575
+ {self.MinObjective._name}(obj) and
576
+ {prefix_lowercase}minobjective_serialized(obj, s) and
577
+ encode_base64(s, e))
578
+
579
+ def export[:min_objective]: {{export_csv[{{
580
+ (:path, base_path ++ "/min_objective_" ++ txn_id_str ++ ".csv");
581
+ (:data, min_objective_data);
582
+ (:compression, "gzip")
583
+ }}]}}
584
+
585
+ // Export max_objective.csv - columns: HASH (UUID string), VALUE (base64 encoded)
586
+ // Transformations: uuid_string and encode_base64 done inline
587
+ def max_objective_data(:HASH, obj, h):
588
+ {self.MaxObjective._name}(obj) and uuid_string(obj, h)
589
+ def max_objective_data(:VALUE, obj, e):
590
+ exists((s) |
591
+ {self.MaxObjective._name}(obj) and
592
+ {prefix_lowercase}maxobjective_serialized(obj, s) and
593
+ encode_base64(s, e))
594
+
595
+ def export[:max_objective]: {{export_csv[{{
596
+ (:path, base_path ++ "/max_objective_" ++ txn_id_str ++ ".csv");
597
+ (:data, max_objective_data);
598
+ (:compression, "gzip")
599
+ }}]}}
600
+ """)
601
+
602
+ executor.execute_raw(export_rel, query_timeout_mins=query_timeout_mins)
603
+
604
+ def _import_solver_results_from_csv(
605
+ self,
606
+ model_id: str,
607
+ executor: RelExecutor,
608
+ prefix_lowercase: str,
609
+ query_timeout_mins: Optional[int] = None
610
+ ) -> None:
611
+ """Import solver results from CSV files in Snowflake stage.
515
612
 
516
- # Validate solver options
517
- for option_key, option_value in options.items():
518
- if not isinstance(option_key, str):
519
- raise TypeError(
520
- f"Solver option keys must be strings, but got {type(option_key).__name__} for key {option_key!r}."
521
- )
522
- if not isinstance(option_value, (int, float, str, bool)):
523
- raise TypeError(
524
- f"Solver option values must be int, float, str, or bool, "
525
- f"but got {type(option_value).__name__} for option {option_key!r}."
526
- )
613
+ Loads and extracts CSV files in a single transaction to minimize overhead.
527
614
 
528
- # Three-phase solve process:
529
- # 1. Export model to Snowflake as protobuf
530
- # 2. Execute solver job (external solver reads from Snowflake)
531
- # 3. Extract and load results back into the model
532
- input_id = uuid.uuid4()
533
- model_uri = f"snowflake://APP_STATE.RAI_INTERNAL_STAGE/job-inputs/solver/{input_id}/model.binpb"
534
- sf_input_uri = f"snowflake://job-inputs/solver/{input_id}/model.binpb"
535
- payload: dict[str, Any] = {"solver": solver.solver_name.lower()}
536
- payload["options"] = options
537
- payload["model_uri"] = sf_input_uri
615
+ Args:
616
+ model_id: Unique model identifier for stage paths.
617
+ executor: RelExecutor instance.
618
+ prefix_lowercase: Prefix for relation names.
619
+ query_timeout_mins: Query timeout in minutes.
620
+ """
621
+ result_stage_base = f"snowflake://APP_STATE.RAI_INTERNAL_STAGE/SOLVERS/job_{model_id}/results"
622
+ value_parse_fn = "parse_int" if self._num_type == "int" else "parse_float"
623
+
624
+ # Single transaction: Load CSV files and extract/map results
625
+ # Use inline definitions to avoid needing declared relations
626
+ load_and_extract_rel = textwrap.dedent(f"""
627
+ // Define CSV loading inline (no declare needed)
628
+ // Load ancillary.csv - contains solver metadata (NAME, VALUE columns)
629
+ def ancillary_config[:path]: "{result_stage_base}/ancillary.csv.gz"
630
+ def ancillary_config[:syntax, :header_row]: 1
631
+ def ancillary_config[:schema, :NAME]: "string"
632
+ def ancillary_config[:schema, :VALUE]: "string"
633
+ def {prefix_lowercase}solver_ancillary_raw {{load_csv[ancillary_config]}}
634
+
635
+ // Load objective_values.csv - contains objective values (SOL_INDEX, VALUE columns)
636
+ def objective_values_config[:path]: "{result_stage_base}/objective_values.csv.gz"
637
+ def objective_values_config[:syntax, :header_row]: 1
638
+ def objective_values_config[:schema, :SOL_INDEX]: "string"
639
+ def objective_values_config[:schema, :VALUE]: "string"
640
+ def {prefix_lowercase}solver_objective_values_raw {{load_csv[objective_values_config]}}
641
+
642
+ // Load points.csv.gz - contains solution points (SOL_INDEX, VAR_HASH, VALUE columns)
643
+ def points_config[:path]: "{result_stage_base}/points.csv.gz"
644
+ def points_config[:syntax, :header_row]: 1
645
+ def points_config[:schema, :SOL_INDEX]: "string"
646
+ def points_config[:schema, :VAR_HASH]: "string"
647
+ def points_config[:schema, :VALUE]: "string"
648
+ def {prefix_lowercase}solver_points_raw {{load_csv[points_config]}}
649
+
650
+ // Clear existing result data
651
+ def delete[:{self.result_info._name}]: {self.result_info._name}
652
+ def delete[:{self.point._name}]: {self.point._name}
653
+ def delete[:{self.points._name}]: {self.points._name}
538
654
 
539
- executor = self._model._to_executor()
540
- if not isinstance(executor, RelExecutor):
541
- raise ValueError(f"Expected RelExecutor, got {type(executor).__name__}.")
542
- prefix_lowercase = f"solvermodel_{self._id}_"
655
+ // Extract ancillary data (result info) - NAME and VALUE columns
656
+ def insert(:{self.result_info._name}, key, val): {{
657
+ exists((row) |
658
+ {prefix_lowercase}solver_ancillary_raw(:NAME, row, key) and
659
+ {prefix_lowercase}solver_ancillary_raw(:VALUE, row, val))
660
+ }}
543
661
 
544
- query_timeout_mins = kwargs.get("query_timeout_mins", None)
545
- config = self._model._config
546
- if (
547
- query_timeout_mins is None
548
- and (
549
- timeout_value := config.get(
550
- "query_timeout_mins", DEFAULT_QUERY_TIMEOUT_MINS
551
- )
552
- )
553
- is not None
554
- ):
555
- query_timeout_mins = int(timeout_value)
556
- config_file_path = getattr(config, "file_path", None)
557
- start_time = time.monotonic()
558
- remaining_timeout_minutes = query_timeout_mins
662
+ // Extract objective value from objective_values CSV (first solution)
663
+ def insert(:{self.result_info._name}, "objective_value", val): {{
664
+ exists((row) |
665
+ {prefix_lowercase}solver_objective_values_raw(:SOL_INDEX, row, "1") and
666
+ {prefix_lowercase}solver_objective_values_raw(:VALUE, row, val))
667
+ }}
559
668
 
560
- # Step 1: Materialize the model and store it in Snowflake
561
- print("export model")
562
- # TODO(coey): Weird hack to avoid uninitialized properties error
563
- # This forces evaluation of the Variable concept before export
564
- b.select(b.count(self.Variable)).to_df()
565
- export_model_relation = f"""
566
- // TODO maybe only want to pass names if printing - like in old setup
669
+ // Extract solution points from points.csv.gz into points property
670
+ // This file has SOL_INDEX, VAR_HASH, VALUE columns
671
+ // Convert CSV string index to Int128 for points property signature
672
+ // Convert value to Int128 (for int) or Float64 (for float)
673
+ def insert(:{self.points._name}, sol_idx_int128, var, val_converted): {{
674
+ exists((row, sol_idx_str, var_hash_str, val_str, sol_idx_int, val) |
675
+ {prefix_lowercase}solver_points_raw(:SOL_INDEX, row, sol_idx_str) and
676
+ {prefix_lowercase}solver_points_raw(:VAR_HASH, row, var_hash_str) and
677
+ {prefix_lowercase}solver_points_raw(:VALUE, row, val_str) and
678
+ parse_int(sol_idx_str, sol_idx_int) and
679
+ parse_uuid(var_hash_str, var) and
680
+ {value_parse_fn}(val_str, val) and
681
+ ::std::mirror::convert(std::mirror::typeof[Int128], sol_idx_int, sol_idx_int128) and
682
+ {'::std::mirror::convert(std::mirror::typeof[Int128], val, val_converted)' if self._num_type == 'int' else '::std::mirror::convert(std::mirror::typeof[Float64], val, val_converted)'})
683
+ }}
684
+
685
+ // Extract first solution into point property (default solution)
686
+ // Filter to SOL_INDEX = 1
687
+ def insert(:{self.point._name}, var, val_converted): {{
688
+ exists((row, var_hash_str, val_str, val) |
689
+ {prefix_lowercase}solver_points_raw(:SOL_INDEX, row, "1") and
690
+ {prefix_lowercase}solver_points_raw(:VAR_HASH, row, var_hash_str) and
691
+ {prefix_lowercase}solver_points_raw(:VALUE, row, val_str) and
692
+ parse_uuid(var_hash_str, var) and
693
+ {value_parse_fn}(val_str, val) and
694
+ {'::std::mirror::convert(std::mirror::typeof[Int128], val, val_converted)' if self._num_type == 'int' else '::std::mirror::convert(std::mirror::typeof[Float64], val, val_converted)'})
695
+ }}
696
+ """)
697
+
698
+ executor.execute_raw(load_and_extract_rel, query_timeout_mins=query_timeout_mins)
699
+
700
+ def _export_model_to_protobuf(
701
+ self,
702
+ model_uri: str,
703
+ executor: RelExecutor,
704
+ prefix_lowercase: str,
705
+ query_timeout_mins: Optional[int] = None
706
+ ) -> None:
707
+ """Export model to protobuf format in Snowflake stage.
708
+
709
+ Args:
710
+ model_uri: Snowflake URI for the protobuf file.
711
+ executor: RelExecutor instance.
712
+ prefix_lowercase: Prefix for relation names.
713
+ query_timeout_mins: Query timeout in minutes.
714
+ """
715
+ export_rel = f"""
567
716
  // Collect all model components into a relation for serialization
568
717
  def model_relation {{
569
718
  (:variable, {self.Variable._name});
@@ -584,31 +733,24 @@ class SolverModelPB:
584
733
  def export {{ config }}
585
734
  """
586
735
  executor.execute_raw(
587
- textwrap.dedent(export_model_relation),
588
- query_timeout_mins=remaining_timeout_minutes,
736
+ textwrap.dedent(export_rel),
737
+ query_timeout_mins=query_timeout_mins
589
738
  )
590
739
 
591
- # Step 2: Execute solver job and wait for completion
592
- print("execute solver job")
593
- remaining_timeout_minutes = calc_remaining_timeout_minutes(
594
- start_time,
595
- query_timeout_mins,
596
- config_file_path=config_file_path,
597
- )
598
- job_id = solver._exec_job(
599
- payload,
600
- log_to_console=log_to_console,
601
- query_timeout_mins=remaining_timeout_minutes,
602
- )
740
+ def _import_solver_results_from_protobuf(
741
+ self,
742
+ job_id: str,
743
+ executor: RelExecutor,
744
+ query_timeout_mins: Optional[int] = None
745
+ ) -> None:
746
+ """Import solver results from protobuf format.
603
747
 
604
- # Step 3: Extract and insert solver results into the model
605
- print("extract result")
606
- remaining_timeout_minutes = calc_remaining_timeout_minutes(
607
- start_time,
608
- query_timeout_mins,
609
- config_file_path=config_file_path,
610
- )
611
- extract_results_relation = f"""
748
+ Args:
749
+ job_id: Job identifier for result location.
750
+ executor: RelExecutor instance.
751
+ query_timeout_mins: Query timeout in minutes.
752
+ """
753
+ extract_rel = f"""
612
754
  def raw_result {{
613
755
  load_binary["snowflake://APP_STATE.RAI_INTERNAL_STAGE/job-results/{job_id}/result.binpb"]
614
756
  }}
@@ -625,6 +767,7 @@ class SolverModelPB:
625
767
  def insert(:{self.result_info._name}, key, value):
626
768
  exists((original_key) | string(extracted[original_key], value) and ::std::mirror::lower(original_key, key))
627
769
  """
770
+
628
771
  if self._num_type == "int":
629
772
  insert_points_relation = f"""
630
773
  def insert(:{self.point._name}, variable, value):
@@ -645,15 +788,122 @@ class SolverModelPB:
645
788
  ::std::mirror::convert(std::mirror::typeof[Int128], float_index, point_index)
646
789
  )
647
790
  """
791
+
648
792
  executor.execute_raw(
649
- textwrap.dedent(extract_results_relation)
650
- + textwrap.dedent(insert_points_relation),
651
- readonly=False,
652
- query_timeout_mins=remaining_timeout_minutes,
793
+ textwrap.dedent(extract_rel) + textwrap.dedent(insert_points_relation),
794
+ query_timeout_mins=query_timeout_mins
653
795
  )
654
796
 
655
- print("finished solve")
797
+ def solve(
798
+ self, solver: Solver, log_to_console: bool = False, **kwargs: Any
799
+ ) -> None:
800
+ """Solve the model.
801
+
802
+ Args:
803
+ solver: Solver instance.
804
+ log_to_console: Whether to show solver output.
805
+ **kwargs: Solver options and parameters.
806
+ """
807
+
808
+ use_csv_store = solver.engine_settings.get("store", {})\
809
+ .get("csv", {})\
810
+ .get("enabled", False)
811
+
812
+ print(f"Using {'csv' if use_csv_store else 'protobuf'} store...")
813
+
814
+ options = {**kwargs, "version": 1}
815
+
816
+ # Validate solver options
817
+ for option_key, option_value in options.items():
818
+ if not isinstance(option_key, str):
819
+ raise TypeError(
820
+ f"Solver option keys must be strings, but got {type(option_key).__name__} for key {option_key!r}."
821
+ )
822
+ if not isinstance(option_value, (int, float, str, bool)):
823
+ raise TypeError(
824
+ f"Solver option values must be int, float, str, or bool, "
825
+ f"but got {type(option_value).__name__} for option {option_key!r}."
826
+ )
827
+
828
+ executor = self._model._to_executor()
829
+ if not isinstance(executor, RelExecutor):
830
+ raise ValueError(f"Expected RelExecutor, got {type(executor).__name__}.")
831
+ prefix_lowercase = f"solvermodel_{self._id}_"
832
+
833
+ # Initialize timeout from config
834
+ query_timeout_mins = kwargs.get("query_timeout_mins", None)
835
+ config = self._model._config
836
+ if (
837
+ query_timeout_mins is None
838
+ and (
839
+ timeout_value := config.get(
840
+ "query_timeout_mins", DEFAULT_QUERY_TIMEOUT_MINS
841
+ )
842
+ )
843
+ is not None
844
+ ):
845
+ query_timeout_mins = int(timeout_value)
846
+ config_file_path = getattr(config, "file_path", None)
847
+ start_time = time.monotonic()
848
+
849
+ # Force evaluation of Variable concept before export
850
+ b.select(b.count(self.Variable)).to_df()
851
+
852
+ # Prepare payload for solver service
853
+ payload: dict[str, Any] = {"solver": solver.solver_name.lower(), "options": options}
854
+
855
+ if use_csv_store:
856
+ # CSV format: model and results are exchanged via CSV files
857
+ model_id = str(uuid.uuid4()).upper().replace('-', '_')
858
+ payload["model_uri"] = f"snowflake://SOLVERS/job_{model_id}/model"
859
+
860
+ print("Exporting model to CSV...")
861
+ remaining_timeout_minutes = calc_remaining_timeout_minutes(
862
+ start_time, query_timeout_mins, config_file_path=config_file_path
863
+ )
864
+ self._export_model_to_csv(model_id, executor, prefix_lowercase, remaining_timeout_minutes)
865
+ print("Model CSV export completed")
866
+
867
+ print("Execute solver job")
868
+ remaining_timeout_minutes = calc_remaining_timeout_minutes(
869
+ start_time, query_timeout_mins, config_file_path=config_file_path
870
+ )
871
+ solver._exec_job(payload, log_to_console=log_to_console, query_timeout_mins=remaining_timeout_minutes)
872
+
873
+ print("Loading and extracting solver results...")
874
+ remaining_timeout_minutes = calc_remaining_timeout_minutes(
875
+ start_time, query_timeout_mins, config_file_path=config_file_path
876
+ )
877
+ self._import_solver_results_from_csv(model_id, executor, prefix_lowercase, remaining_timeout_minutes)
878
+
879
+ else: # protobuf format
880
+ # Protobuf format: model and results are exchanged via binary protobuf
881
+ input_id = uuid.uuid4()
882
+ model_uri = f"snowflake://APP_STATE.RAI_INTERNAL_STAGE/job-inputs/solver/{input_id}/model.binpb"
883
+ sf_input_uri = f"snowflake://job-inputs/solver/{input_id}/model.binpb"
884
+ payload["model_uri"] = sf_input_uri
885
+
886
+ print("Export model...")
887
+ remaining_timeout_minutes = calc_remaining_timeout_minutes(
888
+ start_time, query_timeout_mins, config_file_path=config_file_path
889
+ )
890
+ self._export_model_to_protobuf(model_uri, executor, prefix_lowercase, remaining_timeout_minutes)
891
+
892
+ print("Execute solver job...")
893
+ remaining_timeout_minutes = calc_remaining_timeout_minutes(
894
+ start_time, query_timeout_mins, config_file_path=config_file_path
895
+ )
896
+ job_id = solver._exec_job(payload, log_to_console=log_to_console, query_timeout_mins=remaining_timeout_minutes)
897
+
898
+ print("Extract result...")
899
+ remaining_timeout_minutes = calc_remaining_timeout_minutes(
900
+ start_time, query_timeout_mins, config_file_path=config_file_path
901
+ )
902
+ self._import_solver_results_from_protobuf(job_id, executor, remaining_timeout_minutes)
903
+
904
+ print("Finished solve")
656
905
  print()
906
+ return None
657
907
 
658
908
  def load_point(self, point_index: int) -> None:
659
909
  """Load a solution point.
@@ -678,7 +928,7 @@ class SolverModelPB:
678
928
  def delete[:{self.point._name}]: {self.point._name}
679
929
  def insert(:{self.point._name}, variable, value): {self.points._name}(int128[{point_index}], variable, value)
680
930
  """
681
- executor.execute_raw(textwrap.dedent(load_point_relation), readonly=False)
931
+ executor.execute_raw(textwrap.dedent(load_point_relation))
682
932
 
683
933
  def summarize_result(self) -> Any:
684
934
  """Print solver result summary.
@@ -1,4 +1,5 @@
1
1
  from __future__ import annotations
2
+
2
3
  from typing import Any, Iterable, Sequence as PySequence, cast, Tuple, Union
3
4
  from dataclasses import dataclass, field
4
5
  from decimal import Decimal as PyDecimal
@@ -843,7 +844,7 @@ class ModelToRel:
843
844
 
844
845
  def _effect_name(self, n: ir.Task):
845
846
  """ Return the name to be used for the effect (e.g. the relation name, output, etc). """
846
- if isinstance(n, ir.Output) and bt.export_annotation in n.annotations:
847
+ if helpers.is_export(n):
847
848
  return "Export_Relation"
848
849
  elif isinstance(n, ir.Output):
849
850
  return "output"
@@ -355,8 +355,9 @@ class RelExecutor(e.Executor):
355
355
 
356
356
  # NOTE(coey): this is added temporarily to support executing Rel for the solvers library in EA.
357
357
  # It can be removed once this is no longer needed by the solvers library.
358
- def execute_raw(self, raw_rel:str, readonly:bool=True, query_timeout_mins:int|None=None) -> DataFrame:
359
- raw_results = self.resources.exec_raw(self.database, self.engine, raw_rel, readonly, nowait_durable=True, query_timeout_mins=query_timeout_mins)
358
+ def execute_raw(self, raw_rel:str, query_timeout_mins:int|None=None) -> DataFrame:
359
+ # NOTE intentionally hard-coding to read-only=False, because read-only Rel queries are deprecated.
360
+ raw_results = self.resources.exec_raw(self.database, self.engine, raw_rel, False, nowait_durable=True, query_timeout_mins=query_timeout_mins)
360
361
  df, errs = result_helpers.format_results(raw_results, None, generation=Generation.QB) # Pass None for task parameter
361
362
  self.report_errors(errs)
362
363
  return df
File without changes