syntaxmatrix 2.5.6__py3-none-any.whl → 2.5.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -425,11 +425,12 @@ def refine_question_agent(raw_question: str, dataset_context: str | None = None)
425
425
 
426
426
  return "Configure LLM Profiles or contact your administrator."
427
427
 
428
- system_prompt = (
429
- "You rewrite user questions into specification Machine Learning (ML) job description. "
430
- "If a dataset summary is provided, use it to respect column and help you redefine the question. "
431
- "DO NOT write andy prelude or preamble"
432
- )
428
+ system_prompt = ("""
429
+ - You are a Machine Learning (ML) and Data Science (DS) expert.
430
+ - You rewrite user questions into clear ML job specifications to help AI assistant generate Python code that provides solution to the user question when it is run. Most user questions are vague. So, your goal is to ensure that your output guards the assistant agains making potential errors that you anticipated could arise due to the nature of the question.
431
+ - If a dataset summary is provided, use it to respect column and help you rewrite the question properly.
432
+ - DO NOT write andy prelude or preamble"
433
+ """)
433
434
 
434
435
  user_prompt = f"User question:\n{raw_question}\n\n"
435
436
  if dataset_context:
@@ -446,20 +447,7 @@ def refine_question_agent(raw_question: str, dataset_context: str | None = None)
446
447
 
447
448
 
448
449
  def classify_ml_job_agent(refined_question, dataset_profile):
449
- """
450
- Instructs an LLM (gemini-2.5-flash) to analyze a task description
451
- and return a list of associated machine learning job/task types.
452
- This version uses a highly extensive, generalized list of ML jobs
453
- to ensure robustness across all domains (NLP, CV, RL, etc.).
454
-
455
- Args:
456
- task_description: The detailed description of the statistical/ML task.
457
-
458
- Returns:
459
- A list of strings identifying the relevant ML jobs. Returns an empty
460
- list if the API call fails or the output cannot be parsed.
461
- """
462
-
450
+
463
451
  def ml_response(user_prompt, system_prompt, profile):
464
452
  _profile = profile # _prof.get_profile["admin"]
465
453
 
@@ -571,10 +559,13 @@ def classify_ml_job_agent(refined_question, dataset_profile):
571
559
 
572
560
  return "Configure LLM Profiles or contact your administrator."
573
561
 
574
- system_prompt = (
575
- "You are a strict machine learning task classifier for an ML workbench.\n"
576
- "Your job is to label the user's task desc. with all relevant tags from a fixed list.\n\n"
577
- )
562
+ system_prompt = ("""
563
+ You are a strict machine learning task classifier for an ML workbench.
564
+ Your goal is to correctly label the user's task specifications with the most relevant tags from a fixed list.
565
+ You Must always have 'data_preprocessing' as the 1st tag. Then add up to 4 to make 5 max. Your list, therefore, should have 1-5 tags. If you think a task is too complext for the given context, even if relevant, exclude it.
566
+ If no relevant tag, default to "data_preprocessing" and return that alone.
567
+ You should return only your list of tags, no prelude or preamble.
568
+ """)
578
569
 
579
570
  # --- 1. Define the Master List of ML Tasks (Generalized) ---
580
571
  ml_task_list = [
syntaxmatrix/core.py CHANGED
@@ -1141,21 +1141,23 @@ class SyntaxMUI:
1141
1141
  tasks = [str(t).strip().lower() for t in tasks if str(t).strip()]
1142
1142
 
1143
1143
  ai_profile = """
1144
- - You are a Python expert specializing in data science and machine learning.
1144
+ - You are a Python expert specializing in Data Science (DS) and Machine Learning (ML).
1145
1145
  - Your task is to generate a single, complete, production-quality, executable Python script for a Jupyter-like Python kernel, based on the given instructions.
1146
1146
  - The dataset is already loaded as a pandas DataFrame named `df` (no file I/O or file uploads).
1147
- - Make a copy of `df` and name it `df_copy`. Make sure `df_copy` is preprocessed and cleaned, named `df_cleaned`, if not already done so. Then use `df_cleaned` to perform the ML tasks described in the given context.
1148
- - Select your features and target, from `df_cleaned`, with care and name it `required_cols`
1147
+ - Make a copy of `df` and name it `df_copy`.
1148
+ - Make sure `df_copy` is preprocessed and cleaned, and name it `df_cleaned`, if not already done so.
1149
+ - Work only with `df_cleaned` to perform the ML tasks described in the given context.
1150
+ - Select your features and targets, from `df_cleaned`, with care and name it `required_cols`
1149
1151
  - Create your 'df_filtered by doing: df_filtered = df_cleaned[required_cols].
1150
- - Use the {TEMPLATE_CATALOGUE} below to educate yourself on which visualizations you will implement in the code.
1151
- - The final output MUST be the complete, executable Python code only, enclosed in a single markdown code block (```python ... ```), which is required to fulfill the user's request. See the {tasks} below.
1152
+ - Use the {TEMPLATE_CATALOGUE} below to educate yourself on which visualizations you will implement in the code, and ensure the implementations are in the code you generate.
1153
+ - The final output MUST BE the complete, executable Python code only, enclosed in a single markdown code block (```python ... ```), and MUST BE able to fulfill the user's request: {tasks}.
1152
1154
  - Do not include any explanatory text or markdown outside the code block.
1153
1155
  """
1154
1156
 
1155
1157
  TEMPLATE_CATALOGUE = """
1156
1158
  ### Available SyntaxMatrix templates (use these instead of inventing new helpers)
1157
1159
 
1158
- Visualisation templates (dataset-agnostic):
1160
+ Visualisation templates:
1159
1161
  - viz_pie(df, category_col=None, top_k=8): pie/donut shares within a category.
1160
1162
  - viz_stacked_bar(df, x=None, hue=None, normalise=True): composition across groups.
1161
1163
  - viz_count_bar(df, category_col=None, top_k=12): counts/denominators by category.
@@ -1195,9 +1197,9 @@ class SyntaxMUI:
1195
1197
 
1196
1198
  """
1197
1199
  ### Template rules
1198
- - You MAY call a template if it matches the task.
1200
+ - You MAY call 1 or more templates if they matche the task.
1199
1201
  - Do NOT invent template names.
1200
- - If no template fits, write minimal direct pandas/sklearn/seaborn code instead.
1202
+ - If no template fits, write minimal direct pandas/sklearn/seaborn code instead, for visualization.
1201
1203
  - Keep the solution short: avoid writing wrappers/utilities already handled by SyntaxMatrix hardener.
1202
1204
 
1203
1205
  #### Template selection hint examples:
@@ -1220,8 +1222,7 @@ class SyntaxMUI:
1220
1222
  set `random_state=42` where relevant.
1221
1223
  4) Be defensive, but avoid hard-failing on optional fields:
1222
1224
  - If the primary column, needed to answer the question, is missing, review your copy of the `df` again.
1223
- Make sure that you selected the proper column.
1224
- Never use a column/variable which isn't available or defined.
1225
+ - Make sure that you selected the proper column. Never use a column/variable which isn't available or defined.
1225
1226
  - If a secondary/extra column is missing, show a warning with `show(...)` and continue using available fields.
1226
1227
  - Handle missing values sensibly (drop rows for simple EDA; use `ColumnTransformer` + `SimpleImputer` for modelling).
1227
1228
  - For categorical features in ML, use `OneHotEncoder(handle_unknown="ignore")`
@@ -1253,6 +1254,20 @@ class SyntaxMUI:
1253
1254
  11) You MUST NOT reference any column outside Available columns: {AVAILABLE_COLUMNS}.
1254
1255
  12) If asked to predict/classify, choose the target by matching the task text to Allowed columns
1255
1256
  and never invent a new name.
1257
+ 13) Treat df as the primary dataset you must work with.
1258
+ 14) The dataset is already loaded as df (no file I/O or file uploads).
1259
+ 15) All outputs must be visible to the user via the provided show(...) helper.
1260
+ 16) Never use print(...); use show(...) instead.
1261
+ 17) You MUST NOT read from or write to local files, folders, or external storage.
1262
+ - Do not call open(...), Path(...).write_text/write_bytes, or similar file APIs.
1263
+ - Do not use df.to_csv(...), df.to_excel(...), df.to_parquet(...),
1264
+ df.to_pickle(...), df.to_json(...), df.to_hdf(...), or any other
1265
+ method that writes to disk.
1266
+ - Do not call joblib.dump(...), pickle.dump(...), torch.save(...),
1267
+ numpy.save(...), numpy.savetxt(...), or similar saving functions.
1268
+ - Do not call plt.savefig(..., 'somefile.png') or any variant that
1269
+ writes an image to a filename. Plots must be rendered in-memory only.
1270
+ 18) Keep everything in memory and surface results via show(...) or plots.
1256
1271
 
1257
1272
  #### Cohort rules
1258
1273
  When you generate plots for cohorts or categories, you MUST obey these rules:
syntaxmatrix/preface.py CHANGED
@@ -453,23 +453,24 @@ def _safe_concat(objs, **kwargs):
453
453
  except Exception as e:
454
454
  smx_show(f'⚠ concat skipped: {e}')
455
455
  return _pd.DataFrame()
456
-
456
+
457
457
 
458
458
  def _SMX_OHE(**k):
459
459
  # normalise arg name across sklearn versions
460
- if 'sparse' in k and 'sparse_output' not in k:
461
- k['sparse_output'] = k.pop('sparse')
462
- k.setdefault('handle_unknown', 'ignore')
463
- k.setdefault('sparse_output', False)
460
+ if "sparse" in k and "sparse_output" not in k:
461
+ k["sparse_output"] = k.pop("sparse")
462
+ k.setdefault("handle_unknown", "ignore")
463
+ k.setdefault("sparse_output", False)
464
464
  try:
465
- sig = inspect.signature(OneHotEncoder)
466
- if 'sparse_output' not in sig.parameters and 'sparse_output' in k:
467
- k['sparse'] = k.pop('sparse_output')
468
- except Exception:
469
- if 'sparse_output' in k:
470
- k['sparse'] = k.pop('sparse_output')
471
- return OneHotEncoder(**k)
472
-
465
+ if "sparse_output" not in inspect.signature(OneHotEncoder).parameters:
466
+ if "sparse_output" in k:
467
+ k["sparse"] = k.pop("sparse_output")
468
+ return OneHotEncoder(**k)
469
+ except TypeError:
470
+ if "sparse_output" in k:
471
+ k["sparse"] = k.pop("sparse_output")
472
+ return OneHotEncoder(**k)
473
+
473
474
 
474
475
  def _SMX_mm(a, b):
475
476
  try:
@@ -692,7 +692,7 @@
692
692
  </div><br>
693
693
  {% if llm_usage %}
694
694
  <div class="refined-qblock">
695
- <b>LLM: </b><span>{{ llm_usage.provider }} | {{ llm_usage.model }}</span><br>
695
+ <!-- <b>LLM: </b><span>{{ llm_usage.provider }} | {{ llm_usage.model }}</span><br> -->
696
696
  <b>Token Usage: </b>
697
697
  <li>- Input Tokens: {{ llm_usage.input_tokens }}</li>
698
698
  <li>- Output Tokens: {{ llm_usage.output_tokens }}</li>
@@ -704,12 +704,12 @@
704
704
  {% if ai_outputs %}
705
705
  <div class="d-flex align-items-center justify-content-between" style="margin: 12px;">
706
706
  <h3 class="m-0">Result</h3>
707
+ {% for html_block in ai_outputs %}
708
+ <div class="ai-output" style="margin-bottom:18px;overflow-x:auto; max-width:100%;">
709
+ {{ html_block | safe }}
710
+ </div>
711
+ {% endfor %}
707
712
  </div>
708
- {% for html_block in ai_outputs %}
709
- <div class="ai-output" style="margin-bottom:18px;overflow-x:auto; max-width:100%;">
710
- {{ html_block | safe }}
711
- </div>
712
- {% endfor %}
713
713
  {% endif %}
714
714
  {% if ai_code %}
715
715
  <div>
syntaxmatrix/utils.py CHANGED
@@ -155,31 +155,6 @@ def harden_ai_code(code: str) -> str:
155
155
  # Remove any LLM-added try/except blocks (hardener adds its own)
156
156
  import re
157
157
 
158
- def strip_placeholders(code: str) -> str:
159
- code = re.sub(r"\bshow\(\s*\.\.\.\s*\)",
160
- "show('⚠ Block skipped due to an error.')",
161
- code)
162
- code = re.sub(r"\breturn\s+\.\.\.", "return None", code)
163
- return code
164
-
165
- def _SMX_OHE(**k):
166
- # normalise arg name across sklearn versions
167
- if "sparse" in k and "sparse_output" not in k:
168
- k["sparse_output"] = k.pop("sparse")
169
- # default behaviour we want
170
- k.setdefault("handle_unknown", "ignore")
171
- k.setdefault("sparse_output", False)
172
- try:
173
- # if running on old sklearn without sparse_output, translate back
174
- if "sparse_output" not in inspect.signature(OneHotEncoder).parameters:
175
- if "sparse_output" in k:
176
- k["sparse"] = k.pop("sparse_output")
177
- return OneHotEncoder(**k)
178
- except TypeError:
179
- # final fallback: try legacy name
180
- if "sparse_output" in k:
181
- k["sparse"] = k.pop("sparse_output")
182
- return OneHotEncoder(**k)
183
158
 
184
159
  def _strip_stray_backrefs(code: str) -> str:
185
160
  code = re.sub(r'(?m)^\s*\\\d+\s*', '', code)
@@ -497,6 +472,67 @@ def harden_ai_code(code: str) -> str:
497
472
  """
498
473
  )
499
474
 
475
+ def _strip_file_io_ops(code: str) -> str:
476
+ """
477
+ Remove obvious local file I/O operations in LLM code
478
+ so nothing writes to the container filesystem.
479
+ """
480
+ # 1) Methods like df.to_csv(...), df.to_excel(...), etc.
481
+ FILE_WRITE_METHODS = (
482
+ "to_csv", "to_excel", "to_pickle", "to_parquet",
483
+ "to_json", "to_hdf",
484
+ )
485
+
486
+ for mname in FILE_WRITE_METHODS:
487
+ pat = re.compile(
488
+ rf"(?m)^(\s*)([A-Za-z_][A-Za-z0-9_\.]*)\s*\.\s*{mname}\s*\([^)]*\)\s*$"
489
+ )
490
+
491
+ def _repl(match):
492
+ indent = match.group(1)
493
+ expr = match.group(2)
494
+ return f"{indent}# [SMX] stripped file write: {expr}.{mname}(...)"
495
+
496
+ code = pat.sub(_repl, code)
497
+
498
+ # 2) plt.savefig(...) calls
499
+ pat_savefig = re.compile(r"(?m)^(\s*)(plt\.savefig\s*\([^)]*\)\s*)$")
500
+ code = pat_savefig.sub(
501
+ lambda m: f"{m.group(1)}# [SMX] stripped savefig: {m.group(2).strip()}",
502
+ code,
503
+ )
504
+
505
+ # 3) with open(..., 'w'/'wb') as f:
506
+ pat_with_open = re.compile(
507
+ r"(?m)^(\s*)with\s+open\([^)]*['\"]w[b]?['\"][^)]*\)\s+as\s+([A-Za-z_][A-Za-z0-9_]*)\s*:\s*$"
508
+ )
509
+
510
+ def _with_open_repl(match):
511
+ indent = match.group(1)
512
+ var = match.group(2)
513
+ return f"{indent}if False: # [SMX] file write stripped (was: with open(... as {var}))"
514
+
515
+ code = pat_with_open.sub(_with_open_repl, code)
516
+
517
+ # 4) joblib.dump(...), pickle.dump(...)
518
+ for mod in ("joblib", "pickle"):
519
+ pat = re.compile(rf"(?m)^(\s*){mod}\.dump\s*\([^)]*\)\s*$")
520
+ code = pat.sub(
521
+ lambda m: f"{m.group(1)}# [SMX] stripped {mod}.dump(...)",
522
+ code,
523
+ )
524
+
525
+ # 5) bare open(..., 'w'/'wb') calls
526
+ pat_open = re.compile(
527
+ r"(?m)^(\s*)open\([^)]*['\"]w[b]?['\"][^)]*\)\s*$"
528
+ )
529
+ code = pat_open.sub(
530
+ lambda m: f"{m.group(1)}# [SMX] stripped open(..., 'w'/'wb')",
531
+ code,
532
+ )
533
+
534
+ return code
535
+
500
536
  # Register and run patches once per execution
501
537
  for _patch in (
502
538
  _smx_patch_mean_squared_error_squared_kw,
@@ -598,6 +634,7 @@ def harden_ai_code(code: str) -> str:
598
634
  fixed = _wrap_metric_calls(fixed)
599
635
  fixed = _fix_unexpected_indent(fixed)
600
636
  fixed = _patch_feature_coef_dataframe(fixed)
637
+ fixed = _strip_file_io_ops(fixed)
601
638
 
602
639
  # Import shared preface helpers once and wrap the LLM body safely
603
640
  header = "from syntaxmatrix.preface import *\n\n"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: syntaxmatrix
3
- Version: 2.5.6
3
+ Version: 2.5.6.1
4
4
  Summary: SyntaxMUI: A customizable framework for Python AI Assistant Projects.
5
5
  Author: Bob Nti
6
6
  Author-email: bob.nti@syntaxmatrix.net
@@ -2,7 +2,7 @@ syntaxmatrix/__init__.py,sha256=_LnTrYAW2tbYA37Y233Vv4OMOk8NUnoJi-1yzFyHxEI,2573
2
2
  syntaxmatrix/auth.py,sha256=SCD6uWojXjj9yjUTKzgV5kBYe6ZkXASEG2VopLFkEtM,18140
3
3
  syntaxmatrix/bootstrap.py,sha256=Y7ZNg-Z3ecrr1iYem5EMzPmGstXnEKmO9kqKVoOoljo,817
4
4
  syntaxmatrix/commentary.py,sha256=3uSlbaQ1zl-gYtEtEpFbv2M-IH-HSdFdMvhxa7UCNHk,12025
5
- syntaxmatrix/core.py,sha256=vih5LnpcLMEfEqTZeRHhHNocbjWZCSgcChFV1t45CYs,60179
5
+ syntaxmatrix/core.py,sha256=eIqstFz0shYggr9jPyLTMJyS42xACz3hJ6V40iT8wDQ,61389
6
6
  syntaxmatrix/dataset_preprocessing.py,sha256=wtV4MWzkyfOsBHTsS0H1gqHho77ZQHGDI9skJryyZWA,8732
7
7
  syntaxmatrix/db.py,sha256=xkCpyhFxnAwrnZCTd13NkJsahVze0i4egjMcbB7kPfs,5000
8
8
  syntaxmatrix/display.py,sha256=TgMrE5WW80VlLcL_XvEz936mekFccJgLTfzbCIozSc8,3728
@@ -15,7 +15,7 @@ syntaxmatrix/kernel_manager.py,sha256=sE9zwuqEZq10Q4ySpGn0ilx-ui7cmZw-LEK8GxK-Hh
15
15
  syntaxmatrix/llm_store.py,sha256=c22-ahR_PmZVWB5OAKPVr01YI9rWPWDd_aSEMujhAic,7500
16
16
  syntaxmatrix/models.py,sha256=-yGj4fALYqyQqxIiB0Eh6xWSlr9GfwuoDlzAWUikye8,533
17
17
  syntaxmatrix/plottings.py,sha256=MjHQ9T1_oC5oyr4_wkM2GJDrpjp0sbvudbs2lGaMyzk,6103
18
- syntaxmatrix/preface.py,sha256=hAV8t0U87PXzbIOKEz-RkYxKV5a766dOoE71HWloQ_s,16960
18
+ syntaxmatrix/preface.py,sha256=EOK3lflMJ-0B6SRJtVXhzZjhvu-bfXzw-sy1TbTYOVs,17009
19
19
  syntaxmatrix/profiles.py,sha256=0-lky7Wj-WQlP5CbvTyw1tI2M0FiqhhTkLZYLRhD5AU,2251
20
20
  syntaxmatrix/project_root.py,sha256=1ckvbFVV1szHtHsfSCoGcImHkRwbfszmPG1kGh9ZZlE,2227
21
21
  syntaxmatrix/routes.py,sha256=tGBSccUs9iNuMnjpZmtvn8jDRc1Sy2aAb0t0sKoBUmE,302995
@@ -24,13 +24,13 @@ syntaxmatrix/smiv.py,sha256=1lSN3UYpXvYoVNd6VrkY5iZuF_nDxD6xxvLnTn9wcbQ,1405
24
24
  syntaxmatrix/smpv.py,sha256=rrCgYqfjBaK2n5qzfQyXK3bHFMvgNcCIqPaXquOLtDM,3600
25
25
  syntaxmatrix/themes.py,sha256=qa90vPZTuNNKB37loZhChQfu5QqkaJG4JxgI_4QgCxw,3576
26
26
  syntaxmatrix/ui_modes.py,sha256=5lfKK3AKAB-JQCWfi1GRYp4sQqg4Z0fC3RJ8G3VGCMw,152
27
- syntaxmatrix/utils.py,sha256=0UG1e9XwU3E2m6xblI6yPavKmEQzSIYsvTzkWiN6xJ4,122458
27
+ syntaxmatrix/utils.py,sha256=0iTu9XbUN1HsZModWmyexYrXAzjox7gpHyYV7SmW-PM,123555
28
28
  syntaxmatrix/vector_db.py,sha256=ozvOcMHt52xFAvcp-vAqT69kECPq9BwL8Rzgq3AJaMs,5824
29
29
  syntaxmatrix/vectorizer.py,sha256=5w_UQiUIirm_W-Q9TcaEI8LTcTYIuDBdKfz79T1aZ8g,1366
30
30
  syntaxmatrix/workspace_db.py,sha256=Xu9OlW8wo3iaH5Y88ZMdLOf-fiZxF1NBb5rAw3KcbfY,4715
31
31
  syntaxmatrix/agentic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  syntaxmatrix/agentic/agent_tools.py,sha256=yQwavONP23ziMxNQf3j2Y4TVo_LxEsiAWecKuBK8WDg,866
33
- syntaxmatrix/agentic/agents.py,sha256=gy3XDzydAQdpp-GQyRL8TjIhAAO_ruPnmo4tvJQyARE,30588
33
+ syntaxmatrix/agentic/agents.py,sha256=SKYoBO-iVPRVpvMP6BOYp7axsiCs1ve9J_cNyf3utCw,30808
34
34
  syntaxmatrix/agentic/code_tools_registry.py,sha256=Wp4-KHtp0BUVciqSbmionBsQMVFOnvJPruBJeNiuwkk,1564
35
35
  syntaxmatrix/agentic/model_templates.py,sha256=A3ROE3BHkvnU9cxqSGjlCBIw9U15zRaTKgK-WxcZtUI,76033
36
36
  syntaxmatrix/settings/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -51,7 +51,7 @@ syntaxmatrix/static/js/sidebar.js,sha256=zHp4skKLY2Dlqx7aLPQ8_cR0iTRT17W0SC2TR38
51
51
  syntaxmatrix/static/js/widgets.js,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
52
  syntaxmatrix/templates/change_password.html,sha256=YWEcnwJLccLyKGzQxIrc0xuP-p00BtEIwcYq4oFvJ-0,3332
53
53
  syntaxmatrix/templates/code_cell.html,sha256=LOr9VjvNQcOGKKJ1ecpcZh3C3qsUxBHueg2iQtpdxl8,638
54
- syntaxmatrix/templates/dashboard.html,sha256=1Qf-5IztYOkc0f7QSExV3Fo57Ks0r_BCbYVp7SqvW-4,30996
54
+ syntaxmatrix/templates/dashboard.html,sha256=MhB8m5EQVuWQ5f6gRECtsoUfeyvew_z5nZz5FTg4Pmo,31015
55
55
  syntaxmatrix/templates/docs.html,sha256=KVi5JrZD3gwOduiZhAz7hQrKY9SrQ_bsHOODj0Nj09s,3552
56
56
  syntaxmatrix/templates/error.html,sha256=Iu5ykHnhw8jrxVBNn6B95e90W5u9I2hySCiLtaoOJMs,3290
57
57
  syntaxmatrix/templates/login.html,sha256=V_bWHozS1xCeHPsvAAfaGG-_2lAE7K8d05IarQN1PS8,2677
@@ -63,8 +63,8 @@ syntaxmatrix/vectordb/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5
63
63
  syntaxmatrix/vectordb/adapters/milvus_adapter.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
64
  syntaxmatrix/vectordb/adapters/pgvector_adapter.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
65
65
  syntaxmatrix/vectordb/adapters/sqlite_adapter.py,sha256=L8M2qHfwZRAFVxWeurUVdHaJXz6F5xTUSWh3uy6TSUs,6035
66
- syntaxmatrix-2.5.6.dist-info/licenses/LICENSE.txt,sha256=j1P8naTdy1JMxTC80XYQjbyAQnuOlpDusCUhncrvpy8,1083
67
- syntaxmatrix-2.5.6.dist-info/METADATA,sha256=vel34pxHc1Tf6YwflfaiGnw6TkUW9JnTbWIFiZijZyg,18090
68
- syntaxmatrix-2.5.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
69
- syntaxmatrix-2.5.6.dist-info/top_level.txt,sha256=HKP_zkl4V_nt7osC15DlacoBZktHrbZYOqf_pPkF3T8,13
70
- syntaxmatrix-2.5.6.dist-info/RECORD,,
66
+ syntaxmatrix-2.5.6.1.dist-info/licenses/LICENSE.txt,sha256=j1P8naTdy1JMxTC80XYQjbyAQnuOlpDusCUhncrvpy8,1083
67
+ syntaxmatrix-2.5.6.1.dist-info/METADATA,sha256=KNptOa1RWzCBh139s4QqtGu696Pgm2_ykp_5r7Lkjik,18092
68
+ syntaxmatrix-2.5.6.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
69
+ syntaxmatrix-2.5.6.1.dist-info/top_level.txt,sha256=HKP_zkl4V_nt7osC15DlacoBZktHrbZYOqf_pPkF3T8,13
70
+ syntaxmatrix-2.5.6.1.dist-info/RECORD,,