npcpy 1.2.21__py3-none-any.whl → 1.2.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,163 @@
1
+ from typing import Dict, Callable, Any
2
+
3
+ class DatabaseAIFunctionMapper:
4
+ @staticmethod
5
+ def get_snowflake_cortex_mapping() -> Dict[str, Dict[str, Any]]:
6
+ """
7
+ Map NPC AI functions to Snowflake Cortex functions
8
+
9
+ Structure:
10
+ {
11
+ 'npc_function_name': {
12
+ 'cortex_function': 'snowflake_cortex_function_name',
13
+ 'transformer': optional_transformation_function
14
+ }
15
+ }
16
+ """
17
+ return {
18
+ # Text Generation Mapping
19
+ 'generate_text': {
20
+ 'cortex_function': 'COMPLETE',
21
+ 'transformer': lambda prompt, **kwargs: f"SNOWFLAKE.CORTEX.COMPLETE('{prompt}')"
22
+ },
23
+
24
+ # Summarization Mapping
25
+ 'summarize': {
26
+ 'cortex_function': 'SUMMARIZE',
27
+ 'transformer': lambda text, **kwargs: f"SNOWFLAKE.CORTEX.SUMMARIZE('{text}')"
28
+ },
29
+
30
+ # Sentiment Analysis Mapping
31
+ 'analyze_sentiment': {
32
+ 'cortex_function': 'SENTIMENT',
33
+ 'transformer': lambda text, **kwargs: f"SNOWFLAKE.CORTEX.SENTIMENT('{text}')"
34
+ },
35
+
36
+ # Translation Mapping
37
+ 'translate': {
38
+ 'cortex_function': 'TRANSLATE',
39
+ 'transformer': lambda text, source_lang='auto', target_lang='en', **kwargs:
40
+ f"SNOWFLAKE.CORTEX.TRANSLATE('{text}', '{source_lang}', '{target_lang}')"
41
+ },
42
+
43
+ # Named Entity Recognition
44
+ 'extract_entities': {
45
+ 'cortex_function': 'EXTRACT_ENTITIES',
46
+ 'transformer': lambda text, **kwargs: f"SNOWFLAKE.CORTEX.EXTRACT_ENTITIES('{text}')"
47
+ },
48
+
49
+ # Embedding Generation
50
+ 'generate_embedding': {
51
+ 'cortex_function': 'EMBED_TEXT',
52
+ 'transformer': lambda text, model='snowflake-arctic', **kwargs:
53
+ f"SNOWFLAKE.CORTEX.EMBED_TEXT('{model}', '{text}')"
54
+ }
55
+ }
56
+
57
+ @staticmethod
58
+ def get_databricks_ai_mapping() -> Dict[str, Dict[str, Any]]:
59
+ """
60
+ Map NPC AI functions to Databricks AI functions
61
+ """
62
+ return {
63
+ # Databricks uses different function names and approaches
64
+ 'generate_text': {
65
+ 'databricks_function': 'serving.predict',
66
+ 'transformer': lambda prompt, model='databricks-dolly', **kwargs:
67
+ f"serving.predict('{model}', '{prompt}')"
68
+ },
69
+ # Add more Databricks-specific mappings
70
+ }
71
+
72
+ @staticmethod
73
+ def get_bigquery_ai_mapping() -> Dict[str, Dict[str, Any]]:
74
+ """
75
+ Map NPC AI functions to BigQuery AI functions
76
+ """
77
+ return {
78
+ 'generate_text': {
79
+ 'bigquery_function': 'ML.GENERATE_TEXT',
80
+ 'transformer': lambda prompt, model='text-bison', **kwargs:
81
+ f"ML.GENERATE_TEXT(MODEL `{model}`, '{prompt}')"
82
+ },
83
+ # Add more BigQuery-specific mappings
84
+ }
85
+
86
+ class NativeDatabaseAITransformer:
87
+ def __init__(self, database_type: str):
88
+ self.database_type = database_type
89
+ self.function_mappings = self._get_database_mappings()
90
+
91
+ def _get_database_mappings(self) -> Dict[str, Dict[str, Any]]:
92
+ """
93
+ Get the appropriate AI function mapping based on database type
94
+ """
95
+ mappings = {
96
+ 'snowflake': DatabaseAIFunctionMapper.get_snowflake_cortex_mapping(),
97
+ 'databricks': DatabaseAIFunctionMapper.get_databricks_ai_mapping(),
98
+ 'bigquery': DatabaseAIFunctionMapper.get_bigquery_ai_mapping()
99
+ }
100
+ return mappings.get(self.database_type.lower(), {})
101
+
102
+ def transform_ai_function(self, function_name: str, **kwargs) -> str:
103
+ """
104
+ Transform an NPC AI function to a native database AI function
105
+ """
106
+ mapping = self.function_mappings.get(function_name)
107
+ if not mapping:
108
+ raise ValueError(f"No native mapping found for function: {function_name}")
109
+
110
+ transformer = mapping.get('transformer')
111
+ if not transformer:
112
+ raise ValueError(f"No transformer found for function: {function_name}")
113
+
114
+ return transformer(**kwargs)
115
+
116
+ # Example usage in ModelCompiler
117
+ def _has_native_ai_functions(self, source_name: str) -> bool:
118
+ """Enhanced method to check native AI function support"""
119
+ ai_enabled = {
120
+ 'snowflake': True,
121
+ 'databricks': True,
122
+ 'bigquery': True
123
+ }
124
+ return ai_enabled.get(source_name.lower(), False)
125
+
126
+ def _execute_ai_model(self, sql: str, model: SQLModel) -> pd.DataFrame:
127
+ """
128
+ Enhanced method to use native AI functions when available
129
+ """
130
+ source_pattern = r'FROM\s+(\\w+)\\.(\\w+)'
131
+ matches = re.findall(source_pattern, sql)
132
+
133
+ if matches:
134
+ source_name, table_name = matches[0]
135
+ engine = self._get_engine(source_name)
136
+
137
+ # Check for native AI function support
138
+ if self._has_native_ai_functions(source_name):
139
+ # Use native transformer
140
+ transformer = NativeDatabaseAITransformer(source_name)
141
+
142
+ # Modify SQL to use native AI functions
143
+ for func_name, params in model.ai_functions.items():
144
+ try:
145
+ native_func_call = transformer.transform_ai_function(
146
+ func_name,
147
+ text=params.get('column', ''),
148
+ **{k: v for k, v in params.items() if k != 'column'}
149
+ )
150
+
151
+ # Replace the NQL function with native function
152
+ sql = sql.replace(
153
+ f"nql.{func_name}({params.get('column', '')})",
154
+ native_func_call
155
+ )
156
+ except ValueError as e:
157
+ # Fallback to original method if transformation fails
158
+ print(f"Warning: {e}. Falling back to default AI function.")
159
+
160
+ return pd.read_sql(sql.replace(f"{source_name}.", ""), engine)
161
+
162
+ # Fallback to existing AI model execution
163
+ return super()._execute_ai_model(sql, model)
@@ -0,0 +1,156 @@
1
+ import os
2
+ import yaml
3
+ import sqlalchemy
4
+ import pandas as pd
5
+ from typing import Dict, Any, Optional
6
+
7
+ class SQLModelCompiler:
8
+ """
9
+ Compile and execute SQL models across different database engines
10
+ """
11
+ def __init__(
12
+ self,
13
+ models_dir: str,
14
+ engine: Optional[sqlalchemy.engine.base.Engine] = None,
15
+ engine_type: str = 'sqlite'
16
+ ):
17
+ """
18
+ Initialize SQL Model Compiler
19
+
20
+ :param models_dir: Directory containing SQL model files
21
+ :param engine: SQLAlchemy database engine
22
+ :param engine_type: Type of database engine (sqlite, snowflake, bigquery, etc.)
23
+ """
24
+ self.models_dir = models_dir
25
+ self.engine = engine
26
+ self.engine_type = engine_type.lower()
27
+ self.models = {}
28
+
29
+ # Discover models
30
+ self._discover_models()
31
+
32
+ def _discover_models(self):
33
+ """
34
+ Discover and load SQL model files
35
+ """
36
+ for filename in os.listdir(self.models_dir):
37
+ if filename.endswith('.sql'):
38
+ model_name = os.path.splitext(filename)[0]
39
+ model_path = os.path.join(self.models_dir, filename)
40
+
41
+ with open(model_path, 'r') as f:
42
+ model_content = f.read()
43
+
44
+ self.models[model_name] = {
45
+ 'name': model_name,
46
+ 'content': model_content,
47
+ 'path': model_path
48
+ }
49
+
50
+ def _compile_model(self, model_name: str) -> str:
51
+ """
52
+ Compile a SQL model for the specific engine
53
+
54
+ :param model_name: Name of the model to compile
55
+ :return: Compiled SQL query
56
+ """
57
+ model = self.models[model_name]
58
+ content = model['content']
59
+
60
+ # Engine-specific compilation
61
+ if self.engine_type == 'snowflake':
62
+ # Snowflake-specific transformations
63
+ content = content.replace('{{', 'SNOWFLAKE.').replace('}}', '')
64
+ elif self.engine_type == 'bigquery':
65
+ # BigQuery-specific transformations
66
+ content = content.replace('{{', 'ML.').replace('}}', '')
67
+
68
+ return content
69
+
70
+ def execute_model(
71
+ self,
72
+ model_name: str,
73
+ seed_data: Optional[Dict[str, pd.DataFrame]] = None
74
+ ) -> pd.DataFrame:
75
+ """
76
+ Execute a SQL model
77
+
78
+ :param model_name: Name of the model to execute
79
+ :param seed_data: Optional seed data for the model
80
+ :return: Result DataFrame
81
+ """
82
+ if model_name not in self.models:
83
+ raise ValueError(f"Model {model_name} not found")
84
+
85
+ # Compile model for specific engine
86
+ compiled_sql = self._compile_model(model_name)
87
+
88
+ # If seed data is provided, prepare the database
89
+ if seed_data and self.engine:
90
+ for table_name, df in seed_data.items():
91
+ df.to_sql(table_name, self.engine, if_exists='replace', index=False)
92
+
93
+ # Execute the model
94
+ if self.engine:
95
+ return pd.read_sql(compiled_sql, self.engine)
96
+ else:
97
+ # Fallback to pandas evaluation
98
+ import sqlite3
99
+
100
+ # Create an in-memory SQLite database for evaluation
101
+ conn = sqlite3.connect(':memory:')
102
+
103
+ # Load seed data if available
104
+ if seed_data:
105
+ for table_name, df in seed_data.items():
106
+ df.to_sql(table_name, conn, if_exists='replace', index=False)
107
+
108
+ return pd.read_sql(compiled_sql, conn)
109
+
110
+ def run_all_models(self, seed_data: Optional[Dict[str, pd.DataFrame]] = None):
111
+ """
112
+ Run all discovered models
113
+
114
+ :param seed_data: Optional seed data for models
115
+ :return: Dictionary of model results
116
+ """
117
+ results = {}
118
+ for model_name in self.models:
119
+ results[model_name] = self.execute_model(model_name, seed_data)
120
+ return results
121
+
122
+ # Example usage in a pipeline
123
+ def create_model_compiler(
124
+ models_dir: str,
125
+ engine_type: str = 'sqlite',
126
+ connection_params: Optional[Dict[str, Any]] = None
127
+ ) -> SQLModelCompiler:
128
+ """
129
+ Create a SQL Model Compiler with the specified engine
130
+
131
+ :param models_dir: Directory containing SQL model files
132
+ :param engine_type: Type of database engine
133
+ :param connection_params: Connection parameters for the database
134
+ :return: SQLModelCompiler instance
135
+ """
136
+ if engine_type == 'snowflake':
137
+ from sqlalchemy.dialects.snowflake import base
138
+ engine = sqlalchemy.create_engine(
139
+ f"snowflake://{connection_params['username']}:{connection_params['password']}@"
140
+ f"{connection_params['account']}/{connection_params['database']}/{connection_params['schema']}"
141
+ )
142
+ elif engine_type == 'bigquery':
143
+ from google.cloud import bigquery
144
+ from sqlalchemy.dialects.bigquery import base
145
+ engine = sqlalchemy.create_engine(
146
+ f"bigquery://{connection_params['project_id']}"
147
+ )
148
+ else:
149
+ # Default to SQLite
150
+ engine = sqlalchemy.create_engine('sqlite:///models.db')
151
+
152
+ return SQLModelCompiler(
153
+ models_dir=models_dir,
154
+ engine=engine,
155
+ engine_type=engine_type
156
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: npcpy
3
- Version: 1.2.21
3
+ Version: 1.2.23
4
4
  Summary: npcpy is the premier open-source library for integrating LLMs and Agents into python systems.
5
5
  Home-page: https://github.com/NPC-Worldwide/npcpy
6
6
  Author: Christopher Agostino
@@ -399,6 +399,152 @@ the citizens, being directed by simple and incontestable principles, may tend to
399
399
  maintenance of the Constitution, and the general happiness. ''')
400
400
  # it will play the audio automatically.
401
401
  ```
402
+ ## Fine-Tuning and Evolution
403
+
404
+ `npcpy` provides modular tools for building adaptive AI systems through supervised fine-tuning, reinforcement learning, and genetic algorithms.
405
+
406
+ See examples/fine_tuning_demo.py for a complete working example.
407
+
408
+
409
+ ### Supervised Fine-Tuning (SFT)
410
+
411
+ Train models on specific tasks using simple X, y pairs:
412
+ ```python
413
+ from npcpy.ft.sft import run_sft, load_sft_model, predict_sft
414
+
415
+ X_train = ["translate to french: hello", "translate to french: goodbye"]
416
+ y_train = ["bonjour", "au revoir"]
417
+
418
+ model_path = run_sft(X_train, y_train)
419
+
420
+ model, tokenizer = load_sft_model(model_path)
421
+ response = predict_sft(model, tokenizer, "translate to french: thanks")
422
+ ```
423
+ ### Unsupervised Fine-Tuning (USFT)
424
+ Adapt models to domain-specific text corpora without labels:
425
+ ```python
426
+ from npcpy.ft.usft import run_usft, load_corpus_from_hf
427
+
428
+ texts = load_corpus_from_hf("tiny_shakespeare", split="train[:1000]")
429
+
430
+ model_path = run_usft(
431
+ texts,
432
+ config=USFTConfig(
433
+ output_model_path="models/shakespeare",
434
+ num_train_epochs=3
435
+ )
436
+ )
437
+ Train on your own text corpus:
438
+ pythondomain_texts = [
439
+ "Your domain-specific text 1",
440
+ "Your domain-specific text 2",
441
+ ] * 100
442
+
443
+ model_path = run_usft(domain_texts)
444
+ ```
445
+ ### Diffusion Fine-tuning
446
+ ```
447
+ from npcpy.ft.diff import train_diffusion, generate_image
448
+
449
+ image_paths = ["img1.png", "img2.png", "img3.png"]
450
+ captions = ["a cat", "a dog", "a bird"]
451
+
452
+ model_path = train_diffusion(
453
+ image_paths,
454
+ captions,
455
+ config=DiffusionConfig(
456
+ num_epochs=100,
457
+ batch_size=4
458
+ )
459
+ )
460
+
461
+ generated = generate_image(
462
+ model_path,
463
+ prompt="a white square",
464
+ image_size=128
465
+ )
466
+ Resume training from checkpoint:
467
+ pythonmodel_path = train_diffusion(
468
+ image_paths,
469
+ captions,
470
+ config,
471
+ resume_from="models/diffusion/checkpoints/checkpoint-epoch10-step1000.pt"
472
+ )
473
+ ```
474
+
475
+
476
+ ### Reinforcement Learning (RL)
477
+ Collect agent traces and train with DPO based on reward signals:
478
+ ```python
479
+ from npcpy.ft.rl import collect_traces, run_rl_training
480
+ from npcpy.npc_compiler import NPC
481
+
482
+ tasks = [
483
+ {'prompt': 'Solve 2+2', 'expected': '4'},
484
+ {'prompt': 'Solve 5+3', 'expected': '8'}
485
+ ]
486
+
487
+ agents = [
488
+ NPC(name="farlor", primary_directive="Be concise",
489
+ model="qwen3:0.6b", provider="ollama"),
490
+ NPC(name="tedno", primary_directive="Show your work",
491
+ model="qwen3:0.6b", provider="ollama")
492
+ ]
493
+
494
+ def reward_fn(trace):
495
+ if trace['task_metadata']['expected'] in trace['final_output']:
496
+ return 1.0
497
+ return 0.0
498
+
499
+ adapter_path = run_rl_training(tasks, agents, reward_fn)
500
+ ```
501
+ ### Genetic Evolution
502
+
503
+ Evolve populations of knowledge graphs or model ensembles:
504
+ ```python
505
+ from npcpy.ft.ge import GeneticEvolver, GAConfig
506
+
507
+ config = GAConfig(
508
+ population_size=20,
509
+ generations=50,
510
+ mutation_rate=0.15
511
+ )
512
+
513
+ evolver = GeneticEvolver(
514
+ fitness_fn=your_fitness_function,
515
+ mutate_fn=your_mutation_function,
516
+ crossover_fn=your_crossover_function,
517
+ initialize_fn=your_init_function,
518
+ config=config
519
+ )
520
+
521
+ best_individual = evolver.run()
522
+ ```
523
+
524
+ ### Smart Model Ensembler and response router
525
+ Build fast intuitive responses with fallback to reasoning:
526
+ ```python
527
+ from npcpy.ft.model_ensembler import (
528
+ ResponseRouter,
529
+ create_model_genome
530
+ )
531
+
532
+ genome = create_model_genome(['math', 'code', 'factual'])
533
+ router = ResponseRouter(fast_threshold=0.8)
534
+
535
+ result = router.route_query("What is 2+2?", genome)
536
+
537
+ if result['used_fast_path']:
538
+ print("Fast gut reaction")
539
+ elif result['used_ensemble']:
540
+ print("Ensemble voting")
541
+ else:
542
+ print("Full reasoning")
543
+ ```
544
+ The intention for this model ensembler system is to mimic human cognition: pattern-matched gut reactions (System 1 of Kahneman) for familiar queries, falling back to deliberate reasoning (System 2 of Kahneman) for novel problems. Genetic algorithms evolve both knowledge structures and model specializations over time.
545
+
546
+
547
+
402
548
  ## Serving an NPC Team
403
549
 
404
550
  `npcpy` includes a built-in Flask server that makes it easy to deploy NPC teams for production use. You can serve teams with tools, jinxs, and complex workflows that frontends can interact with via REST APIs.
@@ -1,5 +1,5 @@
1
1
  npcpy/__init__.py,sha256=9imxFtK74_6Rw9rz0kyMnZYl_voPb569tkTlYLt0Urg,131
2
- npcpy/llm_funcs.py,sha256=tvcZuQEcIUJClwEJQXBF6ArEVjSuXt1jAcZOcnYWsVQ,85101
2
+ npcpy/llm_funcs.py,sha256=UkesCnRmclEoqBZPMZa2hKoSTjFzjxDCzPGKgeDegPQ,85101
3
3
  npcpy/main.py,sha256=RWoRIj6VQLxKdOKvdVyaq2kwG35oRpeXPvp1CAAoG-w,81
4
4
  npcpy/npc_compiler.py,sha256=10vu-9WUmlVzaFM_hMJH28iNS1IJXQP3Rb5RT1rZmpA,95326
5
5
  npcpy/npc_sysenv.py,sha256=lPYlKM_TeR4l4-Jcgiqq3CCge8b2oFHdfISD4L_G7eo,30308
@@ -10,39 +10,45 @@ npcpy/data/__init__.py,sha256=1tcoChR-Hjn905JDLqaW9ElRmcISCTJdE7BGXPlym2Q,642
10
10
  npcpy/data/audio.py,sha256=goon4HfsYgx0bI-n1lhkrzWPrJoejJlycXcB0P62pyk,11280
11
11
  npcpy/data/data_models.py,sha256=q7xpI4_nK5HvlOE1XB5u5nFQs4SE5zcgt0kIZJF2dhs,682
12
12
  npcpy/data/image.py,sha256=UQcioNPDd5HYMLL_KStf45SuiIPXDcUY-dEFHwSWUeE,6564
13
- npcpy/data/load.py,sha256=f3-bgKUq_pnfUhqjZdXwfIEZmMbGJpJfGTBjuiYJos8,4258
13
+ npcpy/data/load.py,sha256=7Ay-TYNhCvjJLwdQ5qAgxXSrGwow9ZrazHFVPqMw_cI,4274
14
14
  npcpy/data/text.py,sha256=jP0a1qZZaSJdK-LdZTn2Jjdxqmkd3efxDLEoxflJQeY,5010
15
15
  npcpy/data/video.py,sha256=aPUgj0fA_lFQ7Jf94-PutggCF4695FVCh3q5mnVthvI,574
16
16
  npcpy/data/web.py,sha256=ARGoVKUlQmaiX0zJbSvvFmRCwOv_Z7Pcan9c5GxYObQ,5117
17
17
  npcpy/ft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- npcpy/ft/diff.py,sha256=R3Qo6v0-6M1iI0wiXhUzyuYI2ja0q_0i9bE0z3coxzU,28
19
- npcpy/ft/ge.py,sha256=my5LtGyVTT40V0i1h9FR-tFFA1FHSga-PeCCgUX1UUI,61
18
+ npcpy/ft/diff.py,sha256=wYFRY_2p-B5xVqO7NDyhJbjQsUt4PrwOfgpE1Icghmk,2906
19
+ npcpy/ft/ge.py,sha256=0VzIiXq2wCzGcK1x0Wd-myJ3xRf-FNaPg0GkHEZegUM,3552
20
20
  npcpy/ft/memory_trainer.py,sha256=QZPznxEEwXbOGroHdMUMa5xpqlNwgV6nqOazI2xgrnQ,6635
21
- npcpy/ft/rl.py,sha256=l3RUkEJe4b2yB6pildveu2LJymtNq0F17COwf_CCq3U,34
22
- npcpy/ft/sft.py,sha256=i4ENygRPArbLWN4XZZuBnPWaehs8M-J68JB_mewGJHI,62
21
+ npcpy/ft/model_ensembler.py,sha256=BRX4hJ_rvF1vKTzjMhlahZqPttUgc3PqmzUJDqIfIps,10038
22
+ npcpy/ft/rl.py,sha256=EcPD8t5MFg0zYWSS-A7KJ9bWd0qCTsL5SSvDxV556Z4,9245
23
+ npcpy/ft/sft.py,sha256=iPCP4sM2Nfri0rif_oR1uFInhqY8HIILwT-iQGk7f10,6064
24
+ npcpy/ft/usft.py,sha256=O025GGYGZQf2ZVLowyAmBwh5bJyuy2dUAM6v03YcboY,3435
23
25
  npcpy/gen/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
26
  npcpy/gen/audio_gen.py,sha256=w4toESu7nmli1T5FOwRRCGC_QK9W-SMWknYYkbRv9jE,635
25
27
  npcpy/gen/embeddings.py,sha256=QStTJ2ELiC379OEZsLEgGGIIFD267Y8zQchs7HRn2Zg,2089
26
28
  npcpy/gen/image_gen.py,sha256=ln71jmLoJHekbZYDJpTe5DtOamVte9gjr2BPQ1DzjMQ,14955
27
- npcpy/gen/response.py,sha256=Rrk3rrSSGode94JJkKyL4n9Dpw9CH7R5l9xyjQSDFxE,28954
29
+ npcpy/gen/response.py,sha256=dK0Ux1_0GHo4gOfSHrrp34Ub4YJ-88NjFZfaG3kSrB0,28940
28
30
  npcpy/gen/video_gen.py,sha256=JMp2s2qMp5uy0rOgv6BRZ7nkQI4vdT1hbJ2nSu4s-KA,3243
29
31
  npcpy/memory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- npcpy/memory/command_history.py,sha256=Ww7vZTSjQDuElQXuOjsvu7NTljOLAg07QIFrfKARpVg,45562
32
+ npcpy/memory/command_history.py,sha256=2VdmNW5VRpMrOkbdrMsgn5p3mvuJHNnzGHnIUEM8XMI,46279
31
33
  npcpy/memory/kg_vis.py,sha256=TrQQCRh_E7Pyr-GPAHLSsayubAfGyf4HOEFrPB6W86Q,31280
32
34
  npcpy/memory/knowledge_graph.py,sha256=2XpIlsyPdAOnzQ6kkwP6MWPGwL3P6V33_3suNJYMMJE,48681
33
- npcpy/memory/memory_processor.py,sha256=bLfzT-uDgwNegs1hVBqW3Hl2fYtdmFQbdc5To_f4i5E,2106
35
+ npcpy/memory/memory_processor.py,sha256=6PfVnSBA9ag5EhHJinXoODfEPTlDDoaT0PtCCuZO6HI,2598
34
36
  npcpy/memory/search.py,sha256=glN6WYzaixcoDphTEHAXSMX3vKZGjR12Jx9YVL_gYfE,18433
35
37
  npcpy/mix/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
38
  npcpy/mix/debate.py,sha256=lQXxC7nl6Rwyf7HIYrsVQILMUmYYx55Tjt2pkTg56qY,9019
37
39
  npcpy/sql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
+ npcpy/sql/ai_function_tools.py,sha256=ZCpjVHtaMRdL2dXxbQy5NhhjtPrVViGT1wyEl8ADrks,7755
41
+ npcpy/sql/database_ai_adapters.py,sha256=CMlNGOhmJZhGB47RPvLIMqB61m_eYPVg1lwx42_b0jQ,6865
42
+ npcpy/sql/database_ai_functions.py,sha256=XQCmaFOE1lNCnwrLTNpotYOlv6sx41bb8hxZI_sqpy8,6335
38
43
  npcpy/sql/model_runner.py,sha256=hJZ7hx2mwI-8DAh47Q6BwOsRjx30-HzebL4ajEUO4HA,5734
39
44
  npcpy/sql/npcsql.py,sha256=AOffqGK1Jwjf_tgE8qo4KY5KXE21ylm1dhRSKS0Cesw,13587
45
+ npcpy/sql/sql_model_compiler.py,sha256=G-0dpTlgzc-dXy9YEsdWGjO8xaQ3jFNbc6oUja1Ef4M,5364
40
46
  npcpy/work/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
47
  npcpy/work/desktop.py,sha256=F3I8mUtJp6LAkXodsh8hGZIncoads6c_2Utty-0EdDA,2986
42
48
  npcpy/work/plan.py,sha256=QyUwg8vElWiHuoS-xK4jXTxxHvkMD3VkaCEsCmrEPQk,8300
43
49
  npcpy/work/trigger.py,sha256=P1Y8u1wQRsS2WACims_2IdkBEar-iBQix-2TDWoW0OM,9948
44
- npcpy-1.2.21.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
45
- npcpy-1.2.21.dist-info/METADATA,sha256=UoIjUU728VK_7CUbvzVqNnXkK8AjPEPwagfXBIdO1ko,26025
46
- npcpy-1.2.21.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
47
- npcpy-1.2.21.dist-info/top_level.txt,sha256=g1pbSvrOOncB74Bg5-J0Olg4V0A5VzDw-Xz5YObq8BU,6
48
- npcpy-1.2.21.dist-info/RECORD,,
50
+ npcpy-1.2.23.dist-info/licenses/LICENSE,sha256=j0YPvce7Ng9e32zYOu0EmXjXeJ0Nwawd0RA3uSGGH4E,1070
51
+ npcpy-1.2.23.dist-info/METADATA,sha256=-Q2qDZh5Khizw_PcCZnLCyIkMmFyworeU8gPzgEhBpY,29885
52
+ npcpy-1.2.23.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
+ npcpy-1.2.23.dist-info/top_level.txt,sha256=g1pbSvrOOncB74Bg5-J0Olg4V0A5VzDw-Xz5YObq8BU,6
54
+ npcpy-1.2.23.dist-info/RECORD,,
File without changes