npcpy 1.0.26__py3-none-any.whl → 1.2.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. npcpy/__init__.py +0 -7
  2. npcpy/data/audio.py +16 -99
  3. npcpy/data/image.py +43 -42
  4. npcpy/data/load.py +83 -124
  5. npcpy/data/text.py +28 -28
  6. npcpy/data/video.py +8 -32
  7. npcpy/data/web.py +51 -23
  8. npcpy/ft/diff.py +110 -0
  9. npcpy/ft/ge.py +115 -0
  10. npcpy/ft/memory_trainer.py +171 -0
  11. npcpy/ft/model_ensembler.py +357 -0
  12. npcpy/ft/rl.py +360 -0
  13. npcpy/ft/sft.py +248 -0
  14. npcpy/ft/usft.py +128 -0
  15. npcpy/gen/audio_gen.py +24 -0
  16. npcpy/gen/embeddings.py +13 -13
  17. npcpy/gen/image_gen.py +262 -117
  18. npcpy/gen/response.py +615 -415
  19. npcpy/gen/video_gen.py +53 -7
  20. npcpy/llm_funcs.py +1869 -437
  21. npcpy/main.py +1 -1
  22. npcpy/memory/command_history.py +844 -510
  23. npcpy/memory/kg_vis.py +833 -0
  24. npcpy/memory/knowledge_graph.py +892 -1845
  25. npcpy/memory/memory_processor.py +81 -0
  26. npcpy/memory/search.py +188 -90
  27. npcpy/mix/debate.py +192 -3
  28. npcpy/npc_compiler.py +1672 -801
  29. npcpy/npc_sysenv.py +593 -1266
  30. npcpy/serve.py +3120 -0
  31. npcpy/sql/ai_function_tools.py +257 -0
  32. npcpy/sql/database_ai_adapters.py +186 -0
  33. npcpy/sql/database_ai_functions.py +163 -0
  34. npcpy/sql/model_runner.py +19 -19
  35. npcpy/sql/npcsql.py +706 -507
  36. npcpy/sql/sql_model_compiler.py +156 -0
  37. npcpy/tools.py +183 -0
  38. npcpy/work/plan.py +13 -279
  39. npcpy/work/trigger.py +3 -3
  40. npcpy-1.2.32.dist-info/METADATA +803 -0
  41. npcpy-1.2.32.dist-info/RECORD +54 -0
  42. npcpy/data/dataframes.py +0 -171
  43. npcpy/memory/deep_research.py +0 -125
  44. npcpy/memory/sleep.py +0 -557
  45. npcpy/modes/_state.py +0 -78
  46. npcpy/modes/alicanto.py +0 -1075
  47. npcpy/modes/guac.py +0 -785
  48. npcpy/modes/mcp_npcsh.py +0 -822
  49. npcpy/modes/npc.py +0 -213
  50. npcpy/modes/npcsh.py +0 -1158
  51. npcpy/modes/plonk.py +0 -409
  52. npcpy/modes/pti.py +0 -234
  53. npcpy/modes/serve.py +0 -1637
  54. npcpy/modes/spool.py +0 -312
  55. npcpy/modes/wander.py +0 -549
  56. npcpy/modes/yap.py +0 -572
  57. npcpy/npc_team/alicanto.npc +0 -2
  58. npcpy/npc_team/alicanto.png +0 -0
  59. npcpy/npc_team/assembly_lines/test_pipeline.py +0 -181
  60. npcpy/npc_team/corca.npc +0 -13
  61. npcpy/npc_team/foreman.npc +0 -7
  62. npcpy/npc_team/frederic.npc +0 -6
  63. npcpy/npc_team/frederic4.png +0 -0
  64. npcpy/npc_team/guac.png +0 -0
  65. npcpy/npc_team/jinxs/automator.jinx +0 -18
  66. npcpy/npc_team/jinxs/bash_executer.jinx +0 -31
  67. npcpy/npc_team/jinxs/calculator.jinx +0 -11
  68. npcpy/npc_team/jinxs/edit_file.jinx +0 -96
  69. npcpy/npc_team/jinxs/file_chat.jinx +0 -14
  70. npcpy/npc_team/jinxs/gui_controller.jinx +0 -28
  71. npcpy/npc_team/jinxs/image_generation.jinx +0 -29
  72. npcpy/npc_team/jinxs/internet_search.jinx +0 -30
  73. npcpy/npc_team/jinxs/local_search.jinx +0 -152
  74. npcpy/npc_team/jinxs/npcsh_executor.jinx +0 -31
  75. npcpy/npc_team/jinxs/python_executor.jinx +0 -8
  76. npcpy/npc_team/jinxs/screen_cap.jinx +0 -25
  77. npcpy/npc_team/jinxs/sql_executor.jinx +0 -33
  78. npcpy/npc_team/kadiefa.npc +0 -3
  79. npcpy/npc_team/kadiefa.png +0 -0
  80. npcpy/npc_team/npcsh.ctx +0 -9
  81. npcpy/npc_team/npcsh_sibiji.png +0 -0
  82. npcpy/npc_team/plonk.npc +0 -2
  83. npcpy/npc_team/plonk.png +0 -0
  84. npcpy/npc_team/plonkjr.npc +0 -2
  85. npcpy/npc_team/plonkjr.png +0 -0
  86. npcpy/npc_team/sibiji.npc +0 -5
  87. npcpy/npc_team/sibiji.png +0 -0
  88. npcpy/npc_team/spool.png +0 -0
  89. npcpy/npc_team/templates/analytics/celona.npc +0 -0
  90. npcpy/npc_team/templates/hr_support/raone.npc +0 -0
  91. npcpy/npc_team/templates/humanities/eriane.npc +0 -4
  92. npcpy/npc_team/templates/it_support/lineru.npc +0 -0
  93. npcpy/npc_team/templates/marketing/slean.npc +0 -4
  94. npcpy/npc_team/templates/philosophy/maurawa.npc +0 -0
  95. npcpy/npc_team/templates/sales/turnic.npc +0 -4
  96. npcpy/npc_team/templates/software/welxor.npc +0 -0
  97. npcpy/npc_team/yap.png +0 -0
  98. npcpy/routes.py +0 -958
  99. npcpy/work/mcp_helpers.py +0 -357
  100. npcpy/work/mcp_server.py +0 -194
  101. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.npc +0 -2
  102. npcpy-1.0.26.data/data/npcpy/npc_team/alicanto.png +0 -0
  103. npcpy-1.0.26.data/data/npcpy/npc_team/automator.jinx +0 -18
  104. npcpy-1.0.26.data/data/npcpy/npc_team/bash_executer.jinx +0 -31
  105. npcpy-1.0.26.data/data/npcpy/npc_team/calculator.jinx +0 -11
  106. npcpy-1.0.26.data/data/npcpy/npc_team/celona.npc +0 -0
  107. npcpy-1.0.26.data/data/npcpy/npc_team/corca.npc +0 -13
  108. npcpy-1.0.26.data/data/npcpy/npc_team/edit_file.jinx +0 -96
  109. npcpy-1.0.26.data/data/npcpy/npc_team/eriane.npc +0 -4
  110. npcpy-1.0.26.data/data/npcpy/npc_team/file_chat.jinx +0 -14
  111. npcpy-1.0.26.data/data/npcpy/npc_team/foreman.npc +0 -7
  112. npcpy-1.0.26.data/data/npcpy/npc_team/frederic.npc +0 -6
  113. npcpy-1.0.26.data/data/npcpy/npc_team/frederic4.png +0 -0
  114. npcpy-1.0.26.data/data/npcpy/npc_team/guac.png +0 -0
  115. npcpy-1.0.26.data/data/npcpy/npc_team/gui_controller.jinx +0 -28
  116. npcpy-1.0.26.data/data/npcpy/npc_team/image_generation.jinx +0 -29
  117. npcpy-1.0.26.data/data/npcpy/npc_team/internet_search.jinx +0 -30
  118. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.npc +0 -3
  119. npcpy-1.0.26.data/data/npcpy/npc_team/kadiefa.png +0 -0
  120. npcpy-1.0.26.data/data/npcpy/npc_team/lineru.npc +0 -0
  121. npcpy-1.0.26.data/data/npcpy/npc_team/local_search.jinx +0 -152
  122. npcpy-1.0.26.data/data/npcpy/npc_team/maurawa.npc +0 -0
  123. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh.ctx +0 -9
  124. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_executor.jinx +0 -31
  125. npcpy-1.0.26.data/data/npcpy/npc_team/npcsh_sibiji.png +0 -0
  126. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.npc +0 -2
  127. npcpy-1.0.26.data/data/npcpy/npc_team/plonk.png +0 -0
  128. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.npc +0 -2
  129. npcpy-1.0.26.data/data/npcpy/npc_team/plonkjr.png +0 -0
  130. npcpy-1.0.26.data/data/npcpy/npc_team/python_executor.jinx +0 -8
  131. npcpy-1.0.26.data/data/npcpy/npc_team/raone.npc +0 -0
  132. npcpy-1.0.26.data/data/npcpy/npc_team/screen_cap.jinx +0 -25
  133. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.npc +0 -5
  134. npcpy-1.0.26.data/data/npcpy/npc_team/sibiji.png +0 -0
  135. npcpy-1.0.26.data/data/npcpy/npc_team/slean.npc +0 -4
  136. npcpy-1.0.26.data/data/npcpy/npc_team/spool.png +0 -0
  137. npcpy-1.0.26.data/data/npcpy/npc_team/sql_executor.jinx +0 -33
  138. npcpy-1.0.26.data/data/npcpy/npc_team/test_pipeline.py +0 -181
  139. npcpy-1.0.26.data/data/npcpy/npc_team/turnic.npc +0 -4
  140. npcpy-1.0.26.data/data/npcpy/npc_team/welxor.npc +0 -0
  141. npcpy-1.0.26.data/data/npcpy/npc_team/yap.png +0 -0
  142. npcpy-1.0.26.dist-info/METADATA +0 -827
  143. npcpy-1.0.26.dist-info/RECORD +0 -139
  144. npcpy-1.0.26.dist-info/entry_points.txt +0 -11
  145. /npcpy/{modes → ft}/__init__.py +0 -0
  146. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/WHEEL +0 -0
  147. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/licenses/LICENSE +0 -0
  148. {npcpy-1.0.26.dist-info → npcpy-1.2.32.dist-info}/top_level.txt +0 -0
npcpy/memory/kg_vis.py ADDED
@@ -0,0 +1,833 @@
1
+
2
+
3
+
4
+ import json
5
+ from collections import defaultdict
6
+ import pandas as pd
7
+ import math
8
+ from textwrap import fill
9
+ from chroptiks.plotting_utils import *
10
+
11
+
12
+ import matplotlib.pyplot as plt
13
+ from matplotlib.patches import ConnectionPatch, Patch
14
+ import numpy as np
15
+ from pyvis.network import Network
16
+ import networkx as nx
17
+
18
+ def load_kg_with_pandas(generation, path_prefix="kg_state"):
19
+ """Loads the new graph structure from CSV files."""
20
+ kg = {
21
+ "generation": generation, "facts": [], "concepts": [],
22
+ "concept_links": [], "fact_to_concept_links": {}
23
+ }
24
+ try:
25
+ nodes_df = pd.read_csv(f'{path_prefix}_gen{generation}_nodes.csv')
26
+ links_df = pd.read_csv(f'{path_prefix}_gen{generation}_links.csv')
27
+ except FileNotFoundError as e:
28
+ print(f"Error: Could not find data files for generation {generation}. {e}")
29
+ return None
30
+
31
+ for _, row in nodes_df.iterrows():
32
+ if row['type'] == 'fact':
33
+ kg['facts'].append({'statement': row['id'], 'generation': int(row['generation'])})
34
+ elif row['type'] == 'concept':
35
+ kg['concepts'].append({'name': row['id'], 'generation': int(row['generation'])})
36
+
37
+ fact_links = defaultdict(list)
38
+ concept_links = []
39
+ for _, row in links_df.iterrows():
40
+ if row['type'] == 'fact_to_concept':
41
+ fact_links[row['source']].append(row['target'])
42
+ elif row['type'] == 'concept_to_concept':
43
+ concept_links.append((row['source'], row['target']))
44
+
45
+ kg['fact_to_concept_links'] = dict(fact_links)
46
+ kg['concept_links'] = concept_links
47
+
48
+ print(f"Successfully loaded KG Generation {generation} with pandas.")
49
+ return kg
50
+
51
+ def load_changelog_from_json(from_gen, to_gen, path_prefix="changelog"):
52
+ """Loads the detailed changelog JSON file created during a 'kg_sleep_process'."""
53
+ filename = f"{path_prefix}_gen{from_gen}_to_{to_gen}.json"
54
+ try:
55
+ with open(filename, 'r', encoding='utf-8') as f:
56
+ changelog = json.load(f)
57
+ print(f"Successfully loaded changelog from {filename}")
58
+ return changelog
59
+ except FileNotFoundError as e:
60
+ print(f"Error: Could not find changelog file: {e}")
61
+ return None
62
+
63
+
64
+ def visualize_knowledge_graph_final_interactive(kg, filename="knowledge_graph.html"):
65
+ """Updated to work with the new KG structure"""
66
+ print(f"Generating interactive graph for Gen {kg['generation']} -> {filename}")
67
+
68
+
69
+ facts = kg.get("facts", [])
70
+ concepts = kg.get("concepts", [])
71
+ fact_to_concept_links = kg.get("fact_to_concept_links", {})
72
+ concept_links = kg.get("concept_links", [])
73
+
74
+
75
+ node_map = {}
76
+ for fact in facts:
77
+ node_map[fact['statement']] = fact
78
+ for concept in concepts:
79
+ node_map[concept['name']] = concept
80
+
81
+
82
+
83
+ fact_radius = 300
84
+ concept_radius = 600
85
+
86
+ node_positions = {}
87
+
88
+
89
+ if facts:
90
+ for i, fact in enumerate(facts):
91
+ angle = (2 * math.pi * i) / len(facts)
92
+ node_id = fact['statement']
93
+ node_positions[node_id] = {
94
+ 'x': fact_radius * math.cos(angle),
95
+ 'y': fact_radius * math.sin(angle)
96
+ }
97
+
98
+
99
+ if concepts:
100
+ for i, concept in enumerate(concepts):
101
+ angle = (2 * math.pi * i) / len(concepts)
102
+ node_id = concept['name']
103
+ node_positions[node_id] = {
104
+ 'x': concept_radius * math.cos(angle),
105
+ 'y': concept_radius * math.sin(angle)
106
+ }
107
+
108
+
109
+ net = Network(height="100vh", width="100%", bgcolor="
110
+
111
+
112
+ for fact in facts:
113
+ node_id = fact['statement']
114
+ pos = node_positions.get(node_id, {'x': 0, 'y': 0})
115
+ title_text = f"<strong>Fact (Gen: {fact.get('generation', 'N/A')})</strong><br><em>{fill(node_id, 50)}</em>"
116
+ net.add_node(
117
+ node_id,
118
+ label=fill(node_id, 25),
119
+ title=title_text,
120
+ x=pos['x'],
121
+ y=pos['y'],
122
+ color='
123
+ physics=False
124
+ )
125
+
126
+
127
+ for concept in concepts:
128
+ node_id = concept['name']
129
+ pos = node_positions.get(node_id, {'x': 0, 'y': 0})
130
+ title_text = f"<strong>Concept (Gen: {concept.get('generation', 'N/A')})</strong><br><em>{fill(node_id, 50)}</em>"
131
+ net.add_node(
132
+ node_id,
133
+ label=fill(node_id, 25),
134
+ title=title_text,
135
+ x=pos['x'],
136
+ y=pos['y'],
137
+ color='
138
+ physics=False
139
+ )
140
+
141
+
142
+ for fact_statement, concept_names in fact_to_concept_links.items():
143
+ for concept_name in concept_names:
144
+ if fact_statement in node_map and concept_name in node_map:
145
+ net.add_edge(fact_statement, concept_name, color="
146
+
147
+
148
+ def visualize_growth(k_graphs, filename="growth_chart.png"):
149
+ """
150
+ Plots Facts and Concepts as separate lines instead of a stacked area.
151
+ This allows for independent analysis of each component's growth over time.
152
+ """
153
+ gens = [kg['generation'] for kg in k_graphs]
154
+ facts_counts = [len(kg.get('facts', [])) for kg in k_graphs]
155
+ concepts_counts = [len(kg.get('concepts', [])) for kg in k_graphs]
156
+ total_nodes = [facts + concepts for facts, concepts in zip(facts_counts, concepts_counts)]
157
+
158
+ plt.figure(figsize=(12, 8))
159
+
160
+
161
+ plt.plot(gens, facts_counts, label='Facts', color='
162
+ plt.plot(gens, concepts_counts, label='Concepts', color='
163
+ plt.plot(gens, total_nodes, label='Total Nodes', color='
164
+
165
+ plt.xlabel("Generation", fontsize=14)
166
+ plt.ylabel("Number of Nodes", fontsize=14)
167
+ plt.legend(loc='upper left', fontsize=20, frameon=False)
168
+ plt.xticks(gens)
169
+ plt.ylim(bottom=0)
170
+
171
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
172
+ plt.close()
173
+ print(f"Saved independent growth chart to {filename}")
174
+
175
+
176
+ def visualize_fact_concept_ratio(kg_pairs, filename="fact_concept_ratio.png"):
177
+ """Updated to work with the new KG structure"""
178
+ labels, before_ratios, after_ratios = [], [], []
179
+ for kg_before, kg_after in kg_pairs:
180
+ facts_before = len(kg_before.get('facts', []))
181
+ concepts_before = len(kg_before.get('concepts', []))
182
+ before_ratios.append(facts_before / concepts_before if concepts_before > 0 else 0)
183
+
184
+ facts_after = len(kg_after.get('facts', []))
185
+ concepts_after = len(kg_after.get('concepts', []))
186
+ after_ratios.append(facts_after / concepts_after if concepts_after > 0 else 0)
187
+
188
+ labels.append(f"Gen {kg_before['generation']}→{kg_after['generation']}")
189
+
190
+ x = np.arange(len(labels))
191
+ width = 0.35
192
+ fig, ax = plt.subplots(figsize=(12, 8))
193
+ rects1 = ax.bar(x - width/2, before_ratios, width, label='Before Sleep', color='
194
+ rects2 = ax.bar(x + width/2, after_ratios, width, label='After Sleep', color='
195
+ ax.set_ylabel("Fact-to-Concept Ratio",)
196
+ ax.set_xticks(x, labels, fontsize=12, rotation=45, ha="right")
197
+ ax.legend(fontsize=20, frameon=False)
198
+ ax.bar_label(rects1, padding=3, fmt='%.2f', fontsize=10)
199
+ ax.bar_label(rects2, padding=3, fmt='%.2f', fontsize=10)
200
+ ax.set_ylim(bottom=0, top=max(max(before_ratios, default=0), max(after_ratios, default=0)) * 1.5)
201
+ fig.tight_layout()
202
+ plt.savefig(filename, dpi=300)
203
+ plt.close()
204
+ print(f"Saved fact-to-concept ratio chart to {filename}")
205
+ def visualize_sleep_process(kg_before, kg_after, filename="sleep_process.png"):
206
+ """Simple visualization of before/after states"""
207
+ print(f"\n--- Visualizing Sleep Process: Gen {kg_before['generation']} -> Gen {kg_after['generation']} ---")
208
+
209
+ fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(16, 8))
210
+
211
+
212
+ facts_before = len(kg_before.get('facts', []))
213
+ concepts_before = len(kg_before.get('concepts', []))
214
+ ax1.pie([facts_before, concepts_before], labels=['Facts', 'Concepts'],
215
+ colors=['
216
+
217
+
218
+
219
+ facts_after = len(kg_after.get('facts', []))
220
+ concepts_after = len(kg_after.get('concepts', []))
221
+ ax2.pie([facts_after, concepts_after], labels=['Facts', 'Concepts'],
222
+ colors=['
223
+
224
+
225
+
226
+ plt.savefig(filename, bbox_inches='tight', dpi=300)
227
+ plt.close()
228
+ print(f"Saved sleep process visualization to {filename}")
229
+
230
+ def _create_networkx_graph_full(kg):
231
+ """helper to build the complete graph including fact-to-fact links."""
232
+ G = nx.Graph()
233
+ concepts = [c['name'] for c in kg.get('concepts', [])]
234
+ facts = [f['statement'] for f in kg.get('facts', [])]
235
+ G.add_nodes_from(concepts, type='concept')
236
+ G.add_nodes_from(facts, type='fact')
237
+ for fact, linked_concepts in kg.get('fact_to_concept_links', {}).items():
238
+ for concept in linked_concepts:
239
+ if G.has_node(fact) and G.has_node(concept): G.add_edge(fact, concept)
240
+ for c1, c2 in kg.get('concept_links', []):
241
+ if G.has_node(c1) and G.has_node(c2): G.add_edge(c1, c2)
242
+
243
+ for f1, f2 in kg.get('fact_to_fact_links', []):
244
+ if G.has_node(f1) and G.has_node(f2): G.add_edge(f1, f2)
245
+ return G
246
+
247
+
248
+ def visualize_key_experiences(kg, filename="key_experiences.png"):
249
+ """
250
+ Visualizes the full network, highlighting the most central "key experience" facts.
251
+ """
252
+ print(f"Generating Key Experience network graph for Gen {kg['generation']} -> {filename}")
253
+ G = _create_networkx_graph_full(kg)
254
+ if not G.nodes: return
255
+
256
+ facts = {n for n, d in G.nodes(data=True) if d['type'] == 'fact'}
257
+ concepts = {n for n, d in G.nodes(data=True) if d['type'] == 'concept'}
258
+
259
+
260
+ centrality = nx.degree_centrality(G)
261
+
262
+
263
+ top_facts = sorted(facts, key=lambda n: centrality[n], reverse=True)[:5]
264
+
265
+
266
+ node_colors = []
267
+ for node in G:
268
+ if node in top_facts:
269
+ node_colors.append('
270
+ elif G.nodes[node]['type'] == 'fact':
271
+ node_colors.append('
272
+ else:
273
+ node_colors.append('
274
+
275
+ plt.figure(figsize=(24, 24))
276
+ pos = nx.spring_layout(G, k=1.5/math.sqrt(G.number_of_nodes()), iterations=100, seed=42)
277
+
278
+ nx.draw(G, pos, with_labels=False, node_color=node_colors,
279
+ node_size=[v * 10000 for v in centrality.values()],
280
+ width=0.5, edge_color='gray', alpha=0.7)
281
+
282
+
283
+ labels = {n: fill(n, 15) for n in top_facts + list(concepts)}
284
+ nx.draw_networkx_labels(G, pos, labels=labels, font_size=10)
285
+
286
+ plt.axis('off')
287
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
288
+ plt.close()
289
+
290
+ def _create_networkx_graph(kg):
291
+ """Helper function to convert our KG dict into a NetworkX graph for analysis."""
292
+ G = nx.Graph()
293
+ concepts = [c['name'] for c in kg.get('concepts', [])]
294
+ facts = [f['statement'] for f in kg.get('facts', [])]
295
+
296
+ G.add_nodes_from(concepts, type='concept')
297
+ G.add_nodes_from(facts, type='fact')
298
+
299
+ for fact, linked_concepts in kg.get('fact_to_concept_links', {}).items():
300
+ for concept in linked_concepts:
301
+ if G.has_node(fact) and G.has_node(concept):
302
+ G.add_edge(fact, concept)
303
+
304
+ for c1, c2 in kg.get('concept_links', []):
305
+ if G.has_node(c1) and G.has_node(c2):
306
+ G.add_edge(c1, c2)
307
+
308
+ return G
309
+ def visualize_concept_trajectories(kg_history, n_pillars=2, n_risers=3, filename="concept_trajectories.png"):
310
+ """
311
+ To ensure pillars and risers are distinct sets, telling a clearer story
312
+ about the stable backbone vs. major new themes.
313
+ """
314
+ print(f"Generating Disjoint Concept Trajectories chart -> {filename}")
315
+ centrality_df = pd.DataFrame()
316
+
317
+ gens = [kg['generation'] for kg in kg_history]
318
+ for i, kg in enumerate(kg_history):
319
+ G = _create_networkx_graph(kg)
320
+ if not G.nodes: continue
321
+ degree_centrality = nx.degree_centrality(G)
322
+ concept_centrality = {node: cent for node, cent in degree_centrality.items() if G.nodes[node].get('type') == 'concept'}
323
+ s = pd.Series(concept_centrality, name=kg['generation'])
324
+ centrality_df = pd.concat([centrality_df, s.to_frame()], axis=1)
325
+ centrality_df = centrality_df.transpose().sort_index()
326
+
327
+
328
+ pillars = centrality_df.mean().nlargest(n_pillars).index
329
+
330
+
331
+ riser_candidates = centrality_df.drop(columns=pillars, errors='ignore')
332
+ centrality_diff = riser_candidates.iloc[-1].fillna(0) - riser_candidates.iloc[0].fillna(0)
333
+ risers = centrality_diff.nlargest(n_risers).index
334
+
335
+ concepts_to_plot = pillars.union(risers)
336
+
337
+ plt.figure(figsize=(12, 8))
338
+ for concept_name in concepts_to_plot:
339
+ trajectory = centrality_df[concept_name]
340
+ style = '--' if concept_name in pillars else '-'
341
+ linewidth = 1.5 if concept_name in pillars else 2.5
342
+ alpha = 0.8 if concept_name in pillars else 1.0
343
+ plt.plot(trajectory.index, trajectory.values, marker='o', linestyle=style,
344
+ label=fill(concept_name, 20), linewidth=linewidth, alpha=alpha)
345
+ plt.xlabel("Generation", fontsize=14)
346
+ plt.ylabel("Degree Centrality", fontsize=14)
347
+
348
+ plt.xticks(gens)
349
+ plt.legend(title="Concepts", bbox_to_anchor=(1.05, 1), loc='upper left')
350
+
351
+ plt.ylim(bottom=0)
352
+ plt.tight_layout()
353
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
354
+ plt.close()
355
+ def visualize_associative_richness(kg_history, filename="associative_richness.png"):
356
+ """Plots the Associative Richness Index (ARI): Avg. Concepts per Fact."""
357
+ print(f"Generating Associative Richness chart -> {filename}")
358
+ gens = [kg['generation'] for kg in kg_history]
359
+ ari_scores = []
360
+ for kg in kg_history:
361
+ num_facts = len(kg.get('facts', []))
362
+ total_links = sum(len(links) for links in kg.get('fact_to_concept_links', {}).values())
363
+ ari_scores.append(total_links / num_facts if num_facts > 0 else 0)
364
+
365
+ plt.figure(figsize=(12, 8))
366
+ plt.plot(gens, ari_scores, marker='o', linestyle='-', color='
367
+ plt.axhline(y=1, color='gray', linestyle='--', linewidth=2, label='1-to-1 Mapping Baseline (ARI=1.0)')
368
+ plt.xlabel("Generation")
369
+ plt.ylabel("Avg. Concepts per Fact (ARI)")
370
+ plt.xticks(gens)
371
+ plt.legend(loc='lower right')
372
+ plt.ylim(bottom=0)
373
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
374
+ plt.close()
375
+
376
+ def visualize_conceptual_support(kg_history, filename="conceptual_support.png"):
377
+ """Plots the Conceptual Support Index (CSI): Avg. Facts per Concept."""
378
+ print(f"Generating Conceptual Support chart -> {filename}")
379
+ gens = [kg['generation'] for kg in kg_history]
380
+ csi_scores = []
381
+ for kg in kg_history:
382
+ num_concepts = len(kg.get('concepts', []))
383
+ total_links = sum(len(links) for links in kg.get('fact_to_concept_links', {}).values())
384
+ csi_scores.append(total_links / num_concepts if num_concepts > 0 else 0)
385
+
386
+ plt.figure(figsize=(12, 8))
387
+ plt.plot(gens, csi_scores, marker='o', linestyle='-', color='
388
+ plt.xlabel("Generation")
389
+ plt.ylabel("Avg. Facts per Concept (CSI)")
390
+ plt.xticks(gens)
391
+ plt.legend(loc='lower right')
392
+ plt.ylim(bottom=0)
393
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
394
+ plt.close()
395
+
396
+ def visualize_specialist_concepts(kg_history, num_to_show=8, filename="specialist_concepts.png"):
397
+ """
398
+ Plots trajectories of interesting 'middling' concepts by finding those with
399
+ high variance and peak centrality, while excluding the absolute top global hubs.
400
+ """
401
+ print(f"Generating Specialist Concept Trajectories chart -> {filename}")
402
+ centrality_df = pd.DataFrame()
403
+ gens = [kg['generation'] for kg in kg_history]
404
+
405
+ for kg in kg_history:
406
+ G = _create_networkx_graph(kg)
407
+ concept_centrality = {n: nx.degree_centrality(G)[n] for n, d in G.nodes(data=True) if d['type'] == 'concept'} if G.nodes else {}
408
+ centrality_df = pd.concat([centrality_df, pd.Series(concept_centrality, name=kg['generation'])], axis=1)
409
+ centrality_df = centrality_df.transpose().sort_index()
410
+
411
+
412
+ top_hubs = centrality_df.mean().nlargest(5).index
413
+ specialist_candidates = centrality_df.drop(columns=top_hubs, errors='ignore')
414
+
415
+
416
+ notability_scores = specialist_candidates.max() + specialist_candidates.var().fillna(0)
417
+ concepts_to_plot = notability_scores.nlargest(num_to_show).index
418
+
419
+ plt.figure(figsize=(12, 8))
420
+ for name in concepts_to_plot:
421
+ trajectory = centrality_df[name]
422
+ plt.plot(trajectory.index, trajectory.values, marker='o', linestyle='-', label=fill(name, 25))
423
+
424
+ plt.xlabel("Generation")
425
+ plt.ylabel("Degree Centrality")
426
+
427
+ plt.xticks(gens)
428
+
429
+ plt.legend(title="Specialist Concepts", loc=0, fontsize=17, frameon=False)
430
+ plt.ylim(bottom=0)
431
+ plt.tight_layout()
432
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
433
+ plt.close()
434
+
435
+
436
+ def visualize_static_network(kg, top_n_concepts=25, top_n_facts=50, filename="static_network.png"):
437
+ """
438
+ Creates a clean, ordered bipartite graph showing ONLY the most central concepts
439
+ and facts, preventing visual clutter.
440
+ """
441
+ print(f"Generating ordered static network for Gen {kg['generation']} -> {filename}")
442
+ G = _create_networkx_graph(kg)
443
+ if not G.nodes: return
444
+
445
+
446
+ concepts = {n for n, d in G.nodes(data=True) if d['type'] == 'concept'}
447
+ facts = {n for n, d in G.nodes(data=True) if d['type'] == 'fact'}
448
+
449
+ top_concepts = sorted(concepts, key=G.degree, reverse=True)[:top_n_concepts]
450
+ top_facts = sorted(facts, key=G.degree, reverse=True)[:top_n_facts]
451
+
452
+
453
+ SubG = G.subgraph(top_concepts + top_facts)
454
+
455
+
456
+ pos = {}
457
+ for i, node in enumerate(top_concepts): pos[node] = (-1, np.linspace(1, 0, len(top_concepts))[i])
458
+ for i, node in enumerate(top_facts): pos[node] = (1, np.linspace(1, 0, len(top_facts))[i])
459
+
460
+ plt.figure(figsize=(16, 24))
461
+
462
+
463
+ nx.draw_networkx_nodes(SubG, pos, nodelist=top_facts, node_color='
464
+ nx.draw_networkx_nodes(SubG, pos, nodelist=top_concepts, node_color='
465
+
466
+
467
+ nx.draw_networkx_edges(SubG, pos, alpha=0.25, width=0.6, edge_color='gray')
468
+
469
+
470
+ concept_labels = {name: fill(name, 20) for name in top_concepts}
471
+ nx.draw_networkx_labels(SubG, pos, labels=concept_labels, font_size=14, font_family='serif', horizontalalignment='right')
472
+
473
+ plt.axis('off')
474
+ plt.tight_layout(pad=0)
475
+ plt.savefig(filename, dpi=300, bbox_inches='tight', pad_inches=0.1)
476
+ plt.close()
477
+ def visualize_concept_ontology_graph(kg, filename="concept_ontology.png"):
478
+ """
479
+ Creates a 'bubble map' of the CONCEPT ontology.
480
+ - Nodes are concepts only.
481
+ - Edges are only concept-to-concept links.
482
+ - Node size is proportional to its total degree (including fact links),
483
+ representing its overall importance.
484
+ """
485
+ print(f"Generating Concept Ontology Bubble Map for Gen {kg['generation']} -> {filename}")
486
+
487
+
488
+ Full_G = _create_networkx_graph(kg)
489
+ if not Full_G.nodes:
490
+ print(f" - KG {kg['generation']} has no nodes. Skipping.")
491
+ return
492
+
493
+
494
+ Concept_G = nx.Graph()
495
+ concept_names = [c['name'] for c in kg.get('concepts', [])]
496
+ Concept_G.add_nodes_from(concept_names)
497
+ for c1, c2 in kg.get('concept_links', []):
498
+ if Concept_G.has_node(c1) and Concept_G.has_node(c2):
499
+ Concept_G.add_edge(c1, c2)
500
+
501
+
502
+
503
+ node_sizes = [500 + (Full_G.degree(n) * 50) for n in Concept_G.nodes()]
504
+
505
+
506
+ plt.figure(figsize=(24, 24))
507
+
508
+ pos = nx.spring_layout(Concept_G, k=1.5/math.sqrt(Concept_G.number_of_nodes()), iterations=100, seed=42)
509
+
510
+ nx.draw_networkx_nodes(Concept_G, pos, node_color='
511
+ nx.draw_networkx_edges(Concept_G, pos, alpha=0.6, width=1.0, edge_color='gray')
512
+ nx.draw_networkx_labels(Concept_G, pos, font_size=14, font_family='serif')
513
+
514
+ plt.axis('off')
515
+ plt.tight_layout()
516
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
517
+ plt.close()
518
+
519
+
520
+
521
+ def visualize_top_concept_centrality(kg_history, top_n=5, filename="concept_centrality.png"):
522
+ """
523
+ Tracks the degree centrality of the top N most important concepts over time.
524
+ This shows how a thematic backbone emerges and solidifies within the KG.
525
+ """
526
+ centrality_data = defaultdict(lambda: [np.nan] * len(kg_history))
527
+
528
+ for i, kg in enumerate(kg_history):
529
+ G = _create_networkx_graph(kg)
530
+ if not G.nodes: continue
531
+
532
+ degree_centrality = nx.degree_centrality(G)
533
+
534
+
535
+ concept_centrality = {node: cent for node, cent in degree_centrality.items() if G.nodes[node]['type'] == 'concept'}
536
+
537
+ for concept_name, centrality in concept_centrality.items():
538
+ centrality_data[concept_name][i] = centrality
539
+
540
+
541
+ sorted_concepts = sorted(centrality_data.keys(), key=lambda c: np.nanmax(centrality_data[c]), reverse=True)
542
+ top_concepts = sorted_concepts[:top_n]
543
+
544
+ plt.figure(figsize=(12, 8))
545
+ gens = [kg['generation'] for kg in kg_history]
546
+
547
+ for concept_name in top_concepts:
548
+
549
+ s = pd.Series(centrality_data[concept_name])
550
+ s_interpolated = s.interpolate(method='linear', limit_direction='forward', axis=0)
551
+ plt.plot(gens, s_interpolated, marker='o', linestyle='-', label=fill(concept_name, 20))
552
+
553
+ plt.xlabel("Generation", fontsize=14)
554
+ plt.ylabel("Degree Centrality", fontsize=14)
555
+ plt.xticks(gens)
556
+ plt.legend(title="Top Concepts", loc=0, frameon=False, fontsize=20)
557
+ plt.ylim(bottom=0)
558
+ plt.tight_layout()
559
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
560
+ plt.close()
561
+ print(f"Saved Top Concept Centrality chart to {filename}")
562
+
563
+ def visualize_lorenz_curve(kg_history, filename="lorenz_curve.png"):
564
+ """
565
+ Creates a standalone Lorenz curve plot to compare the degree distribution
566
+ inequality between the first and final generations.
567
+ """
568
+ print(f"Generating Lorenz Curve comparison -> {filename}")
569
+
570
+ fig, ax = plt.subplots(figsize=(10, 10))
571
+
572
+
573
+ first_gen_kg = next((kg for kg in kg_history if kg.get('facts')), None)
574
+ if first_gen_kg:
575
+ G_first = _create_networkx_graph(first_gen_kg)
576
+ degrees_first = np.array(sorted([d for n, d in G_first.degree()]))
577
+ if degrees_first.size > 0:
578
+ cum_degrees_first = np.cumsum(degrees_first)
579
+ ax.plot(np.linspace(0, 1, len(degrees_first)), cum_degrees_first / cum_degrees_first[-1],
580
+ label=f"Gen {first_gen_kg['generation']} (Start)", color='
581
+
582
+
583
+ last_gen_kg = kg_history[-1]
584
+ G_last = _create_networkx_graph(last_gen_kg)
585
+ degrees_last = np.array(sorted([d for n, d in G_last.degree()]))
586
+ if degrees_last.size > 0:
587
+ cum_degrees_last = np.cumsum(degrees_last)
588
+ ax.plot(np.linspace(0, 1, len(degrees_last)), cum_degrees_last / cum_degrees_last[-1],
589
+ label=f"Gen {last_gen_kg['generation']} (End)", color='
590
+
591
+
592
+ ax.plot([0, 1], [0, 1], linestyle='--', color='black', label='Perfect Equality')
593
+
594
+ ax.set_xlabel("Cumulative Share of Nodes", fontsize=14)
595
+ ax.set_ylabel("Cumulative Share of Connections", fontsize=14)
596
+ ax.legend(fontsize=12)
597
+ ax.set_aspect('equal', adjustable='box')
598
+ plt.tight_layout()
599
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
600
+ plt.close()
601
+ def visualize_concept_bubble_chart(kg, filename="concept_bubble_chart.png"):
602
+ """
603
+ Creates a 'bubble chart' of the concept ontology, arranged like a word cloud.
604
+ - The most important concept (highest degree) is fixed at the center.
605
+ - All other concepts are arranged around it using a force-directed layout.
606
+ - Node size is proportional to its total degree.
607
+ - No edges are drawn, for maximum clarity.
608
+ """
609
+ print(f"Generating CENTRALIZED Concept Bubble Chart for Gen {kg['generation']} -> {filename}")
610
+
611
+
612
+ Full_G = _create_networkx_graph(kg)
613
+ if not Full_G.nodes:
614
+ print(f" - KG {kg['generation']} has no nodes. Skipping.")
615
+ return
616
+
617
+
618
+ concepts = {node: Full_G.degree(node) for node, data in Full_G.nodes(data=True) if data['type'] == 'concept'}
619
+
620
+ if not concepts:
621
+ print(f" - KG {kg['generation']} has no concepts. Skipping.")
622
+ return
623
+
624
+
625
+ Concept_G = nx.Graph()
626
+ Concept_G.add_nodes_from(concepts.keys())
627
+ for c1, c2 in kg.get('concept_links', []):
628
+ if Concept_G.has_node(c1) and Concept_G.has_node(c2):
629
+ Concept_G.add_edge(c1, c2)
630
+
631
+
632
+ central_node = max(concepts, key=concepts.get)
633
+ fixed_nodes = [central_node]
634
+ pos_initial = {central_node: (0, 0)}
635
+
636
+
637
+ pos = nx.spring_layout(Concept_G, pos=pos_initial, fixed=fixed_nodes,
638
+ k=1.8/math.sqrt(Concept_G.number_of_nodes()),
639
+ iterations=200, seed=42)
640
+
641
+
642
+ plt.figure(figsize=(20, 20))
643
+
644
+
645
+ node_sizes = [concepts[node] * 200 for node in Concept_G.nodes()]
646
+
647
+ nx.draw_networkx_nodes(Concept_G, pos, node_color='
648
+
649
+ for node, (x, y) in pos.items():
650
+ degree = concepts[node]
651
+ font_size = 8 + 2 * math.log(1 + degree)
652
+ plt.text(x, y, fill(node, 15), ha='center', va='center', fontsize=font_size, fontfamily='serif')
653
+
654
+ plt.axis('off')
655
+ plt.tight_layout()
656
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
657
+ plt.close()
658
+
659
+ def visualize_centrality_bubble_chart(kg, node_type="concepts", filename="concept_bubble_chart.png"):
660
+ """
661
+ Creates a 'bubble chart' where nodes are arranged purely by importance
662
+ (degree centrality), with the most important nodes in the center.
663
+
664
+ Args:
665
+ kg: Knowledge graph data
666
+ node_type: "concepts", "facts", or "both" - which nodes to visualize
667
+ filename: Output filename
668
+ """
669
+ print(f"Generating CENTRALITY-BASED Bubble Chart for {node_type} in Gen {kg['generation']} -> {filename}")
670
+
671
+ Full_G = _create_networkx_graph(kg)
672
+ if not Full_G.nodes:
673
+ print(f" - KG {kg['generation']} has no nodes. Skipping.")
674
+ return
675
+
676
+ all_nodes = {}
677
+ for node, data in Full_G.nodes(data=True):
678
+ if node_type == "concepts" and data['type'] == 'concept':
679
+ all_nodes[node] = Full_G.degree(node)
680
+ elif node_type == "facts" and data['type'] == 'fact':
681
+ all_nodes[node] = Full_G.degree(node)
682
+ elif node_type == "both":
683
+ all_nodes[node] = {'degree': Full_G.degree(node), 'type': data['type']}
684
+
685
+ if not all_nodes:
686
+ print(f" - KG {kg['generation']} has no {node_type}. Skipping.")
687
+ return
688
+
689
+ if node_type == "both":
690
+ sorted_nodes = sorted(all_nodes.items(), key=lambda item: item[1]['degree'], reverse=True)
691
+ else:
692
+ sorted_nodes = sorted(all_nodes.items(), key=lambda item: item[1], reverse=True)
693
+
694
+ pos = {}
695
+ if sorted_nodes:
696
+ central_node, _ = sorted_nodes[0]
697
+ pos[central_node] = (0, 0)
698
+
699
+ radius = 0.25
700
+ nodes_in_ring = 6
701
+ node_idx = 1
702
+
703
+ while node_idx < len(sorted_nodes):
704
+ angle_step = 2 * np.pi / nodes_in_ring
705
+ for i in range(nodes_in_ring):
706
+ if node_idx >= len(sorted_nodes): break
707
+ angle = i * angle_step
708
+ node_name, _ = sorted_nodes[node_idx]
709
+ pos[node_name] = (radius * np.cos(angle), radius * np.sin(angle))
710
+ node_idx += 1
711
+
712
+ radius += 0.20
713
+ nodes_in_ring = int(nodes_in_ring * 1.5)
714
+
715
+
716
+ plt.figure(figsize=(20, 20))
717
+
718
+ if node_type == "both":
719
+
720
+ concept_nodes = [(name, data) for name, data in sorted_nodes if data['type'] == 'concept']
721
+ fact_nodes = [(name, data) for name, data in sorted_nodes if data['type'] == 'fact']
722
+
723
+
724
+ if concept_nodes:
725
+ concept_names = [item[0] for item in concept_nodes]
726
+ concept_sizes = [item[1]['degree'] * 200 for item in concept_nodes]
727
+ concept_pos = {name: pos[name] for name in concept_names if name in pos}
728
+ nx.draw_networkx_nodes(None, concept_pos, nodelist=concept_names,
729
+ node_color='
730
+
731
+
732
+ if fact_nodes:
733
+ fact_names = [item[0] for item in fact_nodes]
734
+ fact_sizes = [item[1]['degree'] * 100 for item in fact_nodes]
735
+ fact_pos = {name: pos[name] for name in fact_names if name in pos}
736
+ nx.draw_networkx_nodes(None, fact_pos, nodelist=fact_names,
737
+ node_color='
738
+
739
+
740
+ from matplotlib.patches import Patch
741
+ legend_elements = [
742
+ Patch(facecolor='
743
+ Patch(facecolor='
744
+ ]
745
+ plt.legend(handles=legend_elements, loc='upper right', fontsize=14)
746
+
747
+ else:
748
+
749
+ node_names = [item[0] for item in sorted_nodes]
750
+ node_sizes = [item[1] * 200 for item in sorted_nodes]
751
+
752
+
753
+ color = '
754
+
755
+ nx.draw_networkx_nodes(None, pos, nodelist=node_names,
756
+ node_color=color, node_size=node_sizes, alpha=0.9)
757
+
758
+
759
+ top_nodes = sorted_nodes[:min(20, len(sorted_nodes))]
760
+
761
+ for item in top_nodes:
762
+ node_name = item[0]
763
+ if node_type == "both":
764
+ degree = item[1]['degree']
765
+ node_type_actual = item[1]['type']
766
+ else:
767
+ degree = item[1]
768
+ node_type_actual = node_type.rstrip('s')
769
+
770
+ if node_name in pos:
771
+ x, y = pos[node_name]
772
+ font_size = max(8, 8 + 2 * np.log1p(degree))
773
+
774
+
775
+ if node_type_actual == 'fact':
776
+ label = fill(node_name, 10)
777
+ else:
778
+ label = fill(node_name, 15)
779
+
780
+ plt.text(x, y, label, ha='center', va='center',
781
+ fontsize=font_size, fontfamily='serif')
782
+
783
+ plt.axis('off')
784
+
785
+
786
+ max_coord = radius * 1.1
787
+ plt.xlim(-max_coord, max_coord)
788
+ plt.ylim(-max_coord, max_coord)
789
+ plt.gca().set_aspect('equal', adjustable='box')
790
+
791
+ plt.tight_layout()
792
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
793
+ plt.close()
794
+ def visualize_dual_richness_metrics(kg_history, filename="dual_richness_metrics.png"):
795
+ """
796
+ Creates a two-panel plot showing ARI and CSI, stacked vertically.
797
+ """
798
+ print(f"Generating Dual Richness Metrics chart -> {filename}")
799
+
800
+ gens = [kg['generation'] for kg in kg_history]
801
+ ari_scores = []
802
+ csi_scores = []
803
+
804
+ for kg in kg_history:
805
+ num_facts = len(kg.get('facts', []))
806
+ num_concepts = len(kg.get('concepts', []))
807
+
808
+ total_links = sum(len(links) for links in kg.get('fact_to_concept_links', {}).values())
809
+
810
+ ari_scores.append(total_links / num_facts if num_facts > 0 else 0)
811
+ csi_scores.append(total_links / num_concepts if num_concepts > 0 else 0)
812
+
813
+
814
+ fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 16), sharex=True)
815
+
816
+
817
+ ax1.plot(gens, ari_scores, marker='o', linestyle='-', color='
818
+ ax1.axhline(y=1, color='gray', linestyle='--', linewidth=2, label='1-to-1 Mapping Baseline (ARI=1.0)')
819
+ ax1.set_ylabel("Avg. Concepts per Fact (ARI)", fontsize=14)
820
+ ax1.legend(loc='lower right')
821
+ ax1.set_ylim(bottom=0)
822
+
823
+
824
+ ax2.plot(gens, csi_scores, marker='o', linestyle='-', color='
825
+ ax2.set_xlabel("Generation", fontsize=14)
826
+ ax2.set_ylabel("Avg. Facts per Concept (CSI)", fontsize=14)
827
+ ax2.legend(loc='lower right')
828
+ ax2.set_ylim(bottom=0)
829
+
830
+ plt.xticks(gens)
831
+ fig.tight_layout(pad=2.0)
832
+ plt.savefig(filename, dpi=300, bbox_inches='tight')
833
+ plt.close()