seif-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- seif/__init__.py +1 -0
- seif/__main__.py +3 -0
- seif/analysis/__init__.py +1 -0
- seif/analysis/artifact_analyzer.py +472 -0
- seif/analysis/audio_analyzer.py +197 -0
- seif/analysis/giza_engine.py +287 -0
- seif/analysis/pattern_comparator.py +237 -0
- seif/analysis/phi_damping.py +347 -0
- seif/analysis/physical_constants.py +362 -0
- seif/analysis/qr_decoder.py +337 -0
- seif/analysis/seed_optimizer.py +236 -0
- seif/analysis/stance_detector.py +166 -0
- seif/analysis/transcompiler.py +206 -0
- seif/bridge/__init__.py +1 -0
- seif/bridge/ai_bridge.py +301 -0
- seif/bridge/conversation_fetcher.py +215 -0
- seif/bridge/seif_session.py +203 -0
- seif/bridge/telegram_bot.py +274 -0
- seif/cli/__init__.py +1 -0
- seif/cli/cli.py +242 -0
- seif/cli/main.py +445 -0
- seif/constants.py +189 -0
- seif/context/__init__.py +1 -0
- seif/context/context_bridge.py +179 -0
- seif/context/context_importer.py +178 -0
- seif/context/context_manager.py +386 -0
- seif/context/context_qr.py +417 -0
- seif/context/evolution.py +240 -0
- seif/context/git_context.py +352 -0
- seif/context/telemetry.py +228 -0
- seif/core/__init__.py +1 -0
- seif/core/resonance_encoding.py +283 -0
- seif/core/resonance_gate.py +179 -0
- seif/core/resonance_signal.py +331 -0
- seif/core/transfer_function.py +296 -0
- seif/core/triple_gate.py +200 -0
- seif/generators/__init__.py +1 -0
- seif/generators/circuit_generator.py +310 -0
- seif/generators/composite_renderer.py +371 -0
- seif/generators/dual_qr.py +268 -0
- seif/generators/fractal_qrcode.py +323 -0
- seif/generators/glyph_renderer.py +279 -0
- seif/generators/harmonic_audio.py +341 -0
- seif/generators/kicad_exporter.py +250 -0
- seif/generators/spice_netlist.py +472 -0
- seif_cli-0.1.0.dist-info/METADATA +393 -0
- seif_cli-0.1.0.dist-info/RECORD +51 -0
- seif_cli-0.1.0.dist-info/WHEEL +5 -0
- seif_cli-0.1.0.dist-info/entry_points.txt +2 -0
- seif_cli-0.1.0.dist-info/licenses/LICENSE +33 -0
- seif_cli-0.1.0.dist-info/top_level.txt +2 -0
seif/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""S.E.I.F. — Spiral Encoding Interoperability Framework"""
|
seif/__main__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""S.E.I.F. — Spiral Encoding Interoperability Framework"""
|
|
@@ -0,0 +1,472 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Artifact Analyzer — Extract Geometric Patterns from Ancient Sacred Structures
|
|
3
|
+
|
|
4
|
+
Pipeline:
|
|
5
|
+
1. Image preprocessing (grayscale, edge detection)
|
|
6
|
+
2. Line detection (Hough Transform) → dominant angles
|
|
7
|
+
3. Circle detection (Hough Circles) → radii and centers
|
|
8
|
+
4. Symmetry analysis (rotational autocorrelation)
|
|
9
|
+
5. φ-ratio detection (consecutive distance ratios)
|
|
10
|
+
6. Fractal dimension estimation (box-counting)
|
|
11
|
+
7. 3-6-9 classification of detected geometry
|
|
12
|
+
|
|
13
|
+
Input: Image file (JPG/PNG) of ancient artifact
|
|
14
|
+
Output: ArtifactGeometry dataclass with all extracted parameters
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import math
|
|
18
|
+
from dataclasses import dataclass, field
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from typing import Optional
|
|
21
|
+
|
|
22
|
+
import numpy as np
|
|
23
|
+
import cv2
|
|
24
|
+
|
|
25
|
+
from seif.core.resonance_gate import digital_root, classify_phase, HarmonicPhase
|
|
26
|
+
from seif.constants import PHI, PHI_INVERSE
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class DetectedLine:
|
|
31
|
+
x1: float
|
|
32
|
+
y1: float
|
|
33
|
+
x2: float
|
|
34
|
+
y2: float
|
|
35
|
+
angle_deg: float
|
|
36
|
+
length: float
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass
|
|
40
|
+
class DetectedCircle:
|
|
41
|
+
cx: float
|
|
42
|
+
cy: float
|
|
43
|
+
radius: float
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class SymmetryProfile:
|
|
48
|
+
dominant_folds: list[int] # e.g. [3, 6] means 3-fold and 6-fold detected
|
|
49
|
+
strongest_fold: int
|
|
50
|
+
rotational_score: float # 0-1, how rotationally symmetric
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class PhiAnalysis:
|
|
55
|
+
ratios: list[float] # consecutive distance ratios
|
|
56
|
+
mean_ratio: float
|
|
57
|
+
phi_deviation: float # |mean_ratio - φ| / φ
|
|
58
|
+
phi_aligned: bool # deviation < 0.1
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass
|
|
62
|
+
class FractalAnalysis:
|
|
63
|
+
box_counting_dimension: float # 1.0 = line, 2.0 = filled plane, 1.5-1.8 = fractal
|
|
64
|
+
is_fractal: bool # dimension between 1.2 and 1.9
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@dataclass
|
|
68
|
+
class HarmonicProfile:
|
|
69
|
+
angles_harmonic_pct: float # % of angles in 55-65° or 115-125° range
|
|
70
|
+
angles_entropic_pct: float # % of angles in 85-95° range
|
|
71
|
+
dominant_phase: HarmonicPhase
|
|
72
|
+
harmonic_score: float # 0-1 overall
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class ArtifactGeometry:
|
|
77
|
+
"""Complete geometric analysis of an ancient artifact image."""
|
|
78
|
+
source_path: str
|
|
79
|
+
image_size: tuple[int, int]
|
|
80
|
+
|
|
81
|
+
# Raw detections
|
|
82
|
+
lines: list[DetectedLine]
|
|
83
|
+
circles: list[DetectedCircle]
|
|
84
|
+
edge_density: float # ratio of edge pixels to total
|
|
85
|
+
|
|
86
|
+
# Higher-level analysis
|
|
87
|
+
angle_histogram: dict[str, int] # "0-30", "30-60", "60-90" bin counts
|
|
88
|
+
symmetry: SymmetryProfile
|
|
89
|
+
phi: PhiAnalysis
|
|
90
|
+
fractal: FractalAnalysis
|
|
91
|
+
harmonic: HarmonicProfile
|
|
92
|
+
|
|
93
|
+
# Circuit-equivalent parameters
|
|
94
|
+
suggested_trace_angle: float # dominant harmonic angle for routing
|
|
95
|
+
suggested_layer_count: int # based on symmetry folds
|
|
96
|
+
suggested_node_positions: list[tuple[float, float]] # convergence points
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _preprocess(image_path: str) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
|
|
100
|
+
"""Load image, convert to grayscale, detect edges."""
|
|
101
|
+
img = cv2.imread(image_path)
|
|
102
|
+
if img is None:
|
|
103
|
+
raise FileNotFoundError(f"Cannot load image: {image_path}")
|
|
104
|
+
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
|
105
|
+
# Adaptive threshold for varied lighting in artifact photos
|
|
106
|
+
blurred = cv2.GaussianBlur(gray, (5, 5), 0)
|
|
107
|
+
edges = cv2.Canny(blurred, 50, 150)
|
|
108
|
+
return img, gray, edges
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def _detect_lines(edges: np.ndarray, min_length: int = 30) -> list[DetectedLine]:
|
|
112
|
+
"""Detect lines using Probabilistic Hough Transform."""
|
|
113
|
+
lines_raw = cv2.HoughLinesP(edges, 1, np.pi / 180, threshold=50,
|
|
114
|
+
minLineLength=min_length, maxLineGap=10)
|
|
115
|
+
if lines_raw is None:
|
|
116
|
+
return []
|
|
117
|
+
|
|
118
|
+
results = []
|
|
119
|
+
for line in lines_raw:
|
|
120
|
+
x1, y1, x2, y2 = line[0]
|
|
121
|
+
dx = x2 - x1
|
|
122
|
+
dy = y2 - y1
|
|
123
|
+
angle = math.degrees(math.atan2(abs(dy), abs(dx))) # 0-90°
|
|
124
|
+
length = math.sqrt(dx * dx + dy * dy)
|
|
125
|
+
results.append(DetectedLine(x1, y1, x2, y2, angle, length))
|
|
126
|
+
|
|
127
|
+
return results
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _detect_circles(gray: np.ndarray) -> list[DetectedCircle]:
|
|
131
|
+
"""Detect circles using Hough Circle Transform."""
|
|
132
|
+
circles_raw = cv2.HoughCircles(gray, cv2.HOUGH_GRADIENT, dp=1.2,
|
|
133
|
+
minDist=30, param1=100, param2=50,
|
|
134
|
+
minRadius=10, maxRadius=0)
|
|
135
|
+
if circles_raw is None:
|
|
136
|
+
return []
|
|
137
|
+
|
|
138
|
+
return [DetectedCircle(c[0], c[1], c[2]) for c in circles_raw[0]]
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def _angle_histogram(lines: list[DetectedLine]) -> dict[str, int]:
|
|
142
|
+
"""Bin detected angles into 30° ranges."""
|
|
143
|
+
bins = {"0-30": 0, "30-60": 0, "60-90": 0}
|
|
144
|
+
for line in lines:
|
|
145
|
+
a = line.angle_deg
|
|
146
|
+
if a < 30:
|
|
147
|
+
bins["0-30"] += 1
|
|
148
|
+
elif a < 60:
|
|
149
|
+
bins["30-60"] += 1
|
|
150
|
+
else:
|
|
151
|
+
bins["60-90"] += 1
|
|
152
|
+
return bins
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def _analyze_symmetry(edges: np.ndarray) -> SymmetryProfile:
|
|
156
|
+
"""Detect rotational symmetry by autocorrelation at key angles."""
|
|
157
|
+
h, w = edges.shape
|
|
158
|
+
center = (w // 2, h // 2)
|
|
159
|
+
|
|
160
|
+
scores = {}
|
|
161
|
+
for fold in [2, 3, 4, 5, 6, 8, 9, 12]:
|
|
162
|
+
angle = 360.0 / fold
|
|
163
|
+
M = cv2.getRotationMatrix2D(center, angle, 1.0)
|
|
164
|
+
rotated = cv2.warpAffine(edges, M, (w, h))
|
|
165
|
+
# Correlation between original and rotated
|
|
166
|
+
overlap = np.sum(edges & rotated)
|
|
167
|
+
total = max(np.sum(edges), 1)
|
|
168
|
+
scores[fold] = overlap / total
|
|
169
|
+
|
|
170
|
+
# Find folds with score > 0.3
|
|
171
|
+
dominant = [fold for fold, score in scores.items() if score > 0.3]
|
|
172
|
+
strongest = max(scores, key=scores.get) if scores else 1
|
|
173
|
+
best_score = scores.get(strongest, 0)
|
|
174
|
+
|
|
175
|
+
return SymmetryProfile(
|
|
176
|
+
dominant_folds=sorted(dominant),
|
|
177
|
+
strongest_fold=strongest,
|
|
178
|
+
rotational_score=best_score,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _analyze_phi_ratios(lines: list[DetectedLine]) -> PhiAnalysis:
|
|
183
|
+
"""Check if consecutive line lengths follow φ-ratio."""
|
|
184
|
+
if len(lines) < 3:
|
|
185
|
+
return PhiAnalysis([], 0, 1.0, False)
|
|
186
|
+
|
|
187
|
+
# Sort lines by length
|
|
188
|
+
sorted_lines = sorted(lines, key=lambda l: l.length, reverse=True)
|
|
189
|
+
lengths = [l.length for l in sorted_lines[:20]] # top 20
|
|
190
|
+
|
|
191
|
+
ratios = []
|
|
192
|
+
for i in range(len(lengths) - 1):
|
|
193
|
+
if lengths[i + 1] > 0:
|
|
194
|
+
ratios.append(lengths[i] / lengths[i + 1])
|
|
195
|
+
|
|
196
|
+
if not ratios:
|
|
197
|
+
return PhiAnalysis([], 0, 1.0, False)
|
|
198
|
+
|
|
199
|
+
mean_ratio = sum(ratios) / len(ratios)
|
|
200
|
+
deviation = abs(mean_ratio - PHI) / PHI
|
|
201
|
+
|
|
202
|
+
return PhiAnalysis(
|
|
203
|
+
ratios=ratios[:10],
|
|
204
|
+
mean_ratio=mean_ratio,
|
|
205
|
+
phi_deviation=deviation,
|
|
206
|
+
phi_aligned=deviation < 0.15,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def _estimate_fractal_dimension(edges: np.ndarray) -> FractalAnalysis:
|
|
211
|
+
"""Estimate fractal dimension via box-counting method."""
|
|
212
|
+
# Use edge image as binary set
|
|
213
|
+
points = np.argwhere(edges > 0)
|
|
214
|
+
if len(points) < 10:
|
|
215
|
+
return FractalAnalysis(1.0, False)
|
|
216
|
+
|
|
217
|
+
# Box sizes: powers of 2 from 4 to image_size/4
|
|
218
|
+
max_size = min(edges.shape) // 4
|
|
219
|
+
sizes = []
|
|
220
|
+
counts = []
|
|
221
|
+
|
|
222
|
+
box_size = 4
|
|
223
|
+
while box_size <= max_size:
|
|
224
|
+
# Count non-empty boxes
|
|
225
|
+
h_boxes = math.ceil(edges.shape[0] / box_size)
|
|
226
|
+
w_boxes = math.ceil(edges.shape[1] / box_size)
|
|
227
|
+
occupied = set()
|
|
228
|
+
for y, x in points:
|
|
229
|
+
occupied.add((y // box_size, x // box_size))
|
|
230
|
+
sizes.append(box_size)
|
|
231
|
+
counts.append(len(occupied))
|
|
232
|
+
box_size *= 2
|
|
233
|
+
|
|
234
|
+
if len(sizes) < 3:
|
|
235
|
+
return FractalAnalysis(1.0, False)
|
|
236
|
+
|
|
237
|
+
# Linear regression on log-log plot
|
|
238
|
+
log_sizes = np.log(sizes)
|
|
239
|
+
log_counts = np.log(counts)
|
|
240
|
+
# D = -slope of log(count) vs log(size)
|
|
241
|
+
coeffs = np.polyfit(log_sizes, log_counts, 1)
|
|
242
|
+
dimension = -coeffs[0]
|
|
243
|
+
|
|
244
|
+
return FractalAnalysis(
|
|
245
|
+
box_counting_dimension=round(dimension, 3),
|
|
246
|
+
is_fractal=1.2 < dimension < 1.95,
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def _classify_harmonic(lines: list[DetectedLine]) -> HarmonicProfile:
|
|
251
|
+
"""Classify the angle distribution as harmonic or entropic."""
|
|
252
|
+
if not lines:
|
|
253
|
+
return HarmonicProfile(0, 0, HarmonicPhase.ENTROPY, 0)
|
|
254
|
+
|
|
255
|
+
harmonic_count = 0 # angles near 60° or 120° (hexagonal)
|
|
256
|
+
entropic_count = 0 # angles near 90° (orthogonal)
|
|
257
|
+
|
|
258
|
+
for line in lines:
|
|
259
|
+
a = line.angle_deg
|
|
260
|
+
if 55 <= a <= 65: # near 60°
|
|
261
|
+
harmonic_count += 1
|
|
262
|
+
elif 25 <= a <= 35: # near 30° (half of 60°)
|
|
263
|
+
harmonic_count += 1
|
|
264
|
+
elif 85 <= a <= 95: # near 90°
|
|
265
|
+
entropic_count += 1
|
|
266
|
+
|
|
267
|
+
total = len(lines)
|
|
268
|
+
harm_pct = harmonic_count / total
|
|
269
|
+
entr_pct = entropic_count / total
|
|
270
|
+
|
|
271
|
+
if harm_pct > 0.3:
|
|
272
|
+
phase = HarmonicPhase.DYNAMICS # 6-fold tendency
|
|
273
|
+
elif harm_pct > 0.15:
|
|
274
|
+
phase = HarmonicPhase.STABILIZATION # 3-fold tendency
|
|
275
|
+
else:
|
|
276
|
+
phase = HarmonicPhase.ENTROPY
|
|
277
|
+
|
|
278
|
+
score = harm_pct / (harm_pct + entr_pct + 0.001)
|
|
279
|
+
|
|
280
|
+
return HarmonicProfile(harm_pct, entr_pct, phase, round(score, 3))
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def _find_convergence_nodes(lines: list[DetectedLine],
|
|
284
|
+
img_size: tuple[int, int]) -> list[tuple[float, float]]:
|
|
285
|
+
"""Find points where multiple lines converge (processing nodes)."""
|
|
286
|
+
if len(lines) < 4:
|
|
287
|
+
return []
|
|
288
|
+
|
|
289
|
+
# Collect all endpoints
|
|
290
|
+
points = []
|
|
291
|
+
for l in lines:
|
|
292
|
+
points.append((l.x1, l.y1))
|
|
293
|
+
points.append((l.x2, l.y2))
|
|
294
|
+
|
|
295
|
+
if not points:
|
|
296
|
+
return []
|
|
297
|
+
|
|
298
|
+
# Cluster nearby points (simple grid-based)
|
|
299
|
+
w, h = img_size
|
|
300
|
+
grid_size = min(w, h) // 10
|
|
301
|
+
clusters = {}
|
|
302
|
+
for px, py in points:
|
|
303
|
+
key = (int(px / grid_size), int(py / grid_size))
|
|
304
|
+
clusters.setdefault(key, []).append((px, py))
|
|
305
|
+
|
|
306
|
+
# Nodes = clusters with 4+ line endpoints
|
|
307
|
+
nodes = []
|
|
308
|
+
for pts in clusters.values():
|
|
309
|
+
if len(pts) >= 4:
|
|
310
|
+
cx = sum(p[0] for p in pts) / len(pts)
|
|
311
|
+
cy = sum(p[1] for p in pts) / len(pts)
|
|
312
|
+
# Normalize to 0-1
|
|
313
|
+
nodes.append((cx / w, cy / h))
|
|
314
|
+
|
|
315
|
+
return nodes[:9] # max 9 nodes (3-6-9 principle)
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def analyze(image_path: str) -> ArtifactGeometry:
|
|
319
|
+
"""Full geometric analysis of an artifact image.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
image_path: Path to JPG/PNG image of ancient artifact
|
|
323
|
+
|
|
324
|
+
Returns:
|
|
325
|
+
ArtifactGeometry with all extracted parameters
|
|
326
|
+
"""
|
|
327
|
+
img, gray, edges = _preprocess(image_path)
|
|
328
|
+
h, w = gray.shape
|
|
329
|
+
|
|
330
|
+
lines = _detect_lines(edges)
|
|
331
|
+
circles = _detect_circles(gray)
|
|
332
|
+
edge_density = np.sum(edges > 0) / (h * w)
|
|
333
|
+
|
|
334
|
+
angle_hist = _angle_histogram(lines)
|
|
335
|
+
symmetry = _analyze_symmetry(edges)
|
|
336
|
+
phi = _analyze_phi_ratios(lines)
|
|
337
|
+
fractal = _estimate_fractal_dimension(edges)
|
|
338
|
+
harmonic = _classify_harmonic(lines)
|
|
339
|
+
nodes = _find_convergence_nodes(lines, (w, h))
|
|
340
|
+
|
|
341
|
+
# Suggest circuit parameters
|
|
342
|
+
if harmonic.dominant_phase != HarmonicPhase.ENTROPY:
|
|
343
|
+
trace_angle = 60.0 # hexagonal routing
|
|
344
|
+
else:
|
|
345
|
+
trace_angle = 90.0 # conventional
|
|
346
|
+
|
|
347
|
+
layer_count = max(3, min(9, len(symmetry.dominant_folds) * 2 + 1))
|
|
348
|
+
|
|
349
|
+
return ArtifactGeometry(
|
|
350
|
+
source_path=image_path,
|
|
351
|
+
image_size=(w, h),
|
|
352
|
+
lines=lines,
|
|
353
|
+
circles=circles,
|
|
354
|
+
edge_density=round(edge_density, 4),
|
|
355
|
+
angle_histogram=angle_hist,
|
|
356
|
+
symmetry=symmetry,
|
|
357
|
+
phi=phi,
|
|
358
|
+
fractal=fractal,
|
|
359
|
+
harmonic=harmonic,
|
|
360
|
+
suggested_trace_angle=trace_angle,
|
|
361
|
+
suggested_layer_count=layer_count,
|
|
362
|
+
suggested_node_positions=nodes,
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def describe(geo: ArtifactGeometry) -> str:
|
|
367
|
+
"""Human-readable analysis report."""
|
|
368
|
+
lines = [
|
|
369
|
+
f"═══ ARTIFACT GEOMETRIC ANALYSIS ═══",
|
|
370
|
+
f"Source: {geo.source_path}",
|
|
371
|
+
f"Image size: {geo.image_size[0]}×{geo.image_size[1]}",
|
|
372
|
+
f"Edge density: {geo.edge_density:.2%}",
|
|
373
|
+
f"",
|
|
374
|
+
f"Detections:",
|
|
375
|
+
f" Lines: {len(geo.lines)}",
|
|
376
|
+
f" Circles: {len(geo.circles)}",
|
|
377
|
+
f" Angles: {geo.angle_histogram}",
|
|
378
|
+
f"",
|
|
379
|
+
f"Symmetry:",
|
|
380
|
+
f" Dominant folds: {geo.symmetry.dominant_folds}",
|
|
381
|
+
f" Strongest: {geo.symmetry.strongest_fold}-fold ({geo.symmetry.rotational_score:.2f})",
|
|
382
|
+
f"",
|
|
383
|
+
f"φ-Ratio Analysis:",
|
|
384
|
+
f" Mean ratio: {geo.phi.mean_ratio:.3f} (φ = {PHI:.3f})",
|
|
385
|
+
f" Deviation: {geo.phi.phi_deviation:.3f}",
|
|
386
|
+
f" φ-aligned: {'✓' if geo.phi.phi_aligned else '✗'}",
|
|
387
|
+
f"",
|
|
388
|
+
f"Fractal Analysis:",
|
|
389
|
+
f" Box-counting D: {geo.fractal.box_counting_dimension:.3f}",
|
|
390
|
+
f" Is fractal: {'✓' if geo.fractal.is_fractal else '✗'}",
|
|
391
|
+
f"",
|
|
392
|
+
f"Harmonic Classification:",
|
|
393
|
+
f" Harmonic angles: {geo.harmonic.angles_harmonic_pct:.1%}",
|
|
394
|
+
f" Entropic angles: {geo.harmonic.angles_entropic_pct:.1%}",
|
|
395
|
+
f" Phase: {geo.harmonic.dominant_phase.name}",
|
|
396
|
+
f" Score: {geo.harmonic.harmonic_score}",
|
|
397
|
+
f"",
|
|
398
|
+
f"Circuit Suggestions:",
|
|
399
|
+
f" Trace angle: {geo.suggested_trace_angle}°",
|
|
400
|
+
f" Layer count: {geo.suggested_layer_count}",
|
|
401
|
+
f" Conv. nodes: {len(geo.suggested_node_positions)}",
|
|
402
|
+
]
|
|
403
|
+
return "\n".join(lines)
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def generate_overlay(image_path: str, geo: ArtifactGeometry,
|
|
407
|
+
output_path: Optional[str] = None) -> str:
|
|
408
|
+
"""Generate an overlay image showing detected geometry on the artifact.
|
|
409
|
+
|
|
410
|
+
Draws:
|
|
411
|
+
- Detected lines colored by phase (gold=harmonic, gray=entropic)
|
|
412
|
+
- Detected circles in blue
|
|
413
|
+
- Convergence nodes as red dots
|
|
414
|
+
- φ-ratio annotations
|
|
415
|
+
"""
|
|
416
|
+
img = cv2.imread(image_path)
|
|
417
|
+
if img is None:
|
|
418
|
+
raise FileNotFoundError(f"Cannot load: {image_path}")
|
|
419
|
+
|
|
420
|
+
overlay = img.copy()
|
|
421
|
+
|
|
422
|
+
# Draw lines colored by harmonic/entropic classification
|
|
423
|
+
for line in geo.lines:
|
|
424
|
+
if 55 <= line.angle_deg <= 65 or 25 <= line.angle_deg <= 35:
|
|
425
|
+
color = (0, 215, 255) # gold (BGR) — harmonic
|
|
426
|
+
elif 85 <= line.angle_deg <= 95:
|
|
427
|
+
color = (128, 128, 128) # gray — entropic
|
|
428
|
+
else:
|
|
429
|
+
color = (200, 150, 50) # blue-ish — neutral
|
|
430
|
+
cv2.line(overlay, (int(line.x1), int(line.y1)),
|
|
431
|
+
(int(line.x2), int(line.y2)), color, 2)
|
|
432
|
+
|
|
433
|
+
# Draw circles
|
|
434
|
+
for circle in geo.circles:
|
|
435
|
+
cv2.circle(overlay, (int(circle.cx), int(circle.cy)),
|
|
436
|
+
int(circle.radius), (255, 100, 0), 2)
|
|
437
|
+
|
|
438
|
+
# Draw convergence nodes
|
|
439
|
+
h, w = img.shape[:2]
|
|
440
|
+
for nx, ny in geo.suggested_node_positions:
|
|
441
|
+
px, py = int(nx * w), int(ny * h)
|
|
442
|
+
cv2.circle(overlay, (px, py), 8, (0, 0, 255), -1)
|
|
443
|
+
cv2.circle(overlay, (px, py), 12, (0, 0, 255), 2)
|
|
444
|
+
|
|
445
|
+
# Add text annotations
|
|
446
|
+
font = cv2.FONT_HERSHEY_SIMPLEX
|
|
447
|
+
cv2.putText(overlay, f"D={geo.fractal.box_counting_dimension:.2f}", (10, 30),
|
|
448
|
+
font, 0.7, (255, 255, 255), 2)
|
|
449
|
+
cv2.putText(overlay, f"phi-dev={geo.phi.phi_deviation:.3f}", (10, 60),
|
|
450
|
+
font, 0.7, (255, 255, 255), 2)
|
|
451
|
+
cv2.putText(overlay, f"harm={geo.harmonic.harmonic_score}", (10, 90),
|
|
452
|
+
font, 0.7, (255, 255, 255), 2)
|
|
453
|
+
|
|
454
|
+
if output_path is None:
|
|
455
|
+
stem = Path(image_path).stem
|
|
456
|
+
output_path = str(Path(image_path).parent.parent.parent / "output" / "analysis" / f"overlay_{stem}.png")
|
|
457
|
+
|
|
458
|
+
Path(output_path).parent.mkdir(parents=True, exist_ok=True)
|
|
459
|
+
cv2.imwrite(output_path, overlay)
|
|
460
|
+
return output_path
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
if __name__ == "__main__":
|
|
464
|
+
import sys
|
|
465
|
+
if len(sys.argv) < 2:
|
|
466
|
+
print("Usage: python -m seif.artifact_analyzer <image_path>")
|
|
467
|
+
sys.exit(1)
|
|
468
|
+
|
|
469
|
+
geo = analyze(sys.argv[1])
|
|
470
|
+
print(describe(geo))
|
|
471
|
+
overlay_path = generate_overlay(sys.argv[1], geo)
|
|
472
|
+
print(f"\nOverlay saved: {overlay_path}")
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Audio Analyzer — Extract Resonance Data from Audio Input
|
|
3
|
+
|
|
4
|
+
Analyzes audio files (WAV) through the SEIF pipeline WITHOUT sending
|
|
5
|
+
the raw audio to the AI. Only the resonance metadata is shared.
|
|
6
|
+
|
|
7
|
+
Extracts:
|
|
8
|
+
- Fundamental frequency (via FFT)
|
|
9
|
+
- Harmonic spectrum (peaks and their digital roots)
|
|
10
|
+
- Proximity to 432/438 Hz (Tesla/Giza alignment)
|
|
11
|
+
- Spectral coherence (organized vs chaotic)
|
|
12
|
+
- 3-6-9 classification of frequency components
|
|
13
|
+
|
|
14
|
+
This is the biological input pathway: the human's VOICE carries
|
|
15
|
+
frequency information that the gate can validate.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import math
|
|
19
|
+
from dataclasses import dataclass
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Optional
|
|
22
|
+
|
|
23
|
+
import numpy as np
|
|
24
|
+
from scipy.io import wavfile
|
|
25
|
+
from scipy.signal import find_peaks
|
|
26
|
+
|
|
27
|
+
from seif.constants import FREQ_TESLA, FREQ_GIZA, FREQ_SCHUMANN, PHI_INVERSE
|
|
28
|
+
from seif.core.resonance_gate import digital_root, classify_phase, HarmonicPhase
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class AudioAnalysis:
|
|
33
|
+
"""Resonance analysis of an audio file."""
|
|
34
|
+
duration_s: float
|
|
35
|
+
sample_rate: int
|
|
36
|
+
fundamental_hz: float
|
|
37
|
+
fundamental_root: int
|
|
38
|
+
fundamental_phase: HarmonicPhase
|
|
39
|
+
|
|
40
|
+
# Top harmonic peaks
|
|
41
|
+
peaks: list[dict] # [{hz, amplitude, root, phase}]
|
|
42
|
+
harmonic_count_369: int # peaks with root 3/6/9
|
|
43
|
+
|
|
44
|
+
# Alignment with known frequencies
|
|
45
|
+
tesla_proximity: float # how close fundamental is to 432 Hz (0-1)
|
|
46
|
+
giza_proximity: float # how close to 438 Hz (0-1)
|
|
47
|
+
schumann_proximity: float # how close to 7.83 Hz or multiple
|
|
48
|
+
|
|
49
|
+
# Coherence
|
|
50
|
+
spectral_coherence: float # 0-1: ratio of energy in harmonic peaks vs noise
|
|
51
|
+
gate_status: str
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def analyze_audio(filepath: str) -> AudioAnalysis:
|
|
55
|
+
"""Analyze a WAV file through the SEIF resonance pipeline.
|
|
56
|
+
|
|
57
|
+
The audio is processed LOCALLY. Only metadata leaves this function.
|
|
58
|
+
"""
|
|
59
|
+
path = Path(filepath)
|
|
60
|
+
if not path.exists():
|
|
61
|
+
raise FileNotFoundError(f"Audio file not found: {filepath}")
|
|
62
|
+
|
|
63
|
+
# Read WAV
|
|
64
|
+
sample_rate, data = wavfile.read(str(path))
|
|
65
|
+
|
|
66
|
+
# Convert to mono if stereo
|
|
67
|
+
if len(data.shape) > 1:
|
|
68
|
+
data = data.mean(axis=1)
|
|
69
|
+
|
|
70
|
+
# Normalize to float
|
|
71
|
+
data = data.astype(float)
|
|
72
|
+
if data.max() > 1:
|
|
73
|
+
data = data / max(abs(data.max()), abs(data.min()))
|
|
74
|
+
|
|
75
|
+
duration = len(data) / sample_rate
|
|
76
|
+
|
|
77
|
+
# FFT
|
|
78
|
+
n = len(data)
|
|
79
|
+
fft = np.fft.rfft(data)
|
|
80
|
+
magnitude = np.abs(fft) / n
|
|
81
|
+
freqs = np.fft.rfftfreq(n, 1 / sample_rate)
|
|
82
|
+
|
|
83
|
+
# Find peaks in spectrum
|
|
84
|
+
min_freq_idx = np.searchsorted(freqs, 20) # ignore below 20 Hz
|
|
85
|
+
max_freq_idx = np.searchsorted(freqs, 5000) # ignore above 5000 Hz
|
|
86
|
+
|
|
87
|
+
mag_slice = magnitude[min_freq_idx:max_freq_idx]
|
|
88
|
+
freq_slice = freqs[min_freq_idx:max_freq_idx]
|
|
89
|
+
|
|
90
|
+
if len(mag_slice) < 10:
|
|
91
|
+
# Too short to analyze
|
|
92
|
+
return AudioAnalysis(
|
|
93
|
+
duration_s=duration, sample_rate=sample_rate,
|
|
94
|
+
fundamental_hz=0, fundamental_root=0,
|
|
95
|
+
fundamental_phase=HarmonicPhase.ENTROPY,
|
|
96
|
+
peaks=[], harmonic_count_369=0,
|
|
97
|
+
tesla_proximity=0, giza_proximity=0, schumann_proximity=0,
|
|
98
|
+
spectral_coherence=0, gate_status="INSUFFICIENT DATA",
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
peak_indices, _ = find_peaks(mag_slice, height=mag_slice.max() * 0.1, distance=5)
|
|
102
|
+
|
|
103
|
+
if len(peak_indices) == 0:
|
|
104
|
+
peak_indices = [np.argmax(mag_slice)]
|
|
105
|
+
|
|
106
|
+
# Sort by amplitude
|
|
107
|
+
sorted_peaks = sorted(peak_indices, key=lambda i: mag_slice[i], reverse=True)[:10]
|
|
108
|
+
|
|
109
|
+
# Fundamental = strongest peak
|
|
110
|
+
fundamental_idx = sorted_peaks[0]
|
|
111
|
+
fundamental_hz = float(freq_slice[fundamental_idx])
|
|
112
|
+
fund_root = digital_root(int(fundamental_hz)) if fundamental_hz > 0 else 0
|
|
113
|
+
fund_phase = classify_phase(fund_root)
|
|
114
|
+
|
|
115
|
+
# Analyze top peaks
|
|
116
|
+
peaks = []
|
|
117
|
+
harmonic_369 = 0
|
|
118
|
+
for idx in sorted_peaks:
|
|
119
|
+
hz = float(freq_slice[idx])
|
|
120
|
+
amp = float(mag_slice[idx])
|
|
121
|
+
root = digital_root(int(hz)) if hz > 0 else 0
|
|
122
|
+
phase = classify_phase(root)
|
|
123
|
+
if phase != HarmonicPhase.ENTROPY:
|
|
124
|
+
harmonic_369 += 1
|
|
125
|
+
peaks.append({
|
|
126
|
+
"hz": round(hz, 2),
|
|
127
|
+
"amplitude": round(amp, 6),
|
|
128
|
+
"root": root,
|
|
129
|
+
"phase": phase.name,
|
|
130
|
+
})
|
|
131
|
+
|
|
132
|
+
# Proximity to known frequencies
|
|
133
|
+
tesla_prox = max(0, 1 - abs(fundamental_hz - FREQ_TESLA) / FREQ_TESLA) if fundamental_hz > 0 else 0
|
|
134
|
+
giza_prox = max(0, 1 - abs(fundamental_hz - FREQ_GIZA) / FREQ_GIZA) if fundamental_hz > 0 else 0
|
|
135
|
+
|
|
136
|
+
# Schumann: check if fundamental or any peak is near 7.83 Hz or integer multiple
|
|
137
|
+
schumann_prox = 0
|
|
138
|
+
for p in peaks:
|
|
139
|
+
for mult in range(1, 60):
|
|
140
|
+
target = FREQ_SCHUMANN * mult
|
|
141
|
+
if abs(p["hz"] - target) < 2: # within 2 Hz
|
|
142
|
+
schumann_prox = max(schumann_prox, 1 - abs(p["hz"] - target) / target)
|
|
143
|
+
|
|
144
|
+
# Spectral coherence: energy in peaks vs total energy
|
|
145
|
+
total_energy = np.sum(mag_slice ** 2)
|
|
146
|
+
peak_energy = sum(mag_slice[idx] ** 2 for idx in sorted_peaks)
|
|
147
|
+
coherence = peak_energy / total_energy if total_energy > 0 else 0
|
|
148
|
+
|
|
149
|
+
# Gate: based on fundamental + coherence
|
|
150
|
+
if fund_phase != HarmonicPhase.ENTROPY and coherence > PHI_INVERSE:
|
|
151
|
+
gate = "RESONANT"
|
|
152
|
+
elif fund_phase != HarmonicPhase.ENTROPY or coherence > 0.3:
|
|
153
|
+
gate = "PARTIAL"
|
|
154
|
+
else:
|
|
155
|
+
gate = "ENTROPIC"
|
|
156
|
+
|
|
157
|
+
return AudioAnalysis(
|
|
158
|
+
duration_s=round(duration, 2),
|
|
159
|
+
sample_rate=sample_rate,
|
|
160
|
+
fundamental_hz=round(fundamental_hz, 2),
|
|
161
|
+
fundamental_root=fund_root,
|
|
162
|
+
fundamental_phase=fund_phase,
|
|
163
|
+
peaks=peaks,
|
|
164
|
+
harmonic_count_369=harmonic_369,
|
|
165
|
+
tesla_proximity=round(tesla_prox, 4),
|
|
166
|
+
giza_proximity=round(giza_prox, 4),
|
|
167
|
+
schumann_proximity=round(schumann_prox, 4),
|
|
168
|
+
spectral_coherence=round(coherence, 4),
|
|
169
|
+
gate_status=gate,
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def describe(analysis: AudioAnalysis) -> str:
|
|
174
|
+
"""Human-readable audio analysis."""
|
|
175
|
+
lines = [
|
|
176
|
+
f"═══ AUDIO RESONANCE ANALYSIS ═══",
|
|
177
|
+
f"Duration: {analysis.duration_s}s | Sample rate: {analysis.sample_rate} Hz",
|
|
178
|
+
f"",
|
|
179
|
+
f"Fundamental: {analysis.fundamental_hz} Hz → root {analysis.fundamental_root} ({analysis.fundamental_phase.name})",
|
|
180
|
+
f"",
|
|
181
|
+
f"Top Peaks:",
|
|
182
|
+
]
|
|
183
|
+
for i, p in enumerate(analysis.peaks[:5]):
|
|
184
|
+
mark = "✓" if p["phase"] != "ENTROPY" else " "
|
|
185
|
+
lines.append(f" {mark} {p['hz']:>8.1f} Hz amp={p['amplitude']:.4f} root={p['root']} {p['phase']}")
|
|
186
|
+
|
|
187
|
+
lines.extend([
|
|
188
|
+
f"",
|
|
189
|
+
f"Harmonic peaks (3/6/9): {analysis.harmonic_count_369}/{len(analysis.peaks)}",
|
|
190
|
+
f"Tesla proximity (432 Hz): {analysis.tesla_proximity:.2%}",
|
|
191
|
+
f"Giza proximity (438 Hz): {analysis.giza_proximity:.2%}",
|
|
192
|
+
f"Schumann proximity: {analysis.schumann_proximity:.2%}",
|
|
193
|
+
f"Spectral coherence: {analysis.spectral_coherence:.2%}",
|
|
194
|
+
f"",
|
|
195
|
+
f"Gate: {analysis.gate_status}",
|
|
196
|
+
])
|
|
197
|
+
return "\n".join(lines)
|