nodeqmindmap 2.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nodeqmindmap/__init__.py +21 -0
- nodeqmindmap/adapter.py +79 -0
- nodeqmindmap/pipeline.py +198 -0
- nodeqmindmap/types.py +82 -0
- nodeqmindmap-2.2.0.dist-info/METADATA +191 -0
- nodeqmindmap-2.2.0.dist-info/RECORD +8 -0
- nodeqmindmap-2.2.0.dist-info/WHEEL +5 -0
- nodeqmindmap-2.2.0.dist-info/top_level.txt +1 -0
nodeqmindmap/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""nodeqmindmap — Python port of the nodeq-mindmap npm package.
|
|
2
|
+
|
|
3
|
+
Provides MindMapNode, PipelineEngine, and JsonSchemaAdapter.
|
|
4
|
+
The D3 visualization layer is browser-only; this port covers data modelling and pipeline logic.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .types import MindMapNode, DataSample, TransformationRule, PipelinePerformance, PipelineConfig, ExecutionResult, PipelineStats
|
|
8
|
+
from .adapter import JsonSchemaAdapter
|
|
9
|
+
from .pipeline import PipelineEngine
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"MindMapNode",
|
|
13
|
+
"DataSample",
|
|
14
|
+
"TransformationRule",
|
|
15
|
+
"PipelinePerformance",
|
|
16
|
+
"PipelineConfig",
|
|
17
|
+
"ExecutionResult",
|
|
18
|
+
"PipelineStats",
|
|
19
|
+
"JsonSchemaAdapter",
|
|
20
|
+
"PipelineEngine",
|
|
21
|
+
]
|
nodeqmindmap/adapter.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""JsonSchemaAdapter — mirrors json-adapter.ts / JsonSchemaAdapter in index.ts."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from .types import MindMapNode
|
|
8
|
+
|
|
9
|
+
_KNOWN_KEYS = {"topic", "title", "name", "summary", "description", "skills", "children"}
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class JsonSchemaAdapter:
|
|
13
|
+
"""Converts arbitrary Python dicts/lists into a MindMapNode tree."""
|
|
14
|
+
|
|
15
|
+
@staticmethod
|
|
16
|
+
def convert_to_standard(data: Any) -> MindMapNode:
|
|
17
|
+
if not isinstance(data, dict):
|
|
18
|
+
return MindMapNode(topic=str(data) if data is not None else "Invalid Data")
|
|
19
|
+
|
|
20
|
+
topic = (
|
|
21
|
+
data.get("topic")
|
|
22
|
+
or data.get("title")
|
|
23
|
+
or data.get("name")
|
|
24
|
+
or "Root"
|
|
25
|
+
)
|
|
26
|
+
summary = data.get("summary") or data.get("description") or ""
|
|
27
|
+
skills_raw = data.get("skills")
|
|
28
|
+
skills = [str(s) for s in skills_raw] if isinstance(skills_raw, list) else []
|
|
29
|
+
|
|
30
|
+
root = MindMapNode(topic=topic, summary=summary, skills=skills)
|
|
31
|
+
|
|
32
|
+
# Explicit children
|
|
33
|
+
if isinstance(data.get("children"), list):
|
|
34
|
+
for child in data["children"]:
|
|
35
|
+
root.children.append(JsonSchemaAdapter.convert_to_standard(child))
|
|
36
|
+
|
|
37
|
+
# Remaining keys as child nodes
|
|
38
|
+
for key, value in data.items():
|
|
39
|
+
if key in _KNOWN_KEYS:
|
|
40
|
+
continue
|
|
41
|
+
if value is None:
|
|
42
|
+
continue
|
|
43
|
+
if isinstance(value, dict):
|
|
44
|
+
root.children.append(
|
|
45
|
+
MindMapNode(
|
|
46
|
+
topic=key,
|
|
47
|
+
children=[JsonSchemaAdapter.convert_to_standard(value)],
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
elif isinstance(value, list):
|
|
51
|
+
items = value[:20]
|
|
52
|
+
child = MindMapNode(
|
|
53
|
+
topic=key,
|
|
54
|
+
summary=f"{len(value)} item(s)",
|
|
55
|
+
)
|
|
56
|
+
for i, item in enumerate(items):
|
|
57
|
+
if isinstance(item, dict):
|
|
58
|
+
child.children.append(
|
|
59
|
+
MindMapNode(
|
|
60
|
+
topic=f"{key}[{i}]",
|
|
61
|
+
children=[JsonSchemaAdapter.convert_to_standard(item)],
|
|
62
|
+
)
|
|
63
|
+
)
|
|
64
|
+
else:
|
|
65
|
+
child.children.append(MindMapNode(topic=str(item)))
|
|
66
|
+
root.children.append(child)
|
|
67
|
+
else:
|
|
68
|
+
root.children.append(MindMapNode(topic=f"{key}: {value}"))
|
|
69
|
+
|
|
70
|
+
return root
|
|
71
|
+
|
|
72
|
+
@staticmethod
|
|
73
|
+
def extract_schema(data: Any) -> dict[str, str]:
|
|
74
|
+
"""Return a flat field→type mapping from a dict or list-of-dict."""
|
|
75
|
+
if isinstance(data, list):
|
|
76
|
+
return JsonSchemaAdapter.extract_schema(data[0]) if data else {}
|
|
77
|
+
if isinstance(data, dict):
|
|
78
|
+
return {k: type(v).__name__ for k, v in data.items()}
|
|
79
|
+
return {}
|
nodeqmindmap/pipeline.py
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
"""PipelineEngine — mirrors pipeline-engine.ts."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import threading
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from .types import (
|
|
10
|
+
DataSample,
|
|
11
|
+
ExecutionResult,
|
|
12
|
+
PipelineConfig,
|
|
13
|
+
PipelinePerformance,
|
|
14
|
+
PipelineStats,
|
|
15
|
+
TransformationRule,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class PipelineEngine:
|
|
20
|
+
"""Manages in-memory ETL pipelines."""
|
|
21
|
+
|
|
22
|
+
def __init__(self) -> None:
|
|
23
|
+
self._pipelines: dict[str, PipelineConfig] = {}
|
|
24
|
+
self._next_id = 1
|
|
25
|
+
self._lock = threading.Lock()
|
|
26
|
+
|
|
27
|
+
def create_pipeline(
|
|
28
|
+
self,
|
|
29
|
+
name: str,
|
|
30
|
+
input_sample: DataSample,
|
|
31
|
+
output_sample: DataSample,
|
|
32
|
+
options: dict[str, Any] | None = None,
|
|
33
|
+
) -> PipelineConfig:
|
|
34
|
+
options = options or {}
|
|
35
|
+
with self._lock:
|
|
36
|
+
pipeline_id = f"pipeline_{self._next_id}"
|
|
37
|
+
self._next_id += 1
|
|
38
|
+
|
|
39
|
+
model_config: dict[str, Any] = options.get("model_config") or {"type": "auto"}
|
|
40
|
+
data_sources: list[dict[str, Any]] = options.get("data_sources") or []
|
|
41
|
+
etl_config: dict[str, Any] = options.get("etl_options") or {}
|
|
42
|
+
|
|
43
|
+
pipeline = PipelineConfig(
|
|
44
|
+
id=pipeline_id,
|
|
45
|
+
name=name,
|
|
46
|
+
input_sample=input_sample,
|
|
47
|
+
output_sample=output_sample,
|
|
48
|
+
transformation_rules=[],
|
|
49
|
+
model_config=model_config,
|
|
50
|
+
accuracy=0.85,
|
|
51
|
+
version="1.0.0",
|
|
52
|
+
created_at=datetime.now(),
|
|
53
|
+
data_sources=data_sources,
|
|
54
|
+
etl_config=etl_config,
|
|
55
|
+
performance=PipelinePerformance(throughput=100, latency=50, error_rate=0.01),
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
with self._lock:
|
|
59
|
+
self._pipelines[pipeline_id] = pipeline
|
|
60
|
+
|
|
61
|
+
return pipeline
|
|
62
|
+
|
|
63
|
+
def update_pipeline(
|
|
64
|
+
self,
|
|
65
|
+
pipeline_id: str,
|
|
66
|
+
input_sample: DataSample | None = None,
|
|
67
|
+
output_sample: DataSample | None = None,
|
|
68
|
+
) -> PipelineConfig:
|
|
69
|
+
with self._lock:
|
|
70
|
+
pipeline = self._pipelines.get(pipeline_id)
|
|
71
|
+
if pipeline is None:
|
|
72
|
+
raise ValueError(f"Pipeline {pipeline_id} not found")
|
|
73
|
+
if input_sample is not None:
|
|
74
|
+
pipeline.input_sample = input_sample
|
|
75
|
+
if output_sample is not None:
|
|
76
|
+
pipeline.output_sample = output_sample
|
|
77
|
+
return pipeline
|
|
78
|
+
|
|
79
|
+
def execute_pipeline(self, pipeline_id: str, input_data: Any) -> ExecutionResult:
|
|
80
|
+
with self._lock:
|
|
81
|
+
exists = pipeline_id in self._pipelines
|
|
82
|
+
if not exists:
|
|
83
|
+
raise ValueError(f"Pipeline {pipeline_id} not found")
|
|
84
|
+
return ExecutionResult(
|
|
85
|
+
processed=True,
|
|
86
|
+
data=input_data,
|
|
87
|
+
timestamp=datetime.now(),
|
|
88
|
+
pipeline_id=pipeline_id,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def get_pipeline(self, pipeline_id: str) -> PipelineConfig | None:
|
|
92
|
+
with self._lock:
|
|
93
|
+
return self._pipelines.get(pipeline_id)
|
|
94
|
+
|
|
95
|
+
def get_all_pipelines(self) -> list[PipelineConfig]:
|
|
96
|
+
with self._lock:
|
|
97
|
+
return list(self._pipelines.values())
|
|
98
|
+
|
|
99
|
+
def get_pipeline_stats(self, pipeline_id: str) -> PipelineStats:
|
|
100
|
+
with self._lock:
|
|
101
|
+
pipeline = self._pipelines.get(pipeline_id)
|
|
102
|
+
if pipeline is None:
|
|
103
|
+
raise ValueError(f"Pipeline {pipeline_id} not found")
|
|
104
|
+
return PipelineStats(
|
|
105
|
+
id=pipeline.id,
|
|
106
|
+
name=pipeline.name,
|
|
107
|
+
performance=pipeline.performance,
|
|
108
|
+
version=pipeline.version,
|
|
109
|
+
last_executed=datetime.now(),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
def generate_pipeline_code(self, pipeline_id: str) -> str:
|
|
113
|
+
with self._lock:
|
|
114
|
+
pipeline = self._pipelines.get(pipeline_id)
|
|
115
|
+
if pipeline is None:
|
|
116
|
+
raise ValueError(f"Pipeline {pipeline_id} not found")
|
|
117
|
+
fn_name = pipeline.name.replace(" ", "_")
|
|
118
|
+
return (
|
|
119
|
+
f"# Generated pipeline code for {pipeline.name}\n"
|
|
120
|
+
f"def {fn_name}(input_data):\n"
|
|
121
|
+
f" # TODO: add transformation logic\n"
|
|
122
|
+
f" return input_data\n"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
def get_pipeline_execution_mode(self, _pipeline_id: str) -> str:
|
|
126
|
+
return "static"
|
|
127
|
+
|
|
128
|
+
def is_pipeline_static(self, _pipeline_id: str) -> bool:
|
|
129
|
+
return True
|
|
130
|
+
|
|
131
|
+
def pipeline_to_mindmap(self, pipeline: PipelineConfig):
|
|
132
|
+
"""Convert a PipelineConfig to a MindMapNode tree (import delayed to avoid circular)."""
|
|
133
|
+
from .types import MindMapNode
|
|
134
|
+
|
|
135
|
+
mode = self.get_pipeline_execution_mode(pipeline.id)
|
|
136
|
+
is_static = self.is_pipeline_static(pipeline.id)
|
|
137
|
+
mode_label = "Static compiled execution" if is_static else "Dynamic execution"
|
|
138
|
+
|
|
139
|
+
children = [
|
|
140
|
+
MindMapNode(
|
|
141
|
+
topic="Execution Mode",
|
|
142
|
+
summary=mode_label,
|
|
143
|
+
skills=[f"Mode: {mode.upper()}"],
|
|
144
|
+
)
|
|
145
|
+
]
|
|
146
|
+
|
|
147
|
+
if pipeline.data_sources:
|
|
148
|
+
src_children = []
|
|
149
|
+
for src in pipeline.data_sources:
|
|
150
|
+
t = src.get("type", "")
|
|
151
|
+
conn = src.get("connection", {})
|
|
152
|
+
label = conn.get("host") or conn.get("apiEndpoint") or "Local"
|
|
153
|
+
src_children.append(MindMapNode(topic=t, summary=label))
|
|
154
|
+
children.append(
|
|
155
|
+
MindMapNode(
|
|
156
|
+
topic="Data Sources",
|
|
157
|
+
summary=f"{len(pipeline.data_sources)} connected sources",
|
|
158
|
+
children=src_children,
|
|
159
|
+
)
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
children.append(
|
|
163
|
+
MindMapNode(
|
|
164
|
+
topic="Input Schema",
|
|
165
|
+
summary="Data input configuration",
|
|
166
|
+
skills=list(pipeline.input_sample.schema.keys()),
|
|
167
|
+
)
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
rule_children = [
|
|
171
|
+
MindMapNode(
|
|
172
|
+
topic=f"{r.source_field} → {r.target_field}",
|
|
173
|
+
summary=f"{r.type} ({int(r.confidence * 100)}%)",
|
|
174
|
+
)
|
|
175
|
+
for r in pipeline.transformation_rules
|
|
176
|
+
]
|
|
177
|
+
children.append(
|
|
178
|
+
MindMapNode(
|
|
179
|
+
topic="Transformations",
|
|
180
|
+
summary=f"{len(pipeline.transformation_rules)} rules",
|
|
181
|
+
children=rule_children,
|
|
182
|
+
)
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
children.append(
|
|
186
|
+
MindMapNode(
|
|
187
|
+
topic="Output Schema",
|
|
188
|
+
summary="Data output configuration",
|
|
189
|
+
skills=list(pipeline.output_sample.schema.keys()),
|
|
190
|
+
)
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
return MindMapNode(
|
|
194
|
+
topic=pipeline.name,
|
|
195
|
+
summary=f"ETL Pipeline - Accuracy: {pipeline.accuracy * 100:.1f}%",
|
|
196
|
+
skills=[f"Version: {pipeline.version}"],
|
|
197
|
+
children=children,
|
|
198
|
+
)
|
nodeqmindmap/types.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"""Type definitions for nodeqmindmap — mirrors types.ts."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class MindMapNode:
|
|
12
|
+
topic: str
|
|
13
|
+
summary: str = ""
|
|
14
|
+
skills: list[str] = field(default_factory=list)
|
|
15
|
+
children: list["MindMapNode"] = field(default_factory=list)
|
|
16
|
+
|
|
17
|
+
def to_dict(self) -> dict[str, Any]:
|
|
18
|
+
d: dict[str, Any] = {"topic": self.topic}
|
|
19
|
+
if self.summary:
|
|
20
|
+
d["summary"] = self.summary
|
|
21
|
+
if self.skills:
|
|
22
|
+
d["skills"] = self.skills
|
|
23
|
+
if self.children:
|
|
24
|
+
d["children"] = [c.to_dict() for c in self.children]
|
|
25
|
+
return d
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class DataSample:
|
|
30
|
+
format: str
|
|
31
|
+
schema: dict[str, Any]
|
|
32
|
+
data: Any
|
|
33
|
+
metadata: dict[str, Any] = field(default_factory=dict)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class TransformationRule:
|
|
38
|
+
source_field: str
|
|
39
|
+
target_field: str
|
|
40
|
+
type: str # direct | rename | divide | multiply | convert | constant
|
|
41
|
+
confidence: float # 0–1
|
|
42
|
+
factor: float | None = None # used by divide/multiply rules
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass
|
|
46
|
+
class PipelinePerformance:
|
|
47
|
+
throughput: float = 100.0 # records/sec
|
|
48
|
+
latency: float = 50.0 # ms
|
|
49
|
+
error_rate: float = 0.01 # 0–1
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class PipelineConfig:
|
|
54
|
+
id: str
|
|
55
|
+
name: str
|
|
56
|
+
input_sample: DataSample
|
|
57
|
+
output_sample: DataSample
|
|
58
|
+
transformation_rules: list[TransformationRule]
|
|
59
|
+
model_config: dict[str, Any]
|
|
60
|
+
accuracy: float
|
|
61
|
+
version: str
|
|
62
|
+
created_at: datetime
|
|
63
|
+
data_sources: list[dict[str, Any]] = field(default_factory=list)
|
|
64
|
+
etl_config: dict[str, Any] = field(default_factory=dict)
|
|
65
|
+
performance: PipelinePerformance = field(default_factory=PipelinePerformance)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class ExecutionResult:
|
|
70
|
+
processed: bool
|
|
71
|
+
data: Any
|
|
72
|
+
timestamp: datetime
|
|
73
|
+
pipeline_id: str
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
@dataclass
|
|
77
|
+
class PipelineStats:
|
|
78
|
+
id: str
|
|
79
|
+
name: str
|
|
80
|
+
performance: PipelinePerformance
|
|
81
|
+
version: str
|
|
82
|
+
last_executed: datetime
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: nodeqmindmap
|
|
3
|
+
Version: 2.2.0
|
|
4
|
+
Summary: Python port of nodeq-mindmap: MindMapNode data model and ETL pipeline engine
|
|
5
|
+
License: MIT
|
|
6
|
+
Project-URL: Homepage, https://github.com/workflow-builder/nodeq-mindmap
|
|
7
|
+
Keywords: mindmap,etl,pipeline,visualization,career-map
|
|
8
|
+
Requires-Python: >=3.10
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
|
|
11
|
+
# nodeq-mindmap
|
|
12
|
+
|
|
13
|
+
[](https://badge.fury.io/js/nodeq-mindmap)
|
|
14
|
+
[](https://opensource.org/licenses/MIT)
|
|
15
|
+
|
|
16
|
+
Interactive D3.js mind map visualization library with a built-in ETL pipeline engine. Render career maps, org charts, or any hierarchical JSON data in the browser, and define data transformation pipelines in code.
|
|
17
|
+
|
|
18
|
+
## Features
|
|
19
|
+
|
|
20
|
+
- **Universal JSON rendering** — any JSON object is automatically converted to a mind map
|
|
21
|
+
- **Interactive** — click to expand/collapse nodes, zoom, pan
|
|
22
|
+
- **Themeable** — control colors, font, node size
|
|
23
|
+
- **Framework agnostic** — works with React, Vue, Angular, or vanilla JS
|
|
24
|
+
- **ETL pipeline engine** — define input/output schemas, track transformation rules, and execute pipelines in memory
|
|
25
|
+
- **Pipeline visualization** — render an active pipeline as a mind map to explore its structure
|
|
26
|
+
- **CLI** — generate SVG files from JSON on the command line (headless via jsdom)
|
|
27
|
+
|
|
28
|
+
## Installation
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
npm install nodeq-mindmap
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
D3 v7 is a peer dependency:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
npm install d3
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Quick Start
|
|
41
|
+
|
|
42
|
+
### Browser / bundler
|
|
43
|
+
|
|
44
|
+
```javascript
|
|
45
|
+
import { NodeQMindMap } from 'nodeq-mindmap';
|
|
46
|
+
|
|
47
|
+
const map = new NodeQMindMap({
|
|
48
|
+
container: '#my-container', // CSS selector or HTMLElement
|
|
49
|
+
data: {
|
|
50
|
+
topic: 'Software Engineer',
|
|
51
|
+
children: [
|
|
52
|
+
{ topic: 'Frontend', skills: ['React', 'TypeScript'] },
|
|
53
|
+
{ topic: 'Backend', skills: ['Node.js', 'PostgreSQL'] },
|
|
54
|
+
]
|
|
55
|
+
},
|
|
56
|
+
width: 900,
|
|
57
|
+
height: 600,
|
|
58
|
+
onNodeClick: (node) => console.log(node.topic),
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
map.render();
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### Any JSON shape
|
|
65
|
+
|
|
66
|
+
`JsonSchemaAdapter.convertToStandard()` converts arbitrary JSON into the `MindMapNode` tree format before rendering:
|
|
67
|
+
|
|
68
|
+
```javascript
|
|
69
|
+
import { NodeQMindMap, JsonSchemaAdapter } from 'nodeq-mindmap';
|
|
70
|
+
|
|
71
|
+
const raw = { name: 'My API', version: '2.0', routes: ['/users', '/posts'] };
|
|
72
|
+
const data = JsonSchemaAdapter.convertToStandard(raw);
|
|
73
|
+
|
|
74
|
+
new NodeQMindMap({ container: '#root', data }).render();
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
## API
|
|
78
|
+
|
|
79
|
+
### `new NodeQMindMap(config)`
|
|
80
|
+
|
|
81
|
+
| Option | Type | Default | Description |
|
|
82
|
+
|---|---|---|---|
|
|
83
|
+
| `container` | `string \| HTMLElement` | required | CSS selector or DOM element |
|
|
84
|
+
| `data` | `any` | required | Hierarchical data (see `MindMapNode`) |
|
|
85
|
+
| `width` | `number` | `800` | SVG width in px |
|
|
86
|
+
| `height` | `number` | `600` | SVG height in px |
|
|
87
|
+
| `theme` | `Partial<Theme>` | — | Colors, font, fontSize |
|
|
88
|
+
| `interactive` | `boolean` | `true` | Enable click/hover |
|
|
89
|
+
| `zoomable` | `boolean` | `true` | Enable pan/zoom |
|
|
90
|
+
| `collapsible` | `boolean` | `true` | Click nodes to collapse |
|
|
91
|
+
| `onNodeClick` | `(node) => void` | — | Click callback |
|
|
92
|
+
| `onNodeHover` | `(node) => void` | — | Hover callback |
|
|
93
|
+
|
|
94
|
+
### Instance methods
|
|
95
|
+
|
|
96
|
+
```typescript
|
|
97
|
+
map.render() // Draw the mind map
|
|
98
|
+
map.updateData(data) // Replace data and re-render
|
|
99
|
+
map.updateTheme(theme) // Merge theme and re-render
|
|
100
|
+
map.exportSVG() // Return SVG markup string
|
|
101
|
+
map.destroy() // Remove SVG from DOM
|
|
102
|
+
|
|
103
|
+
// Pipeline helpers
|
|
104
|
+
await map.createDataPipeline(name, inputSample, outputSample, options?)
|
|
105
|
+
map.executePipeline(inputData)
|
|
106
|
+
map.getAllPipelines()
|
|
107
|
+
map.switchToPipeline(pipelineId)
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
### `MindMapNode` shape
|
|
111
|
+
|
|
112
|
+
```typescript
|
|
113
|
+
interface MindMapNode {
|
|
114
|
+
topic: string; // Node label (required)
|
|
115
|
+
summary?: string; // Subtitle shown in detail panels
|
|
116
|
+
skills?: string[]; // Tag list
|
|
117
|
+
children?: MindMapNode[];
|
|
118
|
+
}
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
### `Theme` options
|
|
122
|
+
|
|
123
|
+
```typescript
|
|
124
|
+
{
|
|
125
|
+
nodeColor: string; // default '#4299e1'
|
|
126
|
+
textColor: string; // default '#2d3748'
|
|
127
|
+
linkColor: string; // default '#a0aec0'
|
|
128
|
+
backgroundColor: string; // default '#ffffff'
|
|
129
|
+
fontSize: number; // default 14
|
|
130
|
+
fontFamily: string; // default 'Arial, sans-serif'
|
|
131
|
+
}
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
## Pipeline Engine
|
|
135
|
+
|
|
136
|
+
`PipelineEngine` is a standalone class for defining and running in-memory ETL pipelines. It does not require a browser.
|
|
137
|
+
|
|
138
|
+
```javascript
|
|
139
|
+
import { PipelineEngine } from 'nodeq-mindmap';
|
|
140
|
+
|
|
141
|
+
const engine = new PipelineEngine();
|
|
142
|
+
|
|
143
|
+
const pipeline = await engine.createPipeline(
|
|
144
|
+
'User ETL',
|
|
145
|
+
{ format: 'json', schema: { id: 'number', name: 'string' }, data: { id: 1, name: 'Alice' } },
|
|
146
|
+
{ format: 'json', schema: { userId: 'number', displayName: 'string' }, data: { userId: 1, displayName: 'Alice' } }
|
|
147
|
+
);
|
|
148
|
+
|
|
149
|
+
const result = engine.executePipeline(pipeline.id, { id: 2, name: 'Bob' });
|
|
150
|
+
// { processed: true, data: { id: 2, name: 'Bob' }, timestamp, pipelineId }
|
|
151
|
+
|
|
152
|
+
console.log(engine.generatePipelineCode(pipeline.id));
|
|
153
|
+
// Outputs a TypeScript function stub for the pipeline
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
### `PipelineEngine` methods
|
|
157
|
+
|
|
158
|
+
```typescript
|
|
159
|
+
createPipeline(name, inputSample, outputSample, options?) // async, returns PipelineConfig
|
|
160
|
+
updatePipeline(id, inputSample?, outputSample?) // async
|
|
161
|
+
executePipeline(id, inputData) // sync, returns result object
|
|
162
|
+
getPipeline(id) // returns PipelineConfig | undefined
|
|
163
|
+
getAllPipelines() // returns PipelineConfig[]
|
|
164
|
+
getPipelineStats(id) // returns perf stats
|
|
165
|
+
generatePipelineCode(id) // returns TS function stub
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
## CLI
|
|
169
|
+
|
|
170
|
+
```bash
|
|
171
|
+
# Generate an SVG mind map from a JSON file
|
|
172
|
+
npx nodeq-mindmap generate -i data.json -o output.svg
|
|
173
|
+
|
|
174
|
+
# Create a pipeline definition from input/output samples
|
|
175
|
+
npx nodeq-mindmap create-pipeline -n "My ETL" -i input.json -o output.json
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
The `generate` command uses jsdom to run D3 headlessly — no browser required.
|
|
179
|
+
|
|
180
|
+
## Ports
|
|
181
|
+
|
|
182
|
+
Server-side / language-specific ports of the data model and pipeline engine are available:
|
|
183
|
+
|
|
184
|
+
- **Go** — `packages/go/` — `MindMapNode`, `PipelineEngine`, `JsonSchemaAdapter` as a Go module
|
|
185
|
+
- **Python** — `packages/python/` — `MindMapNode`, `PipelineEngine`, `JsonSchemaAdapter` as Python dataclasses
|
|
186
|
+
|
|
187
|
+
These ports implement the same data structures and pipeline logic without the D3 visualization layer.
|
|
188
|
+
|
|
189
|
+
## License
|
|
190
|
+
|
|
191
|
+
MIT
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
nodeqmindmap/__init__.py,sha256=5pA9aS78LWVg15i-6quvlmlERkicjw1qU9D3voOKOHc,655
|
|
2
|
+
nodeqmindmap/adapter.py,sha256=iF0YAektF6PEewIcP4QysAC4wrw3DYMGWDqxHG9CU9g,2891
|
|
3
|
+
nodeqmindmap/pipeline.py,sha256=RKWENf3T9jix9NxfY153jATUdPa9jxycziTKGtCSF24,6551
|
|
4
|
+
nodeqmindmap/types.py,sha256=-pVhHui5w-xuEygR6hTQLArVgH8f3PapEiqtgrGw3RQ,2013
|
|
5
|
+
nodeqmindmap-2.2.0.dist-info/METADATA,sha256=oWe4nQ1OjYIsvq7NNit74rCDrNro1jKC-1MS1iOm0YI,6426
|
|
6
|
+
nodeqmindmap-2.2.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
|
|
7
|
+
nodeqmindmap-2.2.0.dist-info/top_level.txt,sha256=RJ63Cza9EOgvGXojlNqLCjsr2UENA6fykEGwVnLRf7U,13
|
|
8
|
+
nodeqmindmap-2.2.0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
nodeqmindmap
|