ipman-cli 0.1.73__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ipman/core/package.py ADDED
@@ -0,0 +1,188 @@
1
+ """IP package data models, parsing, and serialization."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import re
6
+ from dataclasses import dataclass, field
7
+ from pathlib import Path
8
+ from typing import Any
9
+
10
+ import yaml
11
+
12
+ # ---------------------------------------------------------------------------
13
+ # Exceptions
14
+ # ---------------------------------------------------------------------------
15
+
16
+ class ValidationError(Exception):
17
+ """Raised when an IP file fails validation."""
18
+
19
+
20
+ # ---------------------------------------------------------------------------
21
+ # Data models
22
+ # ---------------------------------------------------------------------------
23
+
24
+ @dataclass
25
+ class SkillRef:
26
+ """Reference to a skill in an IP package."""
27
+
28
+ name: str
29
+ source: dict[str, Any] | None = None
30
+ description: str | None = None
31
+
32
+ @property
33
+ def is_direct_source(self) -> bool:
34
+ return self.source is not None
35
+
36
+
37
+ @dataclass
38
+ class DependencyRef:
39
+ """Reference to a dependency IP package."""
40
+
41
+ name: str
42
+ version: str | None = None
43
+ source: str | None = None
44
+
45
+ @property
46
+ def is_direct_source(self) -> bool:
47
+ return self.source is not None
48
+
49
+
50
+ @dataclass
51
+ class IPPackage:
52
+ """Parsed representation of an .ip.yaml file."""
53
+
54
+ name: str
55
+ version: str
56
+ description: str
57
+ skills: list[SkillRef]
58
+ dependencies: list[DependencyRef] = field(default_factory=list)
59
+ author: dict[str, str] | None = None
60
+ license: str | None = None
61
+
62
+
63
+ # ---------------------------------------------------------------------------
64
+ # Name validation
65
+ # ---------------------------------------------------------------------------
66
+
67
+ _NAME_RE = re.compile(r"^[a-z0-9][a-z0-9\-]{1,48}[a-z0-9]$")
68
+
69
+
70
+ # ---------------------------------------------------------------------------
71
+ # Parsing
72
+ # ---------------------------------------------------------------------------
73
+
74
+ def parse_ip_file(
75
+ path: Path | None = None,
76
+ *,
77
+ content: str | None = None,
78
+ ) -> IPPackage:
79
+ """Parse an .ip.yaml file into an IPPackage.
80
+
81
+ Provide either *path* (reads from disk) or *content* (raw YAML string).
82
+ """
83
+ if content is None:
84
+ if path is None:
85
+ msg = "Either path or content must be provided"
86
+ raise ValueError(msg)
87
+ content = path.read_text(encoding="utf-8")
88
+
89
+ data = yaml.safe_load(content)
90
+ if not isinstance(data, dict):
91
+ msg = "IP file must be a YAML mapping"
92
+ raise ValidationError(msg)
93
+
94
+ # --- required fields ---
95
+ if "name" not in data:
96
+ msg = "Missing required field: name"
97
+ raise ValidationError(msg)
98
+ if "version" not in data:
99
+ msg = "Missing required field: version"
100
+ raise ValidationError(msg)
101
+ if "skills" not in data:
102
+ msg = "Missing required field: skills"
103
+ raise ValidationError(msg)
104
+
105
+ # --- skills ---
106
+ raw_skills: list[dict[str, Any]] = data.get("skills") or []
107
+ skills = [
108
+ SkillRef(
109
+ name=s["name"],
110
+ source=s.get("source"),
111
+ description=s.get("description"),
112
+ )
113
+ for s in raw_skills
114
+ ]
115
+
116
+ # --- dependencies ---
117
+ raw_deps: list[dict[str, Any]] = data.get("dependencies") or []
118
+ dependencies = [
119
+ DependencyRef(
120
+ name=d["name"],
121
+ version=d.get("version"),
122
+ source=d.get("source"),
123
+ )
124
+ for d in raw_deps
125
+ ]
126
+
127
+ return IPPackage(
128
+ name=data["name"],
129
+ version=str(data["version"]),
130
+ description=data.get("description", ""),
131
+ skills=skills,
132
+ dependencies=dependencies,
133
+ author=data.get("author"),
134
+ license=data.get("license"),
135
+ )
136
+
137
+
138
+ # ---------------------------------------------------------------------------
139
+ # Serialization
140
+ # ---------------------------------------------------------------------------
141
+
142
+ _HEADER = """\
143
+ # IpMan Intelligence Package — https://github.com/twisker/ipman
144
+ # Install: ipman install {filename}
145
+ """
146
+
147
+
148
+ def dump_ip_file(pkg: IPPackage, path: Path) -> None:
149
+ """Serialize an IPPackage to an .ip.yaml file with header comment."""
150
+ data: dict[str, Any] = {
151
+ "name": pkg.name,
152
+ "version": pkg.version,
153
+ "description": pkg.description,
154
+ }
155
+
156
+ if pkg.author:
157
+ data["author"] = pkg.author
158
+ if pkg.license:
159
+ data["license"] = pkg.license
160
+
161
+ # Skills
162
+ skills_out: list[dict[str, Any]] = []
163
+ for s in pkg.skills:
164
+ entry: dict[str, Any] = {"name": s.name}
165
+ if s.description:
166
+ entry["description"] = s.description
167
+ if s.source:
168
+ entry["source"] = s.source
169
+ skills_out.append(entry)
170
+ data["skills"] = skills_out
171
+
172
+ # Dependencies
173
+ if pkg.dependencies:
174
+ deps_out: list[dict[str, Any]] = []
175
+ for d in pkg.dependencies:
176
+ dep_entry: dict[str, Any] = {"name": d.name}
177
+ if d.version:
178
+ dep_entry["version"] = d.version
179
+ if d.source:
180
+ dep_entry["source"] = d.source
181
+ deps_out.append(dep_entry)
182
+ data["dependencies"] = deps_out
183
+
184
+ header = _HEADER.format(filename=path.name)
185
+ body = yaml.dump(
186
+ data, default_flow_style=False, allow_unicode=True, sort_keys=False,
187
+ )
188
+ path.write_text(header + "\n" + body, encoding="utf-8")
ipman/core/resolver.py ADDED
@@ -0,0 +1,160 @@
1
+ """Dependency resolver — version matching, recursive resolution."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import re
6
+ from collections.abc import Callable
7
+ from dataclasses import dataclass
8
+ from typing import Any
9
+
10
+ # ---------------------------------------------------------------------------
11
+ # Exceptions
12
+ # ---------------------------------------------------------------------------
13
+
14
+ class CyclicDependencyError(Exception):
15
+ """Raised when a cyclic dependency is detected."""
16
+
17
+
18
+ # ---------------------------------------------------------------------------
19
+ # Version constraint
20
+ # ---------------------------------------------------------------------------
21
+
22
+ _CONSTRAINT_RE = re.compile(
23
+ r"^(?P<op>>=|\^|~)?(?P<major>\d+)\.(?P<minor>\d+)(?:\.(?P<patch>\d+))?$"
24
+ )
25
+
26
+
27
+ @dataclass(frozen=True)
28
+ class VersionConstraint:
29
+ """Parsed version constraint."""
30
+
31
+ op: str # "==", ">=", "^", "~"
32
+ major: int
33
+ minor: int
34
+ patch: int
35
+
36
+
37
+ def parse_constraint(spec: str) -> VersionConstraint:
38
+ """Parse a version constraint string like '>=1.2.0', '^1.3.0', '~1.3.0', '1.2.0'."""
39
+ m = _CONSTRAINT_RE.match(spec.strip())
40
+ if not m:
41
+ msg = f"Invalid version constraint: '{spec}'"
42
+ raise ValueError(msg)
43
+ op = m.group("op") or "=="
44
+ return VersionConstraint(
45
+ op=op,
46
+ major=int(m.group("major")),
47
+ minor=int(m.group("minor")),
48
+ patch=int(m.group("patch") or 0),
49
+ )
50
+
51
+
52
+ def _parse_version(version: str) -> tuple[int, int, int]:
53
+ """Parse a plain version string into (major, minor, patch)."""
54
+ parts = version.strip().split(".")
55
+ major = int(parts[0])
56
+ minor = int(parts[1]) if len(parts) > 1 else 0
57
+ patch = int(parts[2]) if len(parts) > 2 else 0
58
+ return major, minor, patch
59
+
60
+
61
+ def version_matches(candidate: str, constraint: str | None) -> bool:
62
+ """Check if *candidate* version satisfies *constraint*.
63
+
64
+ Returns True if constraint is None (no restriction).
65
+ """
66
+ if constraint is None:
67
+ return True
68
+
69
+ c = parse_constraint(constraint)
70
+ v_major, v_minor, v_patch = _parse_version(candidate)
71
+ v = (v_major, v_minor, v_patch)
72
+ base = (c.major, c.minor, c.patch)
73
+
74
+ if c.op == "==":
75
+ return v == base
76
+
77
+ if c.op == ">=":
78
+ return v >= base
79
+
80
+ if c.op == "^":
81
+ # ^M.N.P => >=M.N.P, <(M+1).0.0 (when M>0)
82
+ # >=0.N.P, <0.(N+1).0 (when M==0)
83
+ if v < base:
84
+ return False
85
+ if c.major == 0:
86
+ return v_major == 0 and v_minor == c.minor
87
+ return v_major == c.major
88
+
89
+ if c.op == "~":
90
+ # ~M.N.P => >=M.N.P, <M.(N+1).0
91
+ if v < base:
92
+ return False
93
+ return v_major == c.major and v_minor == c.minor
94
+
95
+ return False
96
+
97
+
98
+ # ---------------------------------------------------------------------------
99
+ # Dependency resolution
100
+ # ---------------------------------------------------------------------------
101
+
102
+ # Type alias for the fetcher callback:
103
+ # fetcher(name, version_constraint) -> dict with keys: version, skills, dependencies
104
+ PackageFetcher = Callable[[str, str | None], dict[str, Any]]
105
+
106
+
107
+ def resolve_dependencies(
108
+ name: str,
109
+ version: str | None,
110
+ fetcher: PackageFetcher,
111
+ ) -> list[dict[str, Any]]:
112
+ """Recursively resolve all skills from a package and its dependencies.
113
+
114
+ Args:
115
+ name: Root package name.
116
+ version: Version constraint for the root (or None).
117
+ fetcher: Callback that returns package data given (name, version).
118
+ Must return dict with 'skills' and 'dependencies' keys.
119
+
120
+ Returns:
121
+ Deduplicated list of skill dicts (order: root-first DFS).
122
+
123
+ Raises:
124
+ CyclicDependencyError: If a dependency cycle is detected.
125
+ """
126
+ seen_skills: set[str] = set()
127
+ result: list[dict[str, Any]] = []
128
+ visiting: set[str] = set() # cycle detection (DFS stack)
129
+ visited: set[str] = set() # already fully resolved
130
+
131
+ def _visit(pkg_name: str, pkg_version: str | None) -> None:
132
+ if pkg_name in visiting:
133
+ raise CyclicDependencyError(
134
+ f"Cyclic dependency detected: {pkg_name}"
135
+ )
136
+ if pkg_name in visited:
137
+ return
138
+
139
+ visiting.add(pkg_name)
140
+
141
+ data = fetcher(pkg_name, pkg_version)
142
+
143
+ # Collect skills (deduplicate by name)
144
+ for skill in data.get("skills", []):
145
+ sname = skill["name"]
146
+ if sname not in seen_skills:
147
+ seen_skills.add(sname)
148
+ result.append(skill)
149
+
150
+ # Recurse into dependencies
151
+ for dep in data.get("dependencies", []):
152
+ dep_name = dep.get("name") or dep
153
+ dep_version = dep.get("version") if isinstance(dep, dict) else None
154
+ _visit(dep_name, dep_version)
155
+
156
+ visiting.discard(pkg_name)
157
+ visited.add(pkg_name)
158
+
159
+ _visit(name, version)
160
+ return result
ipman/core/security.py ADDED
@@ -0,0 +1,84 @@
1
+ """Security enforcement — decision matrix and logging."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from datetime import datetime, timezone
6
+ from enum import Enum
7
+ from pathlib import Path
8
+
9
+ from ipman.core.config import SecurityMode
10
+ from ipman.core.vetter import RiskLevel
11
+
12
+
13
+ class Action(Enum):
14
+ """Install-time action determined by risk level x security mode."""
15
+
16
+ INSTALL = "INSTALL"
17
+ WARN_INSTALL = "WARN_INSTALL"
18
+ WARN_CONFIRM = "WARN_CONFIRM"
19
+ BLOCK = "BLOCK"
20
+
21
+
22
+ # Decision matrix: _MATRIX[security_mode][risk_level] -> Action
23
+ _MATRIX: dict[SecurityMode, dict[RiskLevel, Action]] = {
24
+ SecurityMode.PERMISSIVE: {
25
+ RiskLevel.LOW: Action.INSTALL,
26
+ RiskLevel.MEDIUM: Action.INSTALL,
27
+ RiskLevel.HIGH: Action.INSTALL,
28
+ RiskLevel.EXTREME: Action.WARN_INSTALL,
29
+ },
30
+ SecurityMode.DEFAULT: {
31
+ RiskLevel.LOW: Action.INSTALL,
32
+ RiskLevel.MEDIUM: Action.INSTALL,
33
+ RiskLevel.HIGH: Action.WARN_INSTALL,
34
+ RiskLevel.EXTREME: Action.BLOCK,
35
+ },
36
+ SecurityMode.CAUTIOUS: {
37
+ RiskLevel.LOW: Action.INSTALL,
38
+ RiskLevel.MEDIUM: Action.WARN_INSTALL,
39
+ RiskLevel.HIGH: Action.BLOCK,
40
+ RiskLevel.EXTREME: Action.BLOCK,
41
+ },
42
+ SecurityMode.STRICT: {
43
+ RiskLevel.LOW: Action.INSTALL,
44
+ RiskLevel.MEDIUM: Action.WARN_CONFIRM,
45
+ RiskLevel.HIGH: Action.BLOCK,
46
+ RiskLevel.EXTREME: Action.BLOCK,
47
+ },
48
+ }
49
+
50
+
51
+ def decide_action(
52
+ risk_level: RiskLevel,
53
+ security_mode: SecurityMode,
54
+ ) -> Action:
55
+ """Determine the install action for a given risk + mode."""
56
+ return _MATRIX[security_mode][risk_level]
57
+
58
+
59
+ def log_security_event(
60
+ *,
61
+ log_path: Path,
62
+ skill_name: str,
63
+ source: str,
64
+ risk_level: RiskLevel,
65
+ action: Action,
66
+ details: str = "",
67
+ ) -> None:
68
+ """Append a security event to the log file."""
69
+ log_path.parent.mkdir(parents=True, exist_ok=True)
70
+
71
+ ts = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
72
+ parts = [
73
+ ts,
74
+ action.value,
75
+ skill_name,
76
+ f"source={source}",
77
+ f"risk={risk_level.name}",
78
+ ]
79
+ if details:
80
+ parts.append(f"reason={details}")
81
+
82
+ line = " ".join(parts)
83
+ with log_path.open("a", encoding="utf-8") as f:
84
+ f.write(line + "\n")
ipman/core/vetter.py ADDED
@@ -0,0 +1,193 @@
1
+ """Skill risk assessment engine — red flag detection + risk classification."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import re
6
+ from dataclasses import dataclass, field
7
+ from enum import IntEnum
8
+
9
+
10
+ class RiskLevel(IntEnum):
11
+ """Risk severity levels (ordered for comparison)."""
12
+
13
+ LOW = 0
14
+ MEDIUM = 1
15
+ HIGH = 2
16
+ EXTREME = 3
17
+
18
+
19
+ @dataclass
20
+ class RiskFlag:
21
+ """A single detected risk indicator."""
22
+
23
+ id: str
24
+ description: str
25
+ severity: RiskLevel
26
+
27
+
28
+ @dataclass
29
+ class VetReport:
30
+ """Complete risk assessment report."""
31
+
32
+ skill_name: str
33
+ risk_level: RiskLevel
34
+ verdict: str
35
+ flags: list[RiskFlag] = field(default_factory=list)
36
+
37
+
38
+ # ---------------------------------------------------------------------------
39
+ # Content scanning patterns
40
+ # ---------------------------------------------------------------------------
41
+
42
+ _PATTERNS: list[tuple[str, re.Pattern[str], RiskLevel]] = [
43
+ # Network exfiltration
44
+ (
45
+ "network-exfil",
46
+ re.compile(r"\b(curl|wget)\b", re.IGNORECASE),
47
+ RiskLevel.HIGH,
48
+ ),
49
+ # Raw IP address in URLs
50
+ (
51
+ "raw-ip",
52
+ re.compile(
53
+ r"https?://\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}",
54
+ ),
55
+ RiskLevel.HIGH,
56
+ ),
57
+ # Credential / sensitive path access
58
+ (
59
+ "credential-access",
60
+ re.compile(
61
+ r"~/?\.(ssh|aws|config|gnupg|kube)/",
62
+ re.IGNORECASE,
63
+ ),
64
+ RiskLevel.HIGH,
65
+ ),
66
+ # Obfuscated code (base64 decode)
67
+ (
68
+ "obfuscated-code",
69
+ re.compile(
70
+ r"\bbase64\b.*\b(decode|--decode|-d)\b",
71
+ re.IGNORECASE,
72
+ ),
73
+ RiskLevel.HIGH,
74
+ ),
75
+ # Dynamic code evaluation
76
+ (
77
+ "obfuscated-code",
78
+ re.compile(r"\beval\s*\(", re.IGNORECASE),
79
+ RiskLevel.HIGH,
80
+ ),
81
+ # Privilege escalation
82
+ (
83
+ "privilege-escalation",
84
+ re.compile(r"\bsudo\b", re.IGNORECASE),
85
+ RiskLevel.HIGH,
86
+ ),
87
+ # Agent memory file access
88
+ (
89
+ "memory-access",
90
+ re.compile(
91
+ r"\b(MEMORY\.md|SOUL\.md|IDENTITY\.md|USER\.md)\b",
92
+ ),
93
+ RiskLevel.EXTREME,
94
+ ),
95
+ ]
96
+
97
+
98
+ def vet_skill_content(content: str) -> list[RiskFlag]:
99
+ """Scan skill content for red flags.
100
+
101
+ Analyzes text (SKILL.md, code files, etc.) for suspicious
102
+ patterns that indicate potential security threats.
103
+ """
104
+ flags: list[RiskFlag] = []
105
+ seen_ids: set[str] = set()
106
+
107
+ for flag_id, pattern, severity in _PATTERNS:
108
+ if pattern.search(content):
109
+ # Deduplicate by flag_id (same id can come from
110
+ # multiple pattern variants like eval + base64)
111
+ if flag_id not in seen_ids:
112
+ seen_ids.add(flag_id)
113
+ match = pattern.search(content)
114
+ snippet = match.group(0) if match else ""
115
+ flags.append(RiskFlag(
116
+ id=flag_id,
117
+ description=f"Detected: {snippet}",
118
+ severity=severity,
119
+ ))
120
+
121
+ return flags
122
+
123
+
124
+ # ---------------------------------------------------------------------------
125
+ # Metadata-based checks
126
+ # ---------------------------------------------------------------------------
127
+
128
+ _REPORTS_THRESHOLD = 3
129
+ _LOW_INSTALLS_THRESHOLD = 10
130
+
131
+
132
+ def vet_skill_metadata(
133
+ *,
134
+ author: str,
135
+ installs: int,
136
+ reports: int,
137
+ ) -> list[RiskFlag]:
138
+ """Check metadata-based risk signals."""
139
+ flags: list[RiskFlag] = []
140
+
141
+ if reports >= _REPORTS_THRESHOLD:
142
+ flags.append(RiskFlag(
143
+ id="high-reports",
144
+ description=f"{reports} community reports",
145
+ severity=RiskLevel.HIGH,
146
+ ))
147
+
148
+ if installs < _LOW_INSTALLS_THRESHOLD:
149
+ flags.append(RiskFlag(
150
+ id="low-reputation",
151
+ description=(
152
+ f"Low install count ({installs}) "
153
+ f"by {author}"
154
+ ),
155
+ severity=RiskLevel.MEDIUM,
156
+ ))
157
+
158
+ return flags
159
+
160
+
161
+ # ---------------------------------------------------------------------------
162
+ # Risk assessment
163
+ # ---------------------------------------------------------------------------
164
+
165
+ _VERDICTS = {
166
+ RiskLevel.LOW: "SAFE TO INSTALL",
167
+ RiskLevel.MEDIUM: "INSTALL WITH CAUTION",
168
+ RiskLevel.HIGH: "DO NOT INSTALL",
169
+ RiskLevel.EXTREME: "DO NOT INSTALL",
170
+ }
171
+
172
+
173
+ def assess_risk(
174
+ flags: list[RiskFlag],
175
+ *,
176
+ skill_name: str,
177
+ ) -> VetReport:
178
+ """Compute overall risk level from collected flags."""
179
+ if not flags:
180
+ return VetReport(
181
+ skill_name=skill_name,
182
+ risk_level=RiskLevel.LOW,
183
+ verdict=_VERDICTS[RiskLevel.LOW],
184
+ flags=[],
185
+ )
186
+
187
+ highest = max(f.severity for f in flags)
188
+ return VetReport(
189
+ skill_name=skill_name,
190
+ risk_level=RiskLevel(highest),
191
+ verdict=_VERDICTS[RiskLevel(highest)],
192
+ flags=flags,
193
+ )
ipman/hub/__init__.py ADDED
File without changes