agmem 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agmem-0.1.1.dist-info/METADATA +656 -0
- agmem-0.1.1.dist-info/RECORD +67 -0
- agmem-0.1.1.dist-info/WHEEL +5 -0
- agmem-0.1.1.dist-info/entry_points.txt +2 -0
- agmem-0.1.1.dist-info/licenses/LICENSE +21 -0
- agmem-0.1.1.dist-info/top_level.txt +1 -0
- memvcs/__init__.py +9 -0
- memvcs/cli.py +178 -0
- memvcs/commands/__init__.py +23 -0
- memvcs/commands/add.py +258 -0
- memvcs/commands/base.py +23 -0
- memvcs/commands/blame.py +169 -0
- memvcs/commands/branch.py +110 -0
- memvcs/commands/checkout.py +101 -0
- memvcs/commands/clean.py +76 -0
- memvcs/commands/clone.py +91 -0
- memvcs/commands/commit.py +174 -0
- memvcs/commands/daemon.py +267 -0
- memvcs/commands/diff.py +157 -0
- memvcs/commands/fsck.py +203 -0
- memvcs/commands/garden.py +107 -0
- memvcs/commands/graph.py +151 -0
- memvcs/commands/init.py +61 -0
- memvcs/commands/log.py +103 -0
- memvcs/commands/mcp.py +59 -0
- memvcs/commands/merge.py +88 -0
- memvcs/commands/pull.py +65 -0
- memvcs/commands/push.py +143 -0
- memvcs/commands/reflog.py +52 -0
- memvcs/commands/remote.py +51 -0
- memvcs/commands/reset.py +98 -0
- memvcs/commands/search.py +163 -0
- memvcs/commands/serve.py +54 -0
- memvcs/commands/show.py +125 -0
- memvcs/commands/stash.py +97 -0
- memvcs/commands/status.py +112 -0
- memvcs/commands/tag.py +117 -0
- memvcs/commands/test.py +132 -0
- memvcs/commands/tree.py +156 -0
- memvcs/core/__init__.py +21 -0
- memvcs/core/config_loader.py +245 -0
- memvcs/core/constants.py +12 -0
- memvcs/core/diff.py +380 -0
- memvcs/core/gardener.py +466 -0
- memvcs/core/hooks.py +151 -0
- memvcs/core/knowledge_graph.py +381 -0
- memvcs/core/merge.py +474 -0
- memvcs/core/objects.py +323 -0
- memvcs/core/pii_scanner.py +343 -0
- memvcs/core/refs.py +447 -0
- memvcs/core/remote.py +278 -0
- memvcs/core/repository.py +522 -0
- memvcs/core/schema.py +414 -0
- memvcs/core/staging.py +227 -0
- memvcs/core/storage/__init__.py +72 -0
- memvcs/core/storage/base.py +359 -0
- memvcs/core/storage/gcs.py +308 -0
- memvcs/core/storage/local.py +182 -0
- memvcs/core/storage/s3.py +369 -0
- memvcs/core/test_runner.py +371 -0
- memvcs/core/vector_store.py +313 -0
- memvcs/integrations/__init__.py +5 -0
- memvcs/integrations/mcp_server.py +267 -0
- memvcs/integrations/web_ui/__init__.py +1 -0
- memvcs/integrations/web_ui/server.py +352 -0
- memvcs/utils/__init__.py +9 -0
- memvcs/utils/helpers.py +178 -0
memvcs/commands/tree.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
"""
|
|
2
|
+
agmem tree - Show working directory or commit tree visually.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
from ..commands.base import require_repo
|
|
10
|
+
from ..core.objects import Commit, Tree
|
|
11
|
+
from ..core.repository import Repository
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _build_tree_lines(
|
|
15
|
+
base_path: Path,
|
|
16
|
+
prefix: str = "",
|
|
17
|
+
is_last: bool = True,
|
|
18
|
+
show_hidden: bool = False,
|
|
19
|
+
depth_limit: Optional[int] = None,
|
|
20
|
+
current_depth: int = 0,
|
|
21
|
+
) -> list[str]:
|
|
22
|
+
"""Build tree lines for a directory."""
|
|
23
|
+
lines = []
|
|
24
|
+
if depth_limit is not None and current_depth >= depth_limit:
|
|
25
|
+
return lines
|
|
26
|
+
try:
|
|
27
|
+
entries = sorted(base_path.iterdir(), key=lambda p: (p.is_file(), p.name.lower()))
|
|
28
|
+
except PermissionError:
|
|
29
|
+
return [f"{prefix}└── [permission denied]"]
|
|
30
|
+
|
|
31
|
+
if not show_hidden:
|
|
32
|
+
entries = [e for e in entries if not e.name.startswith(".")]
|
|
33
|
+
|
|
34
|
+
for i, entry in enumerate(entries):
|
|
35
|
+
is_last_entry = i == len(entries) - 1
|
|
36
|
+
connector = "└── " if is_last_entry else "├── "
|
|
37
|
+
lines.append(f"{prefix}{connector}{entry.name}")
|
|
38
|
+
|
|
39
|
+
if entry.is_dir():
|
|
40
|
+
extension = " " if is_last_entry else "│ "
|
|
41
|
+
sub_prefix = prefix + extension
|
|
42
|
+
lines.extend(
|
|
43
|
+
_build_tree_lines(
|
|
44
|
+
entry, sub_prefix, is_last_entry, show_hidden,
|
|
45
|
+
depth_limit, current_depth + 1
|
|
46
|
+
)
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
return lines
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _build_tree_from_entries(entries: list) -> list[str]:
|
|
53
|
+
"""Build tree lines from commit tree entries (flat path/name/hash)."""
|
|
54
|
+
# Build nested dict: {dir: {subdir: {file: hash}}}
|
|
55
|
+
root: dict = {}
|
|
56
|
+
|
|
57
|
+
for path, name, hash_id in entries:
|
|
58
|
+
parts = (path.split("/") if path else []) + [name]
|
|
59
|
+
current = root
|
|
60
|
+
for i, part in enumerate(parts):
|
|
61
|
+
is_file = i == len(parts) - 1
|
|
62
|
+
if is_file:
|
|
63
|
+
current[part] = hash_id # Store hash for files
|
|
64
|
+
else:
|
|
65
|
+
if part not in current:
|
|
66
|
+
current[part] = {}
|
|
67
|
+
current = current[part]
|
|
68
|
+
|
|
69
|
+
def _render(node: dict, prefix: str = "") -> list[str]:
|
|
70
|
+
lines = []
|
|
71
|
+
# Directories first, then files; alphabetically within each
|
|
72
|
+
items = sorted(node.items(), key=lambda x: (not isinstance(x[1], dict), x[0].lower()))
|
|
73
|
+
for i, (key, val) in enumerate(items):
|
|
74
|
+
is_last = i == len(items) - 1
|
|
75
|
+
conn = "└── " if is_last else "├── "
|
|
76
|
+
ext = " " if is_last else "│ "
|
|
77
|
+
if isinstance(val, dict):
|
|
78
|
+
lines.append(f"{prefix}{conn}{key}/")
|
|
79
|
+
lines.extend(_render(val, prefix + ext))
|
|
80
|
+
else:
|
|
81
|
+
lines.append(f"{prefix}{conn}{key} ({val[:8]})")
|
|
82
|
+
return lines
|
|
83
|
+
|
|
84
|
+
return _render(root)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class TreeCommand:
|
|
88
|
+
"""Show directory tree visually."""
|
|
89
|
+
|
|
90
|
+
name = "tree"
|
|
91
|
+
help = "Show working directory or commit tree visually"
|
|
92
|
+
|
|
93
|
+
@staticmethod
|
|
94
|
+
def add_arguments(parser: argparse.ArgumentParser):
|
|
95
|
+
parser.add_argument(
|
|
96
|
+
"ref",
|
|
97
|
+
nargs="?",
|
|
98
|
+
default=None,
|
|
99
|
+
help="Commit/branch to show (default: working directory)",
|
|
100
|
+
)
|
|
101
|
+
parser.add_argument(
|
|
102
|
+
"-a", "--all",
|
|
103
|
+
action="store_true",
|
|
104
|
+
help="Show hidden files",
|
|
105
|
+
)
|
|
106
|
+
parser.add_argument(
|
|
107
|
+
"-L", "--depth",
|
|
108
|
+
type=int,
|
|
109
|
+
default=None,
|
|
110
|
+
help="Limit depth of tree",
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
@staticmethod
|
|
114
|
+
def execute(args) -> int:
|
|
115
|
+
repo, code = require_repo()
|
|
116
|
+
if code != 0:
|
|
117
|
+
return code
|
|
118
|
+
|
|
119
|
+
if args.ref:
|
|
120
|
+
# Show tree at commit
|
|
121
|
+
commit_hash = repo.resolve_ref(args.ref)
|
|
122
|
+
if not commit_hash:
|
|
123
|
+
print(f"Error: Unknown revision: {args.ref}")
|
|
124
|
+
return 1
|
|
125
|
+
|
|
126
|
+
commit = Commit.load(repo.object_store, commit_hash)
|
|
127
|
+
if not commit:
|
|
128
|
+
print(f"Error: Commit not found: {args.ref}")
|
|
129
|
+
return 1
|
|
130
|
+
|
|
131
|
+
tree = Tree.load(repo.object_store, commit.tree)
|
|
132
|
+
if not tree:
|
|
133
|
+
print(f"Error: Tree not found for {args.ref}")
|
|
134
|
+
return 1
|
|
135
|
+
|
|
136
|
+
entries = [(e.path, e.name, e.hash) for e in tree.entries]
|
|
137
|
+
|
|
138
|
+
print(f"📁 {args.ref} ({commit_hash[:8]})")
|
|
139
|
+
print("│")
|
|
140
|
+
for line in _build_tree_from_entries(entries):
|
|
141
|
+
print(line)
|
|
142
|
+
else:
|
|
143
|
+
# Show working directory
|
|
144
|
+
current_dir = repo.current_dir
|
|
145
|
+
if not current_dir.exists():
|
|
146
|
+
print("Error: current/ directory not found.")
|
|
147
|
+
return 1
|
|
148
|
+
|
|
149
|
+
print(f"📁 current/ (working directory)")
|
|
150
|
+
print("│")
|
|
151
|
+
for line in _build_tree_lines(
|
|
152
|
+
current_dir, "", True, args.all, args.depth, 0
|
|
153
|
+
):
|
|
154
|
+
print(line)
|
|
155
|
+
|
|
156
|
+
return 0
|
memvcs/core/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Core agmem modules for object storage and repository management."""
|
|
2
|
+
|
|
3
|
+
from .constants import MEMORY_TYPES
|
|
4
|
+
from .config_loader import load_agmem_config
|
|
5
|
+
from .objects import Blob, Commit, ObjectStore, Tag, Tree
|
|
6
|
+
from .repository import Repository
|
|
7
|
+
from .staging import StagingArea
|
|
8
|
+
from .refs import RefsManager
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"Blob",
|
|
12
|
+
"Commit",
|
|
13
|
+
"MEMORY_TYPES",
|
|
14
|
+
"ObjectStore",
|
|
15
|
+
"RefsManager",
|
|
16
|
+
"Repository",
|
|
17
|
+
"StagingArea",
|
|
18
|
+
"Tag",
|
|
19
|
+
"Tree",
|
|
20
|
+
"load_agmem_config",
|
|
21
|
+
]
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
"""
|
|
2
|
+
agmem config loader - user and repo config with safe credential handling.
|
|
3
|
+
|
|
4
|
+
Loads config from ~/.config/agmem/config.yaml and optionally repo .agmemrc or
|
|
5
|
+
.mem/config.yaml. Credentials are never stored in config; only env var names
|
|
6
|
+
and non-secret options. Use os.getenv() to resolve secrets.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Any, Dict, List, Optional
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
import yaml
|
|
16
|
+
YAML_AVAILABLE = True
|
|
17
|
+
except ImportError:
|
|
18
|
+
YAML_AVAILABLE = False
|
|
19
|
+
|
|
20
|
+
# Default env var names for S3 (config may override with different var names)
|
|
21
|
+
DEFAULT_S3_ACCESS_KEY_VAR = "AWS_ACCESS_KEY_ID"
|
|
22
|
+
DEFAULT_S3_SECRET_KEY_VAR = "AWS_SECRET_ACCESS_KEY"
|
|
23
|
+
|
|
24
|
+
# Canonical config keys
|
|
25
|
+
CONFIG_CLOUD = "cloud"
|
|
26
|
+
CONFIG_CLOUD_S3 = "s3"
|
|
27
|
+
CONFIG_CLOUD_GCS = "gcs"
|
|
28
|
+
CONFIG_PII = "pii"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _user_config_path() -> Path:
|
|
32
|
+
"""Path to user-level config (XDG or ~/.config/agmem/config.yaml)."""
|
|
33
|
+
xdg = os.environ.get("XDG_CONFIG_HOME")
|
|
34
|
+
if xdg:
|
|
35
|
+
return Path(xdg).expanduser() / "agmem" / "config.yaml"
|
|
36
|
+
return Path.home() / ".config" / "agmem" / "config.yaml"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _repo_config_paths(repo_root: Path) -> List[Path]:
|
|
40
|
+
"""Paths to repo-level config (first existing wins)."""
|
|
41
|
+
root = Path(repo_root).resolve()
|
|
42
|
+
return [
|
|
43
|
+
root / ".agmemrc",
|
|
44
|
+
root / ".mem" / "config.yaml",
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _load_yaml(path: Path) -> Dict[str, Any]:
|
|
49
|
+
"""Load a YAML file with safe_load. Returns {} on missing file or error."""
|
|
50
|
+
if not path.exists() or not path.is_file():
|
|
51
|
+
return {}
|
|
52
|
+
if not YAML_AVAILABLE:
|
|
53
|
+
return {}
|
|
54
|
+
try:
|
|
55
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
56
|
+
data = yaml.safe_load(f)
|
|
57
|
+
return data if isinstance(data, dict) else {}
|
|
58
|
+
except Exception:
|
|
59
|
+
return {}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
|
|
63
|
+
"""Merge override into base recursively. Override wins; base is not mutated."""
|
|
64
|
+
out = dict(base)
|
|
65
|
+
for k, v in override.items():
|
|
66
|
+
if k in out and isinstance(out[k], dict) and isinstance(v, dict):
|
|
67
|
+
out[k] = _deep_merge(out[k], v)
|
|
68
|
+
else:
|
|
69
|
+
out[k] = v
|
|
70
|
+
return out
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _resolve_gcs_credentials_path(
|
|
74
|
+
raw_path: Optional[str],
|
|
75
|
+
repo_root: Optional[Path],
|
|
76
|
+
) -> Optional[str]:
|
|
77
|
+
"""
|
|
78
|
+
Resolve GCS credentials_path to absolute and ensure under allowed root.
|
|
79
|
+
Allowed: repo_root (if given) or user home. Returns None if invalid or missing.
|
|
80
|
+
"""
|
|
81
|
+
if not raw_path or not raw_path.strip():
|
|
82
|
+
return None
|
|
83
|
+
path = Path(raw_path.strip()).expanduser()
|
|
84
|
+
if not path.is_absolute():
|
|
85
|
+
base = Path(repo_root).resolve() if repo_root else Path.home()
|
|
86
|
+
path = (base / path).resolve()
|
|
87
|
+
else:
|
|
88
|
+
path = path.resolve()
|
|
89
|
+
if not path.exists() or not path.is_file():
|
|
90
|
+
return None
|
|
91
|
+
allowed_bases: List[Path] = []
|
|
92
|
+
if repo_root:
|
|
93
|
+
allowed_bases.append(Path(repo_root).resolve())
|
|
94
|
+
allowed_bases.append(Path.home())
|
|
95
|
+
for base in allowed_bases:
|
|
96
|
+
try:
|
|
97
|
+
path.resolve().relative_to(base)
|
|
98
|
+
return str(path)
|
|
99
|
+
except ValueError:
|
|
100
|
+
continue
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _apply_gcs_credentials_path(config: Dict[str, Any], repo_root: Optional[Path]) -> None:
|
|
105
|
+
"""Resolve and validate cloud.gcs.credentials_path in-place; remove if invalid."""
|
|
106
|
+
gcs = config.get(CONFIG_CLOUD, {}).get(CONFIG_CLOUD_GCS)
|
|
107
|
+
if not isinstance(gcs, dict) or not gcs.get("credentials_path"):
|
|
108
|
+
return
|
|
109
|
+
raw_path = gcs.get("credentials_path")
|
|
110
|
+
resolved = _resolve_gcs_credentials_path(raw_path, repo_root)
|
|
111
|
+
if CONFIG_CLOUD not in config:
|
|
112
|
+
config[CONFIG_CLOUD] = {}
|
|
113
|
+
if CONFIG_CLOUD_GCS not in config[CONFIG_CLOUD]:
|
|
114
|
+
config[CONFIG_CLOUD][CONFIG_CLOUD_GCS] = dict(gcs)
|
|
115
|
+
if resolved:
|
|
116
|
+
config[CONFIG_CLOUD][CONFIG_CLOUD_GCS]["credentials_path"] = resolved
|
|
117
|
+
else:
|
|
118
|
+
config[CONFIG_CLOUD][CONFIG_CLOUD_GCS] = {
|
|
119
|
+
k: v for k, v in config[CONFIG_CLOUD][CONFIG_CLOUD_GCS].items()
|
|
120
|
+
if k != "credentials_path"
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def load_agmem_config(repo_root: Optional[Path] = None) -> Dict[str, Any]:
|
|
125
|
+
"""
|
|
126
|
+
Load merged agmem config (user + optional repo). No secrets in config.
|
|
127
|
+
|
|
128
|
+
User config: ~/.config/agmem/config.yaml (or XDG_CONFIG_HOME/agmem/config.yaml).
|
|
129
|
+
Repo config: repo_root/.agmemrc or repo_root/.mem/config.yaml (first found).
|
|
130
|
+
Merge order: defaults -> user -> repo (repo overrides user).
|
|
131
|
+
|
|
132
|
+
For cloud.gcs.credentials_path: resolved to absolute and validated under
|
|
133
|
+
repo root or user home.
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
Merged config dict. Use getattr-style access for optional nested keys.
|
|
137
|
+
"""
|
|
138
|
+
config = {}
|
|
139
|
+
user_path = _user_config_path()
|
|
140
|
+
user_cfg = _load_yaml(user_path)
|
|
141
|
+
if user_cfg:
|
|
142
|
+
config = _deep_merge(config, user_cfg)
|
|
143
|
+
|
|
144
|
+
if repo_root:
|
|
145
|
+
for p in _repo_config_paths(repo_root):
|
|
146
|
+
repo_cfg = _load_yaml(p)
|
|
147
|
+
if repo_cfg:
|
|
148
|
+
config = _deep_merge(config, repo_cfg)
|
|
149
|
+
break
|
|
150
|
+
|
|
151
|
+
_apply_gcs_credentials_path(config, repo_root)
|
|
152
|
+
return config
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def _get_cloud_section(config: Optional[Dict[str, Any]], section: str) -> Optional[Dict[str, Any]]:
|
|
156
|
+
"""Return cloud.section dict if present and dict, else None."""
|
|
157
|
+
if not config:
|
|
158
|
+
return None
|
|
159
|
+
cloud = config.get(CONFIG_CLOUD, {})
|
|
160
|
+
if not isinstance(cloud, dict):
|
|
161
|
+
return None
|
|
162
|
+
val = cloud.get(section)
|
|
163
|
+
return val if isinstance(val, dict) else None
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def get_s3_options_from_config(config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
|
|
167
|
+
"""
|
|
168
|
+
Build S3 constructor kwargs from config. Resolves credentials from env only.
|
|
169
|
+
|
|
170
|
+
Returns dict with keys: region, endpoint_url, access_key, secret_key (and
|
|
171
|
+
optionally lock_table). access_key/secret_key are set only from os.getenv(...).
|
|
172
|
+
"""
|
|
173
|
+
out: Dict[str, Any] = {}
|
|
174
|
+
s3 = _get_cloud_section(config, CONFIG_CLOUD_S3)
|
|
175
|
+
if not s3:
|
|
176
|
+
return out
|
|
177
|
+
if s3.get("region"):
|
|
178
|
+
out["region"] = s3["region"]
|
|
179
|
+
if s3.get("endpoint_url"):
|
|
180
|
+
out["endpoint_url"] = s3["endpoint_url"]
|
|
181
|
+
if s3.get("lock_table"):
|
|
182
|
+
out["lock_table"] = s3["lock_table"]
|
|
183
|
+
access_var = s3.get("access_key_var") or DEFAULT_S3_ACCESS_KEY_VAR
|
|
184
|
+
secret_var = s3.get("secret_key_var") or DEFAULT_S3_SECRET_KEY_VAR
|
|
185
|
+
access = os.getenv(access_var)
|
|
186
|
+
secret = os.getenv(secret_var)
|
|
187
|
+
if access and secret:
|
|
188
|
+
out["access_key"] = access
|
|
189
|
+
out["secret_key"] = secret
|
|
190
|
+
return out
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def get_gcs_options_from_config(
|
|
194
|
+
config: Optional[Dict[str, Any]],
|
|
195
|
+
) -> Dict[str, Any]:
|
|
196
|
+
"""
|
|
197
|
+
Build GCS constructor kwargs from config. Credentials from env or validated path.
|
|
198
|
+
|
|
199
|
+
Returns dict with keys: project, credentials_path, or credentials_info (dict
|
|
200
|
+
from JSON string in env). Caller (GCS adapter) uses credentials_path or
|
|
201
|
+
credentials_info; never raw secret values in config.
|
|
202
|
+
"""
|
|
203
|
+
out: Dict[str, Any] = {}
|
|
204
|
+
gcs = _get_cloud_section(config, CONFIG_CLOUD_GCS)
|
|
205
|
+
if not gcs:
|
|
206
|
+
return out
|
|
207
|
+
if gcs.get("project"):
|
|
208
|
+
out["project"] = gcs["project"]
|
|
209
|
+
if gcs.get("credentials_path"):
|
|
210
|
+
out["credentials_path"] = gcs["credentials_path"]
|
|
211
|
+
if gcs.get("credentials_json_var"):
|
|
212
|
+
json_str = os.getenv(gcs["credentials_json_var"])
|
|
213
|
+
if json_str:
|
|
214
|
+
try:
|
|
215
|
+
out["credentials_info"] = json.loads(json_str)
|
|
216
|
+
except (ValueError, TypeError):
|
|
217
|
+
pass
|
|
218
|
+
return out
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def _get_pii_section(config: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
|
|
222
|
+
"""Return pii section dict if present and dict, else None."""
|
|
223
|
+
if not config:
|
|
224
|
+
return None
|
|
225
|
+
pii = config.get(CONFIG_PII)
|
|
226
|
+
return pii if isinstance(pii, dict) else None
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def pii_enabled(config: Optional[Dict[str, Any]]) -> bool:
|
|
230
|
+
"""Return True if PII scanning is enabled (default True when key missing)."""
|
|
231
|
+
pii = _get_pii_section(config)
|
|
232
|
+
if not pii or "enabled" not in pii:
|
|
233
|
+
return True
|
|
234
|
+
return bool(pii["enabled"])
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def pii_allowlist(config: Optional[Dict[str, Any]]) -> List[str]:
|
|
238
|
+
"""Return list of path globs to skip for PII scanning."""
|
|
239
|
+
pii = _get_pii_section(config)
|
|
240
|
+
if not pii:
|
|
241
|
+
return []
|
|
242
|
+
allow = pii.get("allowlist")
|
|
243
|
+
if not isinstance(allow, list):
|
|
244
|
+
return []
|
|
245
|
+
return [str(x) for x in allow]
|