nia-sync 0.1.7__tar.gz → 0.1.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {nia_sync-0.1.7 → nia_sync-0.1.8}/PKG-INFO +1 -1
- nia_sync-0.1.8/api_client.py +104 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/auth.py +3 -3
- {nia_sync-0.1.7 → nia_sync-0.1.8}/config.py +99 -116
- {nia_sync-0.1.7 → nia_sync-0.1.8}/extractor.py +30 -4
- nia_sync-0.1.8/main.py +1447 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/nia_sync.egg-info/PKG-INFO +1 -1
- {nia_sync-0.1.7 → nia_sync-0.1.8}/nia_sync.egg-info/SOURCES.txt +2 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/nia_sync.egg-info/top_level.txt +2 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/pyproject.toml +2 -2
- {nia_sync-0.1.7 → nia_sync-0.1.8}/sync.py +3 -3
- nia_sync-0.1.8/ui.py +119 -0
- nia_sync-0.1.7/main.py +0 -745
- {nia_sync-0.1.7 → nia_sync-0.1.8}/nia_sync.egg-info/dependency_links.txt +0 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/nia_sync.egg-info/entry_points.txt +0 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/nia_sync.egg-info/requires.txt +0 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/setup.cfg +0 -0
- {nia_sync-0.1.7 → nia_sync-0.1.8}/watcher.py +0 -0
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""
|
|
2
|
+
HTTP client helpers for Nia Local Sync CLI.
|
|
3
|
+
|
|
4
|
+
Centralizes API base URL, auth headers, timeouts, and error handling.
|
|
5
|
+
"""
|
|
6
|
+
import httpx
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from config import get_api_key, get_api_base_url
|
|
10
|
+
|
|
11
|
+
DEFAULT_TIMEOUT = 30
|
|
12
|
+
CONNECT_TIMEOUT = 10
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _build_url(path: str) -> str:
|
|
16
|
+
base_url = get_api_base_url().rstrip("/")
|
|
17
|
+
return f"{base_url}{path}"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _build_headers() -> dict[str, str]:
|
|
21
|
+
api_key = get_api_key()
|
|
22
|
+
if not api_key:
|
|
23
|
+
return {}
|
|
24
|
+
return {"Authorization": f"Bearer {api_key}"}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def request_json(
|
|
28
|
+
method: str,
|
|
29
|
+
path: str,
|
|
30
|
+
*,
|
|
31
|
+
payload: dict[str, Any] | None = None,
|
|
32
|
+
params: dict[str, Any] | None = None,
|
|
33
|
+
timeout: int = DEFAULT_TIMEOUT,
|
|
34
|
+
requires_auth: bool = True,
|
|
35
|
+
) -> tuple[Any | None, str | None]:
|
|
36
|
+
api_key = get_api_key()
|
|
37
|
+
if requires_auth and not api_key:
|
|
38
|
+
return None, "Not authenticated"
|
|
39
|
+
|
|
40
|
+
url = _build_url(path)
|
|
41
|
+
headers = _build_headers()
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
with httpx.Client(timeout=httpx.Timeout(timeout, connect=CONNECT_TIMEOUT)) as client:
|
|
45
|
+
response = client.request(
|
|
46
|
+
method,
|
|
47
|
+
url,
|
|
48
|
+
headers=headers,
|
|
49
|
+
json=payload,
|
|
50
|
+
params=params,
|
|
51
|
+
)
|
|
52
|
+
except httpx.RequestError as e:
|
|
53
|
+
return None, f"Network error: {e}"
|
|
54
|
+
|
|
55
|
+
if 200 <= response.status_code < 300:
|
|
56
|
+
try:
|
|
57
|
+
return response.json(), None
|
|
58
|
+
except ValueError:
|
|
59
|
+
return response.text, None
|
|
60
|
+
|
|
61
|
+
if response.status_code == 401:
|
|
62
|
+
return None, "Authentication failed"
|
|
63
|
+
|
|
64
|
+
try:
|
|
65
|
+
detail = response.json().get("detail", response.text)
|
|
66
|
+
except ValueError:
|
|
67
|
+
detail = response.text or f"HTTP {response.status_code}"
|
|
68
|
+
return None, f"API error: {detail}"
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def get_sources() -> list[dict[str, Any]]:
|
|
72
|
+
data, error = request_json("GET", "/v2/daemon/sources")
|
|
73
|
+
if error or not isinstance(data, list):
|
|
74
|
+
return []
|
|
75
|
+
return data
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def add_source(path: str, detected_type: str | None = None) -> dict[str, Any] | None:
|
|
79
|
+
data, error = request_json(
|
|
80
|
+
"POST",
|
|
81
|
+
"/v2/daemon/sources",
|
|
82
|
+
payload={"path": path, "detected_type": detected_type},
|
|
83
|
+
)
|
|
84
|
+
if error or not isinstance(data, dict):
|
|
85
|
+
return None
|
|
86
|
+
return data
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def remove_source(local_folder_id: str) -> bool:
|
|
90
|
+
data, error = request_json("DELETE", f"/v2/daemon/sources/{local_folder_id}")
|
|
91
|
+
if error:
|
|
92
|
+
return False
|
|
93
|
+
return bool(data)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def enable_source_sync(local_folder_id: str, path: str) -> bool:
|
|
97
|
+
data, error = request_json(
|
|
98
|
+
"POST",
|
|
99
|
+
f"/v2/daemon/sources/{local_folder_id}/enable",
|
|
100
|
+
payload={"path": path},
|
|
101
|
+
)
|
|
102
|
+
if error:
|
|
103
|
+
return False
|
|
104
|
+
return bool(data)
|
|
@@ -18,7 +18,7 @@ from config import (
|
|
|
18
18
|
load_config,
|
|
19
19
|
save_config,
|
|
20
20
|
clear_config,
|
|
21
|
-
|
|
21
|
+
get_api_base_url,
|
|
22
22
|
)
|
|
23
23
|
|
|
24
24
|
console = Console()
|
|
@@ -54,7 +54,7 @@ def login() -> bool:
|
|
|
54
54
|
console.print("Starting authentication...")
|
|
55
55
|
|
|
56
56
|
with httpx.Client(timeout=30) as client:
|
|
57
|
-
response = client.post(f"{
|
|
57
|
+
response = client.post(f"{get_api_base_url()}/public/mcp-device/start")
|
|
58
58
|
response.raise_for_status()
|
|
59
59
|
|
|
60
60
|
data = response.json()
|
|
@@ -108,7 +108,7 @@ def _poll_for_api_key(session_id: str, user_code: str) -> str | None:
|
|
|
108
108
|
for attempt in range(MAX_POLL_ATTEMPTS):
|
|
109
109
|
try:
|
|
110
110
|
response = client.post(
|
|
111
|
-
f"{
|
|
111
|
+
f"{get_api_base_url()}/public/mcp-device/exchange",
|
|
112
112
|
json={
|
|
113
113
|
"authorization_session_id": session_id,
|
|
114
114
|
"user_code": user_code,
|
|
@@ -9,14 +9,12 @@ import os
|
|
|
9
9
|
import json
|
|
10
10
|
from pathlib import Path
|
|
11
11
|
from typing import Any
|
|
12
|
-
import httpx
|
|
13
|
-
|
|
14
12
|
# Configuration paths
|
|
15
13
|
NIA_SYNC_DIR = Path.home() / ".nia-sync"
|
|
16
14
|
CONFIG_FILE = NIA_SYNC_DIR / "config.json"
|
|
17
15
|
|
|
18
16
|
# API configuration
|
|
19
|
-
|
|
17
|
+
DEFAULT_API_BASE_URL = "https://apigcp.trynia.ai"
|
|
20
18
|
|
|
21
19
|
# Default directories to search for folders (no config needed)
|
|
22
20
|
DEFAULT_WATCH_DIRS = [
|
|
@@ -35,6 +33,15 @@ DEFAULT_WATCH_DIRS = [
|
|
|
35
33
|
]
|
|
36
34
|
|
|
37
35
|
|
|
36
|
+
def get_api_base_url() -> str:
|
|
37
|
+
"""Get API base URL, preferring env var over config."""
|
|
38
|
+
env_url = os.getenv("NIA_API_URL")
|
|
39
|
+
if env_url:
|
|
40
|
+
return env_url
|
|
41
|
+
config = load_config()
|
|
42
|
+
return config.get("api_url", DEFAULT_API_BASE_URL)
|
|
43
|
+
|
|
44
|
+
|
|
38
45
|
def get_watch_dirs() -> list[str]:
|
|
39
46
|
"""Get directories to search for folders. Uses defaults + any custom ones."""
|
|
40
47
|
config = load_config()
|
|
@@ -44,6 +51,37 @@ def get_watch_dirs() -> list[str]:
|
|
|
44
51
|
return list(dict.fromkeys(all_dirs))
|
|
45
52
|
|
|
46
53
|
|
|
54
|
+
def set_watch_dirs(dirs: list[str]) -> None:
|
|
55
|
+
normalized = [os.path.abspath(os.path.expanduser(d)) for d in dirs if d]
|
|
56
|
+
save_config({"watch_dirs": list(dict.fromkeys(normalized))})
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def add_watch_dir(path: str) -> bool:
|
|
60
|
+
if not path:
|
|
61
|
+
return False
|
|
62
|
+
expanded = os.path.abspath(os.path.expanduser(path))
|
|
63
|
+
config = load_config()
|
|
64
|
+
custom = config.get("watch_dirs", [])
|
|
65
|
+
if expanded in custom:
|
|
66
|
+
return False
|
|
67
|
+
custom.append(expanded)
|
|
68
|
+
save_config({"watch_dirs": custom})
|
|
69
|
+
return True
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def remove_watch_dir(path: str) -> bool:
|
|
73
|
+
if not path:
|
|
74
|
+
return False
|
|
75
|
+
expanded = os.path.abspath(os.path.expanduser(path))
|
|
76
|
+
config = load_config()
|
|
77
|
+
custom = config.get("watch_dirs", [])
|
|
78
|
+
if expanded not in custom:
|
|
79
|
+
return False
|
|
80
|
+
custom = [d for d in custom if d != expanded]
|
|
81
|
+
save_config({"watch_dirs": custom})
|
|
82
|
+
return True
|
|
83
|
+
|
|
84
|
+
|
|
47
85
|
def find_folder_path(folder_name: str, max_depth: int = 3) -> str | None:
|
|
48
86
|
"""
|
|
49
87
|
Search watch directories recursively for a folder with the given name.
|
|
@@ -141,100 +179,75 @@ def get_api_key() -> str | None:
|
|
|
141
179
|
return config.get("api_key")
|
|
142
180
|
|
|
143
181
|
|
|
144
|
-
def
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
- local_folder_id: UUID of the local folder
|
|
150
|
-
- path: Local path to sync (e.g., ~/Library/Messages/chat.db)
|
|
151
|
-
- detected_type: Type of source (imessage, safari_history, folder, etc.)
|
|
152
|
-
- cursor: Current sync cursor (for incremental sync)
|
|
153
|
-
- last_synced: ISO timestamp of last sync
|
|
154
|
-
"""
|
|
155
|
-
api_key = get_api_key()
|
|
156
|
-
if not api_key:
|
|
157
|
-
return []
|
|
182
|
+
def get_config_value(key: str) -> Any:
|
|
183
|
+
config = load_config()
|
|
184
|
+
if not key:
|
|
185
|
+
return config
|
|
186
|
+
return config.get(key)
|
|
158
187
|
|
|
159
|
-
try:
|
|
160
|
-
with httpx.Client(timeout=30) as client:
|
|
161
|
-
response = client.get(
|
|
162
|
-
f"{API_BASE_URL}/v2/daemon/sources",
|
|
163
|
-
headers={"Authorization": f"Bearer {api_key}"},
|
|
164
|
-
)
|
|
165
|
-
response.raise_for_status()
|
|
166
|
-
return response.json()
|
|
167
|
-
|
|
168
|
-
except httpx.HTTPStatusError as e:
|
|
169
|
-
if e.response.status_code == 401:
|
|
170
|
-
# Invalid/expired API key
|
|
171
|
-
return []
|
|
172
|
-
raise
|
|
173
|
-
except httpx.RequestError:
|
|
174
|
-
# Network error - return empty for now
|
|
175
|
-
return []
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
def add_source(path: str, detected_type: str | None = None) -> dict[str, Any] | None:
|
|
179
|
-
"""
|
|
180
|
-
Add a new source for daemon sync.
|
|
181
188
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
189
|
+
def set_config_value(key: str, value: Any) -> None:
|
|
190
|
+
if not key:
|
|
191
|
+
return
|
|
192
|
+
save_config({key: value})
|
|
185
193
|
|
|
186
|
-
Returns:
|
|
187
|
-
Created source info or None on failure
|
|
188
|
-
"""
|
|
189
|
-
api_key = get_api_key()
|
|
190
|
-
if not api_key:
|
|
191
|
-
return None
|
|
192
194
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
},
|
|
202
|
-
)
|
|
203
|
-
response.raise_for_status()
|
|
204
|
-
return response.json()
|
|
205
|
-
|
|
206
|
-
except httpx.HTTPStatusError:
|
|
207
|
-
return None
|
|
208
|
-
except httpx.RequestError:
|
|
209
|
-
return None
|
|
195
|
+
def get_ignore_patterns() -> dict[str, list[str]]:
|
|
196
|
+
config = load_config()
|
|
197
|
+
return {
|
|
198
|
+
"dirs": list(config.get("ignore_dirs", [])),
|
|
199
|
+
"files": list(config.get("ignore_files", [])),
|
|
200
|
+
"extensions": list(config.get("ignore_extensions", [])),
|
|
201
|
+
"path_patterns": list(config.get("ignore_path_patterns", [])),
|
|
202
|
+
}
|
|
210
203
|
|
|
211
204
|
|
|
212
|
-
def
|
|
213
|
-
|
|
214
|
-
|
|
205
|
+
def add_ignore_pattern(kind: str, value: str) -> bool:
|
|
206
|
+
if not kind or not value:
|
|
207
|
+
return False
|
|
208
|
+
config = load_config()
|
|
209
|
+
key_map = {
|
|
210
|
+
"dir": "ignore_dirs",
|
|
211
|
+
"file": "ignore_files",
|
|
212
|
+
"ext": "ignore_extensions",
|
|
213
|
+
"path": "ignore_path_patterns",
|
|
214
|
+
}
|
|
215
|
+
key = key_map.get(kind)
|
|
216
|
+
if not key:
|
|
217
|
+
return False
|
|
218
|
+
current = list(config.get(key, []))
|
|
219
|
+
normalized = value.strip()
|
|
220
|
+
if kind == "ext" and not normalized.startswith("."):
|
|
221
|
+
normalized = f".{normalized}"
|
|
222
|
+
if normalized in current:
|
|
223
|
+
return False
|
|
224
|
+
current.append(normalized)
|
|
225
|
+
save_config({key: current})
|
|
226
|
+
return True
|
|
215
227
|
|
|
216
|
-
Args:
|
|
217
|
-
local_folder_id: ID of the source to remove
|
|
218
228
|
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
"""
|
|
222
|
-
api_key = get_api_key()
|
|
223
|
-
if not api_key:
|
|
229
|
+
def remove_ignore_pattern(kind: str, value: str) -> bool:
|
|
230
|
+
if not kind or not value:
|
|
224
231
|
return False
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
except httpx.HTTPStatusError:
|
|
232
|
+
config = load_config()
|
|
233
|
+
key_map = {
|
|
234
|
+
"dir": "ignore_dirs",
|
|
235
|
+
"file": "ignore_files",
|
|
236
|
+
"ext": "ignore_extensions",
|
|
237
|
+
"path": "ignore_path_patterns",
|
|
238
|
+
}
|
|
239
|
+
key = key_map.get(kind)
|
|
240
|
+
if not key:
|
|
235
241
|
return False
|
|
236
|
-
|
|
242
|
+
current = list(config.get(key, []))
|
|
243
|
+
normalized = value.strip()
|
|
244
|
+
if kind == "ext" and not normalized.startswith("."):
|
|
245
|
+
normalized = f".{normalized}"
|
|
246
|
+
if normalized not in current:
|
|
237
247
|
return False
|
|
248
|
+
current = [item for item in current if item != normalized]
|
|
249
|
+
save_config({key: current})
|
|
250
|
+
return True
|
|
238
251
|
|
|
239
252
|
|
|
240
253
|
def update_source_cursor(local_folder_id: str, cursor: dict[str, Any]) -> bool:
|
|
@@ -247,33 +260,3 @@ def update_source_cursor(local_folder_id: str, cursor: dict[str, Any]) -> bool:
|
|
|
247
260
|
# Note: Cursor is updated by the /daemon/sync endpoint, not a separate call
|
|
248
261
|
# This function is here for potential future use
|
|
249
262
|
return True
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
def enable_source_sync(local_folder_id: str, path: str) -> bool:
|
|
253
|
-
"""
|
|
254
|
-
Enable daemon sync for a source that exists locally.
|
|
255
|
-
|
|
256
|
-
Args:
|
|
257
|
-
local_folder_id: ID of the source
|
|
258
|
-
path: Local path where the source exists
|
|
259
|
-
|
|
260
|
-
Returns:
|
|
261
|
-
True on success, False on failure
|
|
262
|
-
"""
|
|
263
|
-
api_key = get_api_key()
|
|
264
|
-
if not api_key:
|
|
265
|
-
return False
|
|
266
|
-
|
|
267
|
-
try:
|
|
268
|
-
with httpx.Client(timeout=30) as client:
|
|
269
|
-
response = client.post(
|
|
270
|
-
f"{API_BASE_URL}/v2/daemon/sources/{local_folder_id}/enable",
|
|
271
|
-
headers={"Authorization": f"Bearer {api_key}"},
|
|
272
|
-
json={"path": path},
|
|
273
|
-
)
|
|
274
|
-
return response.status_code == 200
|
|
275
|
-
|
|
276
|
-
except httpx.HTTPStatusError:
|
|
277
|
-
return False
|
|
278
|
-
except httpx.RequestError:
|
|
279
|
-
return False
|
|
@@ -122,6 +122,27 @@ SKIP_PATH_PATTERNS = {
|
|
|
122
122
|
"id_rsa", "id_dsa", "id_ecdsa", "id_ed25519",
|
|
123
123
|
}
|
|
124
124
|
|
|
125
|
+
|
|
126
|
+
def _get_custom_ignore_sets() -> tuple[set[str], set[str], set[str], set[str]]:
|
|
127
|
+
try:
|
|
128
|
+
from config import get_ignore_patterns
|
|
129
|
+
except Exception:
|
|
130
|
+
return set(), set(), set(), set()
|
|
131
|
+
|
|
132
|
+
patterns = get_ignore_patterns()
|
|
133
|
+
custom_dirs = {p for p in patterns.get("dirs", []) if p}
|
|
134
|
+
custom_files = {p for p in patterns.get("files", []) if p}
|
|
135
|
+
custom_exts = set()
|
|
136
|
+
for ext in patterns.get("extensions", []) or []:
|
|
137
|
+
if not ext:
|
|
138
|
+
continue
|
|
139
|
+
normalized = ext.strip().lower()
|
|
140
|
+
if not normalized.startswith("."):
|
|
141
|
+
normalized = f".{normalized}"
|
|
142
|
+
custom_exts.add(normalized)
|
|
143
|
+
custom_path_patterns = {p.lower() for p in patterns.get("path_patterns", []) if p}
|
|
144
|
+
return custom_dirs, custom_files, custom_exts, custom_path_patterns
|
|
145
|
+
|
|
125
146
|
# Type identifiers
|
|
126
147
|
TYPE_IMESSAGE = "imessage"
|
|
127
148
|
TYPE_SAFARI_HISTORY = "safari_history"
|
|
@@ -769,6 +790,11 @@ def _extract_folder(
|
|
|
769
790
|
max_mtime = last_mtime
|
|
770
791
|
max_path = last_path
|
|
771
792
|
extracted_count = 0
|
|
793
|
+
custom_dirs, custom_files, custom_exts, custom_path_patterns = _get_custom_ignore_sets()
|
|
794
|
+
skip_dirs = SKIP_DIRS | custom_dirs
|
|
795
|
+
skip_files = SKIP_FILES | custom_files
|
|
796
|
+
skip_exts = SKIP_EXTENSIONS | custom_exts
|
|
797
|
+
skip_path_patterns = SKIP_PATH_PATTERNS | custom_path_patterns
|
|
772
798
|
|
|
773
799
|
# Allowed text file extensions
|
|
774
800
|
text_extensions = {
|
|
@@ -786,7 +812,7 @@ def _extract_folder(
|
|
|
786
812
|
# Filter out excluded directories IN-PLACE to prevent os.walk from descending
|
|
787
813
|
dirs[:] = [
|
|
788
814
|
d for d in dirs
|
|
789
|
-
if d not in
|
|
815
|
+
if d not in skip_dirs
|
|
790
816
|
and not d.startswith(".")
|
|
791
817
|
and not d.endswith(".egg-info")
|
|
792
818
|
]
|
|
@@ -798,7 +824,7 @@ def _extract_folder(
|
|
|
798
824
|
break
|
|
799
825
|
|
|
800
826
|
# Skip by filename
|
|
801
|
-
if filename in
|
|
827
|
+
if filename in skip_files:
|
|
802
828
|
continue
|
|
803
829
|
|
|
804
830
|
# Skip hidden files
|
|
@@ -807,13 +833,13 @@ def _extract_folder(
|
|
|
807
833
|
|
|
808
834
|
# Skip files matching security patterns (credentials, secrets, keys)
|
|
809
835
|
filename_lower = filename.lower()
|
|
810
|
-
if any(pattern in filename_lower for pattern in
|
|
836
|
+
if any(pattern in filename_lower for pattern in skip_path_patterns):
|
|
811
837
|
continue
|
|
812
838
|
|
|
813
839
|
ext = Path(filename).suffix.lower()
|
|
814
840
|
|
|
815
841
|
# Skip by extension
|
|
816
|
-
if ext in
|
|
842
|
+
if ext in skip_exts:
|
|
817
843
|
continue
|
|
818
844
|
|
|
819
845
|
# Only include known text extensions
|