nogic 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nogic/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """Nogic CLI package."""
2
2
 
3
- __version__ = "0.0.1"
3
+ __version__ = "0.1.0"
nogic/commands/init.py CHANGED
@@ -17,7 +17,6 @@ def init(
17
17
  project_id: Annotated[Optional[str], typer.Option("--project-id", "-p", help="Use existing project ID.")] = None,
18
18
  name: Annotated[Optional[str], typer.Option("--name", "-n", help="Project name.")] = None,
19
19
  link: Annotated[bool, typer.Option("--link", help="Re-link an existing project.")] = False,
20
- yes: Annotated[bool, typer.Option("--yes", "-y", help="Accept defaults, skip prompts.")] = False,
21
20
  ):
22
21
  """Initialize a Nogic project in a directory."""
23
22
  directory = directory.resolve()
@@ -58,7 +57,7 @@ def init(
58
57
  ui.info(f"Found existing project '{existing_project.name}'")
59
58
  ui.kv("Project ID", existing_project.id)
60
59
 
61
- if yes or typer.confirm("Use this project?", default=True):
60
+ if typer.confirm("Use this project?", default=True):
62
61
  config.project_id = existing_project.id
63
62
  config.project_name = existing_project.name
64
63
  config.directory_hash = dir_hash
@@ -68,7 +67,7 @@ def init(
68
67
  ui.console.print(" 1. Wipe graph data and reuse this project")
69
68
  ui.console.print(" 2. Abort")
70
69
 
71
- choice = "1" if yes else typer.prompt("Choose option", default="1")
70
+ choice = typer.prompt("Choose option", default="1")
72
71
 
73
72
  if choice == "1":
74
73
  with ui.status_spinner("Wiping graph data..."):
@@ -82,7 +81,7 @@ def init(
82
81
  ui.dim("Aborted.")
83
82
  raise typer.Exit(0)
84
83
  else:
85
- project_name = name or (directory.name if yes else typer.prompt("Project name", default=directory.name))
84
+ project_name = name or typer.prompt("Project name", default=directory.name)
86
85
 
87
86
  try:
88
87
  with ui.status_spinner("Creating project..."):
@@ -96,7 +95,7 @@ def init(
96
95
  if e.response.status_code == 409:
97
96
  data = e.response.json()
98
97
  ui.warn(f"Project already exists: {data.get('project_name')}")
99
- if yes or typer.confirm("Use this project?", default=True):
98
+ if typer.confirm("Use this project?", default=True):
100
99
  config.project_id = data.get("project_id")
101
100
  config.project_name = data.get("project_name")
102
101
  config.directory_hash = dir_hash
nogic/commands/reindex.py CHANGED
@@ -1,8 +1,5 @@
1
1
  """Reindex command - wipe graph data and re-index from scratch."""
2
2
 
3
- import json as _json
4
- import sys
5
- import time
6
3
  from pathlib import Path
7
4
  from typing import Annotated, Optional
8
5
 
@@ -16,24 +13,15 @@ from nogic.api import NogicClient
16
13
  from nogic import ui
17
14
 
18
15
 
19
- def _emit_json(event: str, **kwargs):
20
- """Emit a single NDJSON line to stdout."""
21
- payload = {"event": event, "timestamp": int(time.time()), **kwargs}
22
- sys.stdout.write(_json.dumps(payload) + "\n")
23
- sys.stdout.flush()
24
-
25
-
26
16
  def reindex(
27
17
  directory: Annotated[Path, typer.Argument(help="Path to the project directory.")] = Path("."),
28
18
  ignore: Annotated[Optional[list[str]], typer.Option("--ignore", help="Patterns to ignore.")] = None,
29
19
  yes: Annotated[bool, typer.Option("--yes", "-y", help="Skip confirmation.")] = False,
30
- format: Annotated[Optional[str], typer.Option("--format", help="Output format: text or json.")] = None,
31
20
  ):
32
21
  """Wipe graph data and re-index the entire project."""
33
22
  directory = directory.resolve()
34
23
  nogic_dir = directory / CONFIG_DIR
35
24
  ignore = ignore or []
36
- json_mode = format == "json"
37
25
 
38
26
  if not nogic_dir.exists():
39
27
  ui.error("Not a Nogic project.")
@@ -52,66 +40,50 @@ def reindex(
52
40
  ui.dim("Run `nogic init` to initialize your project.")
53
41
  raise typer.Exit(1)
54
42
 
55
- if not json_mode:
56
- if is_dev_mode():
57
- ui.dev_banner(get_api_url())
58
-
59
- if not yes:
60
- ui.banner("nogic reindex")
61
- ui.kv("Project", config.project_name or f"{config.project_id[:8]}...")
62
- ui.kv("Directory", str(directory))
63
- ui.console.print()
64
- ui.warn("This will delete all graph data and re-index from scratch.")
65
- ui.console.print()
66
- if not typer.confirm("Continue?", default=False):
67
- ui.dim("Aborted.")
68
- raise typer.Exit(0)
43
+ if is_dev_mode():
44
+ ui.dev_banner(get_api_url())
45
+
46
+ if not yes:
47
+ ui.banner("nogic reindex")
48
+ ui.kv("Project", config.project_name or f"{config.project_id[:8]}...")
49
+ ui.kv("Directory", str(directory))
50
+ ui.console.print()
51
+ ui.warn("This will delete all graph data and re-index from scratch.")
52
+ ui.console.print()
53
+ if not typer.confirm("Continue?", default=False):
54
+ ui.dim("Aborted.")
55
+ raise typer.Exit(0)
69
56
 
70
57
  client = NogicClient(config)
71
58
  nodes_deleted = 0
72
59
 
73
60
  try:
74
- if json_mode:
75
- _emit_json("progress", phase="wiping")
76
- else:
77
- wipe_ctx = ui.status_spinner("Wiping graph data...")
78
- wipe_ctx.__enter__()
79
-
80
- try:
81
- result = client.wipe_project_graph(config.project_id)
82
- nodes_deleted = result.nodes_deleted
83
- except httpx.HTTPStatusError as e:
84
- if e.response.status_code == 404:
85
- pass
86
- else:
87
- if json_mode:
88
- _emit_json("error", message=f"Error wiping graph ({e.response.status_code})")
61
+ with ui.status_spinner("Wiping graph data..."):
62
+ try:
63
+ result = client.wipe_project_graph(config.project_id)
64
+ nodes_deleted = result.nodes_deleted
65
+ except httpx.HTTPStatusError as e:
66
+ if e.response.status_code == 404:
67
+ pass
89
68
  else:
90
69
  ui.error(f"Error wiping graph ({e.response.status_code})")
91
- raise typer.Exit(1)
92
-
93
- if not json_mode:
94
- wipe_ctx.__exit__(None, None, None)
70
+ raise typer.Exit(1)
95
71
 
96
- if json_mode:
97
- _emit_json("wiped", nodes_deleted=nodes_deleted)
98
- elif nodes_deleted:
72
+ if nodes_deleted:
99
73
  ui.info(f"Deleted {nodes_deleted} nodes")
100
74
 
101
75
  should_ignore = build_ignore_matcher(directory, extra_patterns=ignore)
102
- log_fn = (lambda msg: None) if json_mode else (lambda msg: ui.dim(f" {msg}"))
103
- sync_service = SyncService(config, directory, log=log_fn, json_mode=json_mode)
76
+ sync_service = SyncService(config, directory, log=lambda msg: ui.dim(f" {msg}"))
104
77
 
105
78
  sync_service.initial_scan(directory, should_ignore)
106
79
  sync_service.close()
107
80
 
108
- if not json_mode:
109
- ui.console.print()
110
- ui.success("Reindex complete!")
111
- if nodes_deleted > 0:
112
- ui.dim(f" Old nodes deleted: {nodes_deleted}")
113
- ui.console.print()
114
- ui.dim("Run 'nogic watch' to continue monitoring for changes.")
81
+ ui.console.print()
82
+ ui.success("Reindex complete!")
83
+ if nodes_deleted > 0:
84
+ ui.dim(f" Old nodes deleted: {nodes_deleted}")
85
+ ui.console.print()
86
+ ui.dim("Run 'nogic watch' to continue monitoring for changes.")
115
87
 
116
88
  finally:
117
89
  client.close()
nogic/commands/status.py CHANGED
@@ -1,10 +1,8 @@
1
1
  """Status command - show project status and verify configuration."""
2
2
 
3
- import json
4
- import sys
5
3
  from pathlib import Path
6
4
  from datetime import datetime
7
- from typing import Annotated, Optional
5
+ from typing import Annotated
8
6
 
9
7
  import httpx
10
8
  import typer
@@ -17,28 +15,19 @@ from nogic import ui
17
15
 
18
16
  def status(
19
17
  directory: Annotated[Path, typer.Argument(help="Path to the project directory.")] = Path("."),
20
- format: Annotated[Optional[str], typer.Option("--format", help="Output format: text or json.")] = None,
21
18
  ):
22
19
  """Show project status and verify configuration."""
23
20
  directory = directory.resolve()
24
21
  nogic_dir = directory / CONFIG_DIR
25
22
  current_dir_hash = get_directory_hash(str(directory))
26
- json_mode = format == "json"
27
23
 
28
24
  if not nogic_dir.exists():
29
- if json_mode:
30
- _emit_json_status(error="Not a Nogic project.")
31
- else:
32
- ui.error("Not a Nogic project.")
33
- ui.dim("Run `nogic init` to initialize your project.")
25
+ ui.error("Not a Nogic project.")
26
+ ui.dim("Run `nogic init` to initialize your project.")
34
27
  raise typer.Exit(1)
35
28
 
36
29
  config = Config.load(directory)
37
30
 
38
- if json_mode:
39
- _status_json(directory, config, current_dir_hash)
40
- return
41
-
42
31
  if is_dev_mode():
43
32
  ui.dev_banner(get_api_url())
44
33
 
@@ -106,57 +95,6 @@ def status(
106
95
  ui.console.print()
107
96
 
108
97
 
109
- def _status_json(directory: Path, config: Config, dir_hash: str):
110
- """Output status as a single JSON object to stdout."""
111
- result: dict = {
112
- "project_name": config.project_name or None,
113
- "project_id": config.project_id or None,
114
- "directory": str(directory),
115
- "logged_in": bool(config.api_key),
116
- "backend": None,
117
- }
118
-
119
- if config.api_key and config.project_id:
120
- client = NogicClient(config)
121
- try:
122
- backend_project = client.get_project_by_directory(dir_hash)
123
- if backend_project:
124
- result["backend"] = {
125
- "status": "connected",
126
- "project_name": backend_project.name,
127
- "project_id": backend_project.id,
128
- "created_at": backend_project.created_at,
129
- "updated_at": backend_project.updated_at,
130
- }
131
- else:
132
- result["backend"] = {"status": "not_found"}
133
- except httpx.HTTPStatusError as e:
134
- result["backend"] = {
135
- "status": "error",
136
- "error": f"HTTP {e.response.status_code}",
137
- }
138
- except httpx.RequestError as e:
139
- result["backend"] = {
140
- "status": "error",
141
- "error": str(e),
142
- }
143
- finally:
144
- client.close()
145
- elif not config.api_key:
146
- result["backend"] = {"status": "not_logged_in"}
147
- else:
148
- result["backend"] = {"status": "not_configured"}
149
-
150
- sys.stdout.write(json.dumps(result) + "\n")
151
- sys.stdout.flush()
152
-
153
-
154
- def _emit_json_status(**kwargs):
155
- """Emit a JSON status object with error info."""
156
- sys.stdout.write(json.dumps(kwargs) + "\n")
157
- sys.stdout.flush()
158
-
159
-
160
98
  def _format_datetime(dt_string: str) -> str:
161
99
  try:
162
100
  dt = datetime.fromisoformat(dt_string.replace("Z", "+00:00"))
nogic/commands/sync.py CHANGED
@@ -14,13 +14,11 @@ from nogic import telemetry, ui
14
14
  def sync(
15
15
  directory: Annotated[Path, typer.Argument(help="Path to the directory to sync.")] = Path("."),
16
16
  ignore: Annotated[Optional[list[str]], typer.Option("--ignore", help="Patterns to ignore.")] = None,
17
- format: Annotated[Optional[str], typer.Option("--format", help="Output format: text or json.")] = None,
18
17
  ):
19
18
  """One-time sync of a directory to backend."""
20
19
  directory = directory.resolve()
21
20
  nogic_dir = directory / ".nogic"
22
21
  ignore = ignore or []
23
- json_mode = format == "json"
24
22
 
25
23
  if not nogic_dir.exists():
26
24
  ui.error("Not a Nogic project.")
@@ -39,22 +37,22 @@ def sync(
39
37
  ui.dim("Run `nogic init` to initialize your project.")
40
38
  raise typer.Exit(1)
41
39
 
42
- if not json_mode:
43
- if is_dev_mode():
44
- ui.dev_banner(get_api_url())
45
- ui.banner("nogic sync", str(directory))
46
- ui.kv("Project", f"{config.project_id[:8]}...")
40
+ if is_dev_mode():
41
+ ui.dev_banner(get_api_url())
47
42
 
48
- log_fn = (lambda msg: None) if json_mode else (lambda msg: ui.dim(f" {msg}"))
49
- sync_service = SyncService(config, directory, log=log_fn, json_mode=json_mode)
43
+ ui.banner("nogic sync", str(directory))
44
+ ui.kv("Project", f"{config.project_id[:8]}...")
45
+
46
+ should_ignore = build_ignore_matcher(directory, extra_patterns=ignore)
47
+
48
+ sync_service = SyncService(config, directory, log=lambda msg: ui.dim(f" {msg}"))
50
49
 
51
50
  try:
52
- sync_service.initial_scan(directory, build_ignore_matcher(directory, extra_patterns=ignore))
51
+ sync_service.initial_scan(directory, should_ignore)
53
52
  telemetry.capture("cli_sync", {"status": "success"})
54
53
  except KeyboardInterrupt:
55
- if not json_mode:
56
- ui.console.print()
57
- ui.dim("Interrupted. Cleaning up...")
54
+ ui.console.print()
55
+ ui.dim("Interrupted. Cleaning up...")
58
56
  try:
59
57
  sync_service.client.clear_staging(config.project_id)
60
58
  except Exception:
@@ -67,6 +65,5 @@ def sync(
67
65
  finally:
68
66
  sync_service.close()
69
67
 
70
- if not json_mode:
71
- ui.console.print()
72
- ui.success("Done.")
68
+ ui.console.print()
69
+ ui.success("Done.")
nogic/commands/watch.py CHANGED
@@ -1,7 +1,5 @@
1
1
  """Watch command for file syncing."""
2
2
 
3
- import json
4
- import sys
5
3
  import time
6
4
  from pathlib import Path
7
5
  from typing import Annotated, Optional
@@ -14,71 +12,48 @@ from nogic.watcher import FileMonitor, SyncService
14
12
  from nogic import ui
15
13
 
16
14
 
17
- def _emit_json(event: str, **kwargs):
18
- """Emit a single NDJSON line to stdout."""
19
- payload = {"event": event, "timestamp": int(time.time()), **kwargs}
20
- sys.stdout.write(json.dumps(payload) + "\n")
21
- sys.stdout.flush()
22
-
23
-
24
15
  def watch(
25
16
  directory: Annotated[Path, typer.Argument(help="Path to the directory to watch.")] = Path("."),
26
17
  ignore: Annotated[Optional[list[str]], typer.Option("--ignore", help="Patterns to ignore.")] = None,
27
- format: Annotated[Optional[str], typer.Option("--format", help="Output format: text or json.")] = None,
28
18
  ):
29
19
  """Watch a directory for file changes and sync to backend."""
30
20
  directory = directory.resolve()
31
21
  nogic_dir = directory / ".nogic"
32
22
  ignore = ignore or []
33
- json_mode = format == "json"
34
23
 
35
24
  if not nogic_dir.exists():
36
- if json_mode:
37
- _emit_json("error", message="Not a Nogic project. Run `nogic init` to initialize.")
38
- else:
39
- ui.error("Not a Nogic project.")
40
- ui.dim("Run `nogic init` to initialize your project.")
25
+ ui.error("Not a Nogic project.")
26
+ ui.dim("Run `nogic init` to initialize your project.")
41
27
  raise typer.Exit(1)
42
28
 
43
29
  config = Config.load(directory)
44
30
 
45
31
  if not config.api_key:
46
- if json_mode:
47
- _emit_json("error", message="Not logged in. Run `nogic login` to authenticate.")
48
- else:
49
- ui.error("Not logged in.")
50
- ui.dim("Run `nogic login` to authenticate.")
32
+ ui.error("Not logged in.")
33
+ ui.dim("Run `nogic login` to authenticate.")
51
34
  raise typer.Exit(1)
52
35
 
53
36
  if not config.project_id:
54
- if json_mode:
55
- _emit_json("error", message="No project configured. Run `nogic init` to initialize.")
56
- else:
57
- ui.error("No project configured.")
58
- ui.dim("Run `nogic init` to initialize your project.")
37
+ ui.error("No project configured.")
38
+ ui.dim("Run `nogic init` to initialize your project.")
59
39
  raise typer.Exit(1)
60
40
 
61
- if not json_mode:
62
- if is_dev_mode():
63
- ui.dev_banner(get_api_url())
64
- ui.banner("nogic watch", str(directory))
65
- ui.kv("Project", f"{config.project_id[:8]}...")
41
+ if is_dev_mode():
42
+ ui.dev_banner(get_api_url())
66
43
 
67
- log_fn = (lambda msg: None) if json_mode else (lambda msg: ui.dim(f" {msg}"))
68
- sync_service = SyncService(config, directory, log=log_fn, json_mode=json_mode)
44
+ ui.banner("nogic watch", str(directory))
45
+ ui.kv("Project", f"{config.project_id[:8]}...")
46
+
47
+ sync_service = SyncService(config, directory, log=lambda msg: ui.dim(f" {msg}"))
69
48
 
70
49
  should_ignore = build_ignore_matcher(directory, extra_patterns=ignore)
71
50
 
72
51
  # Initial scan
73
52
  try:
74
53
  sync_service.initial_scan(directory, should_ignore)
75
- if json_mode:
76
- files_indexed = len(sync_service._file_cache)
77
- _emit_json("initial_scan_complete", files_indexed=files_indexed)
78
54
  except KeyboardInterrupt:
79
- if not json_mode:
80
- ui.console.print()
81
- ui.dim("Interrupted during initial scan. Cleaning up...")
55
+ ui.console.print()
56
+ ui.dim("Interrupted during initial scan. Cleaning up...")
82
57
  try:
83
58
  sync_service.client.clear_staging(config.project_id)
84
59
  except Exception:
@@ -93,27 +68,15 @@ def watch(
93
68
  return
94
69
  try:
95
70
  if sync_service.sync_file_immediate(path):
96
- if json_mode:
97
- _emit_json("synced", path=str(rel))
98
- else:
99
- ui.console.print(f" [green]SYNCED[/] {rel}")
71
+ ui.console.print(f" [green]SYNCED[/] {rel}")
100
72
  except Exception as e:
101
73
  err_msg = str(e)
102
74
  if "413" in err_msg:
103
- if json_mode:
104
- _emit_json("skip", path=str(rel), reason="file too large")
105
- else:
106
- ui.console.print(f" [yellow]SKIP[/] {rel} (file too large for API)")
75
+ ui.console.print(f" [yellow]SKIP[/] {rel} (file too large for API)")
107
76
  elif "503" in err_msg or "502" in err_msg:
108
- if json_mode:
109
- _emit_json("error", path=str(rel), message="backend unavailable")
110
- else:
111
- ui.console.print(f" [red]ERROR[/] {rel} (backend unavailable, will sync on next change)")
77
+ ui.console.print(f" [red]ERROR[/] {rel} (backend unavailable, will sync on next change)")
112
78
  else:
113
- if json_mode:
114
- _emit_json("error", path=str(rel), message=err_msg[:120])
115
- else:
116
- ui.console.print(f" [red]ERROR[/] {rel}: {err_msg[:120]}")
79
+ ui.console.print(f" [red]ERROR[/] {rel}: {err_msg[:120]}")
117
80
 
118
81
  def on_delete(path: Path):
119
82
  try:
@@ -122,20 +85,11 @@ def watch(
122
85
  return
123
86
  try:
124
87
  if sync_service.delete_file_immediate(path):
125
- if json_mode:
126
- _emit_json("deleted", path=str(rel))
127
- else:
128
- ui.console.print(f" [red]DELETED[/] {rel}")
88
+ ui.console.print(f" [red]DELETED[/] {rel}")
129
89
  else:
130
- if json_mode:
131
- _emit_json("deleted", path=str(rel))
132
- else:
133
- ui.console.print(f" [dim]DELETED[/] {rel} (not indexed)")
90
+ ui.console.print(f" [dim]DELETED[/] {rel} (not indexed)")
134
91
  except Exception as e:
135
- if json_mode:
136
- _emit_json("error", path=str(rel), message=str(e)[:80])
137
- else:
138
- ui.console.print(f" [red]DELETED[/] {rel} (error: {str(e)[:80]})")
92
+ ui.console.print(f" [red]DELETED[/] {rel} (error: {str(e)[:80]})")
139
93
 
140
94
  monitor = FileMonitor(
141
95
  root_path=directory,
@@ -144,24 +98,19 @@ def watch(
144
98
  should_ignore=should_ignore,
145
99
  )
146
100
 
147
- if json_mode:
148
- _emit_json("ready", message="Watching for changes...")
149
- else:
150
- ui.console.print()
151
- ui.info("Watching for changes... (Ctrl+C to stop)")
152
- ui.console.print()
101
+ ui.console.print()
102
+ ui.info("Watching for changes... (Ctrl+C to stop)")
103
+ ui.console.print()
153
104
 
154
105
  monitor.start()
155
106
  try:
156
107
  while monitor.is_alive():
157
108
  time.sleep(1)
158
109
  except KeyboardInterrupt:
159
- if not json_mode:
160
- ui.console.print()
161
- ui.dim("Stopping...")
110
+ ui.console.print()
111
+ ui.dim("Stopping...")
162
112
  finally:
163
113
  monitor.stop()
164
114
  sync_service.close()
165
115
 
166
- if not json_mode:
167
- ui.success("Done.")
116
+ ui.success("Done.")
nogic/watcher/sync.py CHANGED
@@ -2,11 +2,8 @@
2
2
 
3
3
  import asyncio
4
4
  import hashlib
5
- import json as _json
6
5
  import logging
7
6
  import os
8
- import sys
9
- import time
10
7
  from pathlib import Path
11
8
  from typing import Callable, Optional
12
9
 
@@ -76,22 +73,14 @@ class SyncService:
76
73
  config: Config,
77
74
  root_path: Path,
78
75
  log: Optional[Callable[[str], None]] = None,
79
- json_mode: bool = False,
80
76
  ):
81
77
  self.config = config
82
78
  self.root_path = root_path
83
79
  self.log = log or print
84
- self.json_mode = json_mode
85
80
  self._client: Optional[NogicClient] = None
86
81
  # In-memory cache of file info for current session
87
82
  self._file_cache: dict[str, dict] = {} # path -> {hash, content, language}
88
83
 
89
- def _emit_json(self, event: str, **kwargs):
90
- """Emit a single NDJSON progress line to stdout (json_mode only)."""
91
- payload = {"event": event, "timestamp": int(time.time()), **kwargs}
92
- sys.stdout.write(_json.dumps(payload) + "\n")
93
- sys.stdout.flush()
94
-
95
84
  @property
96
85
  def client(self) -> NogicClient:
97
86
  if self._client is None:
@@ -158,37 +147,40 @@ class SyncService:
158
147
 
159
148
  Returns list of {path, hash, content, language} for supported files.
160
149
  """
161
- # Collect files using os.walk for directory pruning (skips ignored dirs entirely)
162
- if not self.json_mode:
163
- console = Console()
164
- ctx = console.status("[bold blue]Finding files...", spinner="dots")
165
- ctx.__enter__()
166
-
167
- all_files = []
168
- for dirpath, dirnames, filenames in os.walk(root_path):
169
- dp = Path(dirpath)
170
- dirnames[:] = [
171
- d for d in dirnames
172
- if not ignore_check(dp / d)
173
- ]
174
- for fname in filenames:
175
- fpath = dp / fname
176
- if not ignore_check(fpath):
177
- all_files.append(fpath)
150
+ console = Console()
178
151
 
179
- if not self.json_mode:
180
- ctx.__exit__(None, None, None)
152
+ # Collect files using os.walk for directory pruning (skips ignored dirs entirely)
153
+ with console.status("[bold blue]Finding files...", spinner="dots"):
154
+ all_files = []
155
+ for dirpath, dirnames, filenames in os.walk(root_path):
156
+ dp = Path(dirpath)
157
+ # Prune ignored directories in-place so os.walk doesn't descend
158
+ dirnames[:] = [
159
+ d for d in dirnames
160
+ if not ignore_check(dp / d)
161
+ ]
162
+ for fname in filenames:
163
+ fpath = dp / fname
164
+ if not ignore_check(fpath):
165
+ all_files.append(fpath)
181
166
 
182
167
  files_info = []
183
168
  supported_count = 0
184
169
  skipped_large = 0
185
- total_files = len(all_files)
186
170
 
187
- if self.json_mode:
188
- # Emit progress via NDJSON
189
- for i, path in enumerate(all_files):
190
- if i % 50 == 0 or i == total_files - 1:
191
- self._emit_json("progress", phase="scanning", current=i + 1, total=total_files)
171
+ # Scan with progress bar
172
+ with Progress(
173
+ SpinnerColumn(),
174
+ TextColumn("[progress.description]{task.description}"),
175
+ BarColumn(),
176
+ MofNCompleteColumn(),
177
+ TimeElapsedColumn(),
178
+ transient=False,
179
+ ) as progress:
180
+ task = progress.add_task("Scanning files", total=len(all_files))
181
+
182
+ for path in all_files:
183
+ progress.update(task, advance=1)
192
184
 
193
185
  language = get_language(path)
194
186
  if not language:
@@ -218,49 +210,6 @@ class SyncService:
218
210
  "language": language,
219
211
  })
220
212
  supported_count += 1
221
- else:
222
- # Rich progress bar
223
- with Progress(
224
- SpinnerColumn(),
225
- TextColumn("[progress.description]{task.description}"),
226
- BarColumn(),
227
- MofNCompleteColumn(),
228
- TimeElapsedColumn(),
229
- transient=False,
230
- ) as progress:
231
- task = progress.add_task("Scanning files", total=total_files)
232
-
233
- for path in all_files:
234
- progress.update(task, advance=1)
235
-
236
- language = get_language(path)
237
- if not language:
238
- continue
239
-
240
- try:
241
- file_size = path.stat().st_size
242
- except OSError:
243
- continue
244
-
245
- if file_size > MAX_SINGLE_FILE_BYTES:
246
- skipped_large += 1
247
- continue
248
-
249
- try:
250
- content = path.read_text(encoding="utf-8")
251
- except (OSError, UnicodeDecodeError):
252
- continue
253
-
254
- rel_path = str(path.relative_to(root_path))
255
- content_hash = self.compute_hash(content)
256
-
257
- files_info.append({
258
- "path": rel_path,
259
- "hash": content_hash,
260
- "content": content,
261
- "language": language,
262
- })
263
- supported_count += 1
264
213
 
265
214
  msg = f"Found {supported_count} supported files ({len(all_files)} total)"
266
215
  if skipped_large > 0:
@@ -352,61 +301,47 @@ class SyncService:
352
301
  # Phase 1: Parallel upload
353
302
  self.log(f"Phase 1: Uploading {total_files} files in {num_batches} batches...")
354
303
 
355
- if self.json_mode:
356
- self._emit_json("progress", phase="uploading", current=0, total=num_batches)
304
+ with Progress(
305
+ SpinnerColumn(),
306
+ TextColumn("[progress.description]{task.description}"),
307
+ BarColumn(),
308
+ MofNCompleteColumn(),
309
+ TimeElapsedColumn(),
310
+ transient=False,
311
+ ) as progress:
312
+ upload_task = progress.add_task(
313
+ "Uploading batches",
314
+ total=num_batches
315
+ )
357
316
 
317
+ # Run parallel uploads
358
318
  try:
359
319
  result = asyncio.run(
360
- self._upload_batches_parallel_json(batches, num_batches)
320
+ self._upload_batches_parallel(batches, progress, upload_task)
361
321
  )
362
322
  except Exception as e:
363
- self._emit_json("error", message=f"Parallel upload failed: {e}")
323
+ self.log(f"Parallel upload failed: {e}")
324
+ # Clear staging and fall back to legacy
364
325
  try:
365
326
  self.client.clear_staging(self.config.project_id)
366
327
  except Exception:
367
328
  pass
368
329
  self.log("Falling back to legacy mode...")
369
330
  return self._sync_files_legacy(files_info)
370
- else:
371
- with Progress(
372
- SpinnerColumn(),
373
- TextColumn("[progress.description]{task.description}"),
374
- BarColumn(),
375
- MofNCompleteColumn(),
376
- TimeElapsedColumn(),
377
- transient=False,
378
- ) as progress:
379
- upload_task = progress.add_task(
380
- "Uploading batches",
381
- total=num_batches
382
- )
383
-
384
- try:
385
- result = asyncio.run(
386
- self._upload_batches_parallel(batches, progress, upload_task)
387
- )
388
- except Exception as e:
389
- self.log(f"Parallel upload failed: {e}")
390
- try:
391
- self.client.clear_staging(self.config.project_id)
392
- except Exception:
393
- pass
394
- self.log("Falling back to legacy mode...")
395
- return self._sync_files_legacy(files_info)
396
331
 
397
332
  total_uploaded = result['total_parsed']
398
333
  total_staged = result['total_staged']
399
334
  total_skipped = result.get('total_skipped', 0)
400
335
 
336
+ # If backend doesn't return files_skipped, calculate from totals
401
337
  if total_skipped == 0 and total_staged < total_uploaded:
402
338
  total_skipped = total_uploaded - total_staged
403
339
 
404
340
  self.log(f"Uploaded {total_uploaded} files ({total_staged} staged, {total_skipped} unchanged)")
405
341
 
342
+ # Skip Phase 2 if all files are unchanged
406
343
  if total_staged == 0:
407
344
  self.log("All files unchanged, nothing to index")
408
- if self.json_mode:
409
- self._emit_json("complete", files_indexed=0, nodes_created=0, edges_created=0)
410
345
  return True
411
346
 
412
347
  # Phase 2: Finalize with streaming progress
@@ -418,7 +353,17 @@ class SyncService:
418
353
  total_edges = 0
419
354
  all_errors = []
420
355
 
421
- if self.json_mode:
356
+ # Track tasks by stage name
357
+ stage_tasks: dict = {}
358
+
359
+ with Progress(
360
+ SpinnerColumn(),
361
+ TextColumn("[progress.description]{task.description:<12}"),
362
+ BarColumn(),
363
+ MofNCompleteColumn(),
364
+ TimeElapsedColumn(),
365
+ transient=False,
366
+ ) as progress:
422
367
  try:
423
368
  for event in self.client.finalize_stream(self.config.project_id):
424
369
  if event.stage == "complete":
@@ -428,86 +373,39 @@ class SyncService:
428
373
  total_edges = event.edges_created or 0
429
374
  if event.errors:
430
375
  all_errors.extend(event.errors)
431
- self._emit_json(
432
- "complete",
433
- files_indexed=total_indexed,
434
- files_skipped=total_skipped,
435
- nodes_created=total_nodes,
436
- edges_created=total_edges,
437
- errors=len(all_errors),
438
- )
376
+ for task_id in stage_tasks.values():
377
+ progress.update(task_id, completed=progress.tasks[task_id].total)
439
378
  break
440
379
 
441
380
  if event.stage == "error":
442
- self._emit_json("error", message=event.message or "Processing error")
381
+ all_errors.append({"path": "finalize", "error": event.message})
443
382
  break
444
383
 
445
384
  stage = event.stage
446
385
  current = event.current or 0
447
386
  total = event.total or 0
448
- label = STAGE_LABELS.get(stage, stage)
449
- self._emit_json("progress", phase="processing", stage=stage, label=label, current=current, total=total)
387
+
388
+ if stage not in stage_tasks:
389
+ task_total = total if total > 0 else 100
390
+ label = STAGE_LABELS.get(stage, stage)
391
+ stage_tasks[stage] = progress.add_task(
392
+ label,
393
+ total=task_total,
394
+ )
395
+
396
+ task_id = stage_tasks[stage]
397
+
398
+ if total > 0 and progress.tasks[task_id].total != total:
399
+ progress.update(task_id, total=total)
400
+
401
+ progress.update(task_id, completed=current)
450
402
 
451
403
  except httpx.HTTPStatusError as e:
452
- self._emit_json("error", message=f"Finalize error: {e.response.status_code}")
404
+ self.log(f"Finalize error: {e.response.status_code}")
453
405
  raise
454
406
  except httpx.RequestError as e:
455
- self._emit_json("error", message=f"Request error during finalize: {e}")
407
+ self.log(f"Request error during finalize: {e}")
456
408
  raise
457
- else:
458
- # Rich progress for terminal
459
- stage_tasks: dict = {}
460
-
461
- with Progress(
462
- SpinnerColumn(),
463
- TextColumn("[progress.description]{task.description:<12}"),
464
- BarColumn(),
465
- MofNCompleteColumn(),
466
- TimeElapsedColumn(),
467
- transient=False,
468
- ) as progress:
469
- try:
470
- for event in self.client.finalize_stream(self.config.project_id):
471
- if event.stage == "complete":
472
- total_indexed = event.files_indexed or 0
473
- total_skipped = event.files_skipped or 0
474
- total_nodes = event.nodes_created or 0
475
- total_edges = event.edges_created or 0
476
- if event.errors:
477
- all_errors.extend(event.errors)
478
- for task_id in stage_tasks.values():
479
- progress.update(task_id, completed=progress.tasks[task_id].total)
480
- break
481
-
482
- if event.stage == "error":
483
- all_errors.append({"path": "finalize", "error": event.message})
484
- break
485
-
486
- stage = event.stage
487
- current = event.current or 0
488
- total = event.total or 0
489
-
490
- if stage not in stage_tasks:
491
- task_total = total if total > 0 else 100
492
- label = STAGE_LABELS.get(stage, stage)
493
- stage_tasks[stage] = progress.add_task(
494
- label,
495
- total=task_total,
496
- )
497
-
498
- task_id = stage_tasks[stage]
499
-
500
- if total > 0 and progress.tasks[task_id].total != total:
501
- progress.update(task_id, total=total)
502
-
503
- progress.update(task_id, completed=current)
504
-
505
- except httpx.HTTPStatusError as e:
506
- self.log(f"Finalize error: {e.response.status_code}")
507
- raise
508
- except httpx.RequestError as e:
509
- self.log(f"Request error during finalize: {e}")
510
- raise
511
409
 
512
410
  # Summary
513
411
  if total_skipped > 0:
@@ -524,62 +422,6 @@ class SyncService:
524
422
 
525
423
  return True
526
424
 
527
- async def _upload_batches_parallel_json(
528
- self,
529
- batches: list[list[dict]],
530
- num_batches: int,
531
- max_concurrent: int = MAX_CONCURRENT,
532
- ) -> dict:
533
- """Upload all batches in parallel, emitting NDJSON progress."""
534
- semaphore = asyncio.Semaphore(max_concurrent)
535
- results = {"total_parsed": 0, "total_staged": 0, "total_skipped": 0, "errors": []}
536
- completed = 0
537
- lock = asyncio.Lock()
538
-
539
- async with httpx.AsyncClient(
540
- timeout=60.0,
541
- limits=httpx.Limits(max_connections=max_concurrent + 2, max_keepalive_connections=max_concurrent),
542
- ) as client:
543
- async def upload_one(batch: list[dict], batch_num: int) -> dict:
544
- nonlocal completed
545
- async with semaphore:
546
- resp = await client.post(
547
- f"{self.config.api_url}/v1/index/upload",
548
- json={
549
- "project_id": self.config.project_id,
550
- "files": batch,
551
- },
552
- headers={
553
- "Authorization": f"Bearer {self.config.api_key}",
554
- "Content-Type": "application/json",
555
- },
556
- )
557
- resp.raise_for_status()
558
- data = resp.json()
559
-
560
- async with lock:
561
- completed += 1
562
- self._emit_json("progress", phase="uploading", current=completed, total=num_batches)
563
-
564
- return data
565
-
566
- tasks = [upload_one(batch, i) for i, batch in enumerate(batches)]
567
- responses = await asyncio.gather(*tasks, return_exceptions=True)
568
-
569
- for resp in responses:
570
- if isinstance(resp, Exception):
571
- results["errors"].append(str(resp))
572
- logger.error(f"Batch upload failed: {resp}")
573
- elif isinstance(resp, dict):
574
- results["total_parsed"] += resp.get("files_parsed", 0)
575
- results["total_staged"] = max(results["total_staged"], resp.get("total_staged", 0))
576
- results["total_skipped"] += resp.get("files_skipped", 0)
577
-
578
- if results["errors"]:
579
- raise Exception(f"{len(results['errors'])} batches failed: {results['errors'][0]}")
580
-
581
- return results
582
-
583
425
  async def _upload_batches_parallel(
584
426
  self,
585
427
  batches: list[list[dict]],
@@ -651,10 +493,9 @@ class SyncService:
651
493
  def _upload_files_batched(self, files: list[dict]) -> bool:
652
494
  """Upload files in batches with progress."""
653
495
  if not files:
654
- if self.json_mode:
655
- self._emit_json("complete", files_indexed=0, nodes_created=0, edges_created=0)
656
496
  return True
657
497
 
498
+ # Prepare files for upload (include hash for server-side dedup)
658
499
  upload_files = [
659
500
  {
660
501
  "path": f["path"],
@@ -674,14 +515,29 @@ class SyncService:
674
515
  total_nodes = 0
675
516
  total_edges = 0
676
517
  all_errors = []
677
- files_done = 0
678
518
 
679
- if self.json_mode:
519
+ with Progress(
520
+ SpinnerColumn(),
521
+ TextColumn("[progress.description]{task.description}"),
522
+ BarColumn(),
523
+ MofNCompleteColumn(),
524
+ TimeElapsedColumn(),
525
+ transient=False,
526
+ ) as progress:
527
+ upload_task = progress.add_task(
528
+ f"Uploading files",
529
+ total=total_files
530
+ )
531
+
532
+ files_done = 0
680
533
  for batch_num, batch in enumerate(batches):
681
- self._emit_json("progress", phase="uploading", current=batch_num, total=num_batches,
682
- description=f"Uploading batch {batch_num + 1}/{num_batches}")
534
+ progress.update(
535
+ upload_task,
536
+ description=f"Uploading batch {batch_num + 1}/{num_batches}"
537
+ )
538
+
683
539
  try:
684
- result = self._send_batch_stream(batch)
540
+ result = self._send_batch_stream(batch, progress)
685
541
  if result:
686
542
  total_indexed += result.files_indexed or 0
687
543
  total_skipped += result.files_skipped or 0
@@ -690,52 +546,10 @@ class SyncService:
690
546
  if result.errors:
691
547
  all_errors.extend(result.errors)
692
548
  except Exception as e:
693
- self._emit_json("error", message=f"Batch {batch_num + 1} failed: {e}")
549
+ self.log(f" [ERROR] Batch {batch_num + 1} failed: {e}")
694
550
 
695
551
  files_done += len(batch)
696
-
697
- self._emit_json(
698
- "complete",
699
- files_indexed=total_indexed,
700
- files_skipped=total_skipped,
701
- nodes_created=total_nodes,
702
- edges_created=total_edges,
703
- errors=len(all_errors),
704
- )
705
- else:
706
- with Progress(
707
- SpinnerColumn(),
708
- TextColumn("[progress.description]{task.description}"),
709
- BarColumn(),
710
- MofNCompleteColumn(),
711
- TimeElapsedColumn(),
712
- transient=False,
713
- ) as progress:
714
- upload_task = progress.add_task(
715
- "Uploading files",
716
- total=total_files
717
- )
718
-
719
- for batch_num, batch in enumerate(batches):
720
- progress.update(
721
- upload_task,
722
- description=f"Uploading batch {batch_num + 1}/{num_batches}"
723
- )
724
-
725
- try:
726
- result = self._send_batch_stream(batch, progress)
727
- if result:
728
- total_indexed += result.files_indexed or 0
729
- total_skipped += result.files_skipped or 0
730
- total_nodes += result.nodes_created or 0
731
- total_edges += result.edges_created or 0
732
- if result.errors:
733
- all_errors.extend(result.errors)
734
- except Exception as e:
735
- self.log(f" [ERROR] Batch {batch_num + 1} failed: {e}")
736
-
737
- files_done += len(batch)
738
- progress.update(upload_task, completed=files_done)
552
+ progress.update(upload_task, completed=files_done)
739
553
 
740
554
  # Summary
741
555
  if total_skipped > 0:
@@ -744,7 +558,7 @@ class SyncService:
744
558
  self.log(f"Indexed {total_indexed} files, {total_nodes} nodes, {total_edges} edges")
745
559
  if all_errors:
746
560
  self.log(f"Errors ({len(all_errors)}):")
747
- for err in all_errors[:10]:
561
+ for err in all_errors[:10]: # Show first 10 errors
748
562
  self.log(f" {err.get('path', 'unknown')}: {err.get('error', 'unknown error')}")
749
563
  if len(all_errors) > 10:
750
564
  self.log(f" ... and {len(all_errors) - 10} more errors")
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nogic
3
- Version: 0.0.1
3
+ Version: 0.1.0
4
4
  Summary: Code intelligence CLI for AI agents — index, search, and understand codebases via graph + vector embeddings.
5
5
  Project-URL: Homepage, https://nogic.dev
6
6
  Project-URL: Repository, https://github.com/nogic-dev/cli
7
7
  Project-URL: Documentation, https://docs.nogic.dev
8
- Author-email: Nogic <support@nogic.dev>
8
+ Author-email: Nogic <hello@nogic.dev>
9
9
  License-Expression: MIT
10
10
  License-File: LICENSE
11
11
  Keywords: ai-agents,code-graph,code-intelligence,embeddings,mcp,tree-sitter
@@ -1,4 +1,4 @@
1
- nogic/__init__.py,sha256=mrqbp3Uwau7m5t8uqo6Eq7az13oFNOWlx9Q1ETCRBHo,47
1
+ nogic/__init__.py,sha256=8fFCEtduZTo1cQhZMEh-gMTcaFr38C36_E8Z1NAEZMM,47
2
2
  nogic/config.py,sha256=LPjsIPHjhERh0LkFIKe6n2z-RdkibOFvIlMQFTkBaAs,5006
3
3
  nogic/ignore.py,sha256=Q2uumbWVAXW201GAahvEEtgipqQmTPf49W_SEpyVyDk,2695
4
4
  nogic/main.py,sha256=AdAbbnXlxcbek8nTxL44-ZTNc1sqzWGIlWu-jzXK_9M,1399
@@ -7,14 +7,14 @@ nogic/ui.py,sha256=nCBHktKX2nOwMDv-Duvof-hBvfTkOiCGB41Lx0Rj_QI,1316
7
7
  nogic/api/__init__.py,sha256=fK108K0urB8LhB56bY5o1qUq_Pj_TQFP9YEP4xMVH1E,375
8
8
  nogic/api/client.py,sha256=BwdC3gyYPEGyY6lFDYXA8c3wrccuIq_pejalogrLeWk,12779
9
9
  nogic/commands/__init__.py,sha256=eMuQR40Ye6ei4XbISb4dO1BwFk5RRqKIKGMxrztVu9Y,28
10
- nogic/commands/init.py,sha256=nXUaO1qC8bFuxUCbeueNiJDyPBbphnfRrpHU2MXFZGo,5217
10
+ nogic/commands/init.py,sha256=JWbm1AUyry4hAn-cOSFB8fG3uTTHJ5OL7yuQYQcWh_c,5056
11
11
  nogic/commands/login.py,sha256=dYR6HeveQej1Rgclz2DsT5Kq7qqoSfOad2f4iFtv4bI,2251
12
12
  nogic/commands/projects.py,sha256=I4DTWPJdgUgQDAmFUZT99CYU_ZXlNChU07CptuDAZa0,4422
13
- nogic/commands/reindex.py,sha256=2A1N2Ze6E064lcnlUa39Jr2xLAQq22-QeV8vRPSz3Io,4027
14
- nogic/commands/status.py,sha256=5ZdaLDdanCgSBL8ALoVban1vs7pdivyoSvwsoUP6ItI,6071
15
- nogic/commands/sync.py,sha256=b64uM2oUJDDGzn2Jc5ka9iFxnxafi1pKUYKgQpYANoE,2447
13
+ nogic/commands/reindex.py,sha256=YY1-B-zJHuGiGqrta0AREWVdLKMJ8AxkcBj7Q0yPurY,2998
14
+ nogic/commands/status.py,sha256=GtfWV3W28J3N0WBIlIXon7pbnGtXi7dlILQw-RnYR3I,3959
15
+ nogic/commands/sync.py,sha256=aBt2gEE7t1TZMhAT3hvy2-z0Ox4BZSl4RxTd_ahCLW4,2160
16
16
  nogic/commands/telemetry_cmd.py,sha256=zFArvmYN-CTBAE8jgowWE9CMOniFdyufkQZS7lTcIaw,1796
17
- nogic/commands/watch.py,sha256=mHqUwBy-0zsAN7ZrpJI8aRVW4VReBqLV6epjIyFQki8,5769
17
+ nogic/commands/watch.py,sha256=xO1CWC2MLBcB7rNX0zrtdxfcd4SzXghCg-hAB6xYlIw,3615
18
18
  nogic/parsing/__init__.py,sha256=PdfNe7OlMIx7pXUV73XCEts-gJbKDWl6d37Xgk5Dq1I,417
19
19
  nogic/parsing/js_extractor.py,sha256=m9SWP93byQqB-zcKdPqXdVkL2B_iM2kp1dj_6fUnwA4,25580
20
20
  nogic/parsing/parser.py,sha256=TgIVWGizdca606MV5gSdbQXFqsuzver5dKR3UMoYPg8,7051
@@ -27,9 +27,9 @@ nogic/storage/symbols.py,sha256=2aBKbqmyyAUCOzKLPJUecp49YkgGz_I3gj0esCZl-r8,6287
27
27
  nogic/watcher/__init__.py,sha256=NXX_7l69rhPc0EmNpGuV9uuFrqYqw2g9ulod5280f-c,238
28
28
  nogic/watcher/monitor.py,sha256=zSz6YLB0Wzp3u3bXITn6LuTFZ3uZ66l_Dg12S4KDE9s,2289
29
29
  nogic/watcher/storage.py,sha256=zrYK81CFxTFDPoFwFazbJi6EKna-bUkX2nofetUxiVU,5924
30
- nogic/watcher/sync.py,sha256=JZwDzYxbRFEufYDIGF9ND0UWxb0uySr1MWLTcr-R9d4,32703
31
- nogic-0.0.1.dist-info/METADATA,sha256=rKY5eGXtfCqBrRxzCnvSMvJY1p5e4LC3mfxUw8fus-U,4555
32
- nogic-0.0.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
33
- nogic-0.0.1.dist-info/entry_points.txt,sha256=_mT_cZ1fXzhogKq9r5T7c_qI7xAgcniKzftL0P6xTuQ,43
34
- nogic-0.0.1.dist-info/licenses/LICENSE,sha256=x8apHaKgnADt4dR_45yHyL-OBaRP8310XzD2jv8HRWg,1062
35
- nogic-0.0.1.dist-info/RECORD,,
30
+ nogic/watcher/sync.py,sha256=uX2V3i4Opp5D3YWYetgS-CGzrCCRld4h2H7SOux9sqI,24543
31
+ nogic-0.1.0.dist-info/METADATA,sha256=NkdOuP7qTlg5zt7Xp9_Ze-hQzQxEMh3XgfTHAnmR_vM,4553
32
+ nogic-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
33
+ nogic-0.1.0.dist-info/entry_points.txt,sha256=_mT_cZ1fXzhogKq9r5T7c_qI7xAgcniKzftL0P6xTuQ,43
34
+ nogic-0.1.0.dist-info/licenses/LICENSE,sha256=x8apHaKgnADt4dR_45yHyL-OBaRP8310XzD2jv8HRWg,1062
35
+ nogic-0.1.0.dist-info/RECORD,,
File without changes