affinity-sdk 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. affinity/__init__.py +139 -0
  2. affinity/cli/__init__.py +7 -0
  3. affinity/cli/click_compat.py +27 -0
  4. affinity/cli/commands/__init__.py +1 -0
  5. affinity/cli/commands/_entity_files_dump.py +219 -0
  6. affinity/cli/commands/_list_entry_fields.py +41 -0
  7. affinity/cli/commands/_v1_parsing.py +77 -0
  8. affinity/cli/commands/company_cmds.py +2139 -0
  9. affinity/cli/commands/completion_cmd.py +33 -0
  10. affinity/cli/commands/config_cmds.py +540 -0
  11. affinity/cli/commands/entry_cmds.py +33 -0
  12. affinity/cli/commands/field_cmds.py +413 -0
  13. affinity/cli/commands/interaction_cmds.py +875 -0
  14. affinity/cli/commands/list_cmds.py +3152 -0
  15. affinity/cli/commands/note_cmds.py +433 -0
  16. affinity/cli/commands/opportunity_cmds.py +1174 -0
  17. affinity/cli/commands/person_cmds.py +1980 -0
  18. affinity/cli/commands/query_cmd.py +444 -0
  19. affinity/cli/commands/relationship_strength_cmds.py +62 -0
  20. affinity/cli/commands/reminder_cmds.py +595 -0
  21. affinity/cli/commands/resolve_url_cmd.py +127 -0
  22. affinity/cli/commands/session_cmds.py +84 -0
  23. affinity/cli/commands/task_cmds.py +110 -0
  24. affinity/cli/commands/version_cmd.py +29 -0
  25. affinity/cli/commands/whoami_cmd.py +36 -0
  26. affinity/cli/config.py +108 -0
  27. affinity/cli/context.py +749 -0
  28. affinity/cli/csv_utils.py +195 -0
  29. affinity/cli/date_utils.py +42 -0
  30. affinity/cli/decorators.py +77 -0
  31. affinity/cli/errors.py +28 -0
  32. affinity/cli/field_utils.py +355 -0
  33. affinity/cli/formatters.py +551 -0
  34. affinity/cli/help_json.py +283 -0
  35. affinity/cli/logging.py +100 -0
  36. affinity/cli/main.py +261 -0
  37. affinity/cli/options.py +53 -0
  38. affinity/cli/paths.py +32 -0
  39. affinity/cli/progress.py +183 -0
  40. affinity/cli/query/__init__.py +163 -0
  41. affinity/cli/query/aggregates.py +357 -0
  42. affinity/cli/query/dates.py +194 -0
  43. affinity/cli/query/exceptions.py +147 -0
  44. affinity/cli/query/executor.py +1236 -0
  45. affinity/cli/query/filters.py +248 -0
  46. affinity/cli/query/models.py +333 -0
  47. affinity/cli/query/output.py +331 -0
  48. affinity/cli/query/parser.py +619 -0
  49. affinity/cli/query/planner.py +430 -0
  50. affinity/cli/query/progress.py +270 -0
  51. affinity/cli/query/schema.py +439 -0
  52. affinity/cli/render.py +1589 -0
  53. affinity/cli/resolve.py +222 -0
  54. affinity/cli/resolvers.py +249 -0
  55. affinity/cli/results.py +308 -0
  56. affinity/cli/runner.py +218 -0
  57. affinity/cli/serialization.py +65 -0
  58. affinity/cli/session_cache.py +276 -0
  59. affinity/cli/types.py +70 -0
  60. affinity/client.py +771 -0
  61. affinity/clients/__init__.py +19 -0
  62. affinity/clients/http.py +3664 -0
  63. affinity/clients/pipeline.py +165 -0
  64. affinity/compare.py +501 -0
  65. affinity/downloads.py +114 -0
  66. affinity/exceptions.py +615 -0
  67. affinity/filters.py +1128 -0
  68. affinity/hooks.py +198 -0
  69. affinity/inbound_webhooks.py +302 -0
  70. affinity/models/__init__.py +163 -0
  71. affinity/models/entities.py +798 -0
  72. affinity/models/pagination.py +513 -0
  73. affinity/models/rate_limit_snapshot.py +48 -0
  74. affinity/models/secondary.py +413 -0
  75. affinity/models/types.py +663 -0
  76. affinity/policies.py +40 -0
  77. affinity/progress.py +22 -0
  78. affinity/py.typed +0 -0
  79. affinity/services/__init__.py +42 -0
  80. affinity/services/companies.py +1286 -0
  81. affinity/services/lists.py +1892 -0
  82. affinity/services/opportunities.py +1330 -0
  83. affinity/services/persons.py +1348 -0
  84. affinity/services/rate_limits.py +173 -0
  85. affinity/services/tasks.py +193 -0
  86. affinity/services/v1_only.py +2445 -0
  87. affinity/types.py +83 -0
  88. affinity_sdk-0.9.5.dist-info/METADATA +622 -0
  89. affinity_sdk-0.9.5.dist-info/RECORD +92 -0
  90. affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
  91. affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
  92. affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,430 @@
1
+ """Query execution planner.
2
+
3
+ Analyzes queries and generates execution plans with cost estimates.
4
+ This module is CLI-only and NOT part of the public SDK API.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from graphlib import TopologicalSorter
10
+ from typing import TYPE_CHECKING
11
+
12
+ from .exceptions import QueryValidationError
13
+ from .models import ExecutionPlan, PlanStep, Query, WhereClause
14
+ from .schema import RelationshipDef, get_entity_schema, get_relationship
15
+
16
+ if TYPE_CHECKING:
17
+ pass
18
+
19
+
20
+ # =============================================================================
21
+ # Cost Model Constants
22
+ # =============================================================================
23
+
24
+ # Estimated records per entity type (for planning purposes)
25
+ ESTIMATED_ENTITY_COUNTS: dict[str, int] = {
26
+ "persons": 5000,
27
+ "companies": 2000,
28
+ "opportunities": 1000,
29
+ "listEntries": 10000,
30
+ "interactions": 10000,
31
+ "notes": 5000,
32
+ }
33
+
34
+ # Default estimate when entity type unknown
35
+ DEFAULT_ENTITY_COUNT = 1000
36
+
37
+ # Average related entities per record
38
+ ESTIMATED_RELATIONSHIPS: dict[str, int] = {
39
+ "companies": 2, # persons -> companies
40
+ "people": 3, # companies -> people
41
+ "opportunities": 5, # persons/companies -> opportunities
42
+ "interactions": 20,
43
+ "notes": 10,
44
+ "listEntries": 5,
45
+ }
46
+
47
+ # Thresholds for warnings
48
+ EXPENSIVE_OPERATION_THRESHOLD = 100 # API calls
49
+ VERY_EXPENSIVE_OPERATION_THRESHOLD = 500
50
+ MAX_RECORDS_WARNING_THRESHOLD = 1000
51
+
52
+ # Memory estimation (bytes per record)
53
+ BYTES_PER_RECORD = 2000
54
+
55
+
56
+ # =============================================================================
57
+ # Query Planner
58
+ # =============================================================================
59
+
60
+
61
+ class QueryPlanner:
62
+ """Generates execution plans from parsed queries."""
63
+
64
+ def __init__(self, *, max_records: int = 10000, concurrency: int = 10) -> None:
65
+ """Initialize the planner.
66
+
67
+ Args:
68
+ max_records: Maximum records to fetch (safety limit)
69
+ concurrency: Concurrency level for N+1 operations
70
+ """
71
+ self.max_records = max_records
72
+ self.concurrency = concurrency
73
+
74
+ def plan(self, query: Query) -> ExecutionPlan:
75
+ """Generate an execution plan for a query.
76
+
77
+ Args:
78
+ query: Validated Query object
79
+
80
+ Returns:
81
+ ExecutionPlan with steps, estimates, and warnings
82
+
83
+ Raises:
84
+ QueryPlanError: If plan cannot be generated
85
+ QueryValidationError: If query references unknown entities/relationships
86
+ """
87
+ steps: list[PlanStep] = []
88
+ warnings: list[str] = []
89
+ recommendations: list[str] = []
90
+ step_id = 0
91
+
92
+ # Validate entity exists in schema
93
+ entity_schema = get_entity_schema(query.from_)
94
+ if entity_schema is None:
95
+ raise QueryValidationError(
96
+ f"Unknown entity type '{query.from_}'",
97
+ field="from",
98
+ )
99
+
100
+ # Step 1: Fetch primary entity
101
+ estimated_records = self._estimate_primary_records(query)
102
+ fetch_step = PlanStep(
103
+ step_id=step_id,
104
+ operation="fetch",
105
+ description=f"Fetch {query.from_} (paginated)",
106
+ entity=query.from_,
107
+ estimated_api_calls=self._estimate_pages(estimated_records),
108
+ estimated_records=estimated_records,
109
+ is_client_side=False,
110
+ )
111
+
112
+ # Check for filter pushdown opportunities
113
+ if query.from_ == "listEntries" and query.where is not None:
114
+ pushdown = self._analyze_filter_pushdown(query.where)
115
+ if pushdown:
116
+ fetch_step.filter_pushdown = True
117
+ fetch_step.pushdown_filter = pushdown
118
+ fetch_step.description += " with server-side filter"
119
+ else:
120
+ warnings.append(
121
+ "No server-side filtering available for this query. "
122
+ "Consider using Status, Owner, or other dropdown fields for better performance."
123
+ )
124
+
125
+ steps.append(fetch_step)
126
+ step_id += 1
127
+
128
+ # Step 2: Client-side filter (if WHERE clause and no pushdown)
129
+ if query.where is not None:
130
+ filter_step = PlanStep(
131
+ step_id=step_id,
132
+ operation="filter",
133
+ description=self._describe_where(query.where),
134
+ entity=query.from_,
135
+ estimated_api_calls=0,
136
+ estimated_records=self._estimate_filtered_records(estimated_records, query.where),
137
+ is_client_side=True,
138
+ depends_on=[0],
139
+ )
140
+ steps.append(filter_step)
141
+ step_id += 1
142
+
143
+ # Update estimated records after filter
144
+ estimated_records = filter_step.estimated_records or estimated_records
145
+
146
+ # Step 3: Includes (N+1 API calls)
147
+ if query.include is not None:
148
+ for include_path in query.include:
149
+ # Validate relationship exists
150
+ rel = get_relationship(query.from_, include_path)
151
+ if rel is None:
152
+ # Get available relationships for helpful error message
153
+ entity_schema = get_entity_schema(query.from_)
154
+ available = (
155
+ sorted(entity_schema.relationships.keys())
156
+ if entity_schema and entity_schema.relationships
157
+ else []
158
+ )
159
+ if available:
160
+ raise QueryValidationError(
161
+ f"Unknown relationship '{include_path}' for entity '{query.from_}'. "
162
+ f"Available: {', '.join(available)}",
163
+ field="include",
164
+ )
165
+ else:
166
+ raise QueryValidationError(
167
+ f"Entity '{query.from_}' does not support includes",
168
+ field="include",
169
+ )
170
+
171
+ include_calls = self._estimate_include_calls(estimated_records, include_path, rel)
172
+ include_step = PlanStep(
173
+ step_id=step_id,
174
+ operation="include",
175
+ description=f"Include {include_path} (N+1 API calls)",
176
+ entity=query.from_,
177
+ relationship=include_path,
178
+ estimated_api_calls=include_calls,
179
+ is_client_side=False,
180
+ depends_on=[step_id - 1], # Depends on previous step
181
+ )
182
+
183
+ if rel.requires_n_plus_1:
184
+ include_step.warnings.append(
185
+ f"Fetching {include_path} requires {include_calls} API calls "
186
+ f"({estimated_records} records x 1 call each)"
187
+ )
188
+
189
+ steps.append(include_step)
190
+ step_id += 1
191
+
192
+ # Step 4: Aggregation (if applicable)
193
+ if query.aggregate is not None:
194
+ agg_step = PlanStep(
195
+ step_id=step_id,
196
+ operation="aggregate",
197
+ description=f"Compute aggregates: {', '.join(query.aggregate.keys())}",
198
+ entity=query.from_,
199
+ estimated_api_calls=0,
200
+ is_client_side=True,
201
+ depends_on=[step_id - 1],
202
+ )
203
+ steps.append(agg_step)
204
+ step_id += 1
205
+
206
+ # Step 5: Sort (if orderBy)
207
+ if query.order_by is not None:
208
+ sort_fields = [ob.field or "expression" for ob in query.order_by]
209
+ sort_step = PlanStep(
210
+ step_id=step_id,
211
+ operation="sort",
212
+ description=f"Sort by: {', '.join(sort_fields)}",
213
+ entity=query.from_,
214
+ estimated_api_calls=0,
215
+ is_client_side=True,
216
+ depends_on=[step_id - 1],
217
+ )
218
+ steps.append(sort_step)
219
+ step_id += 1
220
+
221
+ # Step 6: Limit (if specified)
222
+ if query.limit is not None:
223
+ limit_step = PlanStep(
224
+ step_id=step_id,
225
+ operation="limit",
226
+ description=f"Take first {query.limit} results",
227
+ entity=query.from_,
228
+ estimated_api_calls=0,
229
+ is_client_side=True,
230
+ depends_on=[step_id - 1],
231
+ )
232
+ steps.append(limit_step)
233
+ step_id += 1
234
+
235
+ # Calculate totals
236
+ total_api_calls = sum(s.estimated_api_calls for s in steps)
237
+ estimated_fetched = steps[0].estimated_records
238
+
239
+ # Generate warnings and recommendations
240
+ has_expensive = total_api_calls >= EXPENSIVE_OPERATION_THRESHOLD
241
+ requires_full_scan = not fetch_step.filter_pushdown and query.where is not None
242
+
243
+ if total_api_calls >= VERY_EXPENSIVE_OPERATION_THRESHOLD:
244
+ warnings.append(
245
+ f"This query will make approximately {total_api_calls} API calls. "
246
+ "Consider adding filters or reducing the scope."
247
+ )
248
+
249
+ if estimated_fetched and estimated_fetched > MAX_RECORDS_WARNING_THRESHOLD:
250
+ recommendations.append(
251
+ f"Query may fetch up to {estimated_fetched} records. "
252
+ "Use --dry-run to preview before executing."
253
+ )
254
+
255
+ if requires_full_scan:
256
+ recommendations.append(
257
+ "Query requires client-side filtering. For better performance, "
258
+ "consider using saved views or list export with --filter."
259
+ )
260
+
261
+ # Estimate memory
262
+ estimated_memory = None
263
+ if estimated_fetched:
264
+ estimated_memory = (estimated_fetched * BYTES_PER_RECORD) / (1024 * 1024)
265
+
266
+ return ExecutionPlan(
267
+ query=query,
268
+ steps=steps,
269
+ total_api_calls=total_api_calls,
270
+ estimated_records_fetched=estimated_fetched,
271
+ estimated_memory_mb=estimated_memory,
272
+ warnings=warnings,
273
+ recommendations=recommendations,
274
+ has_expensive_operations=has_expensive,
275
+ requires_full_scan=requires_full_scan,
276
+ version=query.version or "1.0",
277
+ )
278
+
279
+ def get_execution_levels(self, plan: ExecutionPlan) -> list[list[PlanStep]]:
280
+ """Group steps by execution level using topological sort.
281
+
282
+ Steps in the same level can be executed in parallel.
283
+
284
+ Args:
285
+ plan: Execution plan
286
+
287
+ Returns:
288
+ List of levels, each containing steps that can run in parallel
289
+ """
290
+ ts: TopologicalSorter[int] = TopologicalSorter()
291
+ step_map = {s.step_id: s for s in plan.steps}
292
+
293
+ for step in plan.steps:
294
+ ts.add(step.step_id, *step.depends_on)
295
+
296
+ levels: list[list[PlanStep]] = []
297
+ ts.prepare()
298
+
299
+ while ts.is_active():
300
+ ready_ids = list(ts.get_ready())
301
+ levels.append([step_map[i] for i in ready_ids])
302
+ for node_id in ready_ids:
303
+ ts.done(node_id)
304
+
305
+ return levels
306
+
307
+ def _estimate_primary_records(self, query: Query) -> int:
308
+ """Estimate number of records for primary entity."""
309
+ base_estimate = ESTIMATED_ENTITY_COUNTS.get(query.from_, DEFAULT_ENTITY_COUNT)
310
+
311
+ # If limit is set, use it as upper bound
312
+ if query.limit is not None:
313
+ return min(base_estimate, query.limit)
314
+
315
+ return base_estimate
316
+
317
+ def _estimate_pages(self, records: int, page_size: int = 100) -> int:
318
+ """Estimate number of API pages needed."""
319
+ return max(1, (records + page_size - 1) // page_size)
320
+
321
+ def _estimate_filtered_records(self, total: int, where: WhereClause) -> int:
322
+ """Estimate records remaining after client-side filter.
323
+
324
+ This is a rough heuristic - actual results vary widely.
325
+ """
326
+ # Simple heuristic: each condition reduces by ~50%
327
+ conditions = self._count_conditions(where)
328
+ reduction = 0.5**conditions
329
+ return max(1, int(total * reduction))
330
+
331
+ def _count_conditions(self, where: WhereClause) -> int:
332
+ """Count number of filter conditions."""
333
+ count = 0
334
+
335
+ # Single condition
336
+ if where.op is not None:
337
+ count = 1
338
+
339
+ # Compound conditions
340
+ if where.and_ is not None:
341
+ count += sum(self._count_conditions(c) for c in where.and_)
342
+ if where.or_ is not None:
343
+ count += sum(self._count_conditions(c) for c in where.or_)
344
+ if where.not_ is not None:
345
+ count += self._count_conditions(where.not_)
346
+
347
+ return count
348
+
349
+ def _estimate_include_calls(
350
+ self, records: int, _include_path: str, rel: RelationshipDef
351
+ ) -> int:
352
+ """Estimate API calls for an include operation."""
353
+ if not rel.requires_n_plus_1:
354
+ # Global service: single filtered call
355
+ return 1
356
+
357
+ # N+1: one call per record
358
+ return records
359
+
360
+ def _describe_where(self, where: WhereClause) -> str:
361
+ """Generate human-readable description of WHERE clause."""
362
+ if where.op is not None:
363
+ path = where.path or "expression"
364
+ # Unary operators don't need a value
365
+ if where.op in ("is_null", "is_not_null"):
366
+ return f"Client-side filter: {path} {where.op}"
367
+ return f"Client-side filter: {path} {where.op} {where.value!r}"
368
+
369
+ if where.and_ is not None:
370
+ return f"Client-side filter: {len(where.and_)} conditions (AND)"
371
+
372
+ if where.or_ is not None:
373
+ return f"Client-side filter: {len(where.or_)} conditions (OR)"
374
+
375
+ if where.not_ is not None:
376
+ return "Client-side filter: NOT condition"
377
+
378
+ return "Client-side filter"
379
+
380
+ def _analyze_filter_pushdown(self, where: WhereClause) -> str | None:
381
+ """Analyze if filter can be pushed to server-side.
382
+
383
+ Currently only supports listEntries with simple eq/neq on dropdown fields.
384
+ Also traverses AND conditions to find pushdown candidates.
385
+
386
+ Returns:
387
+ Filter string for server-side, or None if not pushable
388
+ """
389
+ # Handle AND conditions - traverse to find pushdown candidates
390
+ if where.and_ is not None:
391
+ for clause in where.and_:
392
+ result = self._analyze_filter_pushdown(clause)
393
+ if result is not None:
394
+ return result
395
+ return None
396
+
397
+ # Simple condition - check if pushable
398
+ if where.op not in ("eq", "neq"):
399
+ return None
400
+
401
+ if where.path is None:
402
+ return None
403
+
404
+ # Only fields.* can be pushed down for list entries
405
+ if not where.path.startswith("fields."):
406
+ return None
407
+
408
+ # Build filter string
409
+ field_name = where.path.removeprefix("fields.")
410
+ op_str = "=" if where.op == "eq" else "!="
411
+ value_str = str(where.value)
412
+
413
+ return f"{field_name}{op_str}{value_str}"
414
+
415
+
416
+ def create_planner(
417
+ *,
418
+ max_records: int = 10000,
419
+ concurrency: int = 10,
420
+ ) -> QueryPlanner:
421
+ """Create a query planner with configuration.
422
+
423
+ Args:
424
+ max_records: Maximum records safety limit
425
+ concurrency: Concurrency level for N+1 operations
426
+
427
+ Returns:
428
+ Configured QueryPlanner
429
+ """
430
+ return QueryPlanner(max_records=max_records, concurrency=concurrency)
@@ -0,0 +1,270 @@
1
+ """Progress display for query execution.
2
+
3
+ Provides Rich progress bars for TTY and NDJSON progress for non-TTY (MCP).
4
+ This module is CLI-only and NOT part of the public SDK API.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import sys
11
+ import time
12
+ from typing import TYPE_CHECKING, Any, TextIO
13
+
14
+ from .executor import QueryProgressCallback
15
+ from .models import PlanStep
16
+
17
+ if TYPE_CHECKING:
18
+ from rich.console import Console
19
+ from rich.progress import TaskID
20
+
21
+
22
+ # =============================================================================
23
+ # Rich Progress Display (TTY)
24
+ # =============================================================================
25
+
26
+
27
+ class RichQueryProgress(QueryProgressCallback): # pragma: no cover
28
+ """Rich progress display for terminal output.
29
+
30
+ Shows multi-step progress with:
31
+ - Overall progress bar
32
+ - Per-step progress bars
33
+ - Step descriptions and status
34
+ """
35
+
36
+ def __init__(
37
+ self,
38
+ console: Console | None = None,
39
+ total_steps: int = 1,
40
+ ) -> None:
41
+ """Initialize Rich progress display.
42
+
43
+ Args:
44
+ console: Rich console (defaults to stderr)
45
+ total_steps: Total number of steps in plan
46
+ """
47
+ from rich.console import Console
48
+ from rich.progress import (
49
+ Progress,
50
+ SpinnerColumn,
51
+ TextColumn,
52
+ TimeElapsedColumn,
53
+ )
54
+
55
+ self.console = console or Console(stderr=True)
56
+ self.total_steps = total_steps
57
+ self.completed_steps = 0
58
+
59
+ # Simple progress: spinner + description + elapsed time
60
+ # No percentage/bar since total records are unknown for most operations
61
+ self.progress = Progress(
62
+ SpinnerColumn(),
63
+ TextColumn("[bold blue]{task.description}"),
64
+ TimeElapsedColumn(),
65
+ console=self.console,
66
+ transient=False,
67
+ )
68
+
69
+ self._overall_task: TaskID | None = None
70
+ self._step_tasks: dict[int, TaskID] = {}
71
+ self._started = False
72
+
73
+ def __enter__(self) -> RichQueryProgress:
74
+ """Start progress display."""
75
+ self.progress.start()
76
+ self._started = True
77
+ self._overall_task = self.progress.add_task(
78
+ "[bold]Overall Progress",
79
+ total=self.total_steps,
80
+ )
81
+ return self
82
+
83
+ def __exit__(self, *args: object) -> None:
84
+ """Stop progress display."""
85
+ self.progress.stop()
86
+ self._started = False
87
+
88
+ def on_step_start(self, step: PlanStep) -> None:
89
+ """Called when a step starts."""
90
+ if not self._started:
91
+ return
92
+
93
+ task_id = self.progress.add_task(
94
+ f"[cyan]{step.description}",
95
+ total=None, # Indeterminate
96
+ )
97
+ self._step_tasks[step.step_id] = task_id
98
+
99
+ def on_step_progress(self, step: PlanStep, current: int, total: int | None) -> None:
100
+ """Called during step execution."""
101
+ if not self._started:
102
+ return
103
+
104
+ task_id = self._step_tasks.get(step.step_id)
105
+ if task_id is not None:
106
+ if total is not None:
107
+ self.progress.update(task_id, completed=current, total=total)
108
+ else:
109
+ # No total known - show record count in description instead
110
+ self.progress.update(
111
+ task_id,
112
+ completed=current,
113
+ description=f"[cyan]{step.description} ({current:,} records)",
114
+ )
115
+
116
+ def on_step_complete(self, step: PlanStep, records: int) -> None:
117
+ """Called when a step completes."""
118
+ if not self._started:
119
+ return
120
+
121
+ task_id = self._step_tasks.get(step.step_id)
122
+ if task_id is not None:
123
+ self.progress.update(
124
+ task_id,
125
+ completed=100,
126
+ total=100,
127
+ description=f"[green]✓ {step.description} ({records} records)",
128
+ )
129
+
130
+ self.completed_steps += 1
131
+ if self._overall_task is not None:
132
+ self.progress.update(self._overall_task, completed=self.completed_steps)
133
+
134
+ def on_step_error(self, step: PlanStep, error: Exception) -> None:
135
+ """Called when a step fails."""
136
+ if not self._started:
137
+ return
138
+
139
+ task_id = self._step_tasks.get(step.step_id)
140
+ if task_id is not None:
141
+ self.progress.update(
142
+ task_id,
143
+ description=f"[red]✗ {step.description}: {error}",
144
+ )
145
+
146
+
147
+ # =============================================================================
148
+ # NDJSON Progress (Non-TTY / MCP)
149
+ # =============================================================================
150
+
151
+
152
+ class NDJSONQueryProgress(QueryProgressCallback):
153
+ """NDJSON progress output for non-TTY environments.
154
+
155
+ Emits progress updates as newline-delimited JSON for:
156
+ - MCP tool integration
157
+ - Scripting and automation
158
+ """
159
+
160
+ MIN_PROGRESS_INTERVAL = 0.65 # Minimum seconds between progress updates
161
+
162
+ def __init__(self, output: TextIO | None = None) -> None:
163
+ """Initialize NDJSON progress.
164
+
165
+ Args:
166
+ output: Output stream (defaults to stderr)
167
+ """
168
+ self.output = output or sys.stderr
169
+ self._last_emit: dict[int, float] = {}
170
+
171
+ def _emit(self, data: dict[str, Any], *, force: bool = False) -> None:
172
+ """Emit a progress JSON object."""
173
+ step_id = data.get("stepId", -1)
174
+ now = time.time()
175
+
176
+ # Rate limit per step
177
+ if not force:
178
+ last = self._last_emit.get(step_id, 0)
179
+ if now - last < self.MIN_PROGRESS_INTERVAL:
180
+ return
181
+
182
+ self._last_emit[step_id] = now
183
+ self.output.write(json.dumps(data) + "\n")
184
+ self.output.flush()
185
+
186
+ def on_step_start(self, step: PlanStep) -> None:
187
+ """Called when a step starts."""
188
+ self._emit(
189
+ {
190
+ "type": "progress",
191
+ "event": "step_start",
192
+ "stepId": step.step_id,
193
+ "operation": step.operation,
194
+ "description": step.description,
195
+ }
196
+ )
197
+
198
+ def on_step_progress(self, step: PlanStep, current: int, total: int | None) -> None:
199
+ """Called during step execution."""
200
+ progress = None
201
+ if total is not None and total > 0:
202
+ progress = round((current / total) * 100)
203
+
204
+ self._emit(
205
+ {
206
+ "type": "progress",
207
+ "event": "step_progress",
208
+ "stepId": step.step_id,
209
+ "current": current,
210
+ "total": total,
211
+ "progress": progress,
212
+ }
213
+ )
214
+
215
+ def on_step_complete(self, step: PlanStep, records: int) -> None:
216
+ """Called when a step completes."""
217
+ self._emit(
218
+ {
219
+ "type": "progress",
220
+ "event": "step_complete",
221
+ "stepId": step.step_id,
222
+ "records": records,
223
+ "progress": 100,
224
+ },
225
+ force=True,
226
+ )
227
+
228
+ def on_step_error(self, step: PlanStep, error: Exception) -> None:
229
+ """Called when a step fails."""
230
+ self._emit(
231
+ {
232
+ "type": "progress",
233
+ "event": "step_error",
234
+ "stepId": step.step_id,
235
+ "error": str(error),
236
+ },
237
+ force=True,
238
+ )
239
+
240
+
241
+ # =============================================================================
242
+ # Factory Function
243
+ # =============================================================================
244
+
245
+
246
+ def create_progress_callback( # pragma: no cover
247
+ *,
248
+ total_steps: int = 1,
249
+ quiet: bool = False,
250
+ force_ndjson: bool = False,
251
+ ) -> QueryProgressCallback:
252
+ """Create appropriate progress callback based on environment.
253
+
254
+ Args:
255
+ total_steps: Total number of steps in plan
256
+ quiet: If True, return null callback
257
+ force_ndjson: If True, use NDJSON even on TTY
258
+
259
+ Returns:
260
+ Appropriate progress callback
261
+ """
262
+ from .executor import NullProgressCallback
263
+
264
+ if quiet:
265
+ return NullProgressCallback()
266
+
267
+ if force_ndjson or not sys.stderr.isatty():
268
+ return NDJSONQueryProgress()
269
+
270
+ return RichQueryProgress(total_steps=total_steps)