affinity-sdk 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. affinity/__init__.py +139 -0
  2. affinity/cli/__init__.py +7 -0
  3. affinity/cli/click_compat.py +27 -0
  4. affinity/cli/commands/__init__.py +1 -0
  5. affinity/cli/commands/_entity_files_dump.py +219 -0
  6. affinity/cli/commands/_list_entry_fields.py +41 -0
  7. affinity/cli/commands/_v1_parsing.py +77 -0
  8. affinity/cli/commands/company_cmds.py +2139 -0
  9. affinity/cli/commands/completion_cmd.py +33 -0
  10. affinity/cli/commands/config_cmds.py +540 -0
  11. affinity/cli/commands/entry_cmds.py +33 -0
  12. affinity/cli/commands/field_cmds.py +413 -0
  13. affinity/cli/commands/interaction_cmds.py +875 -0
  14. affinity/cli/commands/list_cmds.py +3152 -0
  15. affinity/cli/commands/note_cmds.py +433 -0
  16. affinity/cli/commands/opportunity_cmds.py +1174 -0
  17. affinity/cli/commands/person_cmds.py +1980 -0
  18. affinity/cli/commands/query_cmd.py +444 -0
  19. affinity/cli/commands/relationship_strength_cmds.py +62 -0
  20. affinity/cli/commands/reminder_cmds.py +595 -0
  21. affinity/cli/commands/resolve_url_cmd.py +127 -0
  22. affinity/cli/commands/session_cmds.py +84 -0
  23. affinity/cli/commands/task_cmds.py +110 -0
  24. affinity/cli/commands/version_cmd.py +29 -0
  25. affinity/cli/commands/whoami_cmd.py +36 -0
  26. affinity/cli/config.py +108 -0
  27. affinity/cli/context.py +749 -0
  28. affinity/cli/csv_utils.py +195 -0
  29. affinity/cli/date_utils.py +42 -0
  30. affinity/cli/decorators.py +77 -0
  31. affinity/cli/errors.py +28 -0
  32. affinity/cli/field_utils.py +355 -0
  33. affinity/cli/formatters.py +551 -0
  34. affinity/cli/help_json.py +283 -0
  35. affinity/cli/logging.py +100 -0
  36. affinity/cli/main.py +261 -0
  37. affinity/cli/options.py +53 -0
  38. affinity/cli/paths.py +32 -0
  39. affinity/cli/progress.py +183 -0
  40. affinity/cli/query/__init__.py +163 -0
  41. affinity/cli/query/aggregates.py +357 -0
  42. affinity/cli/query/dates.py +194 -0
  43. affinity/cli/query/exceptions.py +147 -0
  44. affinity/cli/query/executor.py +1236 -0
  45. affinity/cli/query/filters.py +248 -0
  46. affinity/cli/query/models.py +333 -0
  47. affinity/cli/query/output.py +331 -0
  48. affinity/cli/query/parser.py +619 -0
  49. affinity/cli/query/planner.py +430 -0
  50. affinity/cli/query/progress.py +270 -0
  51. affinity/cli/query/schema.py +439 -0
  52. affinity/cli/render.py +1589 -0
  53. affinity/cli/resolve.py +222 -0
  54. affinity/cli/resolvers.py +249 -0
  55. affinity/cli/results.py +308 -0
  56. affinity/cli/runner.py +218 -0
  57. affinity/cli/serialization.py +65 -0
  58. affinity/cli/session_cache.py +276 -0
  59. affinity/cli/types.py +70 -0
  60. affinity/client.py +771 -0
  61. affinity/clients/__init__.py +19 -0
  62. affinity/clients/http.py +3664 -0
  63. affinity/clients/pipeline.py +165 -0
  64. affinity/compare.py +501 -0
  65. affinity/downloads.py +114 -0
  66. affinity/exceptions.py +615 -0
  67. affinity/filters.py +1128 -0
  68. affinity/hooks.py +198 -0
  69. affinity/inbound_webhooks.py +302 -0
  70. affinity/models/__init__.py +163 -0
  71. affinity/models/entities.py +798 -0
  72. affinity/models/pagination.py +513 -0
  73. affinity/models/rate_limit_snapshot.py +48 -0
  74. affinity/models/secondary.py +413 -0
  75. affinity/models/types.py +663 -0
  76. affinity/policies.py +40 -0
  77. affinity/progress.py +22 -0
  78. affinity/py.typed +0 -0
  79. affinity/services/__init__.py +42 -0
  80. affinity/services/companies.py +1286 -0
  81. affinity/services/lists.py +1892 -0
  82. affinity/services/opportunities.py +1330 -0
  83. affinity/services/persons.py +1348 -0
  84. affinity/services/rate_limits.py +173 -0
  85. affinity/services/tasks.py +193 -0
  86. affinity/services/v1_only.py +2445 -0
  87. affinity/types.py +83 -0
  88. affinity_sdk-0.9.5.dist-info/METADATA +622 -0
  89. affinity_sdk-0.9.5.dist-info/RECORD +92 -0
  90. affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
  91. affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
  92. affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,2139 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import sys
5
+ from collections.abc import Callable
6
+ from contextlib import ExitStack
7
+ from pathlib import Path
8
+ from typing import Any, cast
9
+
10
+ from rich.console import Console
11
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TaskID, TextColumn, TimeElapsedColumn
12
+
13
+ from affinity.models.entities import Company, CompanyCreate, CompanyUpdate
14
+ from affinity.models.types import EnrichedFieldId, FieldId
15
+ from affinity.types import CompanyId, FieldType, ListId, PersonId
16
+
17
+ from ..click_compat import RichCommand, RichGroup, click
18
+ from ..context import CLIContext
19
+ from ..csv_utils import write_csv_to_stdout
20
+ from ..decorators import category, destructive, progress_capable
21
+ from ..errors import CLIError
22
+ from ..options import output_options
23
+ from ..progress import ProgressManager, ProgressSettings
24
+ from ..resolve import resolve_list_selector
25
+ from ..resolvers import ResolvedEntity
26
+ from ..results import CommandContext
27
+ from ..runner import CommandOutput, run_command
28
+ from ..serialization import serialize_model_for_cli
29
+ from ._entity_files_dump import dump_entity_files_bundle
30
+ from ._list_entry_fields import (
31
+ ListEntryFieldsScope,
32
+ build_list_entry_field_rows,
33
+ filter_list_entry_fields,
34
+ )
35
+ from .resolve_url_cmd import _parse_affinity_url
36
+
37
+
38
+ def _fetch_v2_collection(
39
+ *,
40
+ client: Any,
41
+ path: str,
42
+ section: str,
43
+ default_limit: int,
44
+ default_cap: int | None,
45
+ allow_unbounded: bool,
46
+ max_results: int | None,
47
+ all_pages: bool,
48
+ warnings: list[str],
49
+ pagination: dict[str, Any],
50
+ keep_item: Callable[[Any], bool] | None = None,
51
+ ) -> list[Any]:
52
+ """Fetch a paginated V2 collection with configurable limits.
53
+
54
+ This helper centralizes the pagination logic for fetching lists, list-entries,
55
+ and other V2 collections. It handles:
56
+ - Page size limits
57
+ - Max result caps
58
+ - Pagination cursor tracking
59
+ - Optional item filtering
60
+
61
+ Args:
62
+ client: The Affinity client instance.
63
+ path: The API path to fetch (e.g., "/companies/123/lists").
64
+ section: Name for this section (used in warnings and pagination keys).
65
+ default_limit: Default page size for API requests.
66
+ default_cap: Default max items if no explicit cap and not fetching all pages.
67
+ allow_unbounded: Whether unbounded fetching is allowed without --all.
68
+ max_results: Explicit max results limit (from --max-results).
69
+ all_pages: Whether to fetch all pages (from --all flag).
70
+ warnings: List to append warnings to (mutated in place).
71
+ pagination: Dict to store pagination cursors (mutated in place).
72
+ keep_item: Optional filter function to keep only matching items.
73
+
74
+ Returns:
75
+ List of fetched items.
76
+ """
77
+ effective_cap = max_results
78
+ if effective_cap is None and default_cap is not None and not all_pages:
79
+ effective_cap = default_cap
80
+ if effective_cap is not None and effective_cap <= 0:
81
+ return []
82
+
83
+ should_paginate = all_pages or allow_unbounded or effective_cap is not None
84
+ limit = default_limit
85
+ if effective_cap is not None:
86
+ limit = min(default_limit, effective_cap)
87
+
88
+ truncated_mid_page = False
89
+ payload = client._http.get(path, params={"limit": limit} if limit else None)
90
+ rows = payload.get("data", [])
91
+ if not isinstance(rows, list):
92
+ rows = []
93
+ page_items = list(rows)
94
+ if keep_item is not None:
95
+ page_items = [r for r in page_items if keep_item(r)]
96
+ items: list[Any] = page_items
97
+
98
+ page_pagination = payload.get("pagination", {})
99
+ if not isinstance(page_pagination, dict):
100
+ page_pagination = {}
101
+ next_url = page_pagination.get("nextUrl")
102
+ prev_url = page_pagination.get("prevUrl")
103
+
104
+ if effective_cap is not None and len(items) > effective_cap:
105
+ truncated_mid_page = True
106
+ items = items[:effective_cap]
107
+ next_url = None
108
+
109
+ while (
110
+ should_paginate
111
+ and isinstance(next_url, str)
112
+ and next_url
113
+ and (effective_cap is None or len(items) < effective_cap)
114
+ ):
115
+ payload = client._http.get_url(next_url)
116
+ rows = payload.get("data", [])
117
+ if isinstance(rows, list):
118
+ page_items = list(rows)
119
+ if keep_item is not None:
120
+ page_items = [r for r in page_items if keep_item(r)]
121
+ items.extend(page_items)
122
+ page_pagination = payload.get("pagination", {})
123
+ if not isinstance(page_pagination, dict):
124
+ page_pagination = {}
125
+ next_url = page_pagination.get("nextUrl")
126
+ prev_url = page_pagination.get("prevUrl")
127
+
128
+ if effective_cap is not None and len(items) > effective_cap:
129
+ truncated_mid_page = True
130
+ items = items[:effective_cap]
131
+ next_url = None
132
+ break
133
+
134
+ if truncated_mid_page and effective_cap is not None:
135
+ warnings.append(
136
+ f"{section} limited to {effective_cap:,} items. Use --all to fetch all results."
137
+ )
138
+ elif isinstance(next_url, str) and next_url:
139
+ pagination[section] = {"nextCursor": next_url, "prevCursor": prev_url}
140
+
141
+ return items
142
+
143
+
144
+ def _normalize_sdk_item(item: Any) -> Any:
145
+ """Normalize an SDK model or dict to a plain dict for JSON output."""
146
+ if isinstance(item, dict):
147
+ return item
148
+ dump = getattr(item, "model_dump", None)
149
+ if callable(dump):
150
+ payload = dump(by_alias=True, mode="json")
151
+ fields_raw = getattr(item, "fields_raw", None)
152
+ if isinstance(fields_raw, list):
153
+ payload["fields"] = fields_raw
154
+ return payload
155
+ return item
156
+
157
+
158
+ def _fetch_v2_collection_sdk(
159
+ *,
160
+ fetch_page: Callable[[int | None, str | None], Any],
161
+ section: str,
162
+ default_limit: int,
163
+ default_cap: int | None,
164
+ allow_unbounded: bool,
165
+ max_results: int | None,
166
+ all_pages: bool,
167
+ warnings: list[str],
168
+ pagination: dict[str, Any],
169
+ keep_item: Callable[[Any], bool] | None = None,
170
+ ) -> list[Any]:
171
+ """Fetch a paginated V2 collection using SDK methods.
172
+
173
+ This helper centralizes the pagination logic for fetching lists, list-entries,
174
+ and other V2 collections using SDK page methods. It handles:
175
+ - Page size limits
176
+ - Max result caps
177
+ - Pagination cursor tracking
178
+ - Optional item filtering
179
+ - Normalizing SDK models to dicts
180
+
181
+ Args:
182
+ fetch_page: Callback to fetch a page, takes (limit, cursor) and returns a Page object.
183
+ section: Name for this section (used in warnings and pagination keys).
184
+ default_limit: Default page size for API requests.
185
+ default_cap: Default max items if no explicit cap and not fetching all pages.
186
+ allow_unbounded: Whether unbounded fetching is allowed without --all.
187
+ max_results: Explicit max results limit (from --max-results).
188
+ all_pages: Whether to fetch all pages (from --all flag).
189
+ warnings: List to append warnings to (mutated in place).
190
+ pagination: Dict to store pagination cursors (mutated in place).
191
+ keep_item: Optional filter function to keep only matching items.
192
+
193
+ Returns:
194
+ List of fetched items (as dicts).
195
+ """
196
+ effective_cap = max_results
197
+ if effective_cap is None and default_cap is not None and not all_pages:
198
+ effective_cap = default_cap
199
+ if effective_cap is not None and effective_cap <= 0:
200
+ return []
201
+
202
+ should_paginate = all_pages or allow_unbounded or effective_cap is not None
203
+ limit = default_limit
204
+ if effective_cap is not None:
205
+ limit = min(default_limit, effective_cap)
206
+
207
+ truncated_mid_page = False
208
+ page = fetch_page(limit, None)
209
+ rows = page.data if isinstance(page.data, list) else []
210
+ page_items = [_normalize_sdk_item(r) for r in rows]
211
+ if keep_item is not None:
212
+ page_items = [r for r in page_items if keep_item(r)]
213
+ items: list[Any] = list(page_items)
214
+
215
+ page_pagination = getattr(page, "pagination", None)
216
+ next_url = getattr(page_pagination, "next_cursor", None) if page_pagination else None
217
+ prev_url = getattr(page_pagination, "prev_cursor", None) if page_pagination else None
218
+
219
+ if effective_cap is not None and len(items) > effective_cap:
220
+ truncated_mid_page = True
221
+ items = items[:effective_cap]
222
+ next_url = None
223
+
224
+ while (
225
+ should_paginate
226
+ and isinstance(next_url, str)
227
+ and next_url
228
+ and (effective_cap is None or len(items) < effective_cap)
229
+ ):
230
+ page = fetch_page(None, next_url)
231
+ rows = page.data if isinstance(page.data, list) else []
232
+ if rows:
233
+ page_items = [_normalize_sdk_item(r) for r in rows]
234
+ if keep_item is not None:
235
+ page_items = [r for r in page_items if keep_item(r)]
236
+ items.extend(page_items)
237
+ page_pagination = getattr(page, "pagination", None)
238
+ next_url = getattr(page_pagination, "next_cursor", None) if page_pagination else None
239
+ prev_url = getattr(page_pagination, "prev_cursor", None) if page_pagination else None
240
+
241
+ if effective_cap is not None and len(items) > effective_cap:
242
+ truncated_mid_page = True
243
+ items = items[:effective_cap]
244
+ next_url = None
245
+ break
246
+
247
+ if truncated_mid_page and effective_cap is not None:
248
+ warnings.append(
249
+ f"{section} limited to {effective_cap:,} items. Use --all to fetch all results."
250
+ )
251
+ elif isinstance(next_url, str) and next_url:
252
+ pagination[section] = {"nextCursor": next_url, "prevCursor": prev_url}
253
+
254
+ return items
255
+
256
+
257
+ @click.group(name="company", cls=RichGroup)
258
+ def company_group() -> None:
259
+ """Company commands."""
260
+
261
+
262
+ def _parse_field_types(values: tuple[str, ...]) -> list[FieldType] | None:
263
+ """Parse --field-type option values to FieldType enums."""
264
+ if not values:
265
+ return None
266
+ result: list[FieldType] = []
267
+ valid_types = {ft.value.lower(): ft for ft in FieldType}
268
+ for v in values:
269
+ lower = v.lower()
270
+ if lower not in valid_types:
271
+ raise CLIError(
272
+ f"Unknown field type: {v}",
273
+ exit_code=2,
274
+ error_type="usage_error",
275
+ hint=f"Valid types: {', '.join(sorted(valid_types.keys()))}",
276
+ )
277
+ result.append(valid_types[lower])
278
+ return result
279
+
280
+
281
+ @category("read")
282
+ @company_group.command(name="ls", cls=RichCommand)
283
+ @click.option("--page-size", "-s", type=int, default=None, help="Page size (limit).")
284
+ @click.option(
285
+ "--cursor", type=str, default=None, help="Resume from cursor (incompatible with --page-size)."
286
+ )
287
+ @click.option(
288
+ "--max-results", "--limit", "-n", type=int, default=None, help="Stop after N items total."
289
+ )
290
+ @click.option("--all", "-A", "all_pages", is_flag=True, help="Fetch all pages.")
291
+ @click.option(
292
+ "--field",
293
+ "field_ids",
294
+ type=str,
295
+ multiple=True,
296
+ help="Field ID or name to include (repeatable).",
297
+ )
298
+ @click.option(
299
+ "--field-type",
300
+ "field_types",
301
+ type=str,
302
+ multiple=True,
303
+ help="Field type to include (repeatable). Values: global, enriched, relationship-intelligence.",
304
+ )
305
+ @click.option(
306
+ "--filter",
307
+ "filter_expr",
308
+ type=str,
309
+ default=None,
310
+ help="Filter: 'field op value'. Ops: = != =~ =^ =$ > < >= <=. E.g., 'name =~ \"Acme\"'.",
311
+ )
312
+ @click.option(
313
+ "--query",
314
+ "-q",
315
+ type=str,
316
+ default=None,
317
+ help="Fuzzy text search (simple matching). Use --filter for structured queries.",
318
+ )
319
+ @click.option("--csv", "csv_flag", is_flag=True, help="Output as CSV (to stdout).")
320
+ @click.option(
321
+ "--csv-bom",
322
+ is_flag=True,
323
+ help="Add UTF-8 BOM for Excel (use with redirection: --csv --csv-bom > file.csv).",
324
+ )
325
+ @output_options
326
+ @click.pass_obj
327
+ def company_ls(
328
+ ctx: CLIContext,
329
+ *,
330
+ page_size: int | None,
331
+ cursor: str | None,
332
+ max_results: int | None,
333
+ all_pages: bool,
334
+ field_ids: tuple[str, ...],
335
+ field_types: tuple[str, ...],
336
+ filter_expr: str | None,
337
+ query: str | None,
338
+ csv_flag: bool,
339
+ csv_bom: bool,
340
+ ) -> None:
341
+ """
342
+ List companies.
343
+
344
+ Supports field selection, field types, and filter expressions.
345
+ Use --query for free-text search.
346
+
347
+ Examples:
348
+
349
+ - `xaffinity company ls`
350
+ - `xaffinity company ls --page-size 50`
351
+ - `xaffinity company ls --field-type enriched --all`
352
+ - `xaffinity company ls --filter 'Industry = "Software"'`
353
+ - `xaffinity company ls --query "Acme" --all`
354
+ - `xaffinity company ls --all --csv > companies.csv`
355
+ - `xaffinity company ls --all --csv --csv-bom > companies.csv`
356
+ """
357
+
358
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
359
+ # Check mutual exclusivity: --csv and --json
360
+ if csv_flag and ctx.output == "json":
361
+ raise CLIError(
362
+ "--csv and --json are mutually exclusive.",
363
+ exit_code=2,
364
+ error_type="usage_error",
365
+ )
366
+
367
+ client = ctx.get_client(warnings=warnings)
368
+
369
+ if cursor is not None and page_size is not None:
370
+ raise CLIError(
371
+ "--cursor cannot be combined with --page-size.",
372
+ exit_code=2,
373
+ error_type="usage_error",
374
+ )
375
+
376
+ if query is not None and filter_expr is not None:
377
+ raise CLIError(
378
+ "--query cannot be combined with --filter (different APIs).",
379
+ exit_code=2,
380
+ error_type="usage_error",
381
+ hint="Use --query for free-text search or --filter for structured filtering.",
382
+ )
383
+
384
+ # Build CommandContext upfront for all return paths
385
+ ctx_modifiers: dict[str, object] = {}
386
+ if page_size is not None:
387
+ ctx_modifiers["pageSize"] = page_size
388
+ if cursor is not None:
389
+ ctx_modifiers["cursor"] = cursor
390
+ if max_results is not None:
391
+ ctx_modifiers["maxResults"] = max_results
392
+ if all_pages:
393
+ ctx_modifiers["allPages"] = True
394
+ if field_ids:
395
+ ctx_modifiers["fieldIds"] = list(field_ids)
396
+ if field_types:
397
+ ctx_modifiers["fieldTypes"] = list(field_types)
398
+ if filter_expr:
399
+ ctx_modifiers["filter"] = filter_expr
400
+ if query:
401
+ ctx_modifiers["query"] = query
402
+ if csv_flag:
403
+ ctx_modifiers["csv"] = True
404
+ if csv_bom:
405
+ ctx_modifiers["csvBom"] = True
406
+
407
+ cmd_context = CommandContext(
408
+ name="company ls",
409
+ inputs={},
410
+ modifiers=ctx_modifiers,
411
+ )
412
+
413
+ parsed_field_types = _parse_field_types(field_types)
414
+ parsed_field_ids: list[FieldId] | None = (
415
+ [FieldId(fid) for fid in field_ids] if field_ids else None
416
+ )
417
+
418
+ rows: list[dict[str, object]] = []
419
+ first_page = True
420
+ use_v1_search = query is not None
421
+ wants_fields = bool(field_ids or field_types)
422
+
423
+ show_progress = (
424
+ ctx.progress != "never"
425
+ and not ctx.quiet
426
+ and (ctx.progress == "always" or sys.stderr.isatty())
427
+ )
428
+
429
+ # Progress description based on operation type
430
+ task_description = "Searching" if use_v1_search else "Fetching"
431
+
432
+ with ExitStack() as stack:
433
+ progress: Progress | None = None
434
+ task_id: TaskID | None = None
435
+ if show_progress:
436
+ progress = stack.enter_context(
437
+ Progress(
438
+ TextColumn("{task.description}"),
439
+ BarColumn(),
440
+ TextColumn("{task.completed} rows"),
441
+ TimeElapsedColumn(),
442
+ console=Console(file=sys.stderr),
443
+ transient=True,
444
+ )
445
+ )
446
+ task_id = progress.add_task(task_description, total=max_results)
447
+
448
+ # Helper to check max_results and return early if needed
449
+ def _check_max_results(
450
+ rows: list[dict[str, object]],
451
+ idx: int,
452
+ page_len: int,
453
+ next_cursor: str | None,
454
+ prev_cursor: str | None,
455
+ ) -> CommandOutput | None:
456
+ if max_results is not None and len(rows) >= max_results:
457
+ stopped_mid_page = idx < (page_len - 1)
458
+ if stopped_mid_page:
459
+ warnings.append(
460
+ "Results limited by --max-results. Use --all to fetch all results."
461
+ )
462
+ pagination = None
463
+ if next_cursor and not stopped_mid_page and next_cursor != cursor:
464
+ pagination = {
465
+ "companies": {
466
+ "nextCursor": next_cursor,
467
+ "prevCursor": prev_cursor,
468
+ }
469
+ }
470
+ return CommandOutput(
471
+ data={"companies": rows[:max_results]},
472
+ context=cmd_context,
473
+ pagination=pagination,
474
+ api_called=True,
475
+ )
476
+ return None
477
+
478
+ # Three paths: V2-only, V1-only, or Hybrid (V1 search + V2 batch fetch)
479
+ if use_v1_search and wants_fields:
480
+ # Hybrid: V1 search for IDs, then V2 batch fetch with field data
481
+ assert query is not None
482
+ for v1_page in client.companies.search_pages(
483
+ query,
484
+ page_size=page_size,
485
+ page_token=cursor,
486
+ ):
487
+ next_cursor = v1_page.next_page_token
488
+ prev_cursor = None # V1 doesn't have prev cursor
489
+
490
+ if v1_page.data:
491
+ # Batch fetch from V2 with field data
492
+ company_ids = [CompanyId(c.id) for c in v1_page.data]
493
+ v2_response = client.companies.list(
494
+ ids=company_ids,
495
+ field_ids=parsed_field_ids,
496
+ field_types=parsed_field_types,
497
+ )
498
+ for idx, company in enumerate(v2_response.data):
499
+ rows.append(_company_ls_row(company))
500
+ if progress and task_id is not None:
501
+ progress.update(task_id, completed=len(rows))
502
+ result = _check_max_results(
503
+ rows, idx, len(v2_response.data), next_cursor, prev_cursor
504
+ )
505
+ if result is not None:
506
+ return result
507
+
508
+ if first_page and not all_pages and max_results is None:
509
+ return CommandOutput(
510
+ data={"companies": rows},
511
+ context=cmd_context,
512
+ pagination=(
513
+ {"companies": {"nextCursor": next_cursor, "prevCursor": None}}
514
+ if next_cursor
515
+ else None
516
+ ),
517
+ api_called=True,
518
+ )
519
+ first_page = False
520
+
521
+ elif use_v1_search:
522
+ # Search without field data
523
+ assert query is not None
524
+ for search_page in client.companies.search_pages(
525
+ query,
526
+ page_size=page_size,
527
+ page_token=cursor,
528
+ ):
529
+ next_cursor = search_page.next_page_token
530
+ prev_cursor = None # Search doesn't have prev cursor
531
+
532
+ for idx, company in enumerate(search_page.data):
533
+ rows.append(_company_ls_row(company))
534
+ if progress and task_id is not None:
535
+ progress.update(task_id, completed=len(rows))
536
+ result = _check_max_results(
537
+ rows, idx, len(search_page.data), next_cursor, prev_cursor
538
+ )
539
+ if result is not None:
540
+ return result
541
+
542
+ if first_page and not all_pages and max_results is None:
543
+ return CommandOutput(
544
+ data={"companies": rows},
545
+ context=cmd_context,
546
+ pagination=(
547
+ {"companies": {"nextCursor": next_cursor, "prevCursor": None}}
548
+ if next_cursor
549
+ else None
550
+ ),
551
+ api_called=True,
552
+ )
553
+ first_page = False
554
+
555
+ else:
556
+ # List with optional field data
557
+ for page in client.companies.pages(
558
+ field_ids=parsed_field_ids,
559
+ field_types=parsed_field_types,
560
+ filter=filter_expr,
561
+ limit=page_size,
562
+ cursor=cursor,
563
+ ):
564
+ next_cursor = page.pagination.next_cursor
565
+ prev_cursor = page.pagination.prev_cursor
566
+
567
+ for idx, company in enumerate(page.data):
568
+ rows.append(_company_ls_row(company))
569
+ if progress and task_id is not None:
570
+ progress.update(task_id, completed=len(rows))
571
+ result = _check_max_results(
572
+ rows, idx, len(page.data), next_cursor, prev_cursor
573
+ )
574
+ if result is not None:
575
+ return result
576
+
577
+ if first_page and not all_pages and max_results is None:
578
+ return CommandOutput(
579
+ data={"companies": rows},
580
+ context=cmd_context,
581
+ pagination=(
582
+ {
583
+ "companies": {
584
+ "nextCursor": next_cursor,
585
+ "prevCursor": prev_cursor,
586
+ }
587
+ }
588
+ if next_cursor
589
+ else None
590
+ ),
591
+ api_called=True,
592
+ )
593
+ first_page = False
594
+
595
+ # CSV output to stdout
596
+ if csv_flag:
597
+ fieldnames = list(rows[0].keys()) if rows else []
598
+ write_csv_to_stdout(
599
+ rows=rows,
600
+ fieldnames=fieldnames,
601
+ bom=csv_bom,
602
+ )
603
+ sys.exit(0)
604
+
605
+ return CommandOutput(
606
+ data={"companies": rows},
607
+ context=cmd_context,
608
+ pagination=None,
609
+ api_called=True,
610
+ )
611
+
612
+ run_command(ctx, command="company ls", fn=fn)
613
+
614
+
615
+ def _company_ls_row(company: Company) -> dict[str, object]:
616
+ """Build a row for company ls output."""
617
+ return {
618
+ "id": int(company.id),
619
+ "name": company.name,
620
+ "domain": company.domain,
621
+ "domains": company.domains,
622
+ }
623
+
624
+
625
+ _COMPANY_FIELDS_ALL_TYPES: tuple[str, ...] = (
626
+ FieldType.GLOBAL.value,
627
+ FieldType.ENRICHED.value,
628
+ FieldType.RELATIONSHIP_INTELLIGENCE.value,
629
+ )
630
+
631
+
632
+ def _strip_wrapping_quotes(value: str) -> str:
633
+ value = value.strip()
634
+ if len(value) >= 2 and value[0] == value[-1] and value[0] in {"'", '"'}:
635
+ return value[1:-1]
636
+ return value
637
+
638
+
639
+ def _resolve_company_selector(*, client: Any, selector: str) -> tuple[CompanyId, dict[str, Any]]:
640
+ raw = selector.strip()
641
+ if raw.isdigit():
642
+ company_id = CompanyId(int(raw))
643
+ resolved = ResolvedEntity(
644
+ input=selector,
645
+ entity_id=int(company_id),
646
+ entity_type="company",
647
+ source="id",
648
+ )
649
+ return company_id, {"company": resolved.to_dict()}
650
+
651
+ if raw.startswith(("http://", "https://")):
652
+ url_resolved = _parse_affinity_url(raw)
653
+ if url_resolved.type != "company" or url_resolved.company_id is None:
654
+ raise CLIError(
655
+ "Expected a company URL like https://<tenant>.affinity.(co|com)/companies/<id>",
656
+ exit_code=2,
657
+ error_type="usage_error",
658
+ details={"input": selector, "resolvedType": url_resolved.type},
659
+ )
660
+ company_id = CompanyId(int(url_resolved.company_id))
661
+ resolved = ResolvedEntity(
662
+ input=selector,
663
+ entity_id=int(company_id),
664
+ entity_type="company",
665
+ source="url",
666
+ canonical_url=f"https://app.affinity.co/companies/{int(company_id)}",
667
+ )
668
+ return company_id, {"company": resolved.to_dict()}
669
+
670
+ lowered = raw.lower()
671
+ if lowered.startswith("domain:"):
672
+ domain = _strip_wrapping_quotes(raw.split(":", 1)[1])
673
+ company_id = _resolve_company_by_domain(client=client, domain=domain)
674
+ resolved = ResolvedEntity(
675
+ input=selector,
676
+ entity_id=int(company_id),
677
+ entity_type="company",
678
+ source="domain",
679
+ )
680
+ return company_id, {"company": resolved.to_dict()}
681
+
682
+ if lowered.startswith("name:"):
683
+ name = _strip_wrapping_quotes(raw.split(":", 1)[1])
684
+ company_id = _resolve_company_by_name(client=client, name=name)
685
+ resolved = ResolvedEntity(
686
+ input=selector,
687
+ entity_id=int(company_id),
688
+ entity_type="company",
689
+ source="name",
690
+ )
691
+ return company_id, {"company": resolved.to_dict()}
692
+
693
+ raise CLIError(
694
+ "Unrecognized company selector.",
695
+ exit_code=2,
696
+ error_type="usage_error",
697
+ hint='Use a numeric id, an Affinity URL, or "domain:<x>" / "name:<x>".',
698
+ details={"input": selector},
699
+ )
700
+
701
+
702
+ def _resolve_company_by_domain(*, client: Any, domain: str) -> CompanyId:
703
+ domain = domain.strip()
704
+ if not domain:
705
+ raise CLIError("Domain cannot be empty.", exit_code=2, error_type="usage_error")
706
+
707
+ matches: list[Company] = []
708
+ domain_lower = domain.lower()
709
+
710
+ for page in client.companies.search_pages(domain, page_size=500):
711
+ for company in page.data:
712
+ domains: list[str] = []
713
+ if company.domain:
714
+ domains.append(company.domain)
715
+ domains.extend(company.domains or [])
716
+ if any(d.lower() == domain_lower for d in domains):
717
+ matches.append(company)
718
+ if len(matches) >= 20:
719
+ break
720
+ if len(matches) >= 20 or not page.next_page_token:
721
+ break
722
+
723
+ if not matches:
724
+ raise CLIError(
725
+ f'Company not found for domain "{domain}"',
726
+ exit_code=4,
727
+ error_type="not_found",
728
+ hint=f'Run `xaffinity company ls --query "{domain}"` to explore matches.',
729
+ details={"domain": domain},
730
+ )
731
+ if len(matches) > 1:
732
+ raise CLIError(
733
+ f'Ambiguous company domain "{domain}" ({len(matches)} matches)',
734
+ exit_code=2,
735
+ error_type="ambiguous_resolution",
736
+ details={
737
+ "domain": domain,
738
+ "matches": [
739
+ {"companyId": int(c.id), "name": c.name, "domain": c.domain}
740
+ for c in matches[:20]
741
+ ],
742
+ },
743
+ )
744
+ return CompanyId(int(matches[0].id))
745
+
746
+
747
+ def _resolve_company_by_name(*, client: Any, name: str) -> CompanyId:
748
+ name = name.strip()
749
+ if not name:
750
+ raise CLIError("Name cannot be empty.", exit_code=2, error_type="usage_error")
751
+
752
+ matches: list[Company] = []
753
+ name_lower = name.lower()
754
+
755
+ for page in client.companies.search_pages(name, page_size=500):
756
+ for company in page.data:
757
+ if company.name.lower() == name_lower:
758
+ matches.append(company)
759
+ if len(matches) >= 20:
760
+ break
761
+ if len(matches) >= 20 or not page.next_page_token:
762
+ break
763
+
764
+ if not matches:
765
+ raise CLIError(
766
+ f'Company not found for name "{name}"',
767
+ exit_code=4,
768
+ error_type="not_found",
769
+ hint=f'Run `xaffinity company ls --query "{name}"` to explore matches.',
770
+ details={"name": name},
771
+ )
772
+ if len(matches) > 1:
773
+ raise CLIError(
774
+ f'Ambiguous company name "{name}" ({len(matches)} matches)',
775
+ exit_code=2,
776
+ error_type="ambiguous_resolution",
777
+ details={
778
+ "name": name,
779
+ "matches": [
780
+ {"companyId": int(c.id), "name": c.name, "domain": c.domain}
781
+ for c in matches[:20]
782
+ ],
783
+ },
784
+ )
785
+ return CompanyId(int(matches[0].id))
786
+
787
+
788
+ def _resolve_company_field_ids(
789
+ *,
790
+ client: Any,
791
+ fields: tuple[str, ...],
792
+ field_types: list[str],
793
+ ) -> tuple[list[str], dict[str, Any]]:
794
+ meta = client.companies.get_fields()
795
+ field_by_id: dict[str, Any] = {str(f.id): f for f in meta}
796
+ by_name: dict[str, list[str]] = {}
797
+ for f in meta:
798
+ by_name.setdefault(str(f.name).lower(), []).append(str(f.id))
799
+
800
+ resolved_fields: list[str] = []
801
+ for raw in fields:
802
+ text = _strip_wrapping_quotes(str(raw)).strip()
803
+ if not text:
804
+ continue
805
+ if text in field_by_id:
806
+ resolved_fields.append(text)
807
+ continue
808
+ name_matches = by_name.get(text.lower(), [])
809
+ if len(name_matches) == 1:
810
+ resolved_fields.append(name_matches[0])
811
+ continue
812
+ if len(name_matches) > 1:
813
+ raise CLIError(
814
+ f'Ambiguous field name "{text}" ({len(name_matches)} matches)',
815
+ exit_code=2,
816
+ error_type="ambiguous_resolution",
817
+ details={
818
+ "name": text,
819
+ "matches": [
820
+ {
821
+ "fieldId": fid,
822
+ "name": getattr(field_by_id.get(fid), "name", None),
823
+ "type": getattr(field_by_id.get(fid), "type", None),
824
+ "valueType": getattr(field_by_id.get(fid), "value_type", None),
825
+ }
826
+ for fid in name_matches[:20]
827
+ ],
828
+ },
829
+ )
830
+
831
+ raise CLIError(
832
+ f'Unknown field: "{text}"',
833
+ exit_code=2,
834
+ error_type="usage_error",
835
+ hint="Tip: run `xaffinity company get <id> --all-fields --json` and inspect "
836
+ "`data.company.fields[*].id` / `data.company.fields[*].name`.",
837
+ details={"field": text},
838
+ )
839
+
840
+ expanded: list[str] = []
841
+ for field_type in field_types:
842
+ wanted = field_type.strip()
843
+ if not wanted:
844
+ continue
845
+ candidates = [f for f in meta if f.type == wanted]
846
+ candidates.sort(
847
+ key=lambda f: (
848
+ str(f.name).lower(),
849
+ str(f.id),
850
+ )
851
+ )
852
+ expanded.extend([str(f.id) for f in candidates])
853
+
854
+ ordered: list[str] = []
855
+ seen: set[str] = set()
856
+ for fid in [*resolved_fields, *expanded]:
857
+ if fid in seen:
858
+ continue
859
+ ordered.append(fid)
860
+ seen.add(fid)
861
+
862
+ resolved_info = {
863
+ "fieldIds": ordered,
864
+ "fieldTypes": field_types,
865
+ "explicitFields": list(fields),
866
+ }
867
+ return ordered, resolved_info
868
+
869
+
870
+ @company_group.group(name="files", cls=RichGroup)
871
+ def company_files_group() -> None:
872
+ """Company files."""
873
+
874
+
875
+ @category("read")
876
+ @company_files_group.command(name="dump", cls=RichCommand)
877
+ @click.argument("company_id", type=int)
878
+ @click.option(
879
+ "--out",
880
+ "out_dir",
881
+ type=click.Path(),
882
+ default=None,
883
+ help="Output directory for downloaded files.",
884
+ )
885
+ @click.option("--overwrite", is_flag=True, help="Overwrite existing files.")
886
+ @click.option(
887
+ "--concurrency", type=int, default=3, show_default=True, help="Number of concurrent downloads."
888
+ )
889
+ @click.option(
890
+ "--page-size",
891
+ type=int,
892
+ default=100,
893
+ show_default=True,
894
+ help="Page size for file listing (max 100).",
895
+ )
896
+ @click.option("--max-files", type=int, default=None, help="Stop after N files.")
897
+ @output_options
898
+ @click.pass_obj
899
+ def company_files_dump(
900
+ ctx: CLIContext,
901
+ company_id: int,
902
+ *,
903
+ out_dir: str | None,
904
+ overwrite: bool,
905
+ concurrency: int,
906
+ page_size: int,
907
+ max_files: int | None,
908
+ ) -> None:
909
+ """Download all files attached to a company."""
910
+
911
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
912
+ # Build CommandContext
913
+ ctx_modifiers: dict[str, object] = {}
914
+ if out_dir:
915
+ ctx_modifiers["outDir"] = out_dir
916
+ if overwrite:
917
+ ctx_modifiers["overwrite"] = True
918
+ if concurrency != 4:
919
+ ctx_modifiers["concurrency"] = concurrency
920
+ if page_size != 100:
921
+ ctx_modifiers["pageSize"] = page_size
922
+ if max_files is not None:
923
+ ctx_modifiers["maxFiles"] = max_files
924
+
925
+ cmd_context = CommandContext(
926
+ name="company files dump",
927
+ inputs={"companyId": company_id},
928
+ modifiers=ctx_modifiers,
929
+ )
930
+
931
+ return asyncio.run(
932
+ dump_entity_files_bundle(
933
+ ctx=ctx,
934
+ warnings=warnings,
935
+ out_dir=out_dir,
936
+ overwrite=overwrite,
937
+ concurrency=concurrency,
938
+ page_size=page_size,
939
+ max_files=max_files,
940
+ default_dirname=f"affinity-company-{company_id}-files",
941
+ manifest_entity={"type": "company", "companyId": company_id},
942
+ files_list_kwargs={"company_id": CompanyId(company_id)},
943
+ context=cmd_context,
944
+ )
945
+ )
946
+
947
+ run_command(ctx, command="company files dump", fn=fn)
948
+
949
+
950
+ @category("write")
951
+ @progress_capable
952
+ @company_files_group.command(name="upload", cls=RichCommand)
953
+ @click.argument("company_id", type=int)
954
+ @click.option(
955
+ "--file",
956
+ "file_paths",
957
+ type=click.Path(exists=False),
958
+ multiple=True,
959
+ required=True,
960
+ help="File path to upload (repeatable).",
961
+ )
962
+ @output_options
963
+ @click.pass_obj
964
+ def company_files_upload(
965
+ ctx: CLIContext,
966
+ company_id: int,
967
+ *,
968
+ file_paths: tuple[str, ...],
969
+ ) -> None:
970
+ """
971
+ Upload files to a company.
972
+
973
+ Examples:
974
+
975
+ - `xaffinity company files upload 123 --file doc.pdf`
976
+ - `xaffinity company files upload 123 --file a.pdf --file b.pdf`
977
+ """
978
+
979
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
980
+ client = ctx.get_client(warnings=warnings)
981
+
982
+ # Validate all file paths first
983
+ paths: list[Path] = []
984
+ for fp in file_paths:
985
+ p = Path(fp)
986
+ if not p.exists():
987
+ raise CLIError(
988
+ f"File not found: {fp}",
989
+ exit_code=2,
990
+ error_type="usage_error",
991
+ hint="Check the file path and try again.",
992
+ )
993
+ if not p.is_file():
994
+ raise CLIError(
995
+ f"Not a regular file: {fp}",
996
+ exit_code=2,
997
+ error_type="usage_error",
998
+ hint="Only regular files can be uploaded, not directories.",
999
+ )
1000
+ paths.append(p)
1001
+
1002
+ results: list[dict[str, object]] = []
1003
+ settings = ProgressSettings(mode=ctx.progress, quiet=ctx.quiet)
1004
+
1005
+ with ProgressManager(settings=settings) as pm:
1006
+ for p in paths:
1007
+ file_size = p.stat().st_size
1008
+ _task_id, cb = pm.task(
1009
+ description=f"upload {p.name}",
1010
+ total_bytes=file_size,
1011
+ )
1012
+ success = client.files.upload_path(
1013
+ p,
1014
+ company_id=CompanyId(company_id),
1015
+ on_progress=cb,
1016
+ )
1017
+ results.append(
1018
+ {
1019
+ "file": str(p),
1020
+ "filename": p.name,
1021
+ "size": file_size,
1022
+ "success": success,
1023
+ }
1024
+ )
1025
+
1026
+ cmd_context = CommandContext(
1027
+ name="company files upload",
1028
+ inputs={"companyId": company_id},
1029
+ modifiers={"files": list(file_paths)},
1030
+ )
1031
+
1032
+ return CommandOutput(
1033
+ data={"uploads": results, "companyId": company_id},
1034
+ context=cmd_context,
1035
+ api_called=True,
1036
+ )
1037
+
1038
+ run_command(ctx, command="company files upload", fn=fn)
1039
+
1040
+
1041
+ @category("read")
1042
+ @company_group.command(name="get", cls=RichCommand)
1043
+ @click.argument("company_selector", type=str)
1044
+ @click.option(
1045
+ "-f",
1046
+ "--field",
1047
+ "fields",
1048
+ multiple=True,
1049
+ help="Field id or exact field name (repeatable).",
1050
+ )
1051
+ @click.option(
1052
+ "-t",
1053
+ "--field-type",
1054
+ "field_types",
1055
+ multiple=True,
1056
+ type=click.Choice(list(_COMPANY_FIELDS_ALL_TYPES)),
1057
+ help="Include all fields of this type (repeatable).",
1058
+ )
1059
+ @click.option(
1060
+ "--all-fields",
1061
+ is_flag=True,
1062
+ help="Include all supported (non-list-specific) field data.",
1063
+ )
1064
+ @click.option("--no-fields", is_flag=True, help="Do not request field data.")
1065
+ @click.option(
1066
+ "--expand",
1067
+ "expand",
1068
+ multiple=True,
1069
+ type=click.Choice(["lists", "list-entries", "people"]),
1070
+ help="Include related data (repeatable).",
1071
+ )
1072
+ @click.option(
1073
+ "--list",
1074
+ "list_selector",
1075
+ type=str,
1076
+ default=None,
1077
+ help=(
1078
+ "Filter list-entries expansion to a list id or exact list name "
1079
+ "(implies --expand list-entries)."
1080
+ ),
1081
+ )
1082
+ @click.option(
1083
+ "--list-entry-field",
1084
+ "list_entry_fields",
1085
+ multiple=True,
1086
+ help=(
1087
+ "Project a list-entry field into its own column (repeatable; implies --expand "
1088
+ "list-entries)."
1089
+ ),
1090
+ )
1091
+ @click.option(
1092
+ "--show-list-entry-fields",
1093
+ "show_list_entry_fields",
1094
+ is_flag=True,
1095
+ help=(
1096
+ "Render per-list-entry Fields tables in human output (implies --expand list-entries; "
1097
+ "requires --max-results <= 3)."
1098
+ ),
1099
+ )
1100
+ @click.option(
1101
+ "--list-entry-fields-scope",
1102
+ "list_entry_fields_scope",
1103
+ type=click.Choice(["list-only", "all"]),
1104
+ default="list-only",
1105
+ show_default=True,
1106
+ help="Control which fields appear in list entry tables (human output only).",
1107
+ )
1108
+ @click.option(
1109
+ "--max-results",
1110
+ "--limit",
1111
+ "-n",
1112
+ type=int,
1113
+ default=None,
1114
+ help="Maximum items to fetch per expansion section (applies to --expand).",
1115
+ )
1116
+ @click.option(
1117
+ "--all",
1118
+ "all_pages",
1119
+ is_flag=True,
1120
+ help="Fetch all pages for expansions (still capped by --max-results if set).",
1121
+ )
1122
+ @output_options
1123
+ @click.pass_obj
1124
+ def company_get(
1125
+ ctx: CLIContext,
1126
+ company_selector: str,
1127
+ *,
1128
+ fields: tuple[str, ...],
1129
+ field_types: tuple[str, ...],
1130
+ all_fields: bool,
1131
+ no_fields: bool,
1132
+ expand: tuple[str, ...],
1133
+ list_selector: str | None,
1134
+ list_entry_fields: tuple[str, ...],
1135
+ show_list_entry_fields: bool,
1136
+ list_entry_fields_scope: ListEntryFieldsScope,
1137
+ max_results: int | None,
1138
+ all_pages: bool,
1139
+ ) -> None:
1140
+ """
1141
+ Get a company by id, URL, domain, or name.
1142
+
1143
+ The COMPANY_SELECTOR can be:
1144
+
1145
+ - Company ID (e.g., `12345`)
1146
+ - Company URL (e.g., `https://app.affinity.co/companies/12345`)
1147
+ - Domain (e.g., `domain:acme.com`)
1148
+ - Name (e.g., `name:"Acme Inc"`)
1149
+
1150
+ List Entry Fields:
1151
+
1152
+ Use --list-entry-field and related flags to customize which list-entry
1153
+ fields are shown in table output. These flags are ignored in JSON mode
1154
+ to ensure full-fidelity output.
1155
+
1156
+ JSON Output:
1157
+
1158
+ When using --json, all list-entry fields are included regardless of
1159
+ --list-entry-field flags. Use table output for selective field display.
1160
+
1161
+ Examples:
1162
+
1163
+ - `xaffinity company get 223384905`
1164
+ - `xaffinity company get https://mydomain.affinity.com/companies/223384905`
1165
+ - `xaffinity company get domain:acme.com`
1166
+ - `xaffinity company get name:"Acme Inc"`
1167
+ - `xaffinity company get 223384905 --expand list-entries --list "Portfolio"`
1168
+ - `xaffinity company get 223384905 --json # Full data, ignores field filters`
1169
+ """
1170
+
1171
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1172
+ client = ctx.get_client(warnings=warnings)
1173
+ cache = ctx.session_cache
1174
+ company_id, resolved = _resolve_company_selector(client=client, selector=company_selector)
1175
+
1176
+ # Build CommandContext for company get
1177
+ ctx_modifiers: dict[str, object] = {}
1178
+ if fields:
1179
+ ctx_modifiers["fields"] = list(fields)
1180
+ if field_types:
1181
+ ctx_modifiers["fieldTypes"] = list(field_types)
1182
+ if all_fields:
1183
+ ctx_modifiers["allFields"] = True
1184
+ if no_fields:
1185
+ ctx_modifiers["noFields"] = True
1186
+ if expand:
1187
+ ctx_modifiers["expand"] = list(expand)
1188
+ if list_selector:
1189
+ ctx_modifiers["list"] = list_selector
1190
+ if list_entry_fields:
1191
+ ctx_modifiers["listEntryFields"] = list(list_entry_fields)
1192
+ if show_list_entry_fields:
1193
+ ctx_modifiers["showListEntryFields"] = True
1194
+ if list_entry_fields_scope != "list-only":
1195
+ ctx_modifiers["listEntryFieldsScope"] = list_entry_fields_scope
1196
+ if max_results is not None:
1197
+ ctx_modifiers["maxResults"] = max_results
1198
+ if all_pages:
1199
+ ctx_modifiers["allPages"] = True
1200
+
1201
+ # Extract resolved company name if available for context
1202
+ ctx_resolved: dict[str, str] | None = None
1203
+ company_resolved = resolved.get("company", {})
1204
+ if isinstance(company_resolved, dict):
1205
+ entity_name = company_resolved.get("entityName")
1206
+ if entity_name:
1207
+ ctx_resolved = {"selector": str(entity_name)}
1208
+
1209
+ cmd_context = CommandContext(
1210
+ name="company get",
1211
+ inputs={"selector": company_selector},
1212
+ modifiers=ctx_modifiers,
1213
+ resolved=ctx_resolved,
1214
+ )
1215
+
1216
+ expand_set = {e.strip() for e in expand if e and e.strip()}
1217
+
1218
+ # Auto-imply --expand list-entries when list-entry-related flags are used.
1219
+ # This improves DX by removing a redundant flag requirement.
1220
+ if (list_selector or list_entry_fields or show_list_entry_fields) and (
1221
+ "list-entries" not in expand_set
1222
+ ):
1223
+ expand_set.add("list-entries")
1224
+
1225
+ effective_list_entry_fields = tuple(list_entry_fields)
1226
+ effective_show_list_entry_fields = bool(show_list_entry_fields)
1227
+ effective_list_entry_fields_scope: ListEntryFieldsScope = list_entry_fields_scope
1228
+ if ctx.output == "json":
1229
+ # These flags are human/table presentation features; keep JSON stable and full-fidelity.
1230
+ effective_list_entry_fields = ()
1231
+ effective_show_list_entry_fields = False
1232
+ effective_list_entry_fields_scope = "all"
1233
+
1234
+ scope_source = None
1235
+ click_ctx = click.get_current_context(silent=True)
1236
+ if click_ctx is not None:
1237
+ get_source = getattr(cast(Any, click_ctx), "get_parameter_source", None)
1238
+ if callable(get_source):
1239
+ scope_source = get_source("list_entry_fields_scope")
1240
+ source_enum = getattr(cast(Any, click.core), "ParameterSource", None)
1241
+ default_source = getattr(source_enum, "DEFAULT", None) if source_enum else None
1242
+ if (
1243
+ ctx.output != "json"
1244
+ and scope_source is not None
1245
+ and default_source is not None
1246
+ and scope_source != default_source
1247
+ and not show_list_entry_fields
1248
+ ):
1249
+ raise CLIError(
1250
+ "--list-entry-fields-scope requires --show-list-entry-fields.",
1251
+ exit_code=2,
1252
+ error_type="usage_error",
1253
+ )
1254
+
1255
+ # Note: --list now auto-implies --expand list-entries (handled above)
1256
+
1257
+ if no_fields and (fields or field_types or all_fields):
1258
+ raise CLIError(
1259
+ "--no-fields cannot be combined with --field/--field-type/--all-fields.",
1260
+ exit_code=2,
1261
+ error_type="usage_error",
1262
+ )
1263
+
1264
+ # Note: --list-entry-field/--show-list-entry-fields now auto-imply --expand list-entries
1265
+
1266
+ if effective_list_entry_fields and effective_show_list_entry_fields:
1267
+ raise CLIError(
1268
+ "--list-entry-field and --show-list-entry-fields are mutually exclusive.",
1269
+ exit_code=2,
1270
+ error_type="usage_error",
1271
+ )
1272
+
1273
+ if effective_show_list_entry_fields:
1274
+ if max_results is None:
1275
+ raise CLIError(
1276
+ "--show-list-entry-fields requires --max-results N (N <= 3).",
1277
+ exit_code=2,
1278
+ error_type="usage_error",
1279
+ hint=(
1280
+ "Add --max-results 3 to limit output, or use --json / --list-entry-field "
1281
+ "for large outputs."
1282
+ ),
1283
+ )
1284
+ if max_results <= 0:
1285
+ raise CLIError(
1286
+ "--max-results must be >= 1 when used with --show-list-entry-fields.",
1287
+ exit_code=2,
1288
+ error_type="usage_error",
1289
+ )
1290
+ if max_results > 3:
1291
+ raise CLIError(
1292
+ f"--show-list-entry-fields is limited to --max-results 3 (got {max_results}).",
1293
+ exit_code=2,
1294
+ error_type="usage_error",
1295
+ hint=(
1296
+ "Options: set --max-results 3, use --json for full structured data, or "
1297
+ "use --list-entry-field <field> to project specific fields."
1298
+ ),
1299
+ )
1300
+
1301
+ if effective_list_entry_fields and not list_selector:
1302
+ for spec in effective_list_entry_fields:
1303
+ if any(ch.isspace() for ch in spec):
1304
+ raise CLIError(
1305
+ (
1306
+ "Field names are only allowed with --list because names aren't "
1307
+ "unique across lists."
1308
+ ),
1309
+ exit_code=2,
1310
+ error_type="usage_error",
1311
+ hint=(
1312
+ "Tip: run `xaffinity list get <list>` to discover list-entry field IDs."
1313
+ ),
1314
+ details={"field": spec},
1315
+ )
1316
+
1317
+ requested_types: list[str] = []
1318
+ if all_fields:
1319
+ requested_types.extend(list(_COMPANY_FIELDS_ALL_TYPES))
1320
+ requested_types.extend([t for t in field_types if t])
1321
+
1322
+ seen_types: set[str] = set()
1323
+ deduped_types: list[str] = []
1324
+ for t in requested_types:
1325
+ if t in seen_types:
1326
+ continue
1327
+ deduped_types.append(t)
1328
+ seen_types.add(t)
1329
+ requested_types = deduped_types
1330
+
1331
+ selected_field_ids: list[str] = []
1332
+ selection_resolved: dict[str, Any] = {}
1333
+ if not no_fields and (fields or requested_types):
1334
+ if fields:
1335
+ selected_field_ids, selection_resolved = _resolve_company_field_ids(
1336
+ client=client,
1337
+ fields=fields,
1338
+ field_types=requested_types,
1339
+ )
1340
+ else:
1341
+ selection_resolved = {"fieldTypes": requested_types}
1342
+
1343
+ field_ids: list[FieldId | EnrichedFieldId] | None = None
1344
+ if selected_field_ids:
1345
+ coerced_field_ids: list[FieldId | EnrichedFieldId] = []
1346
+ for field_id in selected_field_ids:
1347
+ text = field_id.strip()
1348
+ if not text:
1349
+ continue
1350
+ try:
1351
+ coerced_field_ids.append(FieldId(text))
1352
+ except Exception:
1353
+ coerced_field_ids.append(EnrichedFieldId(text))
1354
+ field_ids = coerced_field_ids or None
1355
+ field_types_param = (
1356
+ [FieldType(t) for t in requested_types] if not fields and requested_types else None
1357
+ )
1358
+ company = client.companies.get(
1359
+ company_id,
1360
+ field_ids=field_ids,
1361
+ field_types=field_types_param,
1362
+ )
1363
+ company_payload = serialize_model_for_cli(company)
1364
+ fields_raw = getattr(company, "fields_raw", None)
1365
+ if isinstance(fields_raw, list):
1366
+ company_payload["fields"] = fields_raw
1367
+
1368
+ data: dict[str, Any] = {"company": company_payload}
1369
+ pagination: dict[str, Any] = {}
1370
+
1371
+ # Show spinner for expansion operations
1372
+ show_expand_progress = (
1373
+ expand_set
1374
+ and ctx.progress != "never"
1375
+ and not ctx.quiet
1376
+ and (ctx.progress == "always" or sys.stderr.isatty())
1377
+ )
1378
+
1379
+ # Variables needed for list-entries expansion
1380
+ list_id: ListId | None = None
1381
+ entries_items: list[Any] = []
1382
+
1383
+ with ExitStack() as stack:
1384
+ if show_expand_progress:
1385
+ progress = stack.enter_context(
1386
+ Progress(
1387
+ SpinnerColumn(),
1388
+ TextColumn("Fetching expanded data..."),
1389
+ console=Console(file=sys.stderr),
1390
+ transient=True,
1391
+ )
1392
+ )
1393
+ progress.add_task("expand", total=None)
1394
+
1395
+ if "lists" in expand_set:
1396
+
1397
+ def fetch_lists_page(limit: int | None, cursor: str | None) -> Any:
1398
+ return client.companies.get_lists(
1399
+ company_id,
1400
+ limit=limit,
1401
+ cursor=cursor,
1402
+ )
1403
+
1404
+ data["lists"] = _fetch_v2_collection_sdk(
1405
+ fetch_page=fetch_lists_page,
1406
+ section="lists",
1407
+ default_limit=100,
1408
+ default_cap=100,
1409
+ allow_unbounded=True,
1410
+ max_results=max_results,
1411
+ all_pages=all_pages,
1412
+ warnings=warnings,
1413
+ pagination=pagination,
1414
+ )
1415
+
1416
+ if "list-entries" in expand_set:
1417
+ if list_selector:
1418
+ raw_list_selector = list_selector.strip()
1419
+ if raw_list_selector.isdigit():
1420
+ list_id = ListId(int(raw_list_selector))
1421
+ resolved.update({"list": {"input": list_selector, "listId": int(list_id)}})
1422
+ else:
1423
+ resolved_list_obj = resolve_list_selector(
1424
+ client=client, selector=list_selector, cache=cache
1425
+ )
1426
+ list_id = ListId(int(resolved_list_obj.list.id))
1427
+ resolved.update(resolved_list_obj.resolved)
1428
+
1429
+ def keep_entry(item: Any) -> bool:
1430
+ if list_id is None:
1431
+ return True
1432
+ return isinstance(item, dict) and item.get("listId") == int(list_id)
1433
+
1434
+ def fetch_list_entries_page(limit: int | None, cursor: str | None) -> Any:
1435
+ return client.companies.get_list_entries(
1436
+ company_id,
1437
+ limit=limit,
1438
+ cursor=cursor,
1439
+ )
1440
+
1441
+ entries_items = _fetch_v2_collection_sdk(
1442
+ fetch_page=fetch_list_entries_page,
1443
+ section="listEntries",
1444
+ default_limit=100,
1445
+ default_cap=None,
1446
+ allow_unbounded=False,
1447
+ max_results=max_results,
1448
+ all_pages=all_pages,
1449
+ warnings=warnings,
1450
+ pagination=pagination,
1451
+ keep_item=keep_entry if list_id is not None else None,
1452
+ )
1453
+ data["listEntries"] = entries_items
1454
+
1455
+ if "people" in expand_set:
1456
+ people_cap = max_results
1457
+ if people_cap is None and not all_pages:
1458
+ people_cap = 100
1459
+ if people_cap is not None and people_cap <= 0:
1460
+ data["people"] = []
1461
+ else:
1462
+ person_ids = client.companies.get_associated_person_ids(company_id)
1463
+ total_people = len(person_ids)
1464
+ if people_cap is not None and total_people > people_cap:
1465
+ warnings.append(
1466
+ f"People truncated at {people_cap:,} items; re-run with --all "
1467
+ "or a higher --max-results to fetch more."
1468
+ )
1469
+
1470
+ people = client.companies.get_associated_people(
1471
+ company_id,
1472
+ max_results=people_cap,
1473
+ )
1474
+ data["people"] = [
1475
+ {
1476
+ "id": int(person.id),
1477
+ "name": person.full_name,
1478
+ "primaryEmail": person.primary_email,
1479
+ "type": (
1480
+ person.type.value
1481
+ if hasattr(person.type, "value")
1482
+ else person.type
1483
+ if person.type
1484
+ else None
1485
+ ),
1486
+ }
1487
+ for person in people
1488
+ ]
1489
+
1490
+ if "list-entries" in expand_set and entries_items and ctx.output != "json":
1491
+ list_name_by_id: dict[int, str] = {}
1492
+ if isinstance(data.get("lists"), list):
1493
+ for item in data.get("lists", []):
1494
+ if not isinstance(item, dict):
1495
+ continue
1496
+ lid = item.get("id")
1497
+ name = item.get("name")
1498
+ if isinstance(lid, int) and isinstance(name, str) and name.strip():
1499
+ list_name_by_id[lid] = name.strip()
1500
+ if effective_show_list_entry_fields:
1501
+ needed_list_ids: set[int] = set()
1502
+ for entry in entries_items:
1503
+ if not isinstance(entry, dict):
1504
+ continue
1505
+ lid = entry.get("listId")
1506
+ if isinstance(lid, int) and lid not in list_name_by_id:
1507
+ needed_list_ids.add(lid)
1508
+ for lid in sorted(needed_list_ids):
1509
+ try:
1510
+ list_obj = client.lists.get(ListId(lid))
1511
+ except Exception:
1512
+ continue
1513
+ if getattr(list_obj, "name", None):
1514
+ list_name_by_id[lid] = str(list_obj.name)
1515
+
1516
+ resolved_list_entry_fields: list[tuple[str, str]] = []
1517
+ if effective_list_entry_fields:
1518
+ if list_id is not None:
1519
+ fields_meta = client.lists.get_fields(list_id)
1520
+ by_id: dict[str, str] = {}
1521
+ by_name: dict[str, list[str]] = {}
1522
+ for f in fields_meta:
1523
+ fid = str(getattr(f, "id", "")).strip()
1524
+ name = str(getattr(f, "name", "")).strip()
1525
+ if fid:
1526
+ by_id[fid] = name or fid
1527
+ if name:
1528
+ by_name.setdefault(name.lower(), []).append(fid or name)
1529
+
1530
+ for spec in effective_list_entry_fields:
1531
+ raw = spec.strip()
1532
+ if not raw:
1533
+ continue
1534
+ if raw in by_id:
1535
+ resolved_list_entry_fields.append((raw, by_id[raw]))
1536
+ continue
1537
+ matches = by_name.get(raw.lower(), [])
1538
+ if len(matches) == 1:
1539
+ fid = matches[0]
1540
+ resolved_list_entry_fields.append((fid, by_id.get(fid, raw)))
1541
+ continue
1542
+ if len(matches) > 1:
1543
+ raise CLIError(
1544
+ (
1545
+ f'Ambiguous list-entry field name "{raw}" '
1546
+ f"({len(matches)} matches)"
1547
+ ),
1548
+ exit_code=2,
1549
+ error_type="ambiguous_resolution",
1550
+ details={"name": raw, "matches": matches[:20]},
1551
+ )
1552
+ raise CLIError(
1553
+ f'Unknown list-entry field: "{raw}"',
1554
+ exit_code=2,
1555
+ error_type="usage_error",
1556
+ hint=(
1557
+ "Tip: run `xaffinity list get <list>` and inspect "
1558
+ "`data.fields[*].id` / `data.fields[*].name`."
1559
+ ),
1560
+ details={"field": raw},
1561
+ )
1562
+ else:
1563
+ for spec in effective_list_entry_fields:
1564
+ raw = spec.strip()
1565
+ if raw:
1566
+ resolved_list_entry_fields.append((raw, raw))
1567
+
1568
+ def unique_label(label: str, *, used: set[str], fallback: str) -> str:
1569
+ base = (label or "").strip() or fallback
1570
+ if base not in used:
1571
+ used.add(base)
1572
+ return base
1573
+ idx = 2
1574
+ while f"{base} ({idx})" in used:
1575
+ idx += 1
1576
+ final = f"{base} ({idx})"
1577
+ used.add(final)
1578
+ return final
1579
+
1580
+ used_labels: set[str] = {
1581
+ "list",
1582
+ "listId",
1583
+ "listEntryId",
1584
+ "createdAt",
1585
+ "fieldsCount",
1586
+ }
1587
+ projected: list[tuple[str, str]] = []
1588
+ for fid, label in resolved_list_entry_fields:
1589
+ projected.append((fid, unique_label(label, used=used_labels, fallback=fid)))
1590
+
1591
+ summary_rows: list[dict[str, Any]] = []
1592
+ for entry in entries_items:
1593
+ if not isinstance(entry, dict):
1594
+ continue
1595
+ list_id_value = entry.get("listId")
1596
+ list_name = (
1597
+ list_name_by_id.get(list_id_value) if isinstance(list_id_value, int) else None
1598
+ )
1599
+ list_label = list_name or (str(list_id_value) if list_id_value is not None else "")
1600
+ fields_payload = entry.get("fields", [])
1601
+ fields_list = fields_payload if isinstance(fields_payload, list) else []
1602
+ row: dict[str, Any] = {}
1603
+ row["list"] = list_label
1604
+ row["listId"] = list_id_value if isinstance(list_id_value, int) else None
1605
+ row["listEntryId"] = entry.get("id")
1606
+ row["createdAt"] = entry.get("createdAt")
1607
+ fields_count = len(fields_list)
1608
+ if effective_show_list_entry_fields:
1609
+ _filtered, list_only_count, total_count = filter_list_entry_fields(
1610
+ fields_list,
1611
+ scope=effective_list_entry_fields_scope,
1612
+ )
1613
+ if effective_list_entry_fields_scope == "list-only":
1614
+ fields_count = list_only_count
1615
+ else:
1616
+ fields_count = total_count
1617
+ row["fieldsCount"] = fields_count
1618
+
1619
+ field_by_id: dict[str, dict[str, Any]] = {}
1620
+ for f in fields_list:
1621
+ if not isinstance(f, dict):
1622
+ continue
1623
+ fld_id = f.get("id")
1624
+ if isinstance(fld_id, str) and fld_id:
1625
+ field_by_id[fld_id] = f
1626
+
1627
+ for fid, label in projected:
1628
+ field_obj = field_by_id.get(fid)
1629
+ value_obj = field_obj.get("value") if isinstance(field_obj, dict) else None
1630
+ row[label] = value_obj
1631
+
1632
+ summary_rows.append(row)
1633
+
1634
+ data["listEntries"] = summary_rows
1635
+
1636
+ if effective_show_list_entry_fields:
1637
+ for entry in entries_items:
1638
+ if not isinstance(entry, dict):
1639
+ continue
1640
+ list_entry_id = entry.get("id")
1641
+ list_id_value = entry.get("listId")
1642
+ list_name = (
1643
+ list_name_by_id.get(list_id_value)
1644
+ if isinstance(list_id_value, int)
1645
+ else None
1646
+ )
1647
+ if list_name:
1648
+ list_hint = (
1649
+ f"{list_name} (listId={list_id_value})"
1650
+ if list_id_value is not None
1651
+ else str(list_name)
1652
+ )
1653
+ else:
1654
+ list_hint = (
1655
+ f"listId={list_id_value}"
1656
+ if list_id_value is not None
1657
+ else "listId=unknown"
1658
+ )
1659
+ title = f"List Entry {list_entry_id} ({list_hint}) Fields"
1660
+
1661
+ fields_payload = entry.get("fields", [])
1662
+ fields_list = fields_payload if isinstance(fields_payload, list) else []
1663
+ filtered_fields, list_only_count, total_count = filter_list_entry_fields(
1664
+ fields_list,
1665
+ scope=effective_list_entry_fields_scope,
1666
+ )
1667
+ if total_count == 0:
1668
+ data[title] = {"_text": "(no fields)"}
1669
+ continue
1670
+ if effective_list_entry_fields_scope == "list-only" and list_only_count == 0:
1671
+ data[title] = {
1672
+ "_text": (
1673
+ f"(no list-specific fields; {total_count} non-list fields "
1674
+ "available with --list-entry-fields-scope all)"
1675
+ )
1676
+ }
1677
+ continue
1678
+
1679
+ field_rows = build_list_entry_field_rows(filtered_fields)
1680
+ if (
1681
+ effective_list_entry_fields_scope == "list-only"
1682
+ and list_only_count < total_count
1683
+ ):
1684
+ data[title] = {
1685
+ "_rows": field_rows,
1686
+ "_hint": (
1687
+ "Some non-list fields hidden — use "
1688
+ "--list-entry-fields-scope all to include them"
1689
+ ),
1690
+ }
1691
+ else:
1692
+ data[title] = field_rows
1693
+
1694
+ if selection_resolved:
1695
+ resolved["fieldSelection"] = selection_resolved
1696
+ if expand_set:
1697
+ resolved["expand"] = sorted(expand_set)
1698
+
1699
+ # Fetch field metadata if fields were requested and present in response
1700
+ company_fields = (
1701
+ company_payload.get("fields") if isinstance(company_payload, dict) else None
1702
+ )
1703
+ if isinstance(company_fields, list) and company_fields:
1704
+ try:
1705
+ from ..field_utils import build_field_id_to_name_map
1706
+
1707
+ field_metadata = client.companies.get_fields()
1708
+ resolved["fieldMetadata"] = build_field_id_to_name_map(field_metadata)
1709
+ except Exception:
1710
+ # Field metadata is optional - continue without names if fetch fails
1711
+ pass
1712
+
1713
+ return CommandOutput(
1714
+ data=data,
1715
+ context=cmd_context,
1716
+ pagination=pagination or None,
1717
+ resolved=resolved,
1718
+ api_called=True,
1719
+ )
1720
+
1721
+ run_command(ctx, command="company get", fn=fn)
1722
+
1723
+
1724
+ @category("write")
1725
+ @company_group.command(name="create", cls=RichCommand)
1726
+ @click.option("--name", required=True, help="Company name.")
1727
+ @click.option("--domain", default=None, help="Primary domain.")
1728
+ @click.option(
1729
+ "--person-id",
1730
+ "person_ids",
1731
+ multiple=True,
1732
+ type=int,
1733
+ help="Associated person id (repeatable).",
1734
+ )
1735
+ @output_options
1736
+ @click.pass_obj
1737
+ def company_create(
1738
+ ctx: CLIContext,
1739
+ *,
1740
+ name: str,
1741
+ domain: str | None,
1742
+ person_ids: tuple[int, ...],
1743
+ ) -> None:
1744
+ """Create a company."""
1745
+
1746
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1747
+ client = ctx.get_client(warnings=warnings)
1748
+ created = client.companies.create(
1749
+ CompanyCreate(
1750
+ name=name,
1751
+ domain=domain,
1752
+ person_ids=[PersonId(pid) for pid in person_ids],
1753
+ )
1754
+ )
1755
+ payload = serialize_model_for_cli(created)
1756
+
1757
+ ctx_modifiers: dict[str, object] = {"name": name}
1758
+ if domain:
1759
+ ctx_modifiers["domain"] = domain
1760
+ if person_ids:
1761
+ ctx_modifiers["personIds"] = list(person_ids)
1762
+
1763
+ cmd_context = CommandContext(
1764
+ name="company create",
1765
+ inputs={},
1766
+ modifiers=ctx_modifiers,
1767
+ )
1768
+
1769
+ return CommandOutput(
1770
+ data={"company": payload},
1771
+ context=cmd_context,
1772
+ api_called=True,
1773
+ )
1774
+
1775
+ run_command(ctx, command="company create", fn=fn)
1776
+
1777
+
1778
+ @category("write")
1779
+ @company_group.command(name="update", cls=RichCommand)
1780
+ @click.argument("company_id", type=int)
1781
+ @click.option("--name", default=None, help="Updated company name.")
1782
+ @click.option("--domain", default=None, help="Updated primary domain.")
1783
+ @click.option(
1784
+ "--person-id",
1785
+ "person_ids",
1786
+ multiple=True,
1787
+ type=int,
1788
+ help="Replace associated person ids (repeatable).",
1789
+ )
1790
+ @output_options
1791
+ @click.pass_obj
1792
+ def company_update(
1793
+ ctx: CLIContext,
1794
+ company_id: int,
1795
+ *,
1796
+ name: str | None,
1797
+ domain: str | None,
1798
+ person_ids: tuple[int, ...],
1799
+ ) -> None:
1800
+ """Update a company."""
1801
+
1802
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1803
+ if not (name or domain or person_ids):
1804
+ raise CLIError(
1805
+ "Provide at least one field to update.",
1806
+ exit_code=2,
1807
+ error_type="usage_error",
1808
+ hint="Use --name, --domain, or --person-id.",
1809
+ )
1810
+ client = ctx.get_client(warnings=warnings)
1811
+ updated = client.companies.update(
1812
+ CompanyId(company_id),
1813
+ CompanyUpdate(
1814
+ name=name,
1815
+ domain=domain,
1816
+ person_ids=[PersonId(pid) for pid in person_ids] if person_ids else None,
1817
+ ),
1818
+ )
1819
+ payload = serialize_model_for_cli(updated)
1820
+
1821
+ ctx_modifiers: dict[str, object] = {}
1822
+ if name:
1823
+ ctx_modifiers["name"] = name
1824
+ if domain:
1825
+ ctx_modifiers["domain"] = domain
1826
+ if person_ids:
1827
+ ctx_modifiers["personIds"] = list(person_ids)
1828
+
1829
+ cmd_context = CommandContext(
1830
+ name="company update",
1831
+ inputs={"companyId": company_id},
1832
+ modifiers=ctx_modifiers,
1833
+ )
1834
+
1835
+ return CommandOutput(
1836
+ data={"company": payload},
1837
+ context=cmd_context,
1838
+ api_called=True,
1839
+ )
1840
+
1841
+ run_command(ctx, command="company update", fn=fn)
1842
+
1843
+
1844
+ @category("write")
1845
+ @destructive
1846
+ @company_group.command(name="delete", cls=RichCommand)
1847
+ @click.argument("company_id", type=int)
1848
+ @click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.")
1849
+ @output_options
1850
+ @click.pass_obj
1851
+ def company_delete(ctx: CLIContext, company_id: int, yes: bool) -> None:
1852
+ """Delete a company."""
1853
+ if not yes:
1854
+ click.confirm(f"Delete company {company_id}?", abort=True)
1855
+
1856
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1857
+ client = ctx.get_client(warnings=warnings)
1858
+ success = client.companies.delete(CompanyId(company_id))
1859
+
1860
+ cmd_context = CommandContext(
1861
+ name="company delete",
1862
+ inputs={"companyId": company_id},
1863
+ modifiers={},
1864
+ )
1865
+
1866
+ return CommandOutput(
1867
+ data={"success": success},
1868
+ context=cmd_context,
1869
+ api_called=True,
1870
+ )
1871
+
1872
+ run_command(ctx, command="company delete", fn=fn)
1873
+
1874
+
1875
+ @category("write")
1876
+ @company_group.command(name="merge", cls=RichCommand)
1877
+ @click.argument("primary_id", type=int)
1878
+ @click.argument("duplicate_id", type=int)
1879
+ @output_options
1880
+ @click.pass_obj
1881
+ def company_merge(
1882
+ ctx: CLIContext,
1883
+ primary_id: int,
1884
+ duplicate_id: int,
1885
+ ) -> None:
1886
+ """Merge a duplicate company into a primary company (beta)."""
1887
+
1888
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1889
+ client = ctx.get_client(warnings=warnings)
1890
+ task_url = client.companies.merge(
1891
+ CompanyId(primary_id),
1892
+ CompanyId(duplicate_id),
1893
+ )
1894
+
1895
+ cmd_context = CommandContext(
1896
+ name="company merge",
1897
+ inputs={"primaryId": primary_id, "duplicateId": duplicate_id},
1898
+ modifiers={},
1899
+ )
1900
+
1901
+ return CommandOutput(
1902
+ data={
1903
+ "survivingId": primary_id,
1904
+ "mergedId": duplicate_id,
1905
+ "affinityUrl": f"https://app.affinity.co/companies/{primary_id}",
1906
+ "taskUrl": task_url,
1907
+ },
1908
+ context=cmd_context,
1909
+ api_called=True,
1910
+ )
1911
+
1912
+ run_command(ctx, command="company merge", fn=fn)
1913
+
1914
+
1915
+ @category("write")
1916
+ @company_group.command(name="field", cls=RichCommand)
1917
+ @click.argument("company_id", type=int)
1918
+ @click.option(
1919
+ "--set",
1920
+ "set_values",
1921
+ nargs=2,
1922
+ multiple=True,
1923
+ metavar="FIELD VALUE",
1924
+ help="Set field value (repeatable). Use two args: FIELD VALUE.",
1925
+ )
1926
+ @click.option(
1927
+ "--unset",
1928
+ "unset_fields",
1929
+ multiple=True,
1930
+ metavar="FIELD",
1931
+ help="Unset field (repeatable). Removes all values for the field.",
1932
+ )
1933
+ @click.option(
1934
+ "--set-json",
1935
+ "json_input",
1936
+ type=str,
1937
+ help="JSON object of field:value pairs to set.",
1938
+ )
1939
+ @click.option(
1940
+ "--get",
1941
+ "get_fields",
1942
+ multiple=True,
1943
+ metavar="FIELD",
1944
+ help="Get specific field values (repeatable).",
1945
+ )
1946
+ @output_options
1947
+ @click.pass_obj
1948
+ def company_field(
1949
+ ctx: CLIContext,
1950
+ company_id: int,
1951
+ *,
1952
+ set_values: tuple[tuple[str, str], ...],
1953
+ unset_fields: tuple[str, ...],
1954
+ json_input: str | None,
1955
+ get_fields: tuple[str, ...],
1956
+ ) -> None:
1957
+ """
1958
+ Manage company field values.
1959
+
1960
+ Unified command for getting, setting, and unsetting field values.
1961
+ For field names with spaces, use quotes.
1962
+
1963
+ Examples:
1964
+
1965
+ - `xaffinity company field 123 --set Industry "Technology"`
1966
+ - `xaffinity company field 123 --set Industry "Tech" --set Size "Large"`
1967
+ - `xaffinity company field 123 --unset Industry`
1968
+ - `xaffinity company field 123 --set-json '{"Industry": "Tech", "Size": "Large"}'`
1969
+ - `xaffinity company field 123 --get Industry --get Size`
1970
+ """
1971
+ import json as json_module
1972
+
1973
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1974
+ from affinity.models.entities import FieldValueCreate
1975
+ from affinity.types import FieldId as FieldIdType
1976
+
1977
+ from ..field_utils import (
1978
+ FieldResolver,
1979
+ fetch_field_metadata,
1980
+ find_field_values_for_field,
1981
+ )
1982
+
1983
+ # Validate: at least one operation must be specified
1984
+ has_set = bool(set_values) or bool(json_input)
1985
+ has_unset = bool(unset_fields)
1986
+ has_get = bool(get_fields)
1987
+
1988
+ if not has_set and not has_unset and not has_get:
1989
+ raise CLIError(
1990
+ "Provide at least one of --set, --unset, --set-json, or --get.",
1991
+ exit_code=2,
1992
+ error_type="usage_error",
1993
+ )
1994
+
1995
+ # Validate: --get is exclusive (can't mix read with write)
1996
+ if has_get and (has_set or has_unset):
1997
+ raise CLIError(
1998
+ "--get cannot be combined with --set, --unset, or --set-json.",
1999
+ exit_code=2,
2000
+ error_type="usage_error",
2001
+ )
2002
+
2003
+ client = ctx.get_client(warnings=warnings)
2004
+ field_metadata = fetch_field_metadata(client=client, entity_type="company")
2005
+ resolver = FieldResolver(field_metadata)
2006
+
2007
+ results: dict[str, Any] = {}
2008
+
2009
+ # Build modifiers for CommandContext
2010
+ ctx_modifiers: dict[str, object] = {}
2011
+ if set_values:
2012
+ ctx_modifiers["set"] = [list(sv) for sv in set_values]
2013
+ if unset_fields:
2014
+ ctx_modifiers["unset"] = list(unset_fields)
2015
+ if json_input:
2016
+ ctx_modifiers["json"] = json_input
2017
+ if get_fields:
2018
+ ctx_modifiers["get"] = list(get_fields)
2019
+
2020
+ # Handle --get: read field values
2021
+ if has_get:
2022
+ existing_values = client.field_values.list(company_id=CompanyId(company_id))
2023
+ field_results: dict[str, Any] = {}
2024
+
2025
+ for field_name in get_fields:
2026
+ target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
2027
+ field_values = find_field_values_for_field(
2028
+ field_values=[serialize_model_for_cli(v) for v in existing_values],
2029
+ field_id=target_field_id,
2030
+ )
2031
+ resolved_name = resolver.get_field_name(target_field_id) or field_name
2032
+ if field_values:
2033
+ if len(field_values) == 1:
2034
+ field_results[resolved_name] = field_values[0].get("value")
2035
+ else:
2036
+ field_results[resolved_name] = [fv.get("value") for fv in field_values]
2037
+ else:
2038
+ field_results[resolved_name] = None
2039
+
2040
+ results["fields"] = field_results
2041
+
2042
+ cmd_context = CommandContext(
2043
+ name="company field",
2044
+ inputs={"companyId": company_id},
2045
+ modifiers=ctx_modifiers,
2046
+ )
2047
+
2048
+ return CommandOutput(
2049
+ data=results,
2050
+ context=cmd_context,
2051
+ api_called=True,
2052
+ )
2053
+
2054
+ # Handle --set and --json: set field values
2055
+ set_operations: list[tuple[str, Any]] = []
2056
+
2057
+ # Collect from --set options
2058
+ for field_name, value in set_values:
2059
+ set_operations.append((field_name, value))
2060
+
2061
+ # Collect from --json
2062
+ if json_input:
2063
+ try:
2064
+ json_data = json_module.loads(json_input)
2065
+ if not isinstance(json_data, dict):
2066
+ raise CLIError(
2067
+ "--json must be a JSON object.",
2068
+ exit_code=2,
2069
+ error_type="usage_error",
2070
+ )
2071
+ for field_name, value in json_data.items():
2072
+ set_operations.append((field_name, value))
2073
+ except json_module.JSONDecodeError as e:
2074
+ raise CLIError(
2075
+ f"Invalid JSON: {e}",
2076
+ exit_code=2,
2077
+ error_type="usage_error",
2078
+ ) from e
2079
+
2080
+ # Execute set operations
2081
+ created_values: list[dict[str, Any]] = []
2082
+ for field_name, value in set_operations:
2083
+ target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
2084
+
2085
+ # Check for existing values and delete them first (replace behavior)
2086
+ existing_values = client.field_values.list(company_id=CompanyId(company_id))
2087
+ existing_for_field = find_field_values_for_field(
2088
+ field_values=[serialize_model_for_cli(v) for v in existing_values],
2089
+ field_id=target_field_id,
2090
+ )
2091
+ for fv in existing_for_field:
2092
+ fv_id = fv.get("id")
2093
+ if fv_id:
2094
+ client.field_values.delete(fv_id)
2095
+
2096
+ # Create new value
2097
+ created = client.field_values.create(
2098
+ FieldValueCreate(
2099
+ field_id=FieldIdType(target_field_id),
2100
+ entity_id=company_id,
2101
+ value=value,
2102
+ )
2103
+ )
2104
+ created_values.append(serialize_model_for_cli(created))
2105
+
2106
+ # Handle --unset: remove field values
2107
+ deleted_count = 0
2108
+ for field_name in unset_fields:
2109
+ target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
2110
+ existing_values = client.field_values.list(company_id=CompanyId(company_id))
2111
+ existing_for_field = find_field_values_for_field(
2112
+ field_values=[serialize_model_for_cli(v) for v in existing_values],
2113
+ field_id=target_field_id,
2114
+ )
2115
+ for fv in existing_for_field:
2116
+ fv_id = fv.get("id")
2117
+ if fv_id:
2118
+ client.field_values.delete(fv_id)
2119
+ deleted_count += 1
2120
+
2121
+ # Build result
2122
+ if created_values:
2123
+ results["created"] = created_values
2124
+ if deleted_count > 0:
2125
+ results["deleted"] = deleted_count
2126
+
2127
+ cmd_context = CommandContext(
2128
+ name="company field",
2129
+ inputs={"companyId": company_id},
2130
+ modifiers=ctx_modifiers,
2131
+ )
2132
+
2133
+ return CommandOutput(
2134
+ data=results,
2135
+ context=cmd_context,
2136
+ api_called=True,
2137
+ )
2138
+
2139
+ run_command(ctx, command="company field", fn=fn)