affinity-sdk 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. affinity/__init__.py +139 -0
  2. affinity/cli/__init__.py +7 -0
  3. affinity/cli/click_compat.py +27 -0
  4. affinity/cli/commands/__init__.py +1 -0
  5. affinity/cli/commands/_entity_files_dump.py +219 -0
  6. affinity/cli/commands/_list_entry_fields.py +41 -0
  7. affinity/cli/commands/_v1_parsing.py +77 -0
  8. affinity/cli/commands/company_cmds.py +2139 -0
  9. affinity/cli/commands/completion_cmd.py +33 -0
  10. affinity/cli/commands/config_cmds.py +540 -0
  11. affinity/cli/commands/entry_cmds.py +33 -0
  12. affinity/cli/commands/field_cmds.py +413 -0
  13. affinity/cli/commands/interaction_cmds.py +875 -0
  14. affinity/cli/commands/list_cmds.py +3152 -0
  15. affinity/cli/commands/note_cmds.py +433 -0
  16. affinity/cli/commands/opportunity_cmds.py +1174 -0
  17. affinity/cli/commands/person_cmds.py +1980 -0
  18. affinity/cli/commands/query_cmd.py +444 -0
  19. affinity/cli/commands/relationship_strength_cmds.py +62 -0
  20. affinity/cli/commands/reminder_cmds.py +595 -0
  21. affinity/cli/commands/resolve_url_cmd.py +127 -0
  22. affinity/cli/commands/session_cmds.py +84 -0
  23. affinity/cli/commands/task_cmds.py +110 -0
  24. affinity/cli/commands/version_cmd.py +29 -0
  25. affinity/cli/commands/whoami_cmd.py +36 -0
  26. affinity/cli/config.py +108 -0
  27. affinity/cli/context.py +749 -0
  28. affinity/cli/csv_utils.py +195 -0
  29. affinity/cli/date_utils.py +42 -0
  30. affinity/cli/decorators.py +77 -0
  31. affinity/cli/errors.py +28 -0
  32. affinity/cli/field_utils.py +355 -0
  33. affinity/cli/formatters.py +551 -0
  34. affinity/cli/help_json.py +283 -0
  35. affinity/cli/logging.py +100 -0
  36. affinity/cli/main.py +261 -0
  37. affinity/cli/options.py +53 -0
  38. affinity/cli/paths.py +32 -0
  39. affinity/cli/progress.py +183 -0
  40. affinity/cli/query/__init__.py +163 -0
  41. affinity/cli/query/aggregates.py +357 -0
  42. affinity/cli/query/dates.py +194 -0
  43. affinity/cli/query/exceptions.py +147 -0
  44. affinity/cli/query/executor.py +1236 -0
  45. affinity/cli/query/filters.py +248 -0
  46. affinity/cli/query/models.py +333 -0
  47. affinity/cli/query/output.py +331 -0
  48. affinity/cli/query/parser.py +619 -0
  49. affinity/cli/query/planner.py +430 -0
  50. affinity/cli/query/progress.py +270 -0
  51. affinity/cli/query/schema.py +439 -0
  52. affinity/cli/render.py +1589 -0
  53. affinity/cli/resolve.py +222 -0
  54. affinity/cli/resolvers.py +249 -0
  55. affinity/cli/results.py +308 -0
  56. affinity/cli/runner.py +218 -0
  57. affinity/cli/serialization.py +65 -0
  58. affinity/cli/session_cache.py +276 -0
  59. affinity/cli/types.py +70 -0
  60. affinity/client.py +771 -0
  61. affinity/clients/__init__.py +19 -0
  62. affinity/clients/http.py +3664 -0
  63. affinity/clients/pipeline.py +165 -0
  64. affinity/compare.py +501 -0
  65. affinity/downloads.py +114 -0
  66. affinity/exceptions.py +615 -0
  67. affinity/filters.py +1128 -0
  68. affinity/hooks.py +198 -0
  69. affinity/inbound_webhooks.py +302 -0
  70. affinity/models/__init__.py +163 -0
  71. affinity/models/entities.py +798 -0
  72. affinity/models/pagination.py +513 -0
  73. affinity/models/rate_limit_snapshot.py +48 -0
  74. affinity/models/secondary.py +413 -0
  75. affinity/models/types.py +663 -0
  76. affinity/policies.py +40 -0
  77. affinity/progress.py +22 -0
  78. affinity/py.typed +0 -0
  79. affinity/services/__init__.py +42 -0
  80. affinity/services/companies.py +1286 -0
  81. affinity/services/lists.py +1892 -0
  82. affinity/services/opportunities.py +1330 -0
  83. affinity/services/persons.py +1348 -0
  84. affinity/services/rate_limits.py +173 -0
  85. affinity/services/tasks.py +193 -0
  86. affinity/services/v1_only.py +2445 -0
  87. affinity/types.py +83 -0
  88. affinity_sdk-0.9.5.dist-info/METADATA +622 -0
  89. affinity_sdk-0.9.5.dist-info/RECORD +92 -0
  90. affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
  91. affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
  92. affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,1980 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import sys
5
+ from collections.abc import Callable
6
+ from contextlib import ExitStack
7
+ from pathlib import Path
8
+ from typing import Any, cast
9
+
10
+ from rich.console import Console
11
+ from rich.progress import BarColumn, Progress, SpinnerColumn, TaskID, TextColumn, TimeElapsedColumn
12
+
13
+ from affinity.models.entities import Person, PersonCreate, PersonUpdate
14
+ from affinity.models.types import FieldId
15
+ from affinity.types import CompanyId, FieldType, ListId, PersonId
16
+
17
+ from ..click_compat import RichCommand, RichGroup, click
18
+ from ..context import CLIContext
19
+ from ..csv_utils import write_csv_to_stdout
20
+ from ..decorators import category, destructive, progress_capable
21
+ from ..errors import CLIError
22
+ from ..options import output_options
23
+ from ..progress import ProgressManager, ProgressSettings
24
+ from ..resolve import resolve_list_selector
25
+ from ..resolvers import ResolvedEntity
26
+ from ..results import CommandContext
27
+ from ..runner import CommandOutput, run_command
28
+ from ..serialization import serialize_model_for_cli
29
+ from ._entity_files_dump import dump_entity_files_bundle
30
+ from ._list_entry_fields import (
31
+ ListEntryFieldsScope,
32
+ build_list_entry_field_rows,
33
+ filter_list_entry_fields,
34
+ )
35
+ from .resolve_url_cmd import _parse_affinity_url
36
+
37
+
38
+ def _fetch_v2_collection(
39
+ *,
40
+ client: Any,
41
+ path: str,
42
+ section: str,
43
+ default_limit: int,
44
+ default_cap: int | None,
45
+ allow_unbounded: bool,
46
+ max_results: int | None,
47
+ all_pages: bool,
48
+ warnings: list[str],
49
+ pagination: dict[str, Any],
50
+ keep_item: Callable[[Any], bool] | None = None,
51
+ ) -> list[Any]:
52
+ """Fetch a paginated V2 collection with configurable limits.
53
+
54
+ This helper centralizes the pagination logic for fetching lists, list-entries,
55
+ and other V2 collections. It handles:
56
+ - Page size limits
57
+ - Max result caps
58
+ - Pagination cursor tracking
59
+ - Optional item filtering
60
+
61
+ Args:
62
+ client: The Affinity client instance.
63
+ path: The API path to fetch (e.g., "/persons/123/lists").
64
+ section: Name for this section (used in warnings and pagination keys).
65
+ default_limit: Default page size for API requests.
66
+ default_cap: Default max items if no explicit cap and not fetching all pages.
67
+ allow_unbounded: Whether unbounded fetching is allowed without --all.
68
+ max_results: Explicit max results limit (from --max-results).
69
+ all_pages: Whether to fetch all pages (from --all flag).
70
+ warnings: List to append warnings to (mutated in place).
71
+ pagination: Dict to store pagination cursors (mutated in place).
72
+ keep_item: Optional filter function to keep only matching items.
73
+
74
+ Returns:
75
+ List of fetched items.
76
+ """
77
+ effective_cap = max_results
78
+ if effective_cap is None and default_cap is not None and not all_pages:
79
+ effective_cap = default_cap
80
+ if effective_cap is not None and effective_cap <= 0:
81
+ return []
82
+
83
+ should_paginate = all_pages or allow_unbounded or effective_cap is not None
84
+ limit = default_limit
85
+ if effective_cap is not None:
86
+ limit = min(default_limit, effective_cap)
87
+
88
+ truncated_mid_page = False
89
+ payload = client._http.get(path, params={"limit": limit} if limit else None)
90
+ rows = payload.get("data", [])
91
+ if not isinstance(rows, list):
92
+ rows = []
93
+ page_items = list(rows)
94
+ if keep_item is not None:
95
+ page_items = [r for r in page_items if keep_item(r)]
96
+ items: list[Any] = page_items
97
+
98
+ page_pagination = payload.get("pagination", {})
99
+ if not isinstance(page_pagination, dict):
100
+ page_pagination = {}
101
+ next_url = page_pagination.get("nextUrl")
102
+ prev_url = page_pagination.get("prevUrl")
103
+
104
+ if effective_cap is not None and len(items) > effective_cap:
105
+ truncated_mid_page = True
106
+ items = items[:effective_cap]
107
+ next_url = None
108
+
109
+ while (
110
+ should_paginate
111
+ and isinstance(next_url, str)
112
+ and next_url
113
+ and (effective_cap is None or len(items) < effective_cap)
114
+ ):
115
+ payload = client._http.get_url(next_url)
116
+ rows = payload.get("data", [])
117
+ if isinstance(rows, list):
118
+ page_items = list(rows)
119
+ if keep_item is not None:
120
+ page_items = [r for r in page_items if keep_item(r)]
121
+ items.extend(page_items)
122
+ page_pagination = payload.get("pagination", {})
123
+ if not isinstance(page_pagination, dict):
124
+ page_pagination = {}
125
+ next_url = page_pagination.get("nextUrl")
126
+ prev_url = page_pagination.get("prevUrl")
127
+
128
+ if effective_cap is not None and len(items) > effective_cap:
129
+ truncated_mid_page = True
130
+ items = items[:effective_cap]
131
+ next_url = None
132
+ break
133
+
134
+ if truncated_mid_page and effective_cap is not None:
135
+ warnings.append(
136
+ f"{section} limited to {effective_cap:,} items. Use --all to fetch all results."
137
+ )
138
+ elif isinstance(next_url, str) and next_url:
139
+ pagination[section] = {"nextCursor": next_url, "prevCursor": prev_url}
140
+
141
+ return items
142
+
143
+
144
+ @click.group(name="person", cls=RichGroup)
145
+ def person_group() -> None:
146
+ """Person commands."""
147
+
148
+
149
+ def _parse_field_types(values: tuple[str, ...]) -> list[FieldType] | None:
150
+ """Parse --field-type option values to FieldType enums."""
151
+ if not values:
152
+ return None
153
+ result: list[FieldType] = []
154
+ valid_types = {ft.value.lower(): ft for ft in FieldType}
155
+ for v in values:
156
+ lower = v.lower()
157
+ if lower not in valid_types:
158
+ raise CLIError(
159
+ f"Unknown field type: {v}",
160
+ exit_code=2,
161
+ error_type="usage_error",
162
+ hint=f"Valid types: {', '.join(sorted(valid_types.keys()))}",
163
+ )
164
+ result.append(valid_types[lower])
165
+ return result
166
+
167
+
168
+ @category("read")
169
+ @person_group.command(name="ls", cls=RichCommand)
170
+ @click.option("--page-size", "-s", type=int, default=None, help="Page size (limit).")
171
+ @click.option(
172
+ "--cursor", type=str, default=None, help="Resume from cursor (incompatible with --page-size)."
173
+ )
174
+ @click.option(
175
+ "--max-results", "--limit", "-n", type=int, default=None, help="Stop after N items total."
176
+ )
177
+ @click.option("--all", "-A", "all_pages", is_flag=True, help="Fetch all pages.")
178
+ @click.option(
179
+ "--field",
180
+ "field_ids",
181
+ type=str,
182
+ multiple=True,
183
+ help="Field ID or name to include (repeatable).",
184
+ )
185
+ @click.option(
186
+ "--field-type",
187
+ "field_types",
188
+ type=str,
189
+ multiple=True,
190
+ help="Field type to include (repeatable). Values: global, enriched, relationship-intelligence.",
191
+ )
192
+ @click.option(
193
+ "--filter",
194
+ "filter_expr",
195
+ type=str,
196
+ default=None,
197
+ help="Filter: 'field op value'. Ops: = != =~ =^ =$ > < >= <=. E.g., 'Email =~ \"@acme\"'.",
198
+ )
199
+ @click.option(
200
+ "--query",
201
+ "-q",
202
+ type=str,
203
+ default=None,
204
+ help="Fuzzy text search (simple matching). Use --filter for structured queries.",
205
+ )
206
+ @click.option("--csv", "csv_flag", is_flag=True, help="Output as CSV (to stdout).")
207
+ @click.option(
208
+ "--csv-bom",
209
+ is_flag=True,
210
+ help="Add UTF-8 BOM for Excel (use with redirection: --csv --csv-bom > file.csv).",
211
+ )
212
+ @output_options
213
+ @click.pass_obj
214
+ def person_ls(
215
+ ctx: CLIContext,
216
+ *,
217
+ page_size: int | None,
218
+ cursor: str | None,
219
+ max_results: int | None,
220
+ all_pages: bool,
221
+ field_ids: tuple[str, ...],
222
+ field_types: tuple[str, ...],
223
+ filter_expr: str | None,
224
+ query: str | None,
225
+ csv_flag: bool,
226
+ csv_bom: bool,
227
+ ) -> None:
228
+ """
229
+ List persons.
230
+
231
+ Supports field selection, field types, and filter expressions.
232
+ Use --query for free-text search.
233
+
234
+ Examples:
235
+
236
+ - `xaffinity person ls`
237
+ - `xaffinity person ls --page-size 50`
238
+ - `xaffinity person ls --field-type enriched --all`
239
+ - `xaffinity person ls --filter 'Email =~ "@acme.com"'`
240
+ - `xaffinity person ls --query "alice@example.com" --all`
241
+ - `xaffinity person ls --all --csv > people.csv`
242
+ - `xaffinity person ls --all --csv --csv-bom > people.csv`
243
+ """
244
+
245
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
246
+ # Check mutual exclusivity: --csv and --json
247
+ if csv_flag and ctx.output == "json":
248
+ raise CLIError(
249
+ "--csv and --json are mutually exclusive.",
250
+ exit_code=2,
251
+ error_type="usage_error",
252
+ )
253
+
254
+ client = ctx.get_client(warnings=warnings)
255
+
256
+ if cursor is not None and page_size is not None:
257
+ raise CLIError(
258
+ "--cursor cannot be combined with --page-size.",
259
+ exit_code=2,
260
+ error_type="usage_error",
261
+ )
262
+
263
+ if query is not None and filter_expr is not None:
264
+ raise CLIError(
265
+ "--query cannot be combined with --filter (different APIs).",
266
+ exit_code=2,
267
+ error_type="usage_error",
268
+ hint="Use --query for free-text search or --filter for structured filtering.",
269
+ )
270
+
271
+ # Build CommandContext upfront for all return paths
272
+ ctx_modifiers: dict[str, object] = {}
273
+ if page_size is not None:
274
+ ctx_modifiers["pageSize"] = page_size
275
+ if cursor is not None:
276
+ ctx_modifiers["cursor"] = cursor
277
+ if max_results is not None:
278
+ ctx_modifiers["maxResults"] = max_results
279
+ if all_pages:
280
+ ctx_modifiers["allPages"] = True
281
+ if field_ids:
282
+ ctx_modifiers["fieldIds"] = list(field_ids)
283
+ if field_types:
284
+ ctx_modifiers["fieldTypes"] = list(field_types)
285
+ if filter_expr:
286
+ ctx_modifiers["filter"] = filter_expr
287
+ if query:
288
+ ctx_modifiers["query"] = query
289
+
290
+ cmd_context = CommandContext(
291
+ name="person ls",
292
+ inputs={},
293
+ modifiers=ctx_modifiers,
294
+ )
295
+
296
+ parsed_field_types = _parse_field_types(field_types)
297
+ parsed_field_ids: list[FieldId] | None = (
298
+ [FieldId(fid) for fid in field_ids] if field_ids else None
299
+ )
300
+
301
+ rows: list[dict[str, object]] = []
302
+ first_page = True
303
+ use_v1_search = query is not None
304
+ wants_fields = bool(field_ids or field_types)
305
+
306
+ show_progress = (
307
+ ctx.progress != "never"
308
+ and not ctx.quiet
309
+ and (ctx.progress == "always" or sys.stderr.isatty())
310
+ )
311
+
312
+ # Progress description based on operation type
313
+ task_description = "Searching" if use_v1_search else "Fetching"
314
+
315
+ with ExitStack() as stack:
316
+ progress: Progress | None = None
317
+ task_id: TaskID | None = None
318
+ if show_progress:
319
+ progress = stack.enter_context(
320
+ Progress(
321
+ TextColumn("{task.description}"),
322
+ BarColumn(),
323
+ TextColumn("{task.completed} rows"),
324
+ TimeElapsedColumn(),
325
+ console=Console(file=sys.stderr),
326
+ transient=True,
327
+ )
328
+ )
329
+ task_id = progress.add_task(task_description, total=max_results)
330
+
331
+ # Helper to check max_results and return early if needed
332
+ def _check_max_results(
333
+ rows: list[dict[str, object]],
334
+ idx: int,
335
+ page_len: int,
336
+ next_cursor: str | None,
337
+ prev_cursor: str | None,
338
+ ) -> CommandOutput | None:
339
+ if max_results is not None and len(rows) >= max_results:
340
+ stopped_mid_page = idx < (page_len - 1)
341
+ if stopped_mid_page:
342
+ warnings.append(
343
+ "Results limited by --max-results. Use --all to fetch all results."
344
+ )
345
+ pagination = None
346
+ if next_cursor and not stopped_mid_page and next_cursor != cursor:
347
+ pagination = {
348
+ "persons": {
349
+ "nextCursor": next_cursor,
350
+ "prevCursor": prev_cursor,
351
+ }
352
+ }
353
+ return CommandOutput(
354
+ data={"persons": rows[:max_results]},
355
+ context=cmd_context,
356
+ pagination=pagination,
357
+ api_called=True,
358
+ )
359
+ return None
360
+
361
+ # Three paths: V2-only, V1-only, or Hybrid (V1 search + V2 batch fetch)
362
+ if use_v1_search and wants_fields:
363
+ # Hybrid: V1 search for IDs, then V2 batch fetch with field data
364
+ assert query is not None
365
+ for v1_page in client.persons.search_pages(
366
+ query,
367
+ page_size=page_size,
368
+ page_token=cursor,
369
+ ):
370
+ next_cursor = v1_page.next_page_token
371
+ prev_cursor = None # V1 doesn't have prev cursor
372
+
373
+ if v1_page.data:
374
+ # Batch fetch from V2 with field data
375
+ person_ids = [PersonId(p.id) for p in v1_page.data]
376
+ v2_response = client.persons.list(
377
+ ids=person_ids,
378
+ field_ids=parsed_field_ids,
379
+ field_types=parsed_field_types,
380
+ )
381
+ for idx, person in enumerate(v2_response.data):
382
+ rows.append(_person_ls_row(person))
383
+ if progress and task_id is not None:
384
+ progress.update(task_id, completed=len(rows))
385
+ result = _check_max_results(
386
+ rows, idx, len(v2_response.data), next_cursor, prev_cursor
387
+ )
388
+ if result is not None:
389
+ return result
390
+
391
+ if first_page and not all_pages and max_results is None:
392
+ return CommandOutput(
393
+ data={"persons": rows},
394
+ context=cmd_context,
395
+ pagination=(
396
+ {"persons": {"nextCursor": next_cursor, "prevCursor": None}}
397
+ if next_cursor
398
+ else None
399
+ ),
400
+ api_called=True,
401
+ )
402
+ first_page = False
403
+
404
+ elif use_v1_search:
405
+ # Search without field data
406
+ assert query is not None
407
+ for search_page in client.persons.search_pages(
408
+ query,
409
+ page_size=page_size,
410
+ page_token=cursor,
411
+ ):
412
+ next_cursor = search_page.next_page_token
413
+ prev_cursor = None # Search doesn't have prev cursor
414
+
415
+ for idx, person in enumerate(search_page.data):
416
+ rows.append(_person_ls_row(person))
417
+ if progress and task_id is not None:
418
+ progress.update(task_id, completed=len(rows))
419
+ result = _check_max_results(
420
+ rows, idx, len(search_page.data), next_cursor, prev_cursor
421
+ )
422
+ if result is not None:
423
+ return result
424
+
425
+ if first_page and not all_pages and max_results is None:
426
+ return CommandOutput(
427
+ data={"persons": rows},
428
+ context=cmd_context,
429
+ pagination=(
430
+ {"persons": {"nextCursor": next_cursor, "prevCursor": None}}
431
+ if next_cursor
432
+ else None
433
+ ),
434
+ api_called=True,
435
+ )
436
+ first_page = False
437
+
438
+ else:
439
+ # List with optional field data
440
+ for page in client.persons.pages(
441
+ field_ids=parsed_field_ids,
442
+ field_types=parsed_field_types,
443
+ filter=filter_expr,
444
+ limit=page_size,
445
+ cursor=cursor,
446
+ ):
447
+ next_cursor = page.pagination.next_cursor
448
+ prev_cursor = page.pagination.prev_cursor
449
+
450
+ for idx, person in enumerate(page.data):
451
+ rows.append(_person_ls_row(person))
452
+ if progress and task_id is not None:
453
+ progress.update(task_id, completed=len(rows))
454
+ result = _check_max_results(
455
+ rows, idx, len(page.data), next_cursor, prev_cursor
456
+ )
457
+ if result is not None:
458
+ return result
459
+
460
+ if first_page and not all_pages and max_results is None:
461
+ return CommandOutput(
462
+ data={"persons": rows},
463
+ context=cmd_context,
464
+ pagination=(
465
+ {
466
+ "persons": {
467
+ "nextCursor": next_cursor,
468
+ "prevCursor": prev_cursor,
469
+ }
470
+ }
471
+ if next_cursor
472
+ else None
473
+ ),
474
+ api_called=True,
475
+ )
476
+ first_page = False
477
+
478
+ # CSV output to stdout
479
+ if csv_flag:
480
+ fieldnames = list(rows[0].keys()) if rows else []
481
+ write_csv_to_stdout(
482
+ rows=rows,
483
+ fieldnames=fieldnames,
484
+ bom=csv_bom,
485
+ )
486
+ sys.exit(0)
487
+
488
+ return CommandOutput(
489
+ data={"persons": rows},
490
+ context=cmd_context,
491
+ pagination=None,
492
+ api_called=True,
493
+ )
494
+
495
+ run_command(ctx, command="person ls", fn=fn)
496
+
497
+
498
+ def _person_ls_row(person: Person) -> dict[str, object]:
499
+ """Build a row for person ls output."""
500
+ return {
501
+ "id": int(person.id),
502
+ "name": person.full_name,
503
+ "primaryEmail": person.primary_email,
504
+ "emails": person.emails,
505
+ }
506
+
507
+
508
+ _PERSON_FIELDS_ALL_TYPES: tuple[str, ...] = (
509
+ FieldType.GLOBAL.value,
510
+ FieldType.ENRICHED.value,
511
+ FieldType.RELATIONSHIP_INTELLIGENCE.value,
512
+ )
513
+
514
+
515
+ def _strip_wrapping_quotes(value: str) -> str:
516
+ value = value.strip()
517
+ if len(value) >= 2 and value[0] == value[-1] and value[0] in {"'", '"'}:
518
+ return value[1:-1]
519
+ return value
520
+
521
+
522
+ def _resolve_person_selector(*, client: Any, selector: str) -> tuple[PersonId, dict[str, Any]]:
523
+ raw = selector.strip()
524
+ if raw.isdigit():
525
+ person_id = PersonId(int(raw))
526
+ resolved = ResolvedEntity(
527
+ input=selector,
528
+ entity_id=int(person_id),
529
+ entity_type="person",
530
+ source="id",
531
+ )
532
+ return person_id, {"person": resolved.to_dict()}
533
+
534
+ if raw.startswith(("http://", "https://")):
535
+ url_resolved = _parse_affinity_url(raw)
536
+ if url_resolved.type != "person" or url_resolved.person_id is None:
537
+ raise CLIError(
538
+ "Expected a person URL like https://<tenant>.affinity.(co|com)/persons/<id>",
539
+ exit_code=2,
540
+ error_type="usage_error",
541
+ details={"input": selector, "resolvedType": url_resolved.type},
542
+ )
543
+ person_id = PersonId(int(url_resolved.person_id))
544
+ resolved = ResolvedEntity(
545
+ input=selector,
546
+ entity_id=int(person_id),
547
+ entity_type="person",
548
+ source="url",
549
+ canonical_url=f"https://app.affinity.co/persons/{int(person_id)}",
550
+ )
551
+ return person_id, {"person": resolved.to_dict()}
552
+
553
+ lowered = raw.lower()
554
+ if lowered.startswith("email:"):
555
+ email = _strip_wrapping_quotes(raw.split(":", 1)[1])
556
+ person_id = _resolve_person_by_email(client=client, email=email)
557
+ resolved = ResolvedEntity(
558
+ input=selector,
559
+ entity_id=int(person_id),
560
+ entity_type="person",
561
+ source="email",
562
+ )
563
+ return person_id, {"person": resolved.to_dict()}
564
+
565
+ if lowered.startswith("name:"):
566
+ name = _strip_wrapping_quotes(raw.split(":", 1)[1])
567
+ person_id = _resolve_person_by_name(client=client, name=name)
568
+ resolved = ResolvedEntity(
569
+ input=selector,
570
+ entity_id=int(person_id),
571
+ entity_type="person",
572
+ source="name",
573
+ )
574
+ return person_id, {"person": resolved.to_dict()}
575
+
576
+ raise CLIError(
577
+ "Unrecognized person selector.",
578
+ exit_code=2,
579
+ error_type="usage_error",
580
+ hint='Use a numeric id, an Affinity URL, or "email:<x>" / "name:<x>".',
581
+ details={"input": selector},
582
+ )
583
+
584
+
585
+ def _resolve_person_by_email(*, client: Any, email: str) -> PersonId:
586
+ email = email.strip()
587
+ if not email:
588
+ raise CLIError("Email cannot be empty.", exit_code=2, error_type="usage_error")
589
+
590
+ matches: list[Person] = []
591
+ email_lower = email.lower()
592
+ for page in client.persons.search_pages(email, page_size=500):
593
+ for person in page.data:
594
+ emails = []
595
+ if person.primary_email:
596
+ emails.append(person.primary_email)
597
+ emails.extend(person.emails or [])
598
+ if any(e.lower() == email_lower for e in emails if e):
599
+ matches.append(person)
600
+ if len(matches) >= 20:
601
+ break
602
+ if len(matches) >= 20 or not page.next_page_token:
603
+ break
604
+
605
+ if not matches:
606
+ raise CLIError(
607
+ f'Person not found for email "{email}"',
608
+ exit_code=4,
609
+ error_type="not_found",
610
+ hint=f'Run `xaffinity person ls --query "{email}"` to explore matches.',
611
+ details={"email": email},
612
+ )
613
+ if len(matches) > 1:
614
+ raise CLIError(
615
+ f'Ambiguous person email "{email}" ({len(matches)} matches)',
616
+ exit_code=2,
617
+ error_type="ambiguous_resolution",
618
+ details={
619
+ "email": email,
620
+ "matches": [
621
+ {
622
+ "personId": int(p.id),
623
+ "name": p.full_name,
624
+ "primaryEmail": p.primary_email,
625
+ }
626
+ for p in matches[:20]
627
+ ],
628
+ },
629
+ )
630
+ return PersonId(int(matches[0].id))
631
+
632
+
633
+ def _resolve_person_by_name(*, client: Any, name: str) -> PersonId:
634
+ name = name.strip()
635
+ if not name:
636
+ raise CLIError("Name cannot be empty.", exit_code=2, error_type="usage_error")
637
+
638
+ matches: list[Person] = []
639
+ name_lower = name.lower()
640
+ for page in client.persons.search_pages(name, page_size=500):
641
+ for person in page.data:
642
+ if person.full_name.lower() == name_lower:
643
+ matches.append(person)
644
+ if len(matches) >= 20:
645
+ break
646
+ if len(matches) >= 20 or not page.next_page_token:
647
+ break
648
+
649
+ if not matches:
650
+ raise CLIError(
651
+ f'Person not found for name "{name}"',
652
+ exit_code=4,
653
+ error_type="not_found",
654
+ hint=f'Run `xaffinity person ls --query "{name}"` to explore matches.',
655
+ details={"name": name},
656
+ )
657
+ if len(matches) > 1:
658
+ raise CLIError(
659
+ f'Ambiguous person name "{name}" ({len(matches)} matches)',
660
+ exit_code=2,
661
+ error_type="ambiguous_resolution",
662
+ details={
663
+ "name": name,
664
+ "matches": [
665
+ {"personId": int(p.id), "name": p.full_name, "primaryEmail": p.primary_email}
666
+ for p in matches[:20]
667
+ ],
668
+ },
669
+ )
670
+ return PersonId(int(matches[0].id))
671
+
672
+
673
+ def _resolve_person_field_ids(
674
+ *,
675
+ client: Any,
676
+ fields: tuple[str, ...],
677
+ field_types: list[str],
678
+ ) -> tuple[list[str], dict[str, Any]]:
679
+ meta = client.persons.get_fields()
680
+ field_by_id: dict[str, Any] = {str(f.id): f for f in meta}
681
+ by_name: dict[str, list[str]] = {}
682
+ for f in meta:
683
+ by_name.setdefault(str(f.name).lower(), []).append(str(f.id))
684
+
685
+ resolved_fields: list[str] = []
686
+ for raw in fields:
687
+ text = _strip_wrapping_quotes(str(raw)).strip()
688
+ if not text:
689
+ continue
690
+ if text in field_by_id:
691
+ resolved_fields.append(text)
692
+ continue
693
+ name_matches = by_name.get(text.lower(), [])
694
+ if len(name_matches) == 1:
695
+ resolved_fields.append(name_matches[0])
696
+ continue
697
+ if len(name_matches) > 1:
698
+ raise CLIError(
699
+ f'Ambiguous field name "{text}" ({len(name_matches)} matches)',
700
+ exit_code=2,
701
+ error_type="ambiguous_resolution",
702
+ details={
703
+ "name": text,
704
+ "matches": [
705
+ {
706
+ "fieldId": fid,
707
+ "name": getattr(field_by_id.get(fid), "name", None),
708
+ "type": getattr(field_by_id.get(fid), "type", None),
709
+ "valueType": getattr(field_by_id.get(fid), "value_type", None),
710
+ }
711
+ for fid in name_matches[:20]
712
+ ],
713
+ },
714
+ )
715
+
716
+ raise CLIError(
717
+ f'Unknown field: "{text}"',
718
+ exit_code=2,
719
+ error_type="usage_error",
720
+ hint="Tip: run `xaffinity person get <id> --all-fields --json` and inspect "
721
+ "`data.person.fields[*].id` / `data.person.fields[*].name`.",
722
+ details={"field": text},
723
+ )
724
+
725
+ expanded: list[str] = []
726
+ for field_type in field_types:
727
+ wanted = field_type.strip()
728
+ if not wanted:
729
+ continue
730
+ candidates = [f for f in meta if f.type == wanted]
731
+ candidates.sort(
732
+ key=lambda f: (
733
+ str(f.name).lower(),
734
+ str(f.id),
735
+ )
736
+ )
737
+ expanded.extend([str(f.id) for f in candidates])
738
+
739
+ ordered: list[str] = []
740
+ seen: set[str] = set()
741
+ for fid in [*resolved_fields, *expanded]:
742
+ if fid in seen:
743
+ continue
744
+ ordered.append(fid)
745
+ seen.add(fid)
746
+
747
+ resolved_info = {
748
+ "fieldIds": ordered,
749
+ "fieldTypes": field_types,
750
+ "explicitFields": list(fields),
751
+ }
752
+ return ordered, resolved_info
753
+
754
+
755
+ @category("read")
756
+ @person_group.command(name="get", cls=RichCommand)
757
+ @click.argument("person_selector", type=str)
758
+ @click.option(
759
+ "-f",
760
+ "--field",
761
+ "fields",
762
+ multiple=True,
763
+ help="Field id or exact field name (repeatable).",
764
+ )
765
+ @click.option(
766
+ "-t",
767
+ "--field-type",
768
+ "field_types",
769
+ multiple=True,
770
+ type=click.Choice(list(_PERSON_FIELDS_ALL_TYPES)),
771
+ help="Include all fields of this type (repeatable).",
772
+ )
773
+ @click.option(
774
+ "--all-fields",
775
+ is_flag=True,
776
+ help="Include all supported (non-list-specific) field data.",
777
+ )
778
+ @click.option("--no-fields", is_flag=True, help="Do not request field data.")
779
+ @click.option(
780
+ "--expand",
781
+ "expand",
782
+ multiple=True,
783
+ type=click.Choice(["lists", "list-entries"]),
784
+ help="Include related data (repeatable).",
785
+ )
786
+ @click.option(
787
+ "--list",
788
+ "list_selector",
789
+ type=str,
790
+ default=None,
791
+ help=(
792
+ "Filter list-entries expansion to a list id or exact list name "
793
+ "(implies --expand list-entries)."
794
+ ),
795
+ )
796
+ @click.option(
797
+ "--list-entry-field",
798
+ "list_entry_fields",
799
+ multiple=True,
800
+ help=(
801
+ "Project a list-entry field into its own column (repeatable; implies --expand "
802
+ "list-entries)."
803
+ ),
804
+ )
805
+ @click.option(
806
+ "--show-list-entry-fields",
807
+ "show_list_entry_fields",
808
+ is_flag=True,
809
+ help=(
810
+ "Render per-list-entry Fields tables in human output (implies --expand list-entries; "
811
+ "requires --max-results <= 3)."
812
+ ),
813
+ )
814
+ @click.option(
815
+ "--list-entry-fields-scope",
816
+ "list_entry_fields_scope",
817
+ type=click.Choice(["list-only", "all"]),
818
+ default="list-only",
819
+ show_default=True,
820
+ help="Control which fields appear in list entry tables (human output only).",
821
+ )
822
+ @click.option(
823
+ "--max-results",
824
+ "--limit",
825
+ "-n",
826
+ type=int,
827
+ default=None,
828
+ help="Maximum items to fetch per expansion section (applies to --expand).",
829
+ )
830
+ @click.option(
831
+ "--all",
832
+ "all_pages",
833
+ is_flag=True,
834
+ help="Fetch all pages for expansions (still capped by --max-results if set).",
835
+ )
836
+ @output_options
837
+ @click.pass_obj
838
+ def person_get(
839
+ ctx: CLIContext,
840
+ person_selector: str,
841
+ *,
842
+ fields: tuple[str, ...],
843
+ field_types: tuple[str, ...],
844
+ all_fields: bool,
845
+ no_fields: bool,
846
+ expand: tuple[str, ...],
847
+ list_selector: str | None,
848
+ list_entry_fields: tuple[str, ...],
849
+ show_list_entry_fields: bool,
850
+ list_entry_fields_scope: ListEntryFieldsScope,
851
+ max_results: int | None,
852
+ all_pages: bool,
853
+ ) -> None:
854
+ """
855
+ Get a person by id, URL, email, or name.
856
+
857
+ The PERSON_SELECTOR can be:
858
+
859
+ - Person ID (e.g., `12345`)
860
+ - Person URL (e.g., `https://app.affinity.co/persons/12345`)
861
+ - Email (e.g., `email:john@example.com`)
862
+ - Name (e.g., `name:"John Smith"`)
863
+
864
+ List Entry Fields:
865
+
866
+ Use --list-entry-field and related flags to customize which list-entry
867
+ fields are shown in table output. These flags are ignored in JSON mode
868
+ to ensure full-fidelity output.
869
+
870
+ JSON Output:
871
+
872
+ When using --json, all list-entry fields are included regardless of
873
+ --list-entry-field flags. Use table output for selective field display.
874
+
875
+ Examples:
876
+
877
+ - `xaffinity person get 223384905`
878
+ - `xaffinity person get https://mydomain.affinity.com/persons/223384905`
879
+ - `xaffinity person get email:alice@example.com`
880
+ - `xaffinity person get name:"Alice Smith"`
881
+ - `xaffinity person get 223384905 --expand list-entries --list "Sales Pipeline"`
882
+ - `xaffinity person get 223384905 --json # Full data, ignores field filters`
883
+ """
884
+
885
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
886
+ client = ctx.get_client(warnings=warnings)
887
+ cache = ctx.session_cache
888
+ person_id, resolved = _resolve_person_selector(client=client, selector=person_selector)
889
+
890
+ expand_set = {e.strip() for e in expand if e and e.strip()}
891
+
892
+ # Auto-imply --expand list-entries when list-entry-related flags are used.
893
+ # This improves DX by removing a redundant flag requirement.
894
+ if (list_selector or list_entry_fields or show_list_entry_fields) and (
895
+ "list-entries" not in expand_set
896
+ ):
897
+ expand_set.add("list-entries")
898
+
899
+ effective_list_entry_fields = tuple(list_entry_fields)
900
+ effective_show_list_entry_fields = bool(show_list_entry_fields)
901
+ effective_list_entry_fields_scope: ListEntryFieldsScope = list_entry_fields_scope
902
+ if ctx.output == "json":
903
+ effective_list_entry_fields = ()
904
+ effective_show_list_entry_fields = False
905
+ effective_list_entry_fields_scope = "all"
906
+
907
+ scope_source = None
908
+ click_ctx = click.get_current_context(silent=True)
909
+ if click_ctx is not None:
910
+ get_source = getattr(cast(Any, click_ctx), "get_parameter_source", None)
911
+ if callable(get_source):
912
+ scope_source = get_source("list_entry_fields_scope")
913
+ source_enum = getattr(cast(Any, click.core), "ParameterSource", None)
914
+ default_source = getattr(source_enum, "DEFAULT", None) if source_enum else None
915
+ if (
916
+ ctx.output != "json"
917
+ and scope_source is not None
918
+ and default_source is not None
919
+ and scope_source != default_source
920
+ and not show_list_entry_fields
921
+ ):
922
+ raise CLIError(
923
+ "--list-entry-fields-scope requires --show-list-entry-fields.",
924
+ exit_code=2,
925
+ error_type="usage_error",
926
+ )
927
+
928
+ # Note: --list now auto-implies --expand list-entries (handled above)
929
+
930
+ if no_fields and (fields or field_types or all_fields):
931
+ raise CLIError(
932
+ "--no-fields cannot be combined with --field/--field-type/--all-fields.",
933
+ exit_code=2,
934
+ error_type="usage_error",
935
+ )
936
+
937
+ # Note: --list-entry-field/--show-list-entry-fields now auto-imply --expand list-entries
938
+
939
+ if effective_list_entry_fields and effective_show_list_entry_fields:
940
+ raise CLIError(
941
+ "--list-entry-field and --show-list-entry-fields are mutually exclusive.",
942
+ exit_code=2,
943
+ error_type="usage_error",
944
+ )
945
+
946
+ if effective_show_list_entry_fields:
947
+ if max_results is None:
948
+ raise CLIError(
949
+ "--show-list-entry-fields requires --max-results N (N <= 3).",
950
+ exit_code=2,
951
+ error_type="usage_error",
952
+ hint=(
953
+ "Add --max-results 3 to limit output, or use --json / --list-entry-field "
954
+ "for large outputs."
955
+ ),
956
+ )
957
+ if max_results <= 0:
958
+ raise CLIError(
959
+ "--max-results must be >= 1 when used with --show-list-entry-fields.",
960
+ exit_code=2,
961
+ error_type="usage_error",
962
+ )
963
+ if max_results > 3:
964
+ raise CLIError(
965
+ f"--show-list-entry-fields is limited to --max-results 3 (got {max_results}).",
966
+ exit_code=2,
967
+ error_type="usage_error",
968
+ hint=(
969
+ "Options: set --max-results 3, use --json for full structured data, or "
970
+ "use --list-entry-field <field> to project specific fields."
971
+ ),
972
+ )
973
+
974
+ if effective_list_entry_fields and not list_selector:
975
+ for spec in effective_list_entry_fields:
976
+ if any(ch.isspace() for ch in spec):
977
+ raise CLIError(
978
+ (
979
+ "Field names are only allowed with --list because names aren't "
980
+ "unique across lists."
981
+ ),
982
+ exit_code=2,
983
+ error_type="usage_error",
984
+ hint=(
985
+ "Tip: run `xaffinity list get <list>` to discover list-entry field IDs."
986
+ ),
987
+ details={"field": spec},
988
+ )
989
+
990
+ requested_types: list[str] = []
991
+ if all_fields:
992
+ requested_types.extend(list(_PERSON_FIELDS_ALL_TYPES))
993
+ requested_types.extend([t for t in field_types if t])
994
+
995
+ seen_types: set[str] = set()
996
+ deduped_types: list[str] = []
997
+ for t in requested_types:
998
+ if t in seen_types:
999
+ continue
1000
+ deduped_types.append(t)
1001
+ seen_types.add(t)
1002
+ requested_types = deduped_types
1003
+
1004
+ params: dict[str, Any] = {}
1005
+ selection_resolved: dict[str, Any] = {}
1006
+ if not no_fields and (fields or requested_types):
1007
+ if fields:
1008
+ selected_field_ids, selection_resolved = _resolve_person_field_ids(
1009
+ client=client,
1010
+ fields=fields,
1011
+ field_types=requested_types,
1012
+ )
1013
+ if selected_field_ids:
1014
+ params["fieldIds"] = selected_field_ids
1015
+ else:
1016
+ params["fieldTypes"] = requested_types
1017
+ selection_resolved = {"fieldTypes": requested_types}
1018
+
1019
+ person_payload = client._http.get(f"/persons/{int(person_id)}", params=params or None)
1020
+
1021
+ data: dict[str, Any] = {"person": person_payload}
1022
+ pagination: dict[str, Any] = {}
1023
+
1024
+ # Show spinner for expansion operations
1025
+ show_expand_progress = (
1026
+ expand_set
1027
+ and ctx.progress != "never"
1028
+ and not ctx.quiet
1029
+ and (ctx.progress == "always" or sys.stderr.isatty())
1030
+ )
1031
+
1032
+ # Variables needed for list-entries expansion
1033
+ list_id: ListId | None = None
1034
+ entries_items: list[Any] = []
1035
+
1036
+ with ExitStack() as stack:
1037
+ if show_expand_progress:
1038
+ progress = stack.enter_context(
1039
+ Progress(
1040
+ SpinnerColumn(),
1041
+ TextColumn("Fetching expanded data..."),
1042
+ console=Console(file=sys.stderr),
1043
+ transient=True,
1044
+ )
1045
+ )
1046
+ progress.add_task("expand", total=None)
1047
+
1048
+ if "lists" in expand_set:
1049
+ data["lists"] = _fetch_v2_collection(
1050
+ client=client,
1051
+ path=f"/persons/{int(person_id)}/lists",
1052
+ section="lists",
1053
+ default_limit=100,
1054
+ default_cap=100,
1055
+ allow_unbounded=True,
1056
+ max_results=max_results,
1057
+ all_pages=all_pages,
1058
+ warnings=warnings,
1059
+ pagination=pagination,
1060
+ )
1061
+
1062
+ if "list-entries" in expand_set:
1063
+ if list_selector:
1064
+ raw_list_selector = list_selector.strip()
1065
+ if raw_list_selector.isdigit():
1066
+ list_id = ListId(int(raw_list_selector))
1067
+ resolved.update({"list": {"input": list_selector, "listId": int(list_id)}})
1068
+ else:
1069
+ resolved_list_obj = resolve_list_selector(
1070
+ client=client, selector=list_selector, cache=cache
1071
+ )
1072
+ list_id = ListId(int(resolved_list_obj.list.id))
1073
+ resolved.update(resolved_list_obj.resolved)
1074
+
1075
+ def keep_entry(item: Any) -> bool:
1076
+ if list_id is None:
1077
+ return True
1078
+ return isinstance(item, dict) and item.get("listId") == int(list_id)
1079
+
1080
+ entries_items = _fetch_v2_collection(
1081
+ client=client,
1082
+ path=f"/persons/{int(person_id)}/list-entries",
1083
+ section="listEntries",
1084
+ default_limit=100,
1085
+ default_cap=None,
1086
+ allow_unbounded=False,
1087
+ max_results=max_results,
1088
+ all_pages=all_pages,
1089
+ warnings=warnings,
1090
+ pagination=pagination,
1091
+ keep_item=keep_entry if list_id is not None else None,
1092
+ )
1093
+ data["listEntries"] = entries_items
1094
+
1095
+ if "list-entries" in expand_set and entries_items and ctx.output != "json":
1096
+ list_name_by_id: dict[int, str] = {}
1097
+ if isinstance(data.get("lists"), list):
1098
+ for item in data.get("lists", []):
1099
+ if not isinstance(item, dict):
1100
+ continue
1101
+ lid = item.get("id")
1102
+ name = item.get("name")
1103
+ if isinstance(lid, int) and isinstance(name, str) and name.strip():
1104
+ list_name_by_id[lid] = name.strip()
1105
+ if effective_show_list_entry_fields:
1106
+ needed_list_ids: set[int] = set()
1107
+ for entry in entries_items:
1108
+ if not isinstance(entry, dict):
1109
+ continue
1110
+ lid = entry.get("listId")
1111
+ if isinstance(lid, int) and lid not in list_name_by_id:
1112
+ needed_list_ids.add(lid)
1113
+ for lid in sorted(needed_list_ids):
1114
+ try:
1115
+ list_obj = client.lists.get(ListId(lid))
1116
+ except Exception:
1117
+ continue
1118
+ if getattr(list_obj, "name", None):
1119
+ list_name_by_id[lid] = str(list_obj.name)
1120
+
1121
+ resolved_list_entry_fields: list[tuple[str, str]] = []
1122
+ if effective_list_entry_fields:
1123
+ if list_id is not None:
1124
+ fields_meta = client.lists.get_fields(list_id)
1125
+ by_id: dict[str, str] = {}
1126
+ by_name: dict[str, list[str]] = {}
1127
+ for f in fields_meta:
1128
+ fid = str(getattr(f, "id", "")).strip()
1129
+ name = str(getattr(f, "name", "")).strip()
1130
+ if fid:
1131
+ by_id[fid] = name or fid
1132
+ if name:
1133
+ by_name.setdefault(name.lower(), []).append(fid or name)
1134
+
1135
+ for spec in effective_list_entry_fields:
1136
+ raw = spec.strip()
1137
+ if not raw:
1138
+ continue
1139
+ if raw in by_id:
1140
+ resolved_list_entry_fields.append((raw, by_id[raw]))
1141
+ continue
1142
+ matches = by_name.get(raw.lower(), [])
1143
+ if len(matches) == 1:
1144
+ fid = matches[0]
1145
+ resolved_list_entry_fields.append((fid, by_id.get(fid, raw)))
1146
+ continue
1147
+ if len(matches) > 1:
1148
+ raise CLIError(
1149
+ (
1150
+ f'Ambiguous list-entry field name "{raw}" '
1151
+ f"({len(matches)} matches)"
1152
+ ),
1153
+ exit_code=2,
1154
+ error_type="ambiguous_resolution",
1155
+ details={"name": raw, "matches": matches[:20]},
1156
+ )
1157
+ raise CLIError(
1158
+ f'Unknown list-entry field: "{raw}"',
1159
+ exit_code=2,
1160
+ error_type="usage_error",
1161
+ hint=(
1162
+ "Tip: run `xaffinity list get <list>` and inspect "
1163
+ "`data.fields[*].id` / `data.fields[*].name`."
1164
+ ),
1165
+ details={"field": raw},
1166
+ )
1167
+ else:
1168
+ for spec in effective_list_entry_fields:
1169
+ raw = spec.strip()
1170
+ if raw:
1171
+ resolved_list_entry_fields.append((raw, raw))
1172
+
1173
+ def unique_label(label: str, *, used: set[str], fallback: str) -> str:
1174
+ base = (label or "").strip() or fallback
1175
+ if base not in used:
1176
+ used.add(base)
1177
+ return base
1178
+ idx = 2
1179
+ while f"{base} ({idx})" in used:
1180
+ idx += 1
1181
+ final = f"{base} ({idx})"
1182
+ used.add(final)
1183
+ return final
1184
+
1185
+ used_labels: set[str] = {
1186
+ "list",
1187
+ "listId",
1188
+ "listEntryId",
1189
+ "createdAt",
1190
+ "fieldsCount",
1191
+ }
1192
+ projected: list[tuple[str, str]] = []
1193
+ for fid, label in resolved_list_entry_fields:
1194
+ projected.append((fid, unique_label(label, used=used_labels, fallback=fid)))
1195
+
1196
+ summary_rows: list[dict[str, Any]] = []
1197
+ for entry in entries_items:
1198
+ if not isinstance(entry, dict):
1199
+ continue
1200
+ list_id_value = entry.get("listId")
1201
+ list_name = (
1202
+ list_name_by_id.get(list_id_value) if isinstance(list_id_value, int) else None
1203
+ )
1204
+ list_label = list_name or (str(list_id_value) if list_id_value is not None else "")
1205
+ fields_payload = entry.get("fields", [])
1206
+ fields_list = fields_payload if isinstance(fields_payload, list) else []
1207
+ row: dict[str, Any] = {}
1208
+ row["list"] = list_label
1209
+ row["listId"] = list_id_value if isinstance(list_id_value, int) else None
1210
+ row["listEntryId"] = entry.get("id")
1211
+ row["createdAt"] = entry.get("createdAt")
1212
+ fields_count = len(fields_list)
1213
+ if effective_show_list_entry_fields:
1214
+ _filtered, list_only_count, total_count = filter_list_entry_fields(
1215
+ fields_list,
1216
+ scope=effective_list_entry_fields_scope,
1217
+ )
1218
+ if effective_list_entry_fields_scope == "list-only":
1219
+ fields_count = list_only_count
1220
+ else:
1221
+ fields_count = total_count
1222
+ row["fieldsCount"] = fields_count
1223
+
1224
+ field_by_id: dict[str, dict[str, Any]] = {}
1225
+ for f in fields_list:
1226
+ if not isinstance(f, dict):
1227
+ continue
1228
+ field_id = f.get("id")
1229
+ if isinstance(field_id, str) and field_id:
1230
+ field_by_id[field_id] = f
1231
+
1232
+ for fid, label in projected:
1233
+ field_obj = field_by_id.get(fid)
1234
+ value_obj = field_obj.get("value") if isinstance(field_obj, dict) else None
1235
+ row[label] = value_obj
1236
+
1237
+ summary_rows.append(row)
1238
+
1239
+ data["listEntries"] = summary_rows
1240
+
1241
+ if effective_show_list_entry_fields:
1242
+ for entry in entries_items:
1243
+ if not isinstance(entry, dict):
1244
+ continue
1245
+ list_entry_id = entry.get("id")
1246
+ list_id_value = entry.get("listId")
1247
+ list_name = (
1248
+ list_name_by_id.get(list_id_value)
1249
+ if isinstance(list_id_value, int)
1250
+ else None
1251
+ )
1252
+ if list_name:
1253
+ list_hint = (
1254
+ f"{list_name} (listId={list_id_value})"
1255
+ if list_id_value is not None
1256
+ else str(list_name)
1257
+ )
1258
+ else:
1259
+ list_hint = (
1260
+ f"listId={list_id_value}"
1261
+ if list_id_value is not None
1262
+ else "listId=unknown"
1263
+ )
1264
+ title = f"List Entry {list_entry_id} ({list_hint}) Fields"
1265
+
1266
+ fields_payload = entry.get("fields", [])
1267
+ fields_list = fields_payload if isinstance(fields_payload, list) else []
1268
+ filtered_fields, list_only_count, total_count = filter_list_entry_fields(
1269
+ fields_list,
1270
+ scope=effective_list_entry_fields_scope,
1271
+ )
1272
+ if total_count == 0:
1273
+ data[title] = {"_text": "(no fields)"}
1274
+ continue
1275
+ if effective_list_entry_fields_scope == "list-only" and list_only_count == 0:
1276
+ data[title] = {
1277
+ "_text": (
1278
+ f"(no list-specific fields; {total_count} non-list fields "
1279
+ "available with --list-entry-fields-scope all)"
1280
+ )
1281
+ }
1282
+ continue
1283
+
1284
+ field_rows = build_list_entry_field_rows(filtered_fields)
1285
+ if (
1286
+ effective_list_entry_fields_scope == "list-only"
1287
+ and list_only_count < total_count
1288
+ ):
1289
+ data[title] = {
1290
+ "_rows": field_rows,
1291
+ "_hint": (
1292
+ "Some non-list fields hidden — use "
1293
+ "--list-entry-fields-scope all to include them"
1294
+ ),
1295
+ }
1296
+ else:
1297
+ data[title] = field_rows
1298
+
1299
+ if selection_resolved:
1300
+ resolved["fieldSelection"] = selection_resolved
1301
+ if expand_set:
1302
+ resolved["expand"] = sorted(expand_set)
1303
+
1304
+ # Fetch field metadata if fields were requested and present in response
1305
+ person_fields = person_payload.get("fields") if isinstance(person_payload, dict) else None
1306
+ if isinstance(person_fields, list) and person_fields:
1307
+ try:
1308
+ from ..field_utils import build_field_id_to_name_map
1309
+
1310
+ field_metadata = client.persons.get_fields()
1311
+ resolved["fieldMetadata"] = build_field_id_to_name_map(field_metadata)
1312
+ except Exception:
1313
+ # Field metadata is optional - continue without names if fetch fails
1314
+ pass
1315
+
1316
+ # Build CommandContext for structured output
1317
+ ctx_inputs: dict[str, Any] = {}
1318
+ ctx_modifiers: dict[str, Any] = {}
1319
+ ctx_resolved: dict[str, str] = {}
1320
+
1321
+ # Determine if selector or ID was used
1322
+ raw_selector = person_selector.strip()
1323
+ if raw_selector.isdigit():
1324
+ ctx_inputs["personId"] = int(person_id)
1325
+ else:
1326
+ ctx_inputs["selector"] = raw_selector
1327
+
1328
+ # Build modifiers from non-default options
1329
+ if expand_set:
1330
+ ctx_modifiers["expand"] = sorted(expand_set)
1331
+ if fields:
1332
+ ctx_modifiers["fields"] = list(fields)
1333
+ if field_types:
1334
+ ctx_modifiers["fieldTypes"] = list(field_types)
1335
+ if all_fields:
1336
+ ctx_modifiers["allFields"] = True
1337
+ if no_fields:
1338
+ ctx_modifiers["noFields"] = True
1339
+ if list_selector:
1340
+ ctx_modifiers["list"] = list_selector
1341
+ if max_results is not None:
1342
+ ctx_modifiers["maxResults"] = max_results
1343
+ if all_pages:
1344
+ ctx_modifiers["allPages"] = True
1345
+
1346
+ # Resolve person name from response
1347
+ if isinstance(person_payload, dict):
1348
+ first = person_payload.get("firstName", "")
1349
+ last = person_payload.get("lastName", "")
1350
+ name = f"{first} {last}".strip()
1351
+ if name:
1352
+ if "personId" in ctx_inputs:
1353
+ ctx_resolved["personId"] = name
1354
+ elif "selector" in ctx_inputs:
1355
+ ctx_resolved["selector"] = name
1356
+
1357
+ context = CommandContext(
1358
+ name="person get",
1359
+ inputs=ctx_inputs,
1360
+ modifiers=ctx_modifiers,
1361
+ resolved=ctx_resolved if ctx_resolved else None,
1362
+ )
1363
+
1364
+ return CommandOutput(
1365
+ data=data,
1366
+ context=context,
1367
+ pagination=pagination or None,
1368
+ resolved=resolved,
1369
+ api_called=True,
1370
+ )
1371
+
1372
+ run_command(ctx, command="person get", fn=fn)
1373
+
1374
+
1375
+ @person_group.group(name="files", cls=RichGroup)
1376
+ def person_files_group() -> None:
1377
+ """Person files."""
1378
+
1379
+
1380
+ @category("read")
1381
+ @person_files_group.command(name="dump", cls=RichCommand)
1382
+ @click.argument("person_id", type=int)
1383
+ @click.option(
1384
+ "--out",
1385
+ "out_dir",
1386
+ type=click.Path(),
1387
+ default=None,
1388
+ help="Output directory for downloaded files.",
1389
+ )
1390
+ @click.option("--overwrite", is_flag=True, help="Overwrite existing files.")
1391
+ @click.option(
1392
+ "--concurrency", type=int, default=3, show_default=True, help="Number of concurrent downloads."
1393
+ )
1394
+ @click.option(
1395
+ "--page-size",
1396
+ type=int,
1397
+ default=100,
1398
+ show_default=True,
1399
+ help="Page size for file listing (max 100).",
1400
+ )
1401
+ @click.option("--max-files", type=int, default=None, help="Stop after N files.")
1402
+ @output_options
1403
+ @click.pass_obj
1404
+ def person_files_dump(
1405
+ ctx: CLIContext,
1406
+ person_id: int,
1407
+ *,
1408
+ out_dir: str | None,
1409
+ overwrite: bool,
1410
+ concurrency: int,
1411
+ page_size: int,
1412
+ max_files: int | None,
1413
+ ) -> None:
1414
+ """Download all files attached to a person."""
1415
+
1416
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1417
+ # Build CommandContext
1418
+ ctx_modifiers: dict[str, object] = {}
1419
+ if out_dir:
1420
+ ctx_modifiers["outDir"] = out_dir
1421
+ if overwrite:
1422
+ ctx_modifiers["overwrite"] = True
1423
+ if concurrency != 4:
1424
+ ctx_modifiers["concurrency"] = concurrency
1425
+ if page_size != 100:
1426
+ ctx_modifiers["pageSize"] = page_size
1427
+ if max_files is not None:
1428
+ ctx_modifiers["maxFiles"] = max_files
1429
+
1430
+ cmd_context = CommandContext(
1431
+ name="person files dump",
1432
+ inputs={"personId": person_id},
1433
+ modifiers=ctx_modifiers,
1434
+ )
1435
+
1436
+ return asyncio.run(
1437
+ dump_entity_files_bundle(
1438
+ ctx=ctx,
1439
+ warnings=warnings,
1440
+ out_dir=out_dir,
1441
+ overwrite=overwrite,
1442
+ concurrency=concurrency,
1443
+ page_size=page_size,
1444
+ max_files=max_files,
1445
+ default_dirname=f"affinity-person-{person_id}-files",
1446
+ manifest_entity={"type": "person", "personId": person_id},
1447
+ files_list_kwargs={"person_id": PersonId(person_id)},
1448
+ context=cmd_context,
1449
+ )
1450
+ )
1451
+
1452
+ run_command(ctx, command="person files dump", fn=fn)
1453
+
1454
+
1455
+ @category("write")
1456
+ @progress_capable
1457
+ @person_files_group.command(name="upload", cls=RichCommand)
1458
+ @click.argument("person_id", type=int)
1459
+ @click.option(
1460
+ "--file",
1461
+ "file_paths",
1462
+ type=click.Path(exists=False),
1463
+ multiple=True,
1464
+ required=True,
1465
+ help="File path to upload (repeatable).",
1466
+ )
1467
+ @output_options
1468
+ @click.pass_obj
1469
+ def person_files_upload(
1470
+ ctx: CLIContext,
1471
+ person_id: int,
1472
+ *,
1473
+ file_paths: tuple[str, ...],
1474
+ ) -> None:
1475
+ """
1476
+ Upload files to a person.
1477
+
1478
+ Examples:
1479
+
1480
+ - `xaffinity person files upload 123 --file doc.pdf`
1481
+ - `xaffinity person files upload 123 --file a.pdf --file b.pdf`
1482
+ """
1483
+
1484
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1485
+ client = ctx.get_client(warnings=warnings)
1486
+
1487
+ # Validate all file paths first
1488
+ paths: list[Path] = []
1489
+ for fp in file_paths:
1490
+ p = Path(fp)
1491
+ if not p.exists():
1492
+ raise CLIError(
1493
+ f"File not found: {fp}",
1494
+ exit_code=2,
1495
+ error_type="usage_error",
1496
+ hint="Check the file path and try again.",
1497
+ )
1498
+ if not p.is_file():
1499
+ raise CLIError(
1500
+ f"Not a regular file: {fp}",
1501
+ exit_code=2,
1502
+ error_type="usage_error",
1503
+ hint="Only regular files can be uploaded, not directories.",
1504
+ )
1505
+ paths.append(p)
1506
+
1507
+ results: list[dict[str, object]] = []
1508
+ settings = ProgressSettings(mode=ctx.progress, quiet=ctx.quiet)
1509
+
1510
+ with ProgressManager(settings=settings) as pm:
1511
+ for p in paths:
1512
+ file_size = p.stat().st_size
1513
+ _task_id, cb = pm.task(
1514
+ description=f"upload {p.name}",
1515
+ total_bytes=file_size,
1516
+ )
1517
+ success = client.files.upload_path(
1518
+ p,
1519
+ person_id=PersonId(person_id),
1520
+ on_progress=cb,
1521
+ )
1522
+ results.append(
1523
+ {
1524
+ "file": str(p),
1525
+ "filename": p.name,
1526
+ "size": file_size,
1527
+ "success": success,
1528
+ }
1529
+ )
1530
+
1531
+ cmd_context = CommandContext(
1532
+ name="person files upload",
1533
+ inputs={"personId": person_id},
1534
+ modifiers={"files": list(file_paths)},
1535
+ )
1536
+
1537
+ return CommandOutput(
1538
+ data={"uploads": results, "personId": person_id},
1539
+ context=cmd_context,
1540
+ api_called=True,
1541
+ )
1542
+
1543
+ run_command(ctx, command="person files upload", fn=fn)
1544
+
1545
+
1546
+ @category("write")
1547
+ @person_group.command(name="create", cls=RichCommand)
1548
+ @click.option("--first-name", required=True, help="First name.")
1549
+ @click.option("--last-name", required=True, help="Last name.")
1550
+ @click.option(
1551
+ "--email",
1552
+ "emails",
1553
+ multiple=True,
1554
+ help="Email address (repeatable).",
1555
+ )
1556
+ @click.option(
1557
+ "--company-id",
1558
+ "company_ids",
1559
+ multiple=True,
1560
+ type=int,
1561
+ help="Associated company id (repeatable).",
1562
+ )
1563
+ @output_options
1564
+ @click.pass_obj
1565
+ def person_create(
1566
+ ctx: CLIContext,
1567
+ *,
1568
+ first_name: str,
1569
+ last_name: str,
1570
+ emails: tuple[str, ...],
1571
+ company_ids: tuple[int, ...],
1572
+ ) -> None:
1573
+ """Create a person."""
1574
+
1575
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1576
+ client = ctx.get_client(warnings=warnings)
1577
+ created = client.persons.create(
1578
+ PersonCreate(
1579
+ first_name=first_name,
1580
+ last_name=last_name,
1581
+ emails=list(emails),
1582
+ company_ids=[CompanyId(cid) for cid in company_ids],
1583
+ )
1584
+ )
1585
+ payload = serialize_model_for_cli(created)
1586
+
1587
+ ctx_modifiers: dict[str, object] = {
1588
+ "firstName": first_name,
1589
+ "lastName": last_name,
1590
+ }
1591
+ if emails:
1592
+ ctx_modifiers["emails"] = list(emails)
1593
+ if company_ids:
1594
+ ctx_modifiers["companyIds"] = list(company_ids)
1595
+
1596
+ cmd_context = CommandContext(
1597
+ name="person create",
1598
+ inputs={},
1599
+ modifiers=ctx_modifiers,
1600
+ )
1601
+
1602
+ return CommandOutput(
1603
+ data={"person": payload},
1604
+ context=cmd_context,
1605
+ api_called=True,
1606
+ )
1607
+
1608
+ run_command(ctx, command="person create", fn=fn)
1609
+
1610
+
1611
+ @category("write")
1612
+ @person_group.command(name="update", cls=RichCommand)
1613
+ @click.argument("person_id", type=int)
1614
+ @click.option("--first-name", default=None, help="Updated first name.")
1615
+ @click.option("--last-name", default=None, help="Updated last name.")
1616
+ @click.option(
1617
+ "--email",
1618
+ "emails",
1619
+ multiple=True,
1620
+ help="Replace emails (repeatable).",
1621
+ )
1622
+ @click.option(
1623
+ "--company-id",
1624
+ "company_ids",
1625
+ multiple=True,
1626
+ type=int,
1627
+ help="Replace associated company ids (repeatable).",
1628
+ )
1629
+ @output_options
1630
+ @click.pass_obj
1631
+ def person_update(
1632
+ ctx: CLIContext,
1633
+ person_id: int,
1634
+ *,
1635
+ first_name: str | None,
1636
+ last_name: str | None,
1637
+ emails: tuple[str, ...],
1638
+ company_ids: tuple[int, ...],
1639
+ ) -> None:
1640
+ """Update a person."""
1641
+
1642
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1643
+ if not (first_name or last_name or emails or company_ids):
1644
+ raise CLIError(
1645
+ "Provide at least one field to update.",
1646
+ exit_code=2,
1647
+ error_type="usage_error",
1648
+ hint="Use --first-name, --last-name, --email, or --company-id.",
1649
+ )
1650
+ client = ctx.get_client(warnings=warnings)
1651
+ updated = client.persons.update(
1652
+ PersonId(person_id),
1653
+ PersonUpdate(
1654
+ first_name=first_name,
1655
+ last_name=last_name,
1656
+ emails=list(emails) if emails else None,
1657
+ company_ids=[CompanyId(cid) for cid in company_ids] if company_ids else None,
1658
+ ),
1659
+ )
1660
+ payload = serialize_model_for_cli(updated)
1661
+
1662
+ ctx_modifiers: dict[str, object] = {}
1663
+ if first_name:
1664
+ ctx_modifiers["firstName"] = first_name
1665
+ if last_name:
1666
+ ctx_modifiers["lastName"] = last_name
1667
+ if emails:
1668
+ ctx_modifiers["emails"] = list(emails)
1669
+ if company_ids:
1670
+ ctx_modifiers["companyIds"] = list(company_ids)
1671
+
1672
+ cmd_context = CommandContext(
1673
+ name="person update",
1674
+ inputs={"personId": person_id},
1675
+ modifiers=ctx_modifiers,
1676
+ )
1677
+
1678
+ return CommandOutput(
1679
+ data={"person": payload},
1680
+ context=cmd_context,
1681
+ api_called=True,
1682
+ )
1683
+
1684
+ run_command(ctx, command="person update", fn=fn)
1685
+
1686
+
1687
+ @category("write")
1688
+ @destructive
1689
+ @person_group.command(name="delete", cls=RichCommand)
1690
+ @click.argument("person_id", type=int)
1691
+ @click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.")
1692
+ @output_options
1693
+ @click.pass_obj
1694
+ def person_delete(ctx: CLIContext, person_id: int, yes: bool) -> None:
1695
+ """Delete a person."""
1696
+ if not yes:
1697
+ click.confirm(f"Delete person {person_id}?", abort=True)
1698
+
1699
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1700
+ client = ctx.get_client(warnings=warnings)
1701
+ success = client.persons.delete(PersonId(person_id))
1702
+
1703
+ cmd_context = CommandContext(
1704
+ name="person delete",
1705
+ inputs={"personId": person_id},
1706
+ modifiers={},
1707
+ )
1708
+
1709
+ return CommandOutput(
1710
+ data={"success": success},
1711
+ context=cmd_context,
1712
+ api_called=True,
1713
+ )
1714
+
1715
+ run_command(ctx, command="person delete", fn=fn)
1716
+
1717
+
1718
+ @category("write")
1719
+ @person_group.command(name="merge", cls=RichCommand)
1720
+ @click.argument("primary_id", type=int)
1721
+ @click.argument("duplicate_id", type=int)
1722
+ @output_options
1723
+ @click.pass_obj
1724
+ def person_merge(
1725
+ ctx: CLIContext,
1726
+ primary_id: int,
1727
+ duplicate_id: int,
1728
+ ) -> None:
1729
+ """Merge a duplicate person into a primary person (beta)."""
1730
+
1731
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1732
+ client = ctx.get_client(warnings=warnings)
1733
+ task_url = client.persons.merge(PersonId(primary_id), PersonId(duplicate_id))
1734
+
1735
+ cmd_context = CommandContext(
1736
+ name="person merge",
1737
+ inputs={"primaryId": primary_id, "duplicateId": duplicate_id},
1738
+ modifiers={},
1739
+ )
1740
+
1741
+ return CommandOutput(
1742
+ data={
1743
+ "survivingId": primary_id,
1744
+ "mergedId": duplicate_id,
1745
+ "affinityUrl": f"https://app.affinity.co/persons/{primary_id}",
1746
+ "taskUrl": task_url,
1747
+ },
1748
+ context=cmd_context,
1749
+ api_called=True,
1750
+ )
1751
+
1752
+ run_command(ctx, command="person merge", fn=fn)
1753
+
1754
+
1755
+ @category("write")
1756
+ @person_group.command(name="field", cls=RichCommand)
1757
+ @click.argument("person_id", type=int)
1758
+ @click.option(
1759
+ "--set",
1760
+ "set_values",
1761
+ nargs=2,
1762
+ multiple=True,
1763
+ metavar="FIELD VALUE",
1764
+ help="Set field value (repeatable). Use two args: FIELD VALUE.",
1765
+ )
1766
+ @click.option(
1767
+ "--unset",
1768
+ "unset_fields",
1769
+ multiple=True,
1770
+ metavar="FIELD",
1771
+ help="Unset field (repeatable). Removes all values for the field.",
1772
+ )
1773
+ @click.option(
1774
+ "--set-json",
1775
+ "json_input",
1776
+ type=str,
1777
+ help="JSON object of field:value pairs to set.",
1778
+ )
1779
+ @click.option(
1780
+ "--get",
1781
+ "get_fields",
1782
+ multiple=True,
1783
+ metavar="FIELD",
1784
+ help="Get specific field values (repeatable).",
1785
+ )
1786
+ @output_options
1787
+ @click.pass_obj
1788
+ def person_field(
1789
+ ctx: CLIContext,
1790
+ person_id: int,
1791
+ *,
1792
+ set_values: tuple[tuple[str, str], ...],
1793
+ unset_fields: tuple[str, ...],
1794
+ json_input: str | None,
1795
+ get_fields: tuple[str, ...],
1796
+ ) -> None:
1797
+ """
1798
+ Manage person field values.
1799
+
1800
+ Unified command for getting, setting, and unsetting field values.
1801
+ For field names with spaces, use quotes.
1802
+
1803
+ Examples:
1804
+
1805
+ - `xaffinity person field 123 --set Phone "+1-555-0123"`
1806
+ - `xaffinity person field 123 --set Phone "+1..." --set Title "CEO"`
1807
+ - `xaffinity person field 123 --unset Phone`
1808
+ - `xaffinity person field 123 --set-json '{"Phone": "+1...", "Title": "CEO"}'`
1809
+ - `xaffinity person field 123 --get Phone --get Email`
1810
+ """
1811
+ import json as json_module
1812
+
1813
+ def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
1814
+ from affinity.models.entities import FieldValueCreate
1815
+ from affinity.types import FieldId as FieldIdType
1816
+
1817
+ from ..field_utils import (
1818
+ FieldResolver,
1819
+ fetch_field_metadata,
1820
+ find_field_values_for_field,
1821
+ )
1822
+
1823
+ # Validate: at least one operation must be specified
1824
+ has_set = bool(set_values) or bool(json_input)
1825
+ has_unset = bool(unset_fields)
1826
+ has_get = bool(get_fields)
1827
+
1828
+ if not has_set and not has_unset and not has_get:
1829
+ raise CLIError(
1830
+ "Provide at least one of --set, --unset, --set-json, or --get.",
1831
+ exit_code=2,
1832
+ error_type="usage_error",
1833
+ )
1834
+
1835
+ # Validate: --get is exclusive (can't mix read with write)
1836
+ if has_get and (has_set or has_unset):
1837
+ raise CLIError(
1838
+ "--get cannot be combined with --set, --unset, or --set-json.",
1839
+ exit_code=2,
1840
+ error_type="usage_error",
1841
+ )
1842
+
1843
+ client = ctx.get_client(warnings=warnings)
1844
+ field_metadata = fetch_field_metadata(client=client, entity_type="person")
1845
+ resolver = FieldResolver(field_metadata)
1846
+
1847
+ results: dict[str, Any] = {}
1848
+
1849
+ # Build modifiers for CommandContext
1850
+ ctx_modifiers: dict[str, object] = {}
1851
+ if set_values:
1852
+ ctx_modifiers["set"] = [list(sv) for sv in set_values]
1853
+ if unset_fields:
1854
+ ctx_modifiers["unset"] = list(unset_fields)
1855
+ if json_input:
1856
+ ctx_modifiers["json"] = json_input
1857
+ if get_fields:
1858
+ ctx_modifiers["get"] = list(get_fields)
1859
+
1860
+ # Handle --get: read field values
1861
+ if has_get:
1862
+ existing_values = client.field_values.list(person_id=PersonId(person_id))
1863
+ field_results: dict[str, Any] = {}
1864
+
1865
+ for field_name in get_fields:
1866
+ target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
1867
+ field_values = find_field_values_for_field(
1868
+ field_values=[serialize_model_for_cli(v) for v in existing_values],
1869
+ field_id=target_field_id,
1870
+ )
1871
+ resolved_name = resolver.get_field_name(target_field_id) or field_name
1872
+ if field_values:
1873
+ if len(field_values) == 1:
1874
+ field_results[resolved_name] = field_values[0].get("value")
1875
+ else:
1876
+ field_results[resolved_name] = [fv.get("value") for fv in field_values]
1877
+ else:
1878
+ field_results[resolved_name] = None
1879
+
1880
+ results["fields"] = field_results
1881
+
1882
+ cmd_context = CommandContext(
1883
+ name="person field",
1884
+ inputs={"personId": person_id},
1885
+ modifiers=ctx_modifiers,
1886
+ )
1887
+
1888
+ return CommandOutput(
1889
+ data=results,
1890
+ context=cmd_context,
1891
+ api_called=True,
1892
+ )
1893
+
1894
+ # Handle --set and --json: set field values
1895
+ set_operations: list[tuple[str, Any]] = []
1896
+
1897
+ # Collect from --set options
1898
+ for field_name, value in set_values:
1899
+ set_operations.append((field_name, value))
1900
+
1901
+ # Collect from --json
1902
+ if json_input:
1903
+ try:
1904
+ json_data = json_module.loads(json_input)
1905
+ if not isinstance(json_data, dict):
1906
+ raise CLIError(
1907
+ "--json must be a JSON object.",
1908
+ exit_code=2,
1909
+ error_type="usage_error",
1910
+ )
1911
+ for field_name, value in json_data.items():
1912
+ set_operations.append((field_name, value))
1913
+ except json_module.JSONDecodeError as e:
1914
+ raise CLIError(
1915
+ f"Invalid JSON: {e}",
1916
+ exit_code=2,
1917
+ error_type="usage_error",
1918
+ ) from e
1919
+
1920
+ # Execute set operations
1921
+ created_values: list[dict[str, Any]] = []
1922
+ for field_name, value in set_operations:
1923
+ target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
1924
+ resolved_name = resolver.get_field_name(target_field_id) or field_name
1925
+
1926
+ # Check for existing values and delete them first (replace behavior)
1927
+ existing_values = client.field_values.list(person_id=PersonId(person_id))
1928
+ existing_for_field = find_field_values_for_field(
1929
+ field_values=[serialize_model_for_cli(v) for v in existing_values],
1930
+ field_id=target_field_id,
1931
+ )
1932
+ for fv in existing_for_field:
1933
+ fv_id = fv.get("id")
1934
+ if fv_id:
1935
+ client.field_values.delete(fv_id)
1936
+
1937
+ # Create new value
1938
+ created = client.field_values.create(
1939
+ FieldValueCreate(
1940
+ field_id=FieldIdType(target_field_id),
1941
+ entity_id=person_id,
1942
+ value=value,
1943
+ )
1944
+ )
1945
+ created_values.append(serialize_model_for_cli(created))
1946
+
1947
+ # Handle --unset: remove field values
1948
+ deleted_count = 0
1949
+ for field_name in unset_fields:
1950
+ target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
1951
+ existing_values = client.field_values.list(person_id=PersonId(person_id))
1952
+ existing_for_field = find_field_values_for_field(
1953
+ field_values=[serialize_model_for_cli(v) for v in existing_values],
1954
+ field_id=target_field_id,
1955
+ )
1956
+ for fv in existing_for_field:
1957
+ fv_id = fv.get("id")
1958
+ if fv_id:
1959
+ client.field_values.delete(fv_id)
1960
+ deleted_count += 1
1961
+
1962
+ # Build result
1963
+ if created_values:
1964
+ results["created"] = created_values
1965
+ if deleted_count > 0:
1966
+ results["deleted"] = deleted_count
1967
+
1968
+ cmd_context = CommandContext(
1969
+ name="person field",
1970
+ inputs={"personId": person_id},
1971
+ modifiers=ctx_modifiers,
1972
+ )
1973
+
1974
+ return CommandOutput(
1975
+ data=results,
1976
+ context=cmd_context,
1977
+ api_called=True,
1978
+ )
1979
+
1980
+ run_command(ctx, command="person field", fn=fn)