affinity-sdk 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. affinity/__init__.py +139 -0
  2. affinity/cli/__init__.py +7 -0
  3. affinity/cli/click_compat.py +27 -0
  4. affinity/cli/commands/__init__.py +1 -0
  5. affinity/cli/commands/_entity_files_dump.py +219 -0
  6. affinity/cli/commands/_list_entry_fields.py +41 -0
  7. affinity/cli/commands/_v1_parsing.py +77 -0
  8. affinity/cli/commands/company_cmds.py +2139 -0
  9. affinity/cli/commands/completion_cmd.py +33 -0
  10. affinity/cli/commands/config_cmds.py +540 -0
  11. affinity/cli/commands/entry_cmds.py +33 -0
  12. affinity/cli/commands/field_cmds.py +413 -0
  13. affinity/cli/commands/interaction_cmds.py +875 -0
  14. affinity/cli/commands/list_cmds.py +3152 -0
  15. affinity/cli/commands/note_cmds.py +433 -0
  16. affinity/cli/commands/opportunity_cmds.py +1174 -0
  17. affinity/cli/commands/person_cmds.py +1980 -0
  18. affinity/cli/commands/query_cmd.py +444 -0
  19. affinity/cli/commands/relationship_strength_cmds.py +62 -0
  20. affinity/cli/commands/reminder_cmds.py +595 -0
  21. affinity/cli/commands/resolve_url_cmd.py +127 -0
  22. affinity/cli/commands/session_cmds.py +84 -0
  23. affinity/cli/commands/task_cmds.py +110 -0
  24. affinity/cli/commands/version_cmd.py +29 -0
  25. affinity/cli/commands/whoami_cmd.py +36 -0
  26. affinity/cli/config.py +108 -0
  27. affinity/cli/context.py +749 -0
  28. affinity/cli/csv_utils.py +195 -0
  29. affinity/cli/date_utils.py +42 -0
  30. affinity/cli/decorators.py +77 -0
  31. affinity/cli/errors.py +28 -0
  32. affinity/cli/field_utils.py +355 -0
  33. affinity/cli/formatters.py +551 -0
  34. affinity/cli/help_json.py +283 -0
  35. affinity/cli/logging.py +100 -0
  36. affinity/cli/main.py +261 -0
  37. affinity/cli/options.py +53 -0
  38. affinity/cli/paths.py +32 -0
  39. affinity/cli/progress.py +183 -0
  40. affinity/cli/query/__init__.py +163 -0
  41. affinity/cli/query/aggregates.py +357 -0
  42. affinity/cli/query/dates.py +194 -0
  43. affinity/cli/query/exceptions.py +147 -0
  44. affinity/cli/query/executor.py +1236 -0
  45. affinity/cli/query/filters.py +248 -0
  46. affinity/cli/query/models.py +333 -0
  47. affinity/cli/query/output.py +331 -0
  48. affinity/cli/query/parser.py +619 -0
  49. affinity/cli/query/planner.py +430 -0
  50. affinity/cli/query/progress.py +270 -0
  51. affinity/cli/query/schema.py +439 -0
  52. affinity/cli/render.py +1589 -0
  53. affinity/cli/resolve.py +222 -0
  54. affinity/cli/resolvers.py +249 -0
  55. affinity/cli/results.py +308 -0
  56. affinity/cli/runner.py +218 -0
  57. affinity/cli/serialization.py +65 -0
  58. affinity/cli/session_cache.py +276 -0
  59. affinity/cli/types.py +70 -0
  60. affinity/client.py +771 -0
  61. affinity/clients/__init__.py +19 -0
  62. affinity/clients/http.py +3664 -0
  63. affinity/clients/pipeline.py +165 -0
  64. affinity/compare.py +501 -0
  65. affinity/downloads.py +114 -0
  66. affinity/exceptions.py +615 -0
  67. affinity/filters.py +1128 -0
  68. affinity/hooks.py +198 -0
  69. affinity/inbound_webhooks.py +302 -0
  70. affinity/models/__init__.py +163 -0
  71. affinity/models/entities.py +798 -0
  72. affinity/models/pagination.py +513 -0
  73. affinity/models/rate_limit_snapshot.py +48 -0
  74. affinity/models/secondary.py +413 -0
  75. affinity/models/types.py +663 -0
  76. affinity/policies.py +40 -0
  77. affinity/progress.py +22 -0
  78. affinity/py.typed +0 -0
  79. affinity/services/__init__.py +42 -0
  80. affinity/services/companies.py +1286 -0
  81. affinity/services/lists.py +1892 -0
  82. affinity/services/opportunities.py +1330 -0
  83. affinity/services/persons.py +1348 -0
  84. affinity/services/rate_limits.py +173 -0
  85. affinity/services/tasks.py +193 -0
  86. affinity/services/v1_only.py +2445 -0
  87. affinity/types.py +83 -0
  88. affinity_sdk-0.9.5.dist-info/METADATA +622 -0
  89. affinity_sdk-0.9.5.dist-info/RECORD +92 -0
  90. affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
  91. affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
  92. affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,195 @@
1
+ from __future__ import annotations
2
+
3
+ import csv
4
+ import io
5
+ import logging
6
+ import re
7
+ import sys
8
+ from collections.abc import Iterable
9
+ from dataclasses import dataclass
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import Any
13
+
14
+ # Import to_cell from formatters (single source of truth)
15
+ # Re-exported here for backwards compatibility
16
+ from .formatters import to_cell
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+ # Re-export to_cell for any external consumers
21
+ __all__ = ["to_cell", "CsvWriteResult", "write_csv", "write_csv_from_rows", "write_csv_to_stdout"]
22
+
23
+
24
+ @dataclass(frozen=True, slots=True)
25
+ class CsvWriteResult:
26
+ rows_written: int
27
+ bytes_written: int
28
+
29
+
30
+ _FILENAME_SAFE = re.compile(r"[^A-Za-z0-9._-]+")
31
+
32
+
33
+ def sanitize_filename(name: str, *, max_len: int = 180) -> str:
34
+ cleaned = _FILENAME_SAFE.sub("_", name).strip("._- ")
35
+ if not cleaned:
36
+ cleaned = "file"
37
+ if len(cleaned) > max_len:
38
+ cleaned = cleaned[:max_len]
39
+ return cleaned
40
+
41
+
42
+ def write_csv(
43
+ *,
44
+ path: Path,
45
+ rows: Iterable[dict[str, Any]],
46
+ fieldnames: list[str],
47
+ bom: bool,
48
+ ) -> CsvWriteResult:
49
+ path.parent.mkdir(parents=True, exist_ok=True)
50
+ encoding = "utf-8-sig" if bom else "utf-8"
51
+ rows_written = 0
52
+
53
+ with path.open("w", newline="", encoding=encoding) as f:
54
+ writer = csv.DictWriter(f, fieldnames=fieldnames, extrasaction="ignore")
55
+ writer.writeheader()
56
+ for row in rows:
57
+ writer.writerow({k: to_cell(v) for k, v in row.items()})
58
+ rows_written += 1
59
+
60
+ bytes_written = path.stat().st_size
61
+ return CsvWriteResult(rows_written=rows_written, bytes_written=bytes_written)
62
+
63
+
64
+ def artifact_path(path: Path) -> tuple[str, bool]:
65
+ """
66
+ Resolve artifact path to relative or absolute string.
67
+
68
+ Returns:
69
+ Tuple of (path_string, is_relative)
70
+ """
71
+ try:
72
+ rel = path.resolve().relative_to(Path.cwd().resolve())
73
+ return str(rel), True
74
+ except Exception:
75
+ return str(path.resolve()), False
76
+
77
+
78
+ def write_csv_from_rows(
79
+ *,
80
+ path: Path,
81
+ rows: Iterable[dict[str, Any]],
82
+ bom: bool = False,
83
+ ) -> CsvWriteResult:
84
+ """
85
+ Write CSV from row dictionaries with auto-detected columns.
86
+
87
+ Detects column names from first row. Handles empty row lists gracefully.
88
+
89
+ Args:
90
+ path: Output CSV file path
91
+ rows: Iterable of dictionaries (must all have same keys)
92
+ bom: Whether to write UTF-8 BOM for Excel compatibility
93
+
94
+ Returns:
95
+ CsvWriteResult with row/byte counts
96
+
97
+ Example:
98
+ >>> rows = [{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}]
99
+ >>> write_csv_from_rows(path=Path("out.csv"), rows=rows)
100
+ CsvWriteResult(rows_written=2, bytes_written=42)
101
+ """
102
+ rows_list = list(rows)
103
+ if not rows_list:
104
+ # Write empty file (no headers - we don't know column names without data)
105
+ path.parent.mkdir(parents=True, exist_ok=True)
106
+ path.touch()
107
+ return CsvWriteResult(rows_written=0, bytes_written=0)
108
+
109
+ fieldnames = list(rows_list[0].keys())
110
+
111
+ return write_csv(
112
+ path=path,
113
+ rows=rows_list,
114
+ fieldnames=fieldnames,
115
+ bom=bom,
116
+ )
117
+
118
+
119
+ def write_csv_to_stdout(
120
+ *,
121
+ rows: Iterable[dict[str, Any]],
122
+ fieldnames: list[str],
123
+ bom: bool,
124
+ ) -> int:
125
+ """
126
+ Write CSV data to stdout.
127
+
128
+ Uses TextIOWrapper around stdout.buffer for proper UTF-8 encoding on all platforms.
129
+ BOM is written when bom=True (useful for Excel compatibility when redirecting to file).
130
+
131
+ Args:
132
+ rows: Iterable of dictionaries to write
133
+ fieldnames: Column names for CSV header
134
+ bom: Whether to write UTF-8 BOM
135
+
136
+ Returns:
137
+ Number of rows written
138
+ """
139
+ encoding = "utf-8-sig" if bom else "utf-8"
140
+ stream = io.TextIOWrapper(sys.stdout.buffer, encoding=encoding, newline="")
141
+
142
+ writer = csv.DictWriter(stream, fieldnames=fieldnames, extrasaction="ignore")
143
+ writer.writeheader()
144
+ rows_written = 0
145
+ for row in rows:
146
+ writer.writerow({k: to_cell(v) for k, v in row.items()})
147
+ rows_written += 1
148
+
149
+ stream.flush()
150
+ stream.detach() # Don't close stdout.buffer
151
+ return rows_written
152
+
153
+
154
+ def localize_iso_string(value: str) -> str:
155
+ """
156
+ Convert ISO datetime string from UTC to local time.
157
+
158
+ Used for CSV output where human-readable local time is preferred.
159
+
160
+ Args:
161
+ value: ISO datetime string (e.g., "2024-01-01T05:00:00+00:00")
162
+
163
+ Returns:
164
+ Local time ISO string (e.g., "2024-01-01T00:00:00-05:00" for EST)
165
+ Returns input unchanged if not a valid datetime string.
166
+ """
167
+ try:
168
+ dt = datetime.fromisoformat(value.replace("Z", "+00:00"))
169
+ local = dt.astimezone()
170
+ return local.isoformat()
171
+ except (ValueError, AttributeError):
172
+ # Log at debug level - this is expected for non-datetime fields
173
+ logger.debug("Could not localize value as datetime: %r", value)
174
+ return value # Return unchanged if not a valid datetime
175
+
176
+
177
+ def localize_row_datetimes(
178
+ row: dict[str, Any],
179
+ datetime_fields: set[str],
180
+ ) -> dict[str, Any]:
181
+ """
182
+ Localize datetime fields in a row dictionary for CSV output.
183
+
184
+ Args:
185
+ row: Dictionary with field values
186
+ datetime_fields: Set of field names that contain datetime values
187
+
188
+ Returns:
189
+ New dictionary with datetime fields localized
190
+ """
191
+ result = dict(row)
192
+ for field in datetime_fields:
193
+ if field in result and isinstance(result[field], str):
194
+ result[field] = localize_iso_string(result[field])
195
+ return result
@@ -0,0 +1,42 @@
1
+ """Date utilities for CLI commands."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Iterator
6
+ from dataclasses import dataclass
7
+ from datetime import datetime, timedelta
8
+ from typing import TYPE_CHECKING
9
+
10
+ if TYPE_CHECKING:
11
+ from affinity.models.secondary import Interaction
12
+
13
+ MAX_CHUNK_DAYS = 365
14
+
15
+
16
+ @dataclass
17
+ class ChunkedFetchResult:
18
+ """Result from chunked interaction fetching."""
19
+
20
+ interactions: list[Interaction]
21
+ chunks_processed: int
22
+
23
+
24
+ def chunk_date_range(
25
+ start: datetime,
26
+ end: datetime,
27
+ max_days: int = MAX_CHUNK_DAYS,
28
+ ) -> Iterator[tuple[datetime, datetime]]:
29
+ """
30
+ Split a date range into chunks of max_days.
31
+
32
+ Yields (chunk_start, chunk_end) tuples.
33
+
34
+ Note: Relies on API using exclusive end_time boundary.
35
+ If an interaction has timestamp exactly at chunk boundary,
36
+ it will appear in the later chunk (not both).
37
+ """
38
+ current = start
39
+ while current < end:
40
+ chunk_end = min(current + timedelta(days=max_days), end)
41
+ yield (current, chunk_end)
42
+ current = chunk_end
@@ -0,0 +1,77 @@
1
+ """Command decorators for CLI metadata.
2
+
3
+ These decorators mark commands with metadata used by the JSON help generator
4
+ for MCP tools and automation.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from collections.abc import Callable
10
+ from typing import TypeVar
11
+
12
+ from .click_compat import click
13
+
14
+ F = TypeVar("F", bound=Callable[..., object])
15
+
16
+
17
+ def destructive(cmd: click.Command) -> click.Command:
18
+ """Mark a command as destructive (data loss possible).
19
+
20
+ Destructive commands require explicit confirmation via --yes flag.
21
+
22
+ Usage:
23
+ @person_group.command(name="delete")
24
+ @destructive
25
+ @click.argument("person_id", type=int)
26
+ def person_delete(person_id: int) -> None:
27
+ ...
28
+ """
29
+ cmd.destructive = True # type: ignore[attr-defined]
30
+ return cmd
31
+
32
+
33
+ def category(cat: str) -> Callable[[click.Command], click.Command]:
34
+ """Tag command category ('read', 'write', or 'local').
35
+
36
+ Categories:
37
+ - read: Reads from Affinity API (safe, idempotent)
38
+ - write: Modifies Affinity data (requires caution)
39
+ - local: No API interaction (version, config, completion, etc.)
40
+
41
+ Usage:
42
+ @category("read")
43
+ @person_group.command(name="get")
44
+ def person_get(...) -> None:
45
+ ...
46
+
47
+ Args:
48
+ cat: One of "read", "write", or "local"
49
+ """
50
+ if cat not in ("read", "write", "local"):
51
+ raise ValueError(f"category must be 'read', 'write', or 'local', got {cat!r}")
52
+
53
+ def decorator(cmd: click.Command) -> click.Command:
54
+ cmd.category = cat # type: ignore[attr-defined]
55
+ return cmd
56
+
57
+ return decorator
58
+
59
+
60
+ def progress_capable(cmd: click.Command) -> click.Command:
61
+ """Mark a command as supporting progress reporting.
62
+
63
+ Commands marked with this decorator emit NDJSON progress to stderr
64
+ when not connected to a TTY, enabling MCP tools to forward progress.
65
+
66
+ Usage:
67
+ @person_group.command(name="files-upload")
68
+ @progress_capable
69
+ @click.argument("person_id", type=int)
70
+ @click.option("--file", required=True)
71
+ def files_upload(person_id: int, file: str) -> None:
72
+ ...
73
+
74
+ Note: Decorator order (bottom-up): def → options → progress_capable → command
75
+ """
76
+ cmd.progress_capable = True # type: ignore[attr-defined]
77
+ return cmd
affinity/cli/errors.py ADDED
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+
6
+ class CLIError(Exception):
7
+ def __init__(
8
+ self,
9
+ message: str,
10
+ *,
11
+ exit_code: int = 1,
12
+ error_type: str = "error",
13
+ details: dict[str, Any] | None = None,
14
+ hint: str | None = None,
15
+ docs_url: str | None = None,
16
+ cause: Exception | None = None,
17
+ ) -> None:
18
+ super().__init__(message)
19
+ self.message = message
20
+ self.exit_code = exit_code
21
+ self.error_type = error_type
22
+ self.details = details
23
+ self.hint = hint
24
+ self.docs_url = docs_url
25
+ self.cause = cause
26
+
27
+ def __str__(self) -> str: # pragma: no cover
28
+ return self.message
@@ -0,0 +1,355 @@
1
+ """Utilities for field name resolution and field metadata management.
2
+
3
+ This module provides shared helpers for resolving human-readable field names
4
+ to field IDs across person/company/opportunity/list-entry commands.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from typing import TYPE_CHECKING, Any, Literal, cast
10
+
11
+ from .errors import CLIError
12
+
13
+ if TYPE_CHECKING:
14
+ from affinity.models.entities import FieldMetadata
15
+
16
+
17
+ EntityType = Literal["person", "company", "opportunity", "list-entry"]
18
+
19
+
20
+ def fetch_field_metadata(
21
+ *,
22
+ client: Any,
23
+ entity_type: EntityType,
24
+ list_id: int | None = None,
25
+ ) -> list[FieldMetadata]:
26
+ """Fetch field metadata for an entity type.
27
+
28
+ Args:
29
+ client: The Affinity client instance.
30
+ entity_type: Type of entity ("person", "company", "opportunity", "list-entry").
31
+ list_id: Required for opportunity and list-entry entity types.
32
+
33
+ Returns:
34
+ List of FieldMetadata objects.
35
+
36
+ Raises:
37
+ CLIError: If list_id is required but not provided.
38
+ """
39
+ from affinity.models.entities import FieldMetadata as FM
40
+
41
+ if entity_type == "person":
42
+ return cast(list[FM], client.persons.get_fields())
43
+ elif entity_type == "company":
44
+ return cast(list[FM], client.companies.get_fields())
45
+ elif entity_type in ("opportunity", "list-entry"):
46
+ if list_id is None:
47
+ raise CLIError(
48
+ f"list_id is required for {entity_type} field metadata.",
49
+ exit_code=2,
50
+ error_type="internal_error",
51
+ )
52
+ from affinity.types import ListId
53
+
54
+ return cast(list[FM], client.lists.get_fields(ListId(list_id)))
55
+ else:
56
+ raise CLIError(
57
+ f"Unknown entity type: {entity_type}",
58
+ exit_code=2,
59
+ error_type="internal_error",
60
+ )
61
+
62
+
63
+ def build_field_id_to_name_map(fields: list[FieldMetadata]) -> dict[str, str]:
64
+ """Build a mapping from field ID to field name.
65
+
66
+ Args:
67
+ fields: List of FieldMetadata objects.
68
+
69
+ Returns:
70
+ Dictionary mapping field_id -> field_name.
71
+ """
72
+ result: dict[str, str] = {}
73
+ for field in fields:
74
+ field_id = str(field.id)
75
+ field_name = str(field.name) if field.name else ""
76
+ result[field_id] = field_name
77
+ return result
78
+
79
+
80
+ def build_field_name_to_id_map(fields: list[FieldMetadata]) -> dict[str, list[str]]:
81
+ """Build a mapping from lowercase field name to field IDs.
82
+
83
+ Multiple fields can have the same name (case-insensitive), so this returns
84
+ a list of field IDs for each name.
85
+
86
+ Args:
87
+ fields: List of FieldMetadata objects.
88
+
89
+ Returns:
90
+ Dictionary mapping lowercase_name -> [field_id, ...].
91
+ """
92
+ result: dict[str, list[str]] = {}
93
+ for field in fields:
94
+ field_id = str(field.id)
95
+ field_name = str(field.name) if field.name else ""
96
+ if field_name:
97
+ result.setdefault(field_name.lower(), []).append(field_id)
98
+ return result
99
+
100
+
101
+ class FieldResolver:
102
+ """Helper class for resolving field names to field IDs.
103
+
104
+ Provides case-insensitive field name resolution with proper error handling
105
+ for ambiguous or missing field names.
106
+ """
107
+
108
+ def __init__(self, fields: list[FieldMetadata]) -> None:
109
+ """Initialize the resolver with field metadata.
110
+
111
+ Args:
112
+ fields: List of FieldMetadata objects.
113
+ """
114
+ self._fields = fields
115
+ self._by_id = build_field_id_to_name_map(fields)
116
+ self._by_name = build_field_name_to_id_map(fields)
117
+
118
+ @property
119
+ def available_names(self) -> list[str]:
120
+ """Get list of available field names for error messages."""
121
+ names: list[str] = []
122
+ seen: set[str] = set()
123
+ for field in self._fields:
124
+ name = str(field.name) if field.name else ""
125
+ if name and name.lower() not in seen:
126
+ names.append(name)
127
+ seen.add(name.lower())
128
+ return sorted(names, key=str.lower)
129
+
130
+ def resolve_field_name_or_id(
131
+ self,
132
+ value: str,
133
+ *,
134
+ context: str = "field",
135
+ ) -> str:
136
+ """Resolve a field name or ID to a field ID.
137
+
138
+ If the value starts with "field-", it's treated as a field ID and validated.
139
+ Otherwise, it's treated as a field name and resolved case-insensitively.
140
+
141
+ Args:
142
+ value: Field name or field ID (e.g., "Phone" or "field-260415").
143
+ context: Context for error messages (e.g., "field" or "list-entry field").
144
+
145
+ Returns:
146
+ The resolved field ID.
147
+
148
+ Raises:
149
+ CLIError: If the field is not found or the name is ambiguous.
150
+ """
151
+ value = value.strip()
152
+ if not value:
153
+ raise CLIError(
154
+ f"Empty {context} name.",
155
+ exit_code=2,
156
+ error_type="usage_error",
157
+ )
158
+
159
+ # If starts with "field-", treat as field ID
160
+ if value.startswith("field-"):
161
+ if value not in self._by_id:
162
+ available = ", ".join(self.available_names[:10])
163
+ suffix = "..." if len(self.available_names) > 10 else ""
164
+ raise CLIError(
165
+ f"Field ID '{value}' not found.",
166
+ exit_code=2,
167
+ error_type="not_found",
168
+ hint=f"Available fields: {available}{suffix}",
169
+ )
170
+ return value
171
+
172
+ # Otherwise, resolve by name (case-insensitive)
173
+ matches = self._by_name.get(value.lower(), [])
174
+ if len(matches) == 1:
175
+ return matches[0]
176
+ if len(matches) > 1:
177
+ # Ambiguous - multiple fields with same name
178
+ details: list[dict[str, Any]] = []
179
+ for fid in matches[:10]:
180
+ details.append(
181
+ {
182
+ "fieldId": fid,
183
+ "name": self._by_id.get(fid, ""),
184
+ }
185
+ )
186
+ raise CLIError(
187
+ f"Ambiguous {context} name '{value}' matches {len(matches)} fields.",
188
+ exit_code=2,
189
+ error_type="ambiguous_resolution",
190
+ details={"name": value, "matches": details},
191
+ hint="Use --field-id with the specific field ID instead.",
192
+ )
193
+
194
+ # Not found
195
+ available = ", ".join(self.available_names[:10])
196
+ suffix = "..." if len(self.available_names) > 10 else ""
197
+ raise CLIError(
198
+ f"Field '{value}' not found.",
199
+ exit_code=2,
200
+ error_type="not_found",
201
+ hint=f"Available fields: {available}{suffix}",
202
+ )
203
+
204
+ def resolve_all_field_names_or_ids(
205
+ self,
206
+ updates: dict[str, Any],
207
+ *,
208
+ context: str = "field",
209
+ ) -> tuple[dict[str, Any], list[str]]:
210
+ """Resolve all field names/IDs in an updates dict to field IDs.
211
+
212
+ Validates ALL field names first and reports ALL errors at once.
213
+
214
+ Args:
215
+ updates: Dictionary of field_name_or_id -> value.
216
+ context: Context for error messages.
217
+
218
+ Returns:
219
+ Tuple of (resolved_updates, errors) where resolved_updates maps
220
+ field_id -> value and errors is a list of invalid field names.
221
+
222
+ Raises:
223
+ CLIError: If any field names are invalid (lists all invalid names).
224
+ """
225
+ resolved: dict[str, Any] = {}
226
+ invalid: list[str] = []
227
+
228
+ for key, value in updates.items():
229
+ key = key.strip()
230
+ if not key:
231
+ continue
232
+
233
+ # If starts with "field-", treat as field ID
234
+ if key.startswith("field-"):
235
+ if key not in self._by_id:
236
+ invalid.append(key)
237
+ else:
238
+ resolved[key] = value
239
+ continue
240
+
241
+ # Otherwise, resolve by name (case-insensitive)
242
+ matches = self._by_name.get(key.lower(), [])
243
+ if len(matches) == 1:
244
+ resolved[matches[0]] = value
245
+ elif len(matches) > 1:
246
+ # For batch updates, treat ambiguous as invalid
247
+ invalid.append(f"{key} (ambiguous: {', '.join(matches[:3])})")
248
+ else:
249
+ invalid.append(key)
250
+
251
+ if invalid:
252
+ available = ", ".join(self.available_names[:10])
253
+ suffix = "..." if len(self.available_names) > 10 else ""
254
+ raise CLIError(
255
+ f"Invalid {context}s: {', '.join(repr(n) for n in invalid)}.",
256
+ exit_code=2,
257
+ error_type="not_found",
258
+ hint=f"Available fields: {available}{suffix}",
259
+ )
260
+
261
+ return resolved, []
262
+
263
+ def get_field_name(self, field_id: str) -> str:
264
+ """Get the field name for a field ID.
265
+
266
+ Args:
267
+ field_id: The field ID.
268
+
269
+ Returns:
270
+ The field name, or empty string if not found.
271
+ """
272
+ return self._by_id.get(field_id, "")
273
+
274
+
275
+ def validate_field_option_mutual_exclusion(
276
+ *,
277
+ field: str | None,
278
+ field_id: str | None,
279
+ ) -> None:
280
+ """Validate that exactly one of --field or --field-id is provided.
281
+
282
+ Args:
283
+ field: The --field option value.
284
+ field_id: The --field-id option value.
285
+
286
+ Raises:
287
+ CLIError: If neither or both options are provided.
288
+ """
289
+ if field is None and field_id is None:
290
+ raise CLIError(
291
+ "Must specify either --field or --field-id.",
292
+ exit_code=2,
293
+ error_type="usage_error",
294
+ )
295
+ if field is not None and field_id is not None:
296
+ raise CLIError(
297
+ "Use only one of --field or --field-id.",
298
+ exit_code=2,
299
+ error_type="usage_error",
300
+ )
301
+
302
+
303
+ def find_field_values_for_field(
304
+ *,
305
+ field_values: list[dict[str, Any]],
306
+ field_id: str,
307
+ ) -> list[dict[str, Any]]:
308
+ """Find all field values matching a specific field ID.
309
+
310
+ Args:
311
+ field_values: List of field value dicts from the API.
312
+ field_id: The field ID to match.
313
+
314
+ Returns:
315
+ List of matching field value dicts.
316
+ """
317
+ matches: list[dict[str, Any]] = []
318
+ for fv in field_values:
319
+ fv_field_id = fv.get("fieldId") or fv.get("field_id")
320
+ if str(fv_field_id) == field_id:
321
+ matches.append(fv)
322
+ return matches
323
+
324
+
325
+ def format_value_for_comparison(value: Any) -> str:
326
+ """Format a field value for string comparison.
327
+
328
+ Non-string values are serialized to their string representation.
329
+
330
+ Args:
331
+ value: The field value.
332
+
333
+ Returns:
334
+ String representation for comparison.
335
+ """
336
+ if value is None:
337
+ return ""
338
+ if isinstance(value, str):
339
+ return value
340
+ if isinstance(value, bool):
341
+ return str(value).lower()
342
+ if isinstance(value, (int, float)):
343
+ return str(value)
344
+ if isinstance(value, dict):
345
+ # Handle typed values like {type: "...", data: ...}
346
+ data = value.get("data")
347
+ if data is not None:
348
+ return format_value_for_comparison(data)
349
+ text = value.get("text") or value.get("name")
350
+ if text is not None:
351
+ return str(text)
352
+ if isinstance(value, list):
353
+ # For lists, join with comma
354
+ return ", ".join(format_value_for_comparison(v) for v in value)
355
+ return str(value)