affinity-sdk 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- affinity/__init__.py +139 -0
- affinity/cli/__init__.py +7 -0
- affinity/cli/click_compat.py +27 -0
- affinity/cli/commands/__init__.py +1 -0
- affinity/cli/commands/_entity_files_dump.py +219 -0
- affinity/cli/commands/_list_entry_fields.py +41 -0
- affinity/cli/commands/_v1_parsing.py +77 -0
- affinity/cli/commands/company_cmds.py +2139 -0
- affinity/cli/commands/completion_cmd.py +33 -0
- affinity/cli/commands/config_cmds.py +540 -0
- affinity/cli/commands/entry_cmds.py +33 -0
- affinity/cli/commands/field_cmds.py +413 -0
- affinity/cli/commands/interaction_cmds.py +875 -0
- affinity/cli/commands/list_cmds.py +3152 -0
- affinity/cli/commands/note_cmds.py +433 -0
- affinity/cli/commands/opportunity_cmds.py +1174 -0
- affinity/cli/commands/person_cmds.py +1980 -0
- affinity/cli/commands/query_cmd.py +444 -0
- affinity/cli/commands/relationship_strength_cmds.py +62 -0
- affinity/cli/commands/reminder_cmds.py +595 -0
- affinity/cli/commands/resolve_url_cmd.py +127 -0
- affinity/cli/commands/session_cmds.py +84 -0
- affinity/cli/commands/task_cmds.py +110 -0
- affinity/cli/commands/version_cmd.py +29 -0
- affinity/cli/commands/whoami_cmd.py +36 -0
- affinity/cli/config.py +108 -0
- affinity/cli/context.py +749 -0
- affinity/cli/csv_utils.py +195 -0
- affinity/cli/date_utils.py +42 -0
- affinity/cli/decorators.py +77 -0
- affinity/cli/errors.py +28 -0
- affinity/cli/field_utils.py +355 -0
- affinity/cli/formatters.py +551 -0
- affinity/cli/help_json.py +283 -0
- affinity/cli/logging.py +100 -0
- affinity/cli/main.py +261 -0
- affinity/cli/options.py +53 -0
- affinity/cli/paths.py +32 -0
- affinity/cli/progress.py +183 -0
- affinity/cli/query/__init__.py +163 -0
- affinity/cli/query/aggregates.py +357 -0
- affinity/cli/query/dates.py +194 -0
- affinity/cli/query/exceptions.py +147 -0
- affinity/cli/query/executor.py +1236 -0
- affinity/cli/query/filters.py +248 -0
- affinity/cli/query/models.py +333 -0
- affinity/cli/query/output.py +331 -0
- affinity/cli/query/parser.py +619 -0
- affinity/cli/query/planner.py +430 -0
- affinity/cli/query/progress.py +270 -0
- affinity/cli/query/schema.py +439 -0
- affinity/cli/render.py +1589 -0
- affinity/cli/resolve.py +222 -0
- affinity/cli/resolvers.py +249 -0
- affinity/cli/results.py +308 -0
- affinity/cli/runner.py +218 -0
- affinity/cli/serialization.py +65 -0
- affinity/cli/session_cache.py +276 -0
- affinity/cli/types.py +70 -0
- affinity/client.py +771 -0
- affinity/clients/__init__.py +19 -0
- affinity/clients/http.py +3664 -0
- affinity/clients/pipeline.py +165 -0
- affinity/compare.py +501 -0
- affinity/downloads.py +114 -0
- affinity/exceptions.py +615 -0
- affinity/filters.py +1128 -0
- affinity/hooks.py +198 -0
- affinity/inbound_webhooks.py +302 -0
- affinity/models/__init__.py +163 -0
- affinity/models/entities.py +798 -0
- affinity/models/pagination.py +513 -0
- affinity/models/rate_limit_snapshot.py +48 -0
- affinity/models/secondary.py +413 -0
- affinity/models/types.py +663 -0
- affinity/policies.py +40 -0
- affinity/progress.py +22 -0
- affinity/py.typed +0 -0
- affinity/services/__init__.py +42 -0
- affinity/services/companies.py +1286 -0
- affinity/services/lists.py +1892 -0
- affinity/services/opportunities.py +1330 -0
- affinity/services/persons.py +1348 -0
- affinity/services/rate_limits.py +173 -0
- affinity/services/tasks.py +193 -0
- affinity/services/v1_only.py +2445 -0
- affinity/types.py +83 -0
- affinity_sdk-0.9.5.dist-info/METADATA +622 -0
- affinity_sdk-0.9.5.dist-info/RECORD +92 -0
- affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
- affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
- affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1892 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Lists and List Entries service.
|
|
3
|
+
|
|
4
|
+
Provides operations for managing lists (spreadsheets) and their entries (rows).
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import builtins
|
|
10
|
+
import re
|
|
11
|
+
import warnings
|
|
12
|
+
from collections.abc import AsyncIterator, Callable, Iterator, Sequence
|
|
13
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
|
14
|
+
from urllib.parse import urlsplit
|
|
15
|
+
|
|
16
|
+
from pydantic import BaseModel
|
|
17
|
+
from pydantic import ValidationError as PydanticValidationError
|
|
18
|
+
|
|
19
|
+
from ..compare import normalize_value
|
|
20
|
+
from ..exceptions import AffinityError, FilterParseError
|
|
21
|
+
from ..filters import FilterExpression
|
|
22
|
+
from ..filters import parse as parse_filter
|
|
23
|
+
from ..models.entities import (
|
|
24
|
+
AffinityList,
|
|
25
|
+
FieldMetadata,
|
|
26
|
+
FieldValues,
|
|
27
|
+
ListCreate,
|
|
28
|
+
ListEntry,
|
|
29
|
+
ListEntryWithEntity,
|
|
30
|
+
SavedView,
|
|
31
|
+
)
|
|
32
|
+
from ..models.pagination import (
|
|
33
|
+
AsyncPageIterator,
|
|
34
|
+
BatchOperationResponse,
|
|
35
|
+
FilterStats,
|
|
36
|
+
PageIterator,
|
|
37
|
+
PaginatedResponse,
|
|
38
|
+
PaginationInfo,
|
|
39
|
+
)
|
|
40
|
+
from ..models.types import (
|
|
41
|
+
AnyFieldId,
|
|
42
|
+
CompanyId,
|
|
43
|
+
FieldType,
|
|
44
|
+
ListEntryId,
|
|
45
|
+
ListId,
|
|
46
|
+
ListType,
|
|
47
|
+
OpportunityId,
|
|
48
|
+
PersonId,
|
|
49
|
+
SavedViewId,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
if TYPE_CHECKING:
|
|
53
|
+
from ..clients.http import AsyncHTTPClient, HTTPClient
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
_LIST_SAVED_VIEWS_CURSOR_RE = re.compile(r"/lists/(?P<list_id>\d+)/saved-views(?:/|$)")
|
|
57
|
+
|
|
58
|
+
T = TypeVar("T", bound=BaseModel)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _parse_filter_with_hint(filter_string: str) -> FilterExpression:
|
|
62
|
+
"""Parse a filter string, adding helpful hints on failure.
|
|
63
|
+
|
|
64
|
+
Raises:
|
|
65
|
+
FilterParseError: If the filter cannot be parsed, with a hint about
|
|
66
|
+
quoting multi-word values.
|
|
67
|
+
"""
|
|
68
|
+
try:
|
|
69
|
+
return parse_filter(filter_string)
|
|
70
|
+
except ValueError as e:
|
|
71
|
+
# Add hint about quoting for common issues
|
|
72
|
+
hint = (
|
|
73
|
+
f"Invalid filter: {e}\n"
|
|
74
|
+
"Hint: Multi-word values must be quoted. "
|
|
75
|
+
"Example: --filter 'Status=\"Intro Meeting\"'"
|
|
76
|
+
)
|
|
77
|
+
raise FilterParseError(hint) from e
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _safe_model_validate(model: type[T], payload: Any, *, context: str | None = None) -> T:
|
|
81
|
+
label = context or getattr(model, "__name__", "response")
|
|
82
|
+
try:
|
|
83
|
+
return model.model_validate(payload)
|
|
84
|
+
except PydanticValidationError as exc:
|
|
85
|
+
raise AffinityError(
|
|
86
|
+
f"Invalid API response while parsing {label}.",
|
|
87
|
+
response_body=payload,
|
|
88
|
+
) from exc
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _saved_views_list_id_from_cursor(cursor: str) -> int | None:
|
|
92
|
+
try:
|
|
93
|
+
path = urlsplit(cursor).path or ""
|
|
94
|
+
except Exception:
|
|
95
|
+
return None
|
|
96
|
+
m = _LIST_SAVED_VIEWS_CURSOR_RE.search(path)
|
|
97
|
+
if not m:
|
|
98
|
+
return None
|
|
99
|
+
try:
|
|
100
|
+
return int(m.group("list_id"))
|
|
101
|
+
except ValueError:
|
|
102
|
+
return None
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class ListService:
|
|
106
|
+
"""
|
|
107
|
+
Service for managing lists.
|
|
108
|
+
|
|
109
|
+
Lists are spreadsheet-like collections of people, companies, or opportunities.
|
|
110
|
+
"""
|
|
111
|
+
|
|
112
|
+
def __init__(self, client: HTTPClient):
|
|
113
|
+
self._client = client
|
|
114
|
+
self._resolve_cache: dict[tuple[str, ListType | None], AffinityList | None] = {}
|
|
115
|
+
|
|
116
|
+
def entries(self, list_id: ListId) -> ListEntryService:
|
|
117
|
+
"""
|
|
118
|
+
Get a ListEntryService for a specific list.
|
|
119
|
+
|
|
120
|
+
This is the explicit path for retrieving "full row" data via list entries.
|
|
121
|
+
"""
|
|
122
|
+
return ListEntryService(self._client, list_id)
|
|
123
|
+
|
|
124
|
+
# =========================================================================
|
|
125
|
+
# List Operations (V2 for read, V1 for write)
|
|
126
|
+
# =========================================================================
|
|
127
|
+
|
|
128
|
+
def list(
|
|
129
|
+
self,
|
|
130
|
+
*,
|
|
131
|
+
limit: int | None = None,
|
|
132
|
+
cursor: str | None = None,
|
|
133
|
+
) -> PaginatedResponse[AffinityList]:
|
|
134
|
+
"""
|
|
135
|
+
Get all lists accessible to you.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
limit: Maximum results per page.
|
|
139
|
+
cursor: Cursor to resume pagination (opaque; obtained from prior responses).
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Paginated list of lists (without field metadata)
|
|
143
|
+
"""
|
|
144
|
+
if cursor is not None:
|
|
145
|
+
if limit is not None:
|
|
146
|
+
raise ValueError(
|
|
147
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query "
|
|
148
|
+
"context. Start a new pagination sequence without a cursor to change "
|
|
149
|
+
"parameters."
|
|
150
|
+
)
|
|
151
|
+
data = self._client.get_url(cursor)
|
|
152
|
+
else:
|
|
153
|
+
if limit is not None and limit <= 0:
|
|
154
|
+
raise ValueError("'limit' must be > 0")
|
|
155
|
+
params: dict[str, Any] = {}
|
|
156
|
+
if limit is not None:
|
|
157
|
+
params["limit"] = limit
|
|
158
|
+
data = self._client.get("/lists", params=params or None)
|
|
159
|
+
|
|
160
|
+
return PaginatedResponse[AffinityList](
|
|
161
|
+
data=[
|
|
162
|
+
_safe_model_validate(AffinityList, list_item) for list_item in data.get("data", [])
|
|
163
|
+
],
|
|
164
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
def pages(
|
|
168
|
+
self,
|
|
169
|
+
*,
|
|
170
|
+
limit: int | None = None,
|
|
171
|
+
cursor: str | None = None,
|
|
172
|
+
) -> Iterator[PaginatedResponse[AffinityList]]:
|
|
173
|
+
"""
|
|
174
|
+
Iterate list pages (not items), yielding `PaginatedResponse[AffinityList]`.
|
|
175
|
+
|
|
176
|
+
This is useful for ETL scripts that want checkpoint/resume via `page.next_cursor`.
|
|
177
|
+
"""
|
|
178
|
+
if cursor is not None and limit is not None:
|
|
179
|
+
raise ValueError(
|
|
180
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query context. "
|
|
181
|
+
"Start a new pagination sequence without a cursor to change parameters."
|
|
182
|
+
)
|
|
183
|
+
requested_cursor = cursor
|
|
184
|
+
page = self.list(limit=limit) if cursor is None else self.list(cursor=cursor)
|
|
185
|
+
while True:
|
|
186
|
+
yield page
|
|
187
|
+
if not page.has_next:
|
|
188
|
+
return
|
|
189
|
+
next_cursor = page.next_cursor
|
|
190
|
+
if next_cursor is None or next_cursor == requested_cursor:
|
|
191
|
+
return
|
|
192
|
+
requested_cursor = next_cursor
|
|
193
|
+
page = self.list(cursor=next_cursor)
|
|
194
|
+
|
|
195
|
+
def all(self) -> Iterator[AffinityList]:
|
|
196
|
+
"""Iterate through all accessible lists."""
|
|
197
|
+
|
|
198
|
+
def fetch_page(next_url: str | None) -> PaginatedResponse[AffinityList]:
|
|
199
|
+
if next_url:
|
|
200
|
+
data = self._client.get_url(next_url)
|
|
201
|
+
return PaginatedResponse[AffinityList](
|
|
202
|
+
data=[
|
|
203
|
+
_safe_model_validate(AffinityList, list_item)
|
|
204
|
+
for list_item in data.get("data", [])
|
|
205
|
+
],
|
|
206
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
207
|
+
)
|
|
208
|
+
return self.list()
|
|
209
|
+
|
|
210
|
+
return PageIterator(fetch_page)
|
|
211
|
+
|
|
212
|
+
def iter(self) -> Iterator[AffinityList]:
|
|
213
|
+
"""
|
|
214
|
+
Auto-paginate all lists.
|
|
215
|
+
|
|
216
|
+
Alias for `all()` (FR-006 public contract).
|
|
217
|
+
"""
|
|
218
|
+
return self.all()
|
|
219
|
+
|
|
220
|
+
def get(self, list_id: ListId) -> AffinityList:
|
|
221
|
+
"""
|
|
222
|
+
Get a single list by ID.
|
|
223
|
+
|
|
224
|
+
Includes field metadata for the list.
|
|
225
|
+
|
|
226
|
+
Note: Uses V1 API because V2's listSize field is undocumented and
|
|
227
|
+
returns incorrect values (often 0 for non-empty lists).
|
|
228
|
+
"""
|
|
229
|
+
data = self._client.get(f"/lists/{list_id}", v1=True)
|
|
230
|
+
return _safe_model_validate(AffinityList, data)
|
|
231
|
+
|
|
232
|
+
def resolve(
|
|
233
|
+
self,
|
|
234
|
+
*,
|
|
235
|
+
name: str,
|
|
236
|
+
list_type: ListType | None = None,
|
|
237
|
+
) -> AffinityList | None:
|
|
238
|
+
"""
|
|
239
|
+
Find a single list by name (optionally filtered by type).
|
|
240
|
+
|
|
241
|
+
Notes:
|
|
242
|
+
- This iterates list pages client-side (the API does not expose a list-search endpoint).
|
|
243
|
+
- Results are cached in-memory on this service instance. If you call this frequently,
|
|
244
|
+
reuse the client, or persist the resolved `ListId` in your own configuration.
|
|
245
|
+
|
|
246
|
+
If multiple matches exist, returns the first match in server-provided order.
|
|
247
|
+
"""
|
|
248
|
+
key = (name.lower(), list_type)
|
|
249
|
+
if key in self._resolve_cache:
|
|
250
|
+
return self._resolve_cache[key]
|
|
251
|
+
|
|
252
|
+
for item in self.all():
|
|
253
|
+
if item.name.lower() == name.lower() and (list_type is None or item.type == list_type):
|
|
254
|
+
self._resolve_cache[key] = item
|
|
255
|
+
return item
|
|
256
|
+
|
|
257
|
+
self._resolve_cache[key] = None
|
|
258
|
+
return None
|
|
259
|
+
|
|
260
|
+
def resolve_all(
|
|
261
|
+
self,
|
|
262
|
+
*,
|
|
263
|
+
name: str,
|
|
264
|
+
list_type: ListType | None = None,
|
|
265
|
+
) -> builtins.list[AffinityList]:
|
|
266
|
+
"""
|
|
267
|
+
Find all lists matching a name (optionally filtered by type).
|
|
268
|
+
|
|
269
|
+
Notes:
|
|
270
|
+
- This iterates list pages client-side (the API does not expose a list-search endpoint).
|
|
271
|
+
- Unlike `resolve()`, this does not cache results.
|
|
272
|
+
"""
|
|
273
|
+
matches: builtins.list[AffinityList] = []
|
|
274
|
+
name_lower = name.lower()
|
|
275
|
+
for item in self.all():
|
|
276
|
+
if item.name.lower() != name_lower:
|
|
277
|
+
continue
|
|
278
|
+
if list_type is not None and item.type != list_type:
|
|
279
|
+
continue
|
|
280
|
+
matches.append(item)
|
|
281
|
+
return matches
|
|
282
|
+
|
|
283
|
+
def create(self, data: ListCreate) -> AffinityList:
|
|
284
|
+
"""
|
|
285
|
+
Create a new list.
|
|
286
|
+
|
|
287
|
+
Uses V1 API.
|
|
288
|
+
"""
|
|
289
|
+
payload = data.model_dump(mode="json", exclude_none=True, exclude_unset=True)
|
|
290
|
+
if not data.additional_permissions:
|
|
291
|
+
payload.pop("additional_permissions", None)
|
|
292
|
+
|
|
293
|
+
result = self._client.post("/lists", json=payload, v1=True)
|
|
294
|
+
|
|
295
|
+
# Invalidate cache
|
|
296
|
+
if self._client.cache:
|
|
297
|
+
self._client.cache.invalidate_prefix("list")
|
|
298
|
+
self._resolve_cache.clear()
|
|
299
|
+
|
|
300
|
+
return _safe_model_validate(AffinityList, result)
|
|
301
|
+
|
|
302
|
+
# =========================================================================
|
|
303
|
+
# Field Operations
|
|
304
|
+
# =========================================================================
|
|
305
|
+
|
|
306
|
+
def get_fields(
|
|
307
|
+
self,
|
|
308
|
+
list_id: ListId,
|
|
309
|
+
*,
|
|
310
|
+
field_types: Sequence[FieldType] | None = None,
|
|
311
|
+
) -> builtins.list[FieldMetadata]:
|
|
312
|
+
"""
|
|
313
|
+
Get fields (columns) for a list.
|
|
314
|
+
|
|
315
|
+
Includes list-specific, global, enriched, and relationship intelligence fields.
|
|
316
|
+
Cached for performance.
|
|
317
|
+
"""
|
|
318
|
+
params: dict[str, Any] = {}
|
|
319
|
+
if field_types:
|
|
320
|
+
params["fieldTypes"] = [field_type.value for field_type in field_types]
|
|
321
|
+
|
|
322
|
+
data = self._client.get(
|
|
323
|
+
f"/lists/{list_id}/fields",
|
|
324
|
+
params=params or None,
|
|
325
|
+
cache_key=f"list_{list_id}_fields:{','.join(field_types or [])}",
|
|
326
|
+
cache_ttl=300,
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
return [_safe_model_validate(FieldMetadata, f) for f in data.get("data", [])]
|
|
330
|
+
|
|
331
|
+
# =========================================================================
|
|
332
|
+
# Saved View Operations
|
|
333
|
+
# =========================================================================
|
|
334
|
+
|
|
335
|
+
def get_saved_views(
|
|
336
|
+
self,
|
|
337
|
+
list_id: ListId,
|
|
338
|
+
*,
|
|
339
|
+
limit: int | None = None,
|
|
340
|
+
cursor: str | None = None,
|
|
341
|
+
) -> PaginatedResponse[SavedView]:
|
|
342
|
+
"""
|
|
343
|
+
Get saved views for a list.
|
|
344
|
+
|
|
345
|
+
Args:
|
|
346
|
+
list_id: List id for the initial request.
|
|
347
|
+
limit: Maximum results per page.
|
|
348
|
+
cursor: Cursor to resume pagination (opaque; obtained from prior responses).
|
|
349
|
+
"""
|
|
350
|
+
if cursor is not None:
|
|
351
|
+
if limit is not None:
|
|
352
|
+
raise ValueError(
|
|
353
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query "
|
|
354
|
+
"context. Start a new pagination sequence without a cursor to change "
|
|
355
|
+
"parameters."
|
|
356
|
+
)
|
|
357
|
+
cursor_list_id = _saved_views_list_id_from_cursor(cursor)
|
|
358
|
+
if cursor_list_id is not None and int(list_id) != cursor_list_id:
|
|
359
|
+
raise ValueError(
|
|
360
|
+
f"Cursor does not match list_id: cursor is for list {cursor_list_id}, "
|
|
361
|
+
f"requested list_id is {int(list_id)}"
|
|
362
|
+
)
|
|
363
|
+
data = self._client.get_url(cursor)
|
|
364
|
+
else:
|
|
365
|
+
if limit is not None and limit <= 0:
|
|
366
|
+
raise ValueError("'limit' must be > 0")
|
|
367
|
+
params: dict[str, Any] = {}
|
|
368
|
+
if limit is not None:
|
|
369
|
+
params["limit"] = limit
|
|
370
|
+
data = self._client.get(f"/lists/{list_id}/saved-views", params=params or None)
|
|
371
|
+
|
|
372
|
+
return PaginatedResponse[SavedView](
|
|
373
|
+
data=[_safe_model_validate(SavedView, v) for v in data.get("data", [])],
|
|
374
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
375
|
+
)
|
|
376
|
+
|
|
377
|
+
def saved_views_pages(
|
|
378
|
+
self,
|
|
379
|
+
list_id: ListId,
|
|
380
|
+
*,
|
|
381
|
+
limit: int | None = None,
|
|
382
|
+
cursor: str | None = None,
|
|
383
|
+
) -> Iterator[PaginatedResponse[SavedView]]:
|
|
384
|
+
"""Iterate saved view pages, yielding `PaginatedResponse[SavedView]`."""
|
|
385
|
+
if cursor is not None and limit is not None:
|
|
386
|
+
raise ValueError(
|
|
387
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query context. "
|
|
388
|
+
"Start a new pagination sequence without a cursor to change parameters."
|
|
389
|
+
)
|
|
390
|
+
requested_cursor = cursor
|
|
391
|
+
page = (
|
|
392
|
+
self.get_saved_views(list_id, limit=limit)
|
|
393
|
+
if cursor is None
|
|
394
|
+
else self.get_saved_views(list_id, cursor=cursor)
|
|
395
|
+
)
|
|
396
|
+
while True:
|
|
397
|
+
yield page
|
|
398
|
+
if not page.has_next:
|
|
399
|
+
return
|
|
400
|
+
next_cursor = page.next_cursor
|
|
401
|
+
if next_cursor is None or next_cursor == requested_cursor:
|
|
402
|
+
return
|
|
403
|
+
requested_cursor = next_cursor
|
|
404
|
+
page = self.get_saved_views(list_id, cursor=next_cursor)
|
|
405
|
+
|
|
406
|
+
def saved_views_all(self, list_id: ListId) -> Iterator[SavedView]:
|
|
407
|
+
"""Iterate all saved views for a list."""
|
|
408
|
+
for page in self.saved_views_pages(list_id):
|
|
409
|
+
yield from page.data
|
|
410
|
+
|
|
411
|
+
def get_saved_view(self, list_id: ListId, view_id: SavedViewId) -> SavedView:
|
|
412
|
+
"""Get a single saved view."""
|
|
413
|
+
data = self._client.get(f"/lists/{list_id}/saved-views/{view_id}")
|
|
414
|
+
return _safe_model_validate(SavedView, data)
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
def _entry_to_filter_dict(entry: ListEntryWithEntity) -> dict[str, Any]:
|
|
418
|
+
"""
|
|
419
|
+
Convert a ListEntryWithEntity to a dict for client-side filter matching.
|
|
420
|
+
|
|
421
|
+
Extracts field values by name from the entity's fields_raw (V2 API format).
|
|
422
|
+
This allows FilterExpression.matches() to evaluate against field values.
|
|
423
|
+
|
|
424
|
+
Uses normalize_value() from compare.py to extract text values from
|
|
425
|
+
dropdown dicts and multi-select arrays - single source of truth for normalization.
|
|
426
|
+
"""
|
|
427
|
+
result: dict[str, Any] = {}
|
|
428
|
+
|
|
429
|
+
# Extract field values from entity.fields_raw (V2 API format)
|
|
430
|
+
if entry.entity is not None:
|
|
431
|
+
fields_raw = getattr(entry.entity, "fields_raw", None)
|
|
432
|
+
if isinstance(fields_raw, builtins.list):
|
|
433
|
+
for field_obj in fields_raw:
|
|
434
|
+
if isinstance(field_obj, dict):
|
|
435
|
+
field_name = field_obj.get("name")
|
|
436
|
+
if field_name:
|
|
437
|
+
value_wrapper = field_obj.get("value")
|
|
438
|
+
if isinstance(value_wrapper, dict):
|
|
439
|
+
data = value_wrapper.get("data")
|
|
440
|
+
# Use normalize_value() to extract text from dropdowns/multi-select
|
|
441
|
+
result[field_name] = normalize_value(data)
|
|
442
|
+
else:
|
|
443
|
+
result[field_name] = value_wrapper
|
|
444
|
+
|
|
445
|
+
# Also add basic entity properties for filtering
|
|
446
|
+
if entry.entity is not None:
|
|
447
|
+
if hasattr(entry.entity, "name"):
|
|
448
|
+
result["name"] = entry.entity.name
|
|
449
|
+
if hasattr(entry.entity, "domain"):
|
|
450
|
+
result["domain"] = entry.entity.domain
|
|
451
|
+
if hasattr(entry.entity, "primary_email"):
|
|
452
|
+
result["primary_email"] = entry.entity.primary_email
|
|
453
|
+
|
|
454
|
+
return result
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
# Warning message for client-side filtering
|
|
458
|
+
_CLIENT_SIDE_FILTER_WARNING = (
|
|
459
|
+
"The Affinity V2 API does not support server-side filtering on list entries. "
|
|
460
|
+
"Filtering is being applied client-side after fetching data. "
|
|
461
|
+
"For large lists, consider using saved views instead (--saved-view)."
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
class ListEntryService:
|
|
466
|
+
"""
|
|
467
|
+
Service for managing list entries (rows).
|
|
468
|
+
|
|
469
|
+
List entries connect entities (people, companies, opportunities) to lists
|
|
470
|
+
and hold list-specific field values.
|
|
471
|
+
"""
|
|
472
|
+
|
|
473
|
+
def __init__(self, client: HTTPClient, list_id: ListId):
|
|
474
|
+
self._client = client
|
|
475
|
+
self._list_id = list_id
|
|
476
|
+
|
|
477
|
+
def _all_entity_list_entries_v2(self, path: str) -> builtins.list[ListEntry]:
|
|
478
|
+
"""
|
|
479
|
+
Fetch all list entries for a single entity across all lists (V2 API).
|
|
480
|
+
|
|
481
|
+
Used for list membership helpers to avoid enumerating an entire list.
|
|
482
|
+
"""
|
|
483
|
+
entries: builtins.list[ListEntry] = []
|
|
484
|
+
data = self._client.get(path)
|
|
485
|
+
|
|
486
|
+
while True:
|
|
487
|
+
entries.extend(_safe_model_validate(ListEntry, item) for item in data.get("data", []))
|
|
488
|
+
pagination = _safe_model_validate(PaginationInfo, data.get("pagination", {}))
|
|
489
|
+
if not pagination.next_cursor:
|
|
490
|
+
break
|
|
491
|
+
data = self._client.get_url(pagination.next_cursor)
|
|
492
|
+
|
|
493
|
+
return entries
|
|
494
|
+
|
|
495
|
+
# =========================================================================
|
|
496
|
+
# Read Operations (V2 API)
|
|
497
|
+
# =========================================================================
|
|
498
|
+
|
|
499
|
+
def list(
|
|
500
|
+
self,
|
|
501
|
+
*,
|
|
502
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
503
|
+
field_types: Sequence[FieldType] | None = None,
|
|
504
|
+
filter: str | FilterExpression | None = None,
|
|
505
|
+
limit: int | None = None,
|
|
506
|
+
cursor: str | None = None,
|
|
507
|
+
) -> PaginatedResponse[ListEntryWithEntity]:
|
|
508
|
+
"""
|
|
509
|
+
Get list entries with entity data and field values.
|
|
510
|
+
|
|
511
|
+
Args:
|
|
512
|
+
field_ids: Specific field IDs to include
|
|
513
|
+
field_types: Field types to include
|
|
514
|
+
filter: Filter expression (applied client-side; API doesn't support it)
|
|
515
|
+
limit: Maximum results per page
|
|
516
|
+
cursor: Cursor to resume pagination (opaque; obtained from prior responses).
|
|
517
|
+
|
|
518
|
+
Returns:
|
|
519
|
+
Paginated list entries with entity data
|
|
520
|
+
|
|
521
|
+
Note:
|
|
522
|
+
The Affinity V2 API does not support server-side filtering on list entries.
|
|
523
|
+
When a filter is provided, it is applied client-side after fetching data.
|
|
524
|
+
For large lists, consider using saved views for server-side filtering.
|
|
525
|
+
"""
|
|
526
|
+
# Parse filter expression if provided
|
|
527
|
+
filter_expr: FilterExpression | None = None
|
|
528
|
+
if filter is not None:
|
|
529
|
+
if isinstance(filter, str):
|
|
530
|
+
# Treat whitespace-only strings as no filter
|
|
531
|
+
stripped = filter.strip()
|
|
532
|
+
if stripped:
|
|
533
|
+
filter_expr = _parse_filter_with_hint(stripped)
|
|
534
|
+
else:
|
|
535
|
+
filter_expr = filter
|
|
536
|
+
# Emit warning about client-side filtering
|
|
537
|
+
warnings.warn(_CLIENT_SIDE_FILTER_WARNING, UserWarning, stacklevel=2)
|
|
538
|
+
|
|
539
|
+
if cursor is not None:
|
|
540
|
+
if field_ids or field_types or filter_expr is not None or limit is not None:
|
|
541
|
+
raise ValueError(
|
|
542
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query "
|
|
543
|
+
"context. Start a new pagination sequence without a cursor to change "
|
|
544
|
+
"parameters."
|
|
545
|
+
)
|
|
546
|
+
data = self._client.get_url(cursor)
|
|
547
|
+
else:
|
|
548
|
+
if limit is not None and limit <= 0:
|
|
549
|
+
raise ValueError("'limit' must be > 0")
|
|
550
|
+
params: dict[str, Any] = {}
|
|
551
|
+
if field_ids:
|
|
552
|
+
params["fieldIds"] = [str(field_id) for field_id in field_ids]
|
|
553
|
+
if field_types:
|
|
554
|
+
params["fieldTypes"] = [field_type.value for field_type in field_types]
|
|
555
|
+
# NOTE: filter is NOT sent to API - it doesn't support filtering
|
|
556
|
+
if limit is not None:
|
|
557
|
+
params["limit"] = limit
|
|
558
|
+
|
|
559
|
+
data = self._client.get(
|
|
560
|
+
f"/lists/{self._list_id}/list-entries",
|
|
561
|
+
params=params or None,
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
# Parse entries
|
|
565
|
+
entries = [_safe_model_validate(ListEntryWithEntity, e) for e in data.get("data", [])]
|
|
566
|
+
|
|
567
|
+
# Apply client-side filtering if filter was provided
|
|
568
|
+
if filter_expr is not None:
|
|
569
|
+
entries = [e for e in entries if filter_expr.matches(_entry_to_filter_dict(e))]
|
|
570
|
+
|
|
571
|
+
return PaginatedResponse[ListEntryWithEntity](
|
|
572
|
+
data=entries,
|
|
573
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
574
|
+
)
|
|
575
|
+
|
|
576
|
+
def pages(
|
|
577
|
+
self,
|
|
578
|
+
*,
|
|
579
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
580
|
+
field_types: Sequence[FieldType] | None = None,
|
|
581
|
+
filter: str | FilterExpression | None = None,
|
|
582
|
+
limit: int | None = None,
|
|
583
|
+
cursor: str | None = None,
|
|
584
|
+
progress_callback: Callable[[FilterStats], None] | None = None,
|
|
585
|
+
) -> Iterator[PaginatedResponse[ListEntryWithEntity]]:
|
|
586
|
+
"""Iterate list-entry pages, yielding `PaginatedResponse[ListEntryWithEntity]`.
|
|
587
|
+
|
|
588
|
+
Use ``pages()`` when you need page-level control for batch processing,
|
|
589
|
+
cursor-based resumption, or progress tracking on unfiltered queries.
|
|
590
|
+
Use ``iter()`` for most cases, especially with filters.
|
|
591
|
+
|
|
592
|
+
Args:
|
|
593
|
+
progress_callback: Optional callback called after each physical page
|
|
594
|
+
fetch during filtered queries. Receives FilterStats with current
|
|
595
|
+
scanned/matched counts for real-time progress updates.
|
|
596
|
+
|
|
597
|
+
Note:
|
|
598
|
+
Filtering is applied client-side (Affinity V2 API does not support
|
|
599
|
+
server-side filtering on list entries). When a filter is provided,
|
|
600
|
+
pages are "virtualized" - the method fetches physical pages internally
|
|
601
|
+
and accumulates filtered results until a full virtual page is ready.
|
|
602
|
+
This ensures consistent page sizes and fast time-to-first-results.
|
|
603
|
+
"""
|
|
604
|
+
if cursor is not None and (
|
|
605
|
+
field_ids
|
|
606
|
+
or field_types
|
|
607
|
+
or filter is not None
|
|
608
|
+
or limit is not None
|
|
609
|
+
or progress_callback is not None
|
|
610
|
+
):
|
|
611
|
+
raise ValueError(
|
|
612
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query context. "
|
|
613
|
+
"Start a new pagination sequence without a cursor to change parameters."
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
# Parse filter once for all pages (since list() with cursor can't accept filter)
|
|
617
|
+
filter_expr: FilterExpression | None = None
|
|
618
|
+
if filter is not None:
|
|
619
|
+
if isinstance(filter, str):
|
|
620
|
+
stripped = filter.strip()
|
|
621
|
+
if stripped:
|
|
622
|
+
filter_expr = _parse_filter_with_hint(stripped)
|
|
623
|
+
else:
|
|
624
|
+
filter_expr = filter
|
|
625
|
+
|
|
626
|
+
# No filter: use simple pagination (original behavior)
|
|
627
|
+
if filter_expr is None:
|
|
628
|
+
requested_cursor = cursor
|
|
629
|
+
page = (
|
|
630
|
+
self.list(field_ids=field_ids, field_types=field_types, limit=limit)
|
|
631
|
+
if cursor is None
|
|
632
|
+
else self.list(cursor=cursor)
|
|
633
|
+
)
|
|
634
|
+
while True:
|
|
635
|
+
yield page
|
|
636
|
+
if not page.has_next:
|
|
637
|
+
return
|
|
638
|
+
next_cursor = page.next_cursor
|
|
639
|
+
if next_cursor is None or next_cursor == requested_cursor:
|
|
640
|
+
return
|
|
641
|
+
requested_cursor = next_cursor
|
|
642
|
+
page = self.list(cursor=next_cursor)
|
|
643
|
+
return
|
|
644
|
+
|
|
645
|
+
# With filter: use virtualized pagination for consistent page sizes
|
|
646
|
+
# and fast time-to-first-results
|
|
647
|
+
virtual_page_size = limit if limit is not None else 100
|
|
648
|
+
buffer: list[ListEntryWithEntity] = []
|
|
649
|
+
physical_cursor: str | None = None
|
|
650
|
+
has_more_physical = True
|
|
651
|
+
|
|
652
|
+
# Track filter stats for progress reporting
|
|
653
|
+
total_scanned = 0
|
|
654
|
+
total_matched = 0
|
|
655
|
+
|
|
656
|
+
# Fetch first physical page WITHOUT filter so we can track accurate counts
|
|
657
|
+
first_page = self.list(field_ids=field_ids, field_types=field_types, limit=limit)
|
|
658
|
+
# Track scanned count (before filtering)
|
|
659
|
+
total_scanned += len(first_page.data)
|
|
660
|
+
# Apply filter manually to first page (same as subsequent pages)
|
|
661
|
+
filtered_first = [
|
|
662
|
+
e for e in first_page.data if filter_expr.matches(_entry_to_filter_dict(e))
|
|
663
|
+
]
|
|
664
|
+
total_matched += len(filtered_first)
|
|
665
|
+
buffer.extend(filtered_first)
|
|
666
|
+
physical_cursor = first_page.next_cursor
|
|
667
|
+
has_more_physical = first_page.has_next and physical_cursor is not None
|
|
668
|
+
|
|
669
|
+
# Report initial progress after first page fetch
|
|
670
|
+
if progress_callback is not None:
|
|
671
|
+
progress_callback(FilterStats(scanned=total_scanned, matched=total_matched))
|
|
672
|
+
|
|
673
|
+
while True:
|
|
674
|
+
# Yield virtual page when buffer is full or no more data
|
|
675
|
+
if len(buffer) >= virtual_page_size or not has_more_physical:
|
|
676
|
+
if not buffer and not has_more_physical:
|
|
677
|
+
return # No more data
|
|
678
|
+
# Slice off one virtual page
|
|
679
|
+
page_data = buffer[:virtual_page_size]
|
|
680
|
+
buffer = buffer[virtual_page_size:]
|
|
681
|
+
# has_next is true if we have more buffered or more physical pages
|
|
682
|
+
has_next = len(buffer) > 0 or has_more_physical
|
|
683
|
+
virtual_page = PaginatedResponse[ListEntryWithEntity](
|
|
684
|
+
data=page_data,
|
|
685
|
+
pagination=PaginationInfo(
|
|
686
|
+
next_cursor=None, # Virtual pages don't support cursor resumption
|
|
687
|
+
prev_cursor=None,
|
|
688
|
+
),
|
|
689
|
+
)
|
|
690
|
+
# Override has_next since we know better than the pagination info
|
|
691
|
+
virtual_page._has_next_override = has_next
|
|
692
|
+
# Add filter stats for progress tracking
|
|
693
|
+
virtual_page._filter_stats = FilterStats(
|
|
694
|
+
scanned=total_scanned, matched=total_matched
|
|
695
|
+
)
|
|
696
|
+
yield virtual_page
|
|
697
|
+
if not has_next:
|
|
698
|
+
return
|
|
699
|
+
continue
|
|
700
|
+
|
|
701
|
+
# Need more data - fetch next physical page
|
|
702
|
+
if not has_more_physical:
|
|
703
|
+
continue # Will yield remaining buffer above
|
|
704
|
+
|
|
705
|
+
physical_page = self.list(cursor=physical_cursor)
|
|
706
|
+
# Track scanned count (before filtering)
|
|
707
|
+
total_scanned += len(physical_page.data)
|
|
708
|
+
# Apply filter manually to subsequent pages
|
|
709
|
+
filtered_data = [
|
|
710
|
+
e for e in physical_page.data if filter_expr.matches(_entry_to_filter_dict(e))
|
|
711
|
+
]
|
|
712
|
+
total_matched += len(filtered_data)
|
|
713
|
+
buffer.extend(filtered_data)
|
|
714
|
+
physical_cursor = physical_page.next_cursor
|
|
715
|
+
has_more_physical = physical_page.has_next and physical_cursor is not None
|
|
716
|
+
|
|
717
|
+
# Report progress after each physical page fetch
|
|
718
|
+
if progress_callback is not None:
|
|
719
|
+
progress_callback(FilterStats(scanned=total_scanned, matched=total_matched))
|
|
720
|
+
|
|
721
|
+
def all(
|
|
722
|
+
self,
|
|
723
|
+
*,
|
|
724
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
725
|
+
field_types: Sequence[FieldType] | None = None,
|
|
726
|
+
filter: str | FilterExpression | None = None,
|
|
727
|
+
) -> Iterator[ListEntryWithEntity]:
|
|
728
|
+
"""
|
|
729
|
+
Iterate through all list entries with automatic pagination.
|
|
730
|
+
|
|
731
|
+
Note:
|
|
732
|
+
The Affinity V2 API does not support server-side filtering on list entries.
|
|
733
|
+
When a filter is provided, it is applied client-side after fetching all data.
|
|
734
|
+
For large lists, consider using saved views for server-side filtering.
|
|
735
|
+
"""
|
|
736
|
+
# Parse filter once for all pages
|
|
737
|
+
filter_expr: FilterExpression | None = None
|
|
738
|
+
if filter is not None:
|
|
739
|
+
if isinstance(filter, str):
|
|
740
|
+
# Treat whitespace-only strings as no filter
|
|
741
|
+
stripped = filter.strip()
|
|
742
|
+
if stripped:
|
|
743
|
+
filter_expr = _parse_filter_with_hint(stripped)
|
|
744
|
+
else:
|
|
745
|
+
filter_expr = filter
|
|
746
|
+
# Emit warning once for the entire iteration
|
|
747
|
+
warnings.warn(_CLIENT_SIDE_FILTER_WARNING, UserWarning, stacklevel=2)
|
|
748
|
+
|
|
749
|
+
def fetch_page(next_url: str | None) -> PaginatedResponse[ListEntryWithEntity]:
|
|
750
|
+
if next_url:
|
|
751
|
+
data = self._client.get_url(next_url)
|
|
752
|
+
entries = [
|
|
753
|
+
_safe_model_validate(ListEntryWithEntity, e) for e in data.get("data", [])
|
|
754
|
+
]
|
|
755
|
+
return PaginatedResponse[ListEntryWithEntity](
|
|
756
|
+
data=entries,
|
|
757
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
758
|
+
)
|
|
759
|
+
# First page - don't pass filter to list() to avoid duplicate warnings
|
|
760
|
+
return self.list(
|
|
761
|
+
field_ids=field_ids,
|
|
762
|
+
field_types=field_types,
|
|
763
|
+
)
|
|
764
|
+
|
|
765
|
+
# Iterate through all pages, applying filter if provided
|
|
766
|
+
for entry in PageIterator(fetch_page):
|
|
767
|
+
if filter_expr is None or filter_expr.matches(_entry_to_filter_dict(entry)):
|
|
768
|
+
yield entry
|
|
769
|
+
|
|
770
|
+
def iter(
|
|
771
|
+
self,
|
|
772
|
+
*,
|
|
773
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
774
|
+
field_types: Sequence[FieldType] | None = None,
|
|
775
|
+
filter: str | FilterExpression | None = None,
|
|
776
|
+
) -> Iterator[ListEntryWithEntity]:
|
|
777
|
+
"""
|
|
778
|
+
Auto-paginate all list entries.
|
|
779
|
+
|
|
780
|
+
Alias for `all()` (FR-006 public contract).
|
|
781
|
+
"""
|
|
782
|
+
return self.all(field_ids=field_ids, field_types=field_types, filter=filter)
|
|
783
|
+
|
|
784
|
+
def get(self, entry_id: ListEntryId) -> ListEntryWithEntity:
|
|
785
|
+
"""Get a single list entry by ID."""
|
|
786
|
+
data = self._client.get(f"/lists/{self._list_id}/list-entries/{entry_id}")
|
|
787
|
+
return _safe_model_validate(ListEntryWithEntity, data)
|
|
788
|
+
|
|
789
|
+
def from_saved_view(
|
|
790
|
+
self,
|
|
791
|
+
view_id: SavedViewId,
|
|
792
|
+
*,
|
|
793
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
794
|
+
field_types: Sequence[FieldType] | None = None,
|
|
795
|
+
limit: int | None = None,
|
|
796
|
+
) -> PaginatedResponse[ListEntryWithEntity]:
|
|
797
|
+
"""
|
|
798
|
+
Get list entries from a saved view.
|
|
799
|
+
|
|
800
|
+
Args:
|
|
801
|
+
view_id: The saved view ID
|
|
802
|
+
field_ids: Specific field IDs to include in the response
|
|
803
|
+
field_types: Field types to include in the response
|
|
804
|
+
limit: Maximum results per page
|
|
805
|
+
|
|
806
|
+
Returns:
|
|
807
|
+
Paginated list entries with entity data and field values
|
|
808
|
+
|
|
809
|
+
Note:
|
|
810
|
+
The saved view's filters are applied server-side.
|
|
811
|
+
Field values are returned in entity.fields_raw as an array.
|
|
812
|
+
"""
|
|
813
|
+
params: dict[str, Any] = {}
|
|
814
|
+
if field_ids:
|
|
815
|
+
params["fieldIds"] = [str(field_id) for field_id in field_ids]
|
|
816
|
+
if field_types:
|
|
817
|
+
params["fieldTypes"] = [field_type.value for field_type in field_types]
|
|
818
|
+
if limit:
|
|
819
|
+
params["limit"] = limit
|
|
820
|
+
|
|
821
|
+
data = self._client.get(
|
|
822
|
+
f"/lists/{self._list_id}/saved-views/{view_id}/list-entries",
|
|
823
|
+
params=params or None,
|
|
824
|
+
)
|
|
825
|
+
|
|
826
|
+
return PaginatedResponse[ListEntryWithEntity](
|
|
827
|
+
data=[_safe_model_validate(ListEntryWithEntity, e) for e in data.get("data", [])],
|
|
828
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
829
|
+
)
|
|
830
|
+
|
|
831
|
+
# =========================================================================
|
|
832
|
+
# Write Operations (V1 API for create/delete, V2 for field updates)
|
|
833
|
+
# =========================================================================
|
|
834
|
+
|
|
835
|
+
# -------------------------------------------------------------------------
|
|
836
|
+
# Membership helpers (V2 for read, V1 for insert)
|
|
837
|
+
# -------------------------------------------------------------------------
|
|
838
|
+
|
|
839
|
+
def find_person(self, person_id: PersonId) -> ListEntry | None:
|
|
840
|
+
"""
|
|
841
|
+
Return the first list entry for this person on this list (or None).
|
|
842
|
+
|
|
843
|
+
Notes:
|
|
844
|
+
- Affinity lists can contain duplicates. This returns the first match in
|
|
845
|
+
the server-provided order. Use `find_all_person()` to surface all matches.
|
|
846
|
+
"""
|
|
847
|
+
entries = self.find_all_person(person_id)
|
|
848
|
+
return entries[0] if entries else None
|
|
849
|
+
|
|
850
|
+
def find_all_person(self, person_id: PersonId) -> builtins.list[ListEntry]:
|
|
851
|
+
"""
|
|
852
|
+
Return all list entries for this person on this list (may be empty).
|
|
853
|
+
|
|
854
|
+
Affinity allows the same entity to appear multiple times on a list.
|
|
855
|
+
"""
|
|
856
|
+
all_entries = self._all_entity_list_entries_v2(f"/persons/{person_id}/list-entries")
|
|
857
|
+
return [entry for entry in all_entries if entry.list_id == self._list_id]
|
|
858
|
+
|
|
859
|
+
def ensure_person(
|
|
860
|
+
self,
|
|
861
|
+
person_id: PersonId,
|
|
862
|
+
*,
|
|
863
|
+
creator_id: int | None = None,
|
|
864
|
+
) -> ListEntry:
|
|
865
|
+
"""
|
|
866
|
+
Ensure a person is on this list (idempotent by default).
|
|
867
|
+
|
|
868
|
+
Returns:
|
|
869
|
+
The first existing list entry if present; otherwise creates a new one.
|
|
870
|
+
|
|
871
|
+
Notes:
|
|
872
|
+
- This method performs an existence check to avoid accidental duplicates.
|
|
873
|
+
To intentionally create duplicates, call `add_person()` directly.
|
|
874
|
+
"""
|
|
875
|
+
existing = self.find_person(person_id)
|
|
876
|
+
if existing is not None:
|
|
877
|
+
return existing
|
|
878
|
+
return self.add_person(person_id, creator_id=creator_id)
|
|
879
|
+
|
|
880
|
+
def find_company(self, company_id: CompanyId) -> ListEntry | None:
|
|
881
|
+
"""
|
|
882
|
+
Return the first list entry for this company on this list (or None).
|
|
883
|
+
|
|
884
|
+
Notes:
|
|
885
|
+
- Affinity lists can contain duplicates. This returns the first match in
|
|
886
|
+
the server-provided order. Use `find_all_company()` to surface all matches.
|
|
887
|
+
"""
|
|
888
|
+
entries = self.find_all_company(company_id)
|
|
889
|
+
return entries[0] if entries else None
|
|
890
|
+
|
|
891
|
+
def find_all_company(self, company_id: CompanyId) -> builtins.list[ListEntry]:
|
|
892
|
+
"""
|
|
893
|
+
Return all list entries for this company on this list (may be empty).
|
|
894
|
+
|
|
895
|
+
Affinity allows the same entity to appear multiple times on a list.
|
|
896
|
+
"""
|
|
897
|
+
all_entries = self._all_entity_list_entries_v2(f"/companies/{company_id}/list-entries")
|
|
898
|
+
return [entry for entry in all_entries if entry.list_id == self._list_id]
|
|
899
|
+
|
|
900
|
+
def ensure_company(
|
|
901
|
+
self,
|
|
902
|
+
company_id: CompanyId,
|
|
903
|
+
*,
|
|
904
|
+
creator_id: int | None = None,
|
|
905
|
+
) -> ListEntry:
|
|
906
|
+
"""
|
|
907
|
+
Ensure a company is on this list (idempotent by default).
|
|
908
|
+
|
|
909
|
+
Returns:
|
|
910
|
+
The first existing list entry if present; otherwise creates a new one.
|
|
911
|
+
|
|
912
|
+
Notes:
|
|
913
|
+
- This method performs an existence check to avoid accidental duplicates.
|
|
914
|
+
To intentionally create duplicates, call `add_company()` directly.
|
|
915
|
+
"""
|
|
916
|
+
existing = self.find_company(company_id)
|
|
917
|
+
if existing is not None:
|
|
918
|
+
return existing
|
|
919
|
+
return self.add_company(company_id, creator_id=creator_id)
|
|
920
|
+
|
|
921
|
+
def add_person(
|
|
922
|
+
self,
|
|
923
|
+
person_id: PersonId,
|
|
924
|
+
*,
|
|
925
|
+
creator_id: int | None = None,
|
|
926
|
+
) -> ListEntry:
|
|
927
|
+
"""Add a person to this list."""
|
|
928
|
+
return self._create_entry(int(person_id), creator_id)
|
|
929
|
+
|
|
930
|
+
def add_company(
|
|
931
|
+
self,
|
|
932
|
+
company_id: CompanyId,
|
|
933
|
+
*,
|
|
934
|
+
creator_id: int | None = None,
|
|
935
|
+
) -> ListEntry:
|
|
936
|
+
"""Add a company to this list."""
|
|
937
|
+
return self._create_entry(int(company_id), creator_id)
|
|
938
|
+
|
|
939
|
+
def add_opportunity(
|
|
940
|
+
self,
|
|
941
|
+
opportunity_id: OpportunityId,
|
|
942
|
+
*,
|
|
943
|
+
creator_id: int | None = None,
|
|
944
|
+
) -> ListEntry:
|
|
945
|
+
"""Add an opportunity to this list."""
|
|
946
|
+
return self._create_entry(int(opportunity_id), creator_id)
|
|
947
|
+
|
|
948
|
+
def _create_entry(
|
|
949
|
+
self,
|
|
950
|
+
entity_id: int,
|
|
951
|
+
creator_id: int | None = None,
|
|
952
|
+
) -> ListEntry:
|
|
953
|
+
"""Internal method to create a list entry."""
|
|
954
|
+
payload: dict[str, Any] = {"entity_id": entity_id}
|
|
955
|
+
if creator_id:
|
|
956
|
+
payload["creator_id"] = creator_id
|
|
957
|
+
|
|
958
|
+
result = self._client.post(
|
|
959
|
+
f"/lists/{self._list_id}/list-entries",
|
|
960
|
+
json=payload,
|
|
961
|
+
v1=True,
|
|
962
|
+
)
|
|
963
|
+
|
|
964
|
+
return _safe_model_validate(ListEntry, result)
|
|
965
|
+
|
|
966
|
+
def delete(self, entry_id: ListEntryId) -> bool:
|
|
967
|
+
"""
|
|
968
|
+
Remove a list entry (row) from the list.
|
|
969
|
+
|
|
970
|
+
Note: This only removes the entry from the list, not the entity itself.
|
|
971
|
+
"""
|
|
972
|
+
result = self._client.delete(
|
|
973
|
+
f"/lists/{self._list_id}/list-entries/{entry_id}",
|
|
974
|
+
v1=True,
|
|
975
|
+
)
|
|
976
|
+
return bool(result.get("success", False))
|
|
977
|
+
|
|
978
|
+
# =========================================================================
|
|
979
|
+
# Field Value Operations (V2 API)
|
|
980
|
+
# =========================================================================
|
|
981
|
+
|
|
982
|
+
def get_field_values(
|
|
983
|
+
self,
|
|
984
|
+
entry_id: ListEntryId,
|
|
985
|
+
) -> FieldValues:
|
|
986
|
+
"""Get all field values for a list entry."""
|
|
987
|
+
data = self._client.get(f"/lists/{self._list_id}/list-entries/{entry_id}/fields")
|
|
988
|
+
values = data.get("data", {})
|
|
989
|
+
if isinstance(values, dict):
|
|
990
|
+
return _safe_model_validate(FieldValues, values)
|
|
991
|
+
return _safe_model_validate(FieldValues, {})
|
|
992
|
+
|
|
993
|
+
def get_field_value(
|
|
994
|
+
self,
|
|
995
|
+
entry_id: ListEntryId,
|
|
996
|
+
field_id: AnyFieldId,
|
|
997
|
+
) -> Any:
|
|
998
|
+
"""Get a single field value."""
|
|
999
|
+
data = self._client.get(f"/lists/{self._list_id}/list-entries/{entry_id}/fields/{field_id}")
|
|
1000
|
+
return data.get("value")
|
|
1001
|
+
|
|
1002
|
+
def update_field_value(
|
|
1003
|
+
self,
|
|
1004
|
+
entry_id: ListEntryId,
|
|
1005
|
+
field_id: AnyFieldId,
|
|
1006
|
+
value: Any,
|
|
1007
|
+
) -> FieldValues:
|
|
1008
|
+
"""
|
|
1009
|
+
Update a single field value on a list entry.
|
|
1010
|
+
|
|
1011
|
+
Args:
|
|
1012
|
+
entry_id: The list entry
|
|
1013
|
+
field_id: The field to update
|
|
1014
|
+
value: New value (type depends on field type)
|
|
1015
|
+
|
|
1016
|
+
Returns:
|
|
1017
|
+
Updated field value data
|
|
1018
|
+
"""
|
|
1019
|
+
result = self._client.post(
|
|
1020
|
+
f"/lists/{self._list_id}/list-entries/{entry_id}/fields/{field_id}",
|
|
1021
|
+
json={"value": value},
|
|
1022
|
+
)
|
|
1023
|
+
return _safe_model_validate(FieldValues, result)
|
|
1024
|
+
|
|
1025
|
+
def batch_update_fields(
|
|
1026
|
+
self,
|
|
1027
|
+
entry_id: ListEntryId,
|
|
1028
|
+
updates: dict[AnyFieldId, Any],
|
|
1029
|
+
) -> BatchOperationResponse:
|
|
1030
|
+
"""
|
|
1031
|
+
Update multiple field values at once.
|
|
1032
|
+
|
|
1033
|
+
More efficient than individual updates for multiple fields.
|
|
1034
|
+
|
|
1035
|
+
Args:
|
|
1036
|
+
entry_id: The list entry
|
|
1037
|
+
updates: Dict mapping field IDs to new values
|
|
1038
|
+
|
|
1039
|
+
Returns:
|
|
1040
|
+
Batch operation response with success/failure per field
|
|
1041
|
+
"""
|
|
1042
|
+
operations = [
|
|
1043
|
+
{"fieldId": str(field_id), "value": value} for field_id, value in updates.items()
|
|
1044
|
+
]
|
|
1045
|
+
|
|
1046
|
+
result = self._client.patch(
|
|
1047
|
+
f"/lists/{self._list_id}/list-entries/{entry_id}/fields",
|
|
1048
|
+
json={"operations": operations},
|
|
1049
|
+
)
|
|
1050
|
+
|
|
1051
|
+
return _safe_model_validate(BatchOperationResponse, result)
|
|
1052
|
+
|
|
1053
|
+
|
|
1054
|
+
class AsyncListService:
|
|
1055
|
+
"""Async list operations (TR-009)."""
|
|
1056
|
+
|
|
1057
|
+
def __init__(self, client: AsyncHTTPClient):
|
|
1058
|
+
self._client = client
|
|
1059
|
+
self._resolve_cache: dict[tuple[str, ListType | None], AffinityList | None] = {}
|
|
1060
|
+
|
|
1061
|
+
def entries(self, list_id: ListId) -> AsyncListEntryService:
|
|
1062
|
+
"""
|
|
1063
|
+
Get an AsyncListEntryService for a specific list.
|
|
1064
|
+
|
|
1065
|
+
This is the explicit path for retrieving "full row" data via list entries.
|
|
1066
|
+
"""
|
|
1067
|
+
return AsyncListEntryService(self._client, list_id)
|
|
1068
|
+
|
|
1069
|
+
async def list(
|
|
1070
|
+
self,
|
|
1071
|
+
*,
|
|
1072
|
+
limit: int | None = None,
|
|
1073
|
+
cursor: str | None = None,
|
|
1074
|
+
) -> PaginatedResponse[AffinityList]:
|
|
1075
|
+
"""
|
|
1076
|
+
Get all lists accessible to you.
|
|
1077
|
+
|
|
1078
|
+
Args:
|
|
1079
|
+
limit: Maximum results per page.
|
|
1080
|
+
cursor: Cursor to resume pagination (opaque; obtained from prior responses).
|
|
1081
|
+
|
|
1082
|
+
Returns:
|
|
1083
|
+
Paginated list of lists (without field metadata)
|
|
1084
|
+
"""
|
|
1085
|
+
if cursor is not None:
|
|
1086
|
+
if limit is not None:
|
|
1087
|
+
raise ValueError(
|
|
1088
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query "
|
|
1089
|
+
"context. Start a new pagination sequence without a cursor to change "
|
|
1090
|
+
"parameters."
|
|
1091
|
+
)
|
|
1092
|
+
data = await self._client.get_url(cursor)
|
|
1093
|
+
else:
|
|
1094
|
+
if limit is not None and limit <= 0:
|
|
1095
|
+
raise ValueError("'limit' must be > 0")
|
|
1096
|
+
params: dict[str, Any] = {}
|
|
1097
|
+
if limit is not None:
|
|
1098
|
+
params["limit"] = limit
|
|
1099
|
+
data = await self._client.get("/lists", params=params or None)
|
|
1100
|
+
return PaginatedResponse[AffinityList](
|
|
1101
|
+
data=[
|
|
1102
|
+
_safe_model_validate(AffinityList, list_item) for list_item in data.get("data", [])
|
|
1103
|
+
],
|
|
1104
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
1105
|
+
)
|
|
1106
|
+
|
|
1107
|
+
async def pages(
|
|
1108
|
+
self,
|
|
1109
|
+
*,
|
|
1110
|
+
limit: int | None = None,
|
|
1111
|
+
cursor: str | None = None,
|
|
1112
|
+
) -> AsyncIterator[PaginatedResponse[AffinityList]]:
|
|
1113
|
+
"""
|
|
1114
|
+
Iterate list pages (not items), yielding `PaginatedResponse[AffinityList]`.
|
|
1115
|
+
|
|
1116
|
+
This is useful for ETL scripts that want checkpoint/resume via `page.next_cursor`.
|
|
1117
|
+
"""
|
|
1118
|
+
if cursor is not None and limit is not None:
|
|
1119
|
+
raise ValueError(
|
|
1120
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query context. "
|
|
1121
|
+
"Start a new pagination sequence without a cursor to change parameters."
|
|
1122
|
+
)
|
|
1123
|
+
requested_cursor = cursor
|
|
1124
|
+
page = await self.list(limit=limit) if cursor is None else await self.list(cursor=cursor)
|
|
1125
|
+
while True:
|
|
1126
|
+
yield page
|
|
1127
|
+
if not page.has_next:
|
|
1128
|
+
return
|
|
1129
|
+
next_cursor = page.next_cursor
|
|
1130
|
+
if next_cursor is None or next_cursor == requested_cursor:
|
|
1131
|
+
return
|
|
1132
|
+
requested_cursor = next_cursor
|
|
1133
|
+
page = await self.list(cursor=next_cursor)
|
|
1134
|
+
|
|
1135
|
+
def all(self) -> AsyncIterator[AffinityList]:
|
|
1136
|
+
"""Iterate through all accessible lists."""
|
|
1137
|
+
|
|
1138
|
+
async def fetch_page(next_url: str | None) -> PaginatedResponse[AffinityList]:
|
|
1139
|
+
if next_url:
|
|
1140
|
+
data = await self._client.get_url(next_url)
|
|
1141
|
+
return PaginatedResponse[AffinityList](
|
|
1142
|
+
data=[
|
|
1143
|
+
_safe_model_validate(AffinityList, list_item)
|
|
1144
|
+
for list_item in data.get("data", [])
|
|
1145
|
+
],
|
|
1146
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
1147
|
+
)
|
|
1148
|
+
return await self.list()
|
|
1149
|
+
|
|
1150
|
+
return AsyncPageIterator(fetch_page)
|
|
1151
|
+
|
|
1152
|
+
def iter(self) -> AsyncIterator[AffinityList]:
|
|
1153
|
+
"""
|
|
1154
|
+
Auto-paginate all lists.
|
|
1155
|
+
|
|
1156
|
+
Alias for `all()` (FR-006 public contract).
|
|
1157
|
+
"""
|
|
1158
|
+
return self.all()
|
|
1159
|
+
|
|
1160
|
+
# =========================================================================
|
|
1161
|
+
# Saved View Operations
|
|
1162
|
+
# =========================================================================
|
|
1163
|
+
|
|
1164
|
+
async def get_saved_views(
|
|
1165
|
+
self,
|
|
1166
|
+
list_id: ListId,
|
|
1167
|
+
*,
|
|
1168
|
+
limit: int | None = None,
|
|
1169
|
+
cursor: str | None = None,
|
|
1170
|
+
) -> PaginatedResponse[SavedView]:
|
|
1171
|
+
"""
|
|
1172
|
+
Get saved views for a list.
|
|
1173
|
+
|
|
1174
|
+
Args:
|
|
1175
|
+
list_id: List id for the initial request.
|
|
1176
|
+
limit: Maximum results per page.
|
|
1177
|
+
cursor: Cursor to resume pagination (opaque; obtained from prior responses).
|
|
1178
|
+
"""
|
|
1179
|
+
if cursor is not None:
|
|
1180
|
+
if limit is not None:
|
|
1181
|
+
raise ValueError(
|
|
1182
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query "
|
|
1183
|
+
"context. Start a new pagination sequence without a cursor to change "
|
|
1184
|
+
"parameters."
|
|
1185
|
+
)
|
|
1186
|
+
cursor_list_id = _saved_views_list_id_from_cursor(cursor)
|
|
1187
|
+
if cursor_list_id is not None and int(list_id) != cursor_list_id:
|
|
1188
|
+
raise ValueError(
|
|
1189
|
+
f"Cursor does not match list_id: cursor is for list {cursor_list_id}, "
|
|
1190
|
+
f"requested list_id is {int(list_id)}"
|
|
1191
|
+
)
|
|
1192
|
+
data = await self._client.get_url(cursor)
|
|
1193
|
+
else:
|
|
1194
|
+
if limit is not None and limit <= 0:
|
|
1195
|
+
raise ValueError("'limit' must be > 0")
|
|
1196
|
+
params: dict[str, Any] = {}
|
|
1197
|
+
if limit is not None:
|
|
1198
|
+
params["limit"] = limit
|
|
1199
|
+
data = await self._client.get(f"/lists/{list_id}/saved-views", params=params or None)
|
|
1200
|
+
|
|
1201
|
+
return PaginatedResponse[SavedView](
|
|
1202
|
+
data=[_safe_model_validate(SavedView, v) for v in data.get("data", [])],
|
|
1203
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
1204
|
+
)
|
|
1205
|
+
|
|
1206
|
+
async def saved_views_pages(
|
|
1207
|
+
self,
|
|
1208
|
+
list_id: ListId,
|
|
1209
|
+
*,
|
|
1210
|
+
limit: int | None = None,
|
|
1211
|
+
cursor: str | None = None,
|
|
1212
|
+
) -> AsyncIterator[PaginatedResponse[SavedView]]:
|
|
1213
|
+
"""Iterate saved view pages, yielding `PaginatedResponse[SavedView]`."""
|
|
1214
|
+
if cursor is not None and limit is not None:
|
|
1215
|
+
raise ValueError(
|
|
1216
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query context. "
|
|
1217
|
+
"Start a new pagination sequence without a cursor to change parameters."
|
|
1218
|
+
)
|
|
1219
|
+
requested_cursor = cursor
|
|
1220
|
+
page = (
|
|
1221
|
+
await self.get_saved_views(list_id, limit=limit)
|
|
1222
|
+
if cursor is None
|
|
1223
|
+
else await self.get_saved_views(list_id, cursor=cursor)
|
|
1224
|
+
)
|
|
1225
|
+
while True:
|
|
1226
|
+
yield page
|
|
1227
|
+
if not page.has_next:
|
|
1228
|
+
return
|
|
1229
|
+
next_cursor = page.next_cursor
|
|
1230
|
+
if next_cursor is None or next_cursor == requested_cursor:
|
|
1231
|
+
return
|
|
1232
|
+
requested_cursor = next_cursor
|
|
1233
|
+
page = await self.get_saved_views(list_id, cursor=next_cursor)
|
|
1234
|
+
|
|
1235
|
+
async def saved_views_all(self, list_id: ListId) -> AsyncIterator[SavedView]:
|
|
1236
|
+
"""Iterate all saved views for a list."""
|
|
1237
|
+
async for page in self.saved_views_pages(list_id):
|
|
1238
|
+
for item in page.data:
|
|
1239
|
+
yield item
|
|
1240
|
+
|
|
1241
|
+
async def get_saved_view(self, list_id: ListId, view_id: SavedViewId) -> SavedView:
|
|
1242
|
+
"""Get a single saved view."""
|
|
1243
|
+
data = await self._client.get(f"/lists/{list_id}/saved-views/{view_id}")
|
|
1244
|
+
return _safe_model_validate(SavedView, data)
|
|
1245
|
+
|
|
1246
|
+
async def get(self, list_id: ListId) -> AffinityList:
|
|
1247
|
+
"""
|
|
1248
|
+
Get a single list by ID.
|
|
1249
|
+
|
|
1250
|
+
Includes field metadata for the list.
|
|
1251
|
+
|
|
1252
|
+
Note: Uses V1 API because V2's listSize field is undocumented and
|
|
1253
|
+
returns incorrect values (often 0 for non-empty lists).
|
|
1254
|
+
"""
|
|
1255
|
+
data = await self._client.get(f"/lists/{list_id}", v1=True)
|
|
1256
|
+
return _safe_model_validate(AffinityList, data)
|
|
1257
|
+
|
|
1258
|
+
async def resolve(
|
|
1259
|
+
self,
|
|
1260
|
+
*,
|
|
1261
|
+
name: str,
|
|
1262
|
+
list_type: ListType | None = None,
|
|
1263
|
+
) -> AffinityList | None:
|
|
1264
|
+
"""
|
|
1265
|
+
Find a single list by name (optionally filtered by type).
|
|
1266
|
+
|
|
1267
|
+
Notes:
|
|
1268
|
+
- This iterates list pages client-side (the API does not expose a list-search endpoint).
|
|
1269
|
+
- Results are cached in-memory on this service instance. If you call this frequently,
|
|
1270
|
+
reuse the client, or persist the resolved `ListId` in your own configuration.
|
|
1271
|
+
|
|
1272
|
+
If multiple matches exist, returns the first match in server-provided order.
|
|
1273
|
+
"""
|
|
1274
|
+
key = (name.lower(), list_type)
|
|
1275
|
+
if key in self._resolve_cache:
|
|
1276
|
+
return self._resolve_cache[key]
|
|
1277
|
+
|
|
1278
|
+
async for item in self.all():
|
|
1279
|
+
if item.name.lower() == name.lower() and (list_type is None or item.type == list_type):
|
|
1280
|
+
self._resolve_cache[key] = item
|
|
1281
|
+
return item
|
|
1282
|
+
|
|
1283
|
+
self._resolve_cache[key] = None
|
|
1284
|
+
return None
|
|
1285
|
+
|
|
1286
|
+
async def resolve_all(
|
|
1287
|
+
self,
|
|
1288
|
+
*,
|
|
1289
|
+
name: str,
|
|
1290
|
+
list_type: ListType | None = None,
|
|
1291
|
+
) -> builtins.list[AffinityList]:
|
|
1292
|
+
"""
|
|
1293
|
+
Find all lists matching a name (optionally filtered by type).
|
|
1294
|
+
|
|
1295
|
+
Notes:
|
|
1296
|
+
- This iterates list pages client-side (the API does not expose a list-search endpoint).
|
|
1297
|
+
- Unlike `resolve()`, this does not cache results.
|
|
1298
|
+
"""
|
|
1299
|
+
matches: builtins.list[AffinityList] = []
|
|
1300
|
+
name_lower = name.lower()
|
|
1301
|
+
async for item in self.all():
|
|
1302
|
+
if item.name.lower() != name_lower:
|
|
1303
|
+
continue
|
|
1304
|
+
if list_type is not None and item.type != list_type:
|
|
1305
|
+
continue
|
|
1306
|
+
matches.append(item)
|
|
1307
|
+
return matches
|
|
1308
|
+
|
|
1309
|
+
# =========================================================================
|
|
1310
|
+
# Write Operations (V1 API)
|
|
1311
|
+
# =========================================================================
|
|
1312
|
+
|
|
1313
|
+
async def create(self, data: ListCreate) -> AffinityList:
|
|
1314
|
+
"""
|
|
1315
|
+
Create a new list.
|
|
1316
|
+
|
|
1317
|
+
Uses V1 API.
|
|
1318
|
+
"""
|
|
1319
|
+
payload = data.model_dump(mode="json", exclude_none=True, exclude_unset=True)
|
|
1320
|
+
if not data.additional_permissions:
|
|
1321
|
+
payload.pop("additional_permissions", None)
|
|
1322
|
+
|
|
1323
|
+
result = await self._client.post("/lists", json=payload, v1=True)
|
|
1324
|
+
|
|
1325
|
+
if self._client.cache:
|
|
1326
|
+
self._client.cache.invalidate_prefix("list")
|
|
1327
|
+
self._resolve_cache.clear()
|
|
1328
|
+
|
|
1329
|
+
return _safe_model_validate(AffinityList, result)
|
|
1330
|
+
|
|
1331
|
+
# =========================================================================
|
|
1332
|
+
# Field Operations
|
|
1333
|
+
# =========================================================================
|
|
1334
|
+
|
|
1335
|
+
async def get_fields(
|
|
1336
|
+
self,
|
|
1337
|
+
list_id: ListId,
|
|
1338
|
+
*,
|
|
1339
|
+
field_types: Sequence[FieldType] | None = None,
|
|
1340
|
+
) -> builtins.list[FieldMetadata]:
|
|
1341
|
+
"""
|
|
1342
|
+
Get fields (columns) for a list.
|
|
1343
|
+
|
|
1344
|
+
Includes list-specific, global, enriched, and relationship intelligence fields.
|
|
1345
|
+
Cached for performance.
|
|
1346
|
+
"""
|
|
1347
|
+
params: dict[str, Any] = {}
|
|
1348
|
+
if field_types:
|
|
1349
|
+
params["fieldTypes"] = [field_type.value for field_type in field_types]
|
|
1350
|
+
|
|
1351
|
+
data = await self._client.get(
|
|
1352
|
+
f"/lists/{list_id}/fields",
|
|
1353
|
+
params=params or None,
|
|
1354
|
+
cache_key=f"list_{list_id}_fields:{','.join(field_types or [])}",
|
|
1355
|
+
cache_ttl=300,
|
|
1356
|
+
)
|
|
1357
|
+
|
|
1358
|
+
return [_safe_model_validate(FieldMetadata, f) for f in data.get("data", [])]
|
|
1359
|
+
|
|
1360
|
+
|
|
1361
|
+
class AsyncListEntryService:
|
|
1362
|
+
"""Async list entry operations (TR-009)."""
|
|
1363
|
+
|
|
1364
|
+
def __init__(self, client: AsyncHTTPClient, list_id: ListId):
|
|
1365
|
+
self._client = client
|
|
1366
|
+
self._list_id = list_id
|
|
1367
|
+
|
|
1368
|
+
async def _all_entity_list_entries_v2(self, path: str) -> builtins.list[ListEntry]:
|
|
1369
|
+
"""
|
|
1370
|
+
Fetch all list entries for a single entity across all lists (V2 API).
|
|
1371
|
+
|
|
1372
|
+
Used for list membership helpers to avoid enumerating an entire list.
|
|
1373
|
+
"""
|
|
1374
|
+
entries: builtins.list[ListEntry] = []
|
|
1375
|
+
data = await self._client.get(path)
|
|
1376
|
+
|
|
1377
|
+
while True:
|
|
1378
|
+
entries.extend(_safe_model_validate(ListEntry, item) for item in data.get("data", []))
|
|
1379
|
+
pagination = _safe_model_validate(PaginationInfo, data.get("pagination", {}))
|
|
1380
|
+
if not pagination.next_cursor:
|
|
1381
|
+
break
|
|
1382
|
+
data = await self._client.get_url(pagination.next_cursor)
|
|
1383
|
+
|
|
1384
|
+
return entries
|
|
1385
|
+
|
|
1386
|
+
async def list(
|
|
1387
|
+
self,
|
|
1388
|
+
*,
|
|
1389
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
1390
|
+
field_types: Sequence[FieldType] | None = None,
|
|
1391
|
+
filter: str | FilterExpression | None = None,
|
|
1392
|
+
limit: int | None = None,
|
|
1393
|
+
cursor: str | None = None,
|
|
1394
|
+
) -> PaginatedResponse[ListEntryWithEntity]:
|
|
1395
|
+
"""
|
|
1396
|
+
Get list entries with entity data and field values.
|
|
1397
|
+
|
|
1398
|
+
Args:
|
|
1399
|
+
field_ids: Specific field IDs to include
|
|
1400
|
+
field_types: Field types to include
|
|
1401
|
+
filter: Filter expression (applied client-side; API doesn't support it)
|
|
1402
|
+
limit: Maximum results per page
|
|
1403
|
+
cursor: Cursor to resume pagination (opaque; obtained from prior responses).
|
|
1404
|
+
|
|
1405
|
+
Returns:
|
|
1406
|
+
Paginated list entries with entity data
|
|
1407
|
+
|
|
1408
|
+
Note:
|
|
1409
|
+
The Affinity V2 API does not support server-side filtering on list entries.
|
|
1410
|
+
When a filter is provided, it is applied client-side after fetching data.
|
|
1411
|
+
For large lists, consider using saved views for server-side filtering.
|
|
1412
|
+
"""
|
|
1413
|
+
# Parse filter expression if provided
|
|
1414
|
+
filter_expr: FilterExpression | None = None
|
|
1415
|
+
if filter is not None:
|
|
1416
|
+
if isinstance(filter, str):
|
|
1417
|
+
# Treat whitespace-only strings as no filter
|
|
1418
|
+
stripped = filter.strip()
|
|
1419
|
+
if stripped:
|
|
1420
|
+
filter_expr = _parse_filter_with_hint(stripped)
|
|
1421
|
+
else:
|
|
1422
|
+
filter_expr = filter
|
|
1423
|
+
# Emit warning about client-side filtering
|
|
1424
|
+
warnings.warn(_CLIENT_SIDE_FILTER_WARNING, UserWarning, stacklevel=2)
|
|
1425
|
+
|
|
1426
|
+
if cursor is not None:
|
|
1427
|
+
if field_ids or field_types or filter_expr is not None or limit is not None:
|
|
1428
|
+
raise ValueError(
|
|
1429
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query "
|
|
1430
|
+
"context. Start a new pagination sequence without a cursor to change "
|
|
1431
|
+
"parameters."
|
|
1432
|
+
)
|
|
1433
|
+
data = await self._client.get_url(cursor)
|
|
1434
|
+
else:
|
|
1435
|
+
if limit is not None and limit <= 0:
|
|
1436
|
+
raise ValueError("'limit' must be > 0")
|
|
1437
|
+
params: dict[str, Any] = {}
|
|
1438
|
+
if field_ids:
|
|
1439
|
+
params["fieldIds"] = [str(field_id) for field_id in field_ids]
|
|
1440
|
+
if field_types:
|
|
1441
|
+
params["fieldTypes"] = [field_type.value for field_type in field_types]
|
|
1442
|
+
# NOTE: filter is NOT sent to API - it doesn't support filtering
|
|
1443
|
+
if limit is not None:
|
|
1444
|
+
params["limit"] = limit
|
|
1445
|
+
|
|
1446
|
+
data = await self._client.get(
|
|
1447
|
+
f"/lists/{self._list_id}/list-entries",
|
|
1448
|
+
params=params or None,
|
|
1449
|
+
)
|
|
1450
|
+
|
|
1451
|
+
# Parse entries
|
|
1452
|
+
entries = [_safe_model_validate(ListEntryWithEntity, e) for e in data.get("data", [])]
|
|
1453
|
+
|
|
1454
|
+
# Apply client-side filtering if filter was provided
|
|
1455
|
+
if filter_expr is not None:
|
|
1456
|
+
entries = [e for e in entries if filter_expr.matches(_entry_to_filter_dict(e))]
|
|
1457
|
+
|
|
1458
|
+
return PaginatedResponse[ListEntryWithEntity](
|
|
1459
|
+
data=entries,
|
|
1460
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
1461
|
+
)
|
|
1462
|
+
|
|
1463
|
+
async def pages(
|
|
1464
|
+
self,
|
|
1465
|
+
*,
|
|
1466
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
1467
|
+
field_types: Sequence[FieldType] | None = None,
|
|
1468
|
+
filter: str | FilterExpression | None = None,
|
|
1469
|
+
limit: int | None = None,
|
|
1470
|
+
cursor: str | None = None,
|
|
1471
|
+
progress_callback: Callable[[FilterStats], None] | None = None,
|
|
1472
|
+
) -> AsyncIterator[PaginatedResponse[ListEntryWithEntity]]:
|
|
1473
|
+
"""Iterate list-entry pages, yielding `PaginatedResponse[ListEntryWithEntity]`.
|
|
1474
|
+
|
|
1475
|
+
Use ``pages()`` when you need page-level control for batch processing,
|
|
1476
|
+
cursor-based resumption, or progress tracking on unfiltered queries.
|
|
1477
|
+
Use ``iter()`` for most cases, especially with filters.
|
|
1478
|
+
|
|
1479
|
+
Args:
|
|
1480
|
+
progress_callback: Optional callback called after each physical page
|
|
1481
|
+
fetch during filtered queries. Receives FilterStats with current
|
|
1482
|
+
scanned/matched counts for real-time progress updates.
|
|
1483
|
+
|
|
1484
|
+
Note:
|
|
1485
|
+
Filtering is applied client-side (Affinity V2 API does not support
|
|
1486
|
+
server-side filtering on list entries). When a filter is provided,
|
|
1487
|
+
pages are "virtualized" - the method fetches physical pages internally
|
|
1488
|
+
and accumulates filtered results until a full virtual page is ready.
|
|
1489
|
+
This ensures consistent page sizes and fast time-to-first-results.
|
|
1490
|
+
"""
|
|
1491
|
+
if cursor is not None and (
|
|
1492
|
+
field_ids
|
|
1493
|
+
or field_types
|
|
1494
|
+
or filter is not None
|
|
1495
|
+
or limit is not None
|
|
1496
|
+
or progress_callback is not None
|
|
1497
|
+
):
|
|
1498
|
+
raise ValueError(
|
|
1499
|
+
"Cannot combine 'cursor' with other parameters; cursor encodes all query context. "
|
|
1500
|
+
"Start a new pagination sequence without a cursor to change parameters."
|
|
1501
|
+
)
|
|
1502
|
+
|
|
1503
|
+
# Parse filter once for all pages (since list() with cursor can't accept filter)
|
|
1504
|
+
filter_expr: FilterExpression | None = None
|
|
1505
|
+
if filter is not None:
|
|
1506
|
+
if isinstance(filter, str):
|
|
1507
|
+
stripped = filter.strip()
|
|
1508
|
+
if stripped:
|
|
1509
|
+
filter_expr = _parse_filter_with_hint(stripped)
|
|
1510
|
+
else:
|
|
1511
|
+
filter_expr = filter
|
|
1512
|
+
|
|
1513
|
+
# No filter: use simple pagination (original behavior)
|
|
1514
|
+
if filter_expr is None:
|
|
1515
|
+
requested_cursor = cursor
|
|
1516
|
+
page = (
|
|
1517
|
+
await self.list(field_ids=field_ids, field_types=field_types, limit=limit)
|
|
1518
|
+
if cursor is None
|
|
1519
|
+
else await self.list(cursor=cursor)
|
|
1520
|
+
)
|
|
1521
|
+
while True:
|
|
1522
|
+
yield page
|
|
1523
|
+
if not page.has_next:
|
|
1524
|
+
return
|
|
1525
|
+
next_cursor = page.next_cursor
|
|
1526
|
+
if next_cursor is None or next_cursor == requested_cursor:
|
|
1527
|
+
return
|
|
1528
|
+
requested_cursor = next_cursor
|
|
1529
|
+
page = await self.list(cursor=next_cursor)
|
|
1530
|
+
return
|
|
1531
|
+
|
|
1532
|
+
# With filter: use virtualized pagination for consistent page sizes
|
|
1533
|
+
# and fast time-to-first-results
|
|
1534
|
+
virtual_page_size = limit if limit is not None else 100
|
|
1535
|
+
buffer: list[ListEntryWithEntity] = []
|
|
1536
|
+
physical_cursor: str | None = None
|
|
1537
|
+
has_more_physical = True
|
|
1538
|
+
|
|
1539
|
+
# Track filter stats for progress reporting
|
|
1540
|
+
total_scanned = 0
|
|
1541
|
+
total_matched = 0
|
|
1542
|
+
|
|
1543
|
+
# Fetch first physical page WITHOUT filter so we can track accurate counts
|
|
1544
|
+
first_page = await self.list(field_ids=field_ids, field_types=field_types, limit=limit)
|
|
1545
|
+
# Track scanned count (before filtering)
|
|
1546
|
+
total_scanned += len(first_page.data)
|
|
1547
|
+
# Apply filter manually to first page (same as subsequent pages)
|
|
1548
|
+
filtered_first = [
|
|
1549
|
+
e for e in first_page.data if filter_expr.matches(_entry_to_filter_dict(e))
|
|
1550
|
+
]
|
|
1551
|
+
total_matched += len(filtered_first)
|
|
1552
|
+
buffer.extend(filtered_first)
|
|
1553
|
+
physical_cursor = first_page.next_cursor
|
|
1554
|
+
has_more_physical = first_page.has_next and physical_cursor is not None
|
|
1555
|
+
|
|
1556
|
+
# Report initial progress after first page fetch
|
|
1557
|
+
if progress_callback is not None:
|
|
1558
|
+
progress_callback(FilterStats(scanned=total_scanned, matched=total_matched))
|
|
1559
|
+
|
|
1560
|
+
while True:
|
|
1561
|
+
# Yield virtual page when buffer is full or no more data
|
|
1562
|
+
if len(buffer) >= virtual_page_size or not has_more_physical:
|
|
1563
|
+
if not buffer and not has_more_physical:
|
|
1564
|
+
return # No more data
|
|
1565
|
+
# Slice off one virtual page
|
|
1566
|
+
page_data = buffer[:virtual_page_size]
|
|
1567
|
+
buffer = buffer[virtual_page_size:]
|
|
1568
|
+
# has_next is true if we have more buffered or more physical pages
|
|
1569
|
+
has_next = len(buffer) > 0 or has_more_physical
|
|
1570
|
+
virtual_page = PaginatedResponse[ListEntryWithEntity](
|
|
1571
|
+
data=page_data,
|
|
1572
|
+
pagination=PaginationInfo(
|
|
1573
|
+
next_cursor=None, # Virtual pages don't support cursor resumption
|
|
1574
|
+
prev_cursor=None,
|
|
1575
|
+
),
|
|
1576
|
+
)
|
|
1577
|
+
# Override has_next since we know better than the pagination info
|
|
1578
|
+
virtual_page._has_next_override = has_next
|
|
1579
|
+
# Add filter stats for progress tracking
|
|
1580
|
+
virtual_page._filter_stats = FilterStats(
|
|
1581
|
+
scanned=total_scanned, matched=total_matched
|
|
1582
|
+
)
|
|
1583
|
+
yield virtual_page
|
|
1584
|
+
if not has_next:
|
|
1585
|
+
return
|
|
1586
|
+
continue
|
|
1587
|
+
|
|
1588
|
+
# Need more data - fetch next physical page
|
|
1589
|
+
if not has_more_physical:
|
|
1590
|
+
continue # Will yield remaining buffer above
|
|
1591
|
+
|
|
1592
|
+
physical_page = await self.list(cursor=physical_cursor)
|
|
1593
|
+
# Track scanned count (before filtering)
|
|
1594
|
+
total_scanned += len(physical_page.data)
|
|
1595
|
+
# Apply filter manually to subsequent pages
|
|
1596
|
+
filtered_data = [
|
|
1597
|
+
e for e in physical_page.data if filter_expr.matches(_entry_to_filter_dict(e))
|
|
1598
|
+
]
|
|
1599
|
+
total_matched += len(filtered_data)
|
|
1600
|
+
buffer.extend(filtered_data)
|
|
1601
|
+
physical_cursor = physical_page.next_cursor
|
|
1602
|
+
has_more_physical = physical_page.has_next and physical_cursor is not None
|
|
1603
|
+
|
|
1604
|
+
# Report progress after each physical page fetch
|
|
1605
|
+
if progress_callback is not None:
|
|
1606
|
+
progress_callback(FilterStats(scanned=total_scanned, matched=total_matched))
|
|
1607
|
+
|
|
1608
|
+
async def all(
|
|
1609
|
+
self,
|
|
1610
|
+
*,
|
|
1611
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
1612
|
+
field_types: Sequence[FieldType] | None = None,
|
|
1613
|
+
filter: str | FilterExpression | None = None,
|
|
1614
|
+
) -> AsyncIterator[ListEntryWithEntity]:
|
|
1615
|
+
"""
|
|
1616
|
+
Iterate through all list entries with automatic pagination.
|
|
1617
|
+
|
|
1618
|
+
Note:
|
|
1619
|
+
The Affinity V2 API does not support server-side filtering on list entries.
|
|
1620
|
+
When a filter is provided, it is applied client-side after fetching all data.
|
|
1621
|
+
For large lists, consider using saved views for server-side filtering.
|
|
1622
|
+
"""
|
|
1623
|
+
# Parse filter once for all pages
|
|
1624
|
+
filter_expr: FilterExpression | None = None
|
|
1625
|
+
if filter is not None:
|
|
1626
|
+
if isinstance(filter, str):
|
|
1627
|
+
# Treat whitespace-only strings as no filter
|
|
1628
|
+
stripped = filter.strip()
|
|
1629
|
+
if stripped:
|
|
1630
|
+
filter_expr = _parse_filter_with_hint(stripped)
|
|
1631
|
+
else:
|
|
1632
|
+
filter_expr = filter
|
|
1633
|
+
# Emit warning once for the entire iteration
|
|
1634
|
+
warnings.warn(_CLIENT_SIDE_FILTER_WARNING, UserWarning, stacklevel=2)
|
|
1635
|
+
|
|
1636
|
+
async def fetch_page(next_url: str | None) -> PaginatedResponse[ListEntryWithEntity]:
|
|
1637
|
+
if next_url:
|
|
1638
|
+
data = await self._client.get_url(next_url)
|
|
1639
|
+
entries = [
|
|
1640
|
+
_safe_model_validate(ListEntryWithEntity, e) for e in data.get("data", [])
|
|
1641
|
+
]
|
|
1642
|
+
return PaginatedResponse[ListEntryWithEntity](
|
|
1643
|
+
data=entries,
|
|
1644
|
+
pagination=_safe_model_validate(PaginationInfo, data.get("pagination", {})),
|
|
1645
|
+
)
|
|
1646
|
+
# First page - don't pass filter to list() to avoid duplicate warnings
|
|
1647
|
+
return await self.list(
|
|
1648
|
+
field_ids=field_ids,
|
|
1649
|
+
field_types=field_types,
|
|
1650
|
+
)
|
|
1651
|
+
|
|
1652
|
+
# Iterate through all pages, applying filter if provided
|
|
1653
|
+
async for entry in AsyncPageIterator(fetch_page):
|
|
1654
|
+
if filter_expr is None or filter_expr.matches(_entry_to_filter_dict(entry)):
|
|
1655
|
+
yield entry
|
|
1656
|
+
|
|
1657
|
+
async def iter(
|
|
1658
|
+
self,
|
|
1659
|
+
*,
|
|
1660
|
+
field_ids: Sequence[AnyFieldId] | None = None,
|
|
1661
|
+
field_types: Sequence[FieldType] | None = None,
|
|
1662
|
+
filter: str | FilterExpression | None = None,
|
|
1663
|
+
) -> AsyncIterator[ListEntryWithEntity]:
|
|
1664
|
+
"""
|
|
1665
|
+
Auto-paginate all list entries.
|
|
1666
|
+
|
|
1667
|
+
Alias for `all()` (FR-006 public contract).
|
|
1668
|
+
"""
|
|
1669
|
+
async for entry in self.all(field_ids=field_ids, field_types=field_types, filter=filter):
|
|
1670
|
+
yield entry
|
|
1671
|
+
|
|
1672
|
+
# -------------------------------------------------------------------------
|
|
1673
|
+
# Membership helpers (V2 for read only)
|
|
1674
|
+
# -------------------------------------------------------------------------
|
|
1675
|
+
|
|
1676
|
+
async def find_person(self, person_id: PersonId) -> ListEntry | None:
|
|
1677
|
+
"""
|
|
1678
|
+
Return the first list entry for this person on this list (or None).
|
|
1679
|
+
|
|
1680
|
+
Notes:
|
|
1681
|
+
- Affinity lists can contain duplicates. This returns the first match in
|
|
1682
|
+
the server-provided order. Use `find_all_person()` to surface all matches.
|
|
1683
|
+
"""
|
|
1684
|
+
entries = await self.find_all_person(person_id)
|
|
1685
|
+
return entries[0] if entries else None
|
|
1686
|
+
|
|
1687
|
+
async def find_all_person(self, person_id: PersonId) -> builtins.list[ListEntry]:
|
|
1688
|
+
"""
|
|
1689
|
+
Return all list entries for this person on this list (may be empty).
|
|
1690
|
+
|
|
1691
|
+
Affinity allows the same entity to appear multiple times on a list.
|
|
1692
|
+
"""
|
|
1693
|
+
all_entries = await self._all_entity_list_entries_v2(f"/persons/{person_id}/list-entries")
|
|
1694
|
+
return [entry for entry in all_entries if entry.list_id == self._list_id]
|
|
1695
|
+
|
|
1696
|
+
async def find_company(self, company_id: CompanyId) -> ListEntry | None:
|
|
1697
|
+
"""
|
|
1698
|
+
Return the first list entry for this company on this list (or None).
|
|
1699
|
+
|
|
1700
|
+
Notes:
|
|
1701
|
+
- Affinity lists can contain duplicates. This returns the first match in
|
|
1702
|
+
the server-provided order. Use `find_all_company()` to surface all matches.
|
|
1703
|
+
"""
|
|
1704
|
+
entries = await self.find_all_company(company_id)
|
|
1705
|
+
return entries[0] if entries else None
|
|
1706
|
+
|
|
1707
|
+
async def find_all_company(self, company_id: CompanyId) -> builtins.list[ListEntry]:
|
|
1708
|
+
"""
|
|
1709
|
+
Return all list entries for this company on this list (may be empty).
|
|
1710
|
+
|
|
1711
|
+
Affinity allows the same entity to appear multiple times on a list.
|
|
1712
|
+
"""
|
|
1713
|
+
all_entries = await self._all_entity_list_entries_v2(
|
|
1714
|
+
f"/companies/{company_id}/list-entries"
|
|
1715
|
+
)
|
|
1716
|
+
return [entry for entry in all_entries if entry.list_id == self._list_id]
|
|
1717
|
+
|
|
1718
|
+
async def ensure_person(
|
|
1719
|
+
self,
|
|
1720
|
+
person_id: PersonId,
|
|
1721
|
+
*,
|
|
1722
|
+
creator_id: int | None = None,
|
|
1723
|
+
) -> ListEntry:
|
|
1724
|
+
"""
|
|
1725
|
+
Ensure a person is on this list (idempotent by default).
|
|
1726
|
+
|
|
1727
|
+
Returns:
|
|
1728
|
+
The first existing list entry if present; otherwise creates a new one.
|
|
1729
|
+
|
|
1730
|
+
Notes:
|
|
1731
|
+
- This method performs an existence check to avoid accidental duplicates.
|
|
1732
|
+
To intentionally create duplicates, call `add_person()` directly.
|
|
1733
|
+
"""
|
|
1734
|
+
existing = await self.find_person(person_id)
|
|
1735
|
+
if existing is not None:
|
|
1736
|
+
return existing
|
|
1737
|
+
return await self.add_person(person_id, creator_id=creator_id)
|
|
1738
|
+
|
|
1739
|
+
async def ensure_company(
|
|
1740
|
+
self,
|
|
1741
|
+
company_id: CompanyId,
|
|
1742
|
+
*,
|
|
1743
|
+
creator_id: int | None = None,
|
|
1744
|
+
) -> ListEntry:
|
|
1745
|
+
"""
|
|
1746
|
+
Ensure a company is on this list (idempotent by default).
|
|
1747
|
+
|
|
1748
|
+
Returns:
|
|
1749
|
+
The first existing list entry if present; otherwise creates a new one.
|
|
1750
|
+
|
|
1751
|
+
Notes:
|
|
1752
|
+
- This method performs an existence check to avoid accidental duplicates.
|
|
1753
|
+
To intentionally create duplicates, call `add_company()` directly.
|
|
1754
|
+
"""
|
|
1755
|
+
existing = await self.find_company(company_id)
|
|
1756
|
+
if existing is not None:
|
|
1757
|
+
return existing
|
|
1758
|
+
return await self.add_company(company_id, creator_id=creator_id)
|
|
1759
|
+
|
|
1760
|
+
async def add_person(
|
|
1761
|
+
self,
|
|
1762
|
+
person_id: PersonId,
|
|
1763
|
+
*,
|
|
1764
|
+
creator_id: int | None = None,
|
|
1765
|
+
) -> ListEntry:
|
|
1766
|
+
"""Add a person to this list."""
|
|
1767
|
+
return await self._create_entry(int(person_id), creator_id)
|
|
1768
|
+
|
|
1769
|
+
async def add_company(
|
|
1770
|
+
self,
|
|
1771
|
+
company_id: CompanyId,
|
|
1772
|
+
*,
|
|
1773
|
+
creator_id: int | None = None,
|
|
1774
|
+
) -> ListEntry:
|
|
1775
|
+
"""Add a company to this list."""
|
|
1776
|
+
return await self._create_entry(int(company_id), creator_id)
|
|
1777
|
+
|
|
1778
|
+
async def add_opportunity(
|
|
1779
|
+
self,
|
|
1780
|
+
opportunity_id: OpportunityId,
|
|
1781
|
+
*,
|
|
1782
|
+
creator_id: int | None = None,
|
|
1783
|
+
) -> ListEntry:
|
|
1784
|
+
"""Add an opportunity to this list."""
|
|
1785
|
+
return await self._create_entry(int(opportunity_id), creator_id)
|
|
1786
|
+
|
|
1787
|
+
async def _create_entry(
|
|
1788
|
+
self,
|
|
1789
|
+
entity_id: int,
|
|
1790
|
+
creator_id: int | None = None,
|
|
1791
|
+
) -> ListEntry:
|
|
1792
|
+
"""Internal method to create a list entry."""
|
|
1793
|
+
payload: dict[str, Any] = {"entity_id": entity_id}
|
|
1794
|
+
if creator_id:
|
|
1795
|
+
payload["creator_id"] = creator_id
|
|
1796
|
+
|
|
1797
|
+
result = await self._client.post(
|
|
1798
|
+
f"/lists/{self._list_id}/list-entries",
|
|
1799
|
+
json=payload,
|
|
1800
|
+
v1=True,
|
|
1801
|
+
)
|
|
1802
|
+
|
|
1803
|
+
return _safe_model_validate(ListEntry, result)
|
|
1804
|
+
|
|
1805
|
+
async def delete(self, entry_id: ListEntryId) -> bool:
|
|
1806
|
+
"""
|
|
1807
|
+
Remove a list entry (row) from the list.
|
|
1808
|
+
|
|
1809
|
+
Note: This only removes the entry from the list, not the entity itself.
|
|
1810
|
+
"""
|
|
1811
|
+
result = await self._client.delete(
|
|
1812
|
+
f"/lists/{self._list_id}/list-entries/{entry_id}",
|
|
1813
|
+
v1=True,
|
|
1814
|
+
)
|
|
1815
|
+
return bool(result.get("success", False))
|
|
1816
|
+
|
|
1817
|
+
# =========================================================================
|
|
1818
|
+
# Field Value Operations (V2 API)
|
|
1819
|
+
# =========================================================================
|
|
1820
|
+
|
|
1821
|
+
async def get_field_values(
|
|
1822
|
+
self,
|
|
1823
|
+
entry_id: ListEntryId,
|
|
1824
|
+
) -> FieldValues:
|
|
1825
|
+
"""Get all field values for a list entry."""
|
|
1826
|
+
data = await self._client.get(f"/lists/{self._list_id}/list-entries/{entry_id}/fields")
|
|
1827
|
+
values = data.get("data", {})
|
|
1828
|
+
if isinstance(values, dict):
|
|
1829
|
+
return _safe_model_validate(FieldValues, values)
|
|
1830
|
+
return _safe_model_validate(FieldValues, {})
|
|
1831
|
+
|
|
1832
|
+
async def get_field_value(
|
|
1833
|
+
self,
|
|
1834
|
+
entry_id: ListEntryId,
|
|
1835
|
+
field_id: AnyFieldId,
|
|
1836
|
+
) -> Any:
|
|
1837
|
+
"""Get a single field value."""
|
|
1838
|
+
data = await self._client.get(
|
|
1839
|
+
f"/lists/{self._list_id}/list-entries/{entry_id}/fields/{field_id}"
|
|
1840
|
+
)
|
|
1841
|
+
return data.get("value")
|
|
1842
|
+
|
|
1843
|
+
async def update_field_value(
|
|
1844
|
+
self,
|
|
1845
|
+
entry_id: ListEntryId,
|
|
1846
|
+
field_id: AnyFieldId,
|
|
1847
|
+
value: Any,
|
|
1848
|
+
) -> FieldValues:
|
|
1849
|
+
"""
|
|
1850
|
+
Update a single field value on a list entry.
|
|
1851
|
+
|
|
1852
|
+
Args:
|
|
1853
|
+
entry_id: The list entry
|
|
1854
|
+
field_id: The field to update
|
|
1855
|
+
value: New value (type depends on field type)
|
|
1856
|
+
|
|
1857
|
+
Returns:
|
|
1858
|
+
Updated field value data
|
|
1859
|
+
"""
|
|
1860
|
+
result = await self._client.post(
|
|
1861
|
+
f"/lists/{self._list_id}/list-entries/{entry_id}/fields/{field_id}",
|
|
1862
|
+
json={"value": value},
|
|
1863
|
+
)
|
|
1864
|
+
return _safe_model_validate(FieldValues, result)
|
|
1865
|
+
|
|
1866
|
+
async def batch_update_fields(
|
|
1867
|
+
self,
|
|
1868
|
+
entry_id: ListEntryId,
|
|
1869
|
+
updates: dict[AnyFieldId, Any],
|
|
1870
|
+
) -> BatchOperationResponse:
|
|
1871
|
+
"""
|
|
1872
|
+
Update multiple field values at once.
|
|
1873
|
+
|
|
1874
|
+
More efficient than individual updates for multiple fields.
|
|
1875
|
+
|
|
1876
|
+
Args:
|
|
1877
|
+
entry_id: The list entry
|
|
1878
|
+
updates: Dict mapping field IDs to new values
|
|
1879
|
+
|
|
1880
|
+
Returns:
|
|
1881
|
+
Batch operation response with success/failure per field
|
|
1882
|
+
"""
|
|
1883
|
+
operations = [
|
|
1884
|
+
{"fieldId": str(field_id), "value": value} for field_id, value in updates.items()
|
|
1885
|
+
]
|
|
1886
|
+
|
|
1887
|
+
result = await self._client.patch(
|
|
1888
|
+
f"/lists/{self._list_id}/list-entries/{entry_id}/fields",
|
|
1889
|
+
json={"operations": operations},
|
|
1890
|
+
)
|
|
1891
|
+
|
|
1892
|
+
return _safe_model_validate(BatchOperationResponse, result)
|