affinity-sdk 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- affinity/__init__.py +139 -0
- affinity/cli/__init__.py +7 -0
- affinity/cli/click_compat.py +27 -0
- affinity/cli/commands/__init__.py +1 -0
- affinity/cli/commands/_entity_files_dump.py +219 -0
- affinity/cli/commands/_list_entry_fields.py +41 -0
- affinity/cli/commands/_v1_parsing.py +77 -0
- affinity/cli/commands/company_cmds.py +2139 -0
- affinity/cli/commands/completion_cmd.py +33 -0
- affinity/cli/commands/config_cmds.py +540 -0
- affinity/cli/commands/entry_cmds.py +33 -0
- affinity/cli/commands/field_cmds.py +413 -0
- affinity/cli/commands/interaction_cmds.py +875 -0
- affinity/cli/commands/list_cmds.py +3152 -0
- affinity/cli/commands/note_cmds.py +433 -0
- affinity/cli/commands/opportunity_cmds.py +1174 -0
- affinity/cli/commands/person_cmds.py +1980 -0
- affinity/cli/commands/query_cmd.py +444 -0
- affinity/cli/commands/relationship_strength_cmds.py +62 -0
- affinity/cli/commands/reminder_cmds.py +595 -0
- affinity/cli/commands/resolve_url_cmd.py +127 -0
- affinity/cli/commands/session_cmds.py +84 -0
- affinity/cli/commands/task_cmds.py +110 -0
- affinity/cli/commands/version_cmd.py +29 -0
- affinity/cli/commands/whoami_cmd.py +36 -0
- affinity/cli/config.py +108 -0
- affinity/cli/context.py +749 -0
- affinity/cli/csv_utils.py +195 -0
- affinity/cli/date_utils.py +42 -0
- affinity/cli/decorators.py +77 -0
- affinity/cli/errors.py +28 -0
- affinity/cli/field_utils.py +355 -0
- affinity/cli/formatters.py +551 -0
- affinity/cli/help_json.py +283 -0
- affinity/cli/logging.py +100 -0
- affinity/cli/main.py +261 -0
- affinity/cli/options.py +53 -0
- affinity/cli/paths.py +32 -0
- affinity/cli/progress.py +183 -0
- affinity/cli/query/__init__.py +163 -0
- affinity/cli/query/aggregates.py +357 -0
- affinity/cli/query/dates.py +194 -0
- affinity/cli/query/exceptions.py +147 -0
- affinity/cli/query/executor.py +1236 -0
- affinity/cli/query/filters.py +248 -0
- affinity/cli/query/models.py +333 -0
- affinity/cli/query/output.py +331 -0
- affinity/cli/query/parser.py +619 -0
- affinity/cli/query/planner.py +430 -0
- affinity/cli/query/progress.py +270 -0
- affinity/cli/query/schema.py +439 -0
- affinity/cli/render.py +1589 -0
- affinity/cli/resolve.py +222 -0
- affinity/cli/resolvers.py +249 -0
- affinity/cli/results.py +308 -0
- affinity/cli/runner.py +218 -0
- affinity/cli/serialization.py +65 -0
- affinity/cli/session_cache.py +276 -0
- affinity/cli/types.py +70 -0
- affinity/client.py +771 -0
- affinity/clients/__init__.py +19 -0
- affinity/clients/http.py +3664 -0
- affinity/clients/pipeline.py +165 -0
- affinity/compare.py +501 -0
- affinity/downloads.py +114 -0
- affinity/exceptions.py +615 -0
- affinity/filters.py +1128 -0
- affinity/hooks.py +198 -0
- affinity/inbound_webhooks.py +302 -0
- affinity/models/__init__.py +163 -0
- affinity/models/entities.py +798 -0
- affinity/models/pagination.py +513 -0
- affinity/models/rate_limit_snapshot.py +48 -0
- affinity/models/secondary.py +413 -0
- affinity/models/types.py +663 -0
- affinity/policies.py +40 -0
- affinity/progress.py +22 -0
- affinity/py.typed +0 -0
- affinity/services/__init__.py +42 -0
- affinity/services/companies.py +1286 -0
- affinity/services/lists.py +1892 -0
- affinity/services/opportunities.py +1330 -0
- affinity/services/persons.py +1348 -0
- affinity/services/rate_limits.py +173 -0
- affinity/services/tasks.py +193 -0
- affinity/services/v1_only.py +2445 -0
- affinity/types.py +83 -0
- affinity_sdk-0.9.5.dist-info/METADATA +622 -0
- affinity_sdk-0.9.5.dist-info/RECORD +92 -0
- affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
- affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
- affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1174 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import sys
|
|
5
|
+
from collections.abc import Iterator
|
|
6
|
+
from contextlib import ExitStack
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
from rich.progress import (
|
|
12
|
+
BarColumn,
|
|
13
|
+
Progress,
|
|
14
|
+
SpinnerColumn,
|
|
15
|
+
TaskID,
|
|
16
|
+
TextColumn,
|
|
17
|
+
TimeElapsedColumn,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
from affinity.models.entities import Opportunity, OpportunityCreate, OpportunityUpdate
|
|
21
|
+
from affinity.models.pagination import PaginatedResponse, V1PaginatedResponse
|
|
22
|
+
from affinity.models.types import ListType
|
|
23
|
+
from affinity.types import CompanyId, ListId, OpportunityId, PersonId
|
|
24
|
+
|
|
25
|
+
from ..click_compat import RichCommand, RichGroup, click
|
|
26
|
+
from ..context import CLIContext
|
|
27
|
+
from ..csv_utils import write_csv_to_stdout
|
|
28
|
+
from ..decorators import category, destructive, progress_capable
|
|
29
|
+
from ..errors import CLIError
|
|
30
|
+
from ..options import output_options
|
|
31
|
+
from ..progress import ProgressManager, ProgressSettings
|
|
32
|
+
from ..resolve import resolve_list_selector
|
|
33
|
+
from ..resolvers import ResolvedEntity
|
|
34
|
+
from ..results import CommandContext
|
|
35
|
+
from ..runner import CommandOutput, run_command
|
|
36
|
+
from ..serialization import serialize_model_for_cli
|
|
37
|
+
from ._entity_files_dump import dump_entity_files_bundle
|
|
38
|
+
from .resolve_url_cmd import _parse_affinity_url
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@click.group(name="opportunity", cls=RichGroup)
|
|
42
|
+
def opportunity_group() -> None:
|
|
43
|
+
"""Opportunity commands."""
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _resolve_opportunity_selector(
|
|
47
|
+
*,
|
|
48
|
+
selector: str,
|
|
49
|
+
) -> tuple[OpportunityId, dict[str, Any]]:
|
|
50
|
+
raw = selector.strip()
|
|
51
|
+
if raw.isdigit():
|
|
52
|
+
opportunity_id = OpportunityId(int(raw))
|
|
53
|
+
resolved = ResolvedEntity(
|
|
54
|
+
input=selector,
|
|
55
|
+
entity_id=int(opportunity_id),
|
|
56
|
+
entity_type="opportunity",
|
|
57
|
+
source="id",
|
|
58
|
+
)
|
|
59
|
+
return opportunity_id, {"opportunity": resolved.to_dict()}
|
|
60
|
+
|
|
61
|
+
if raw.startswith(("http://", "https://")):
|
|
62
|
+
url_parsed = _parse_affinity_url(raw)
|
|
63
|
+
if url_parsed.type != "opportunity" or url_parsed.opportunity_id is None:
|
|
64
|
+
raise CLIError(
|
|
65
|
+
"Expected an opportunity URL like https://<tenant>.affinity.(co|com)/opportunities/<id>",
|
|
66
|
+
exit_code=2,
|
|
67
|
+
error_type="usage_error",
|
|
68
|
+
details={"input": selector, "resolvedType": url_parsed.type},
|
|
69
|
+
)
|
|
70
|
+
opportunity_id = OpportunityId(int(url_parsed.opportunity_id))
|
|
71
|
+
url_resolved = ResolvedEntity(
|
|
72
|
+
input=selector,
|
|
73
|
+
entity_id=int(opportunity_id),
|
|
74
|
+
entity_type="opportunity",
|
|
75
|
+
source="url",
|
|
76
|
+
canonical_url=f"https://app.affinity.co/opportunities/{int(opportunity_id)}",
|
|
77
|
+
)
|
|
78
|
+
return opportunity_id, {"opportunity": url_resolved.to_dict()}
|
|
79
|
+
|
|
80
|
+
raise CLIError(
|
|
81
|
+
"Unrecognized opportunity selector.",
|
|
82
|
+
exit_code=2,
|
|
83
|
+
error_type="usage_error",
|
|
84
|
+
hint='Use a numeric id or an Affinity URL like "https://<tenant>.affinity.co/opportunities/<id>".',
|
|
85
|
+
details={"input": selector},
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@category("read")
|
|
90
|
+
@opportunity_group.command(name="ls", cls=RichCommand)
|
|
91
|
+
@click.option("--page-size", "-s", type=int, default=None, help="Page size (limit).")
|
|
92
|
+
@click.option(
|
|
93
|
+
"--cursor", type=str, default=None, help="Resume from cursor (incompatible with --page-size)."
|
|
94
|
+
)
|
|
95
|
+
@click.option(
|
|
96
|
+
"--max-results", "--limit", "-n", type=int, default=None, help="Stop after N items total."
|
|
97
|
+
)
|
|
98
|
+
@click.option("--all", "-A", "all_pages", is_flag=True, help="Fetch all pages.")
|
|
99
|
+
@click.option(
|
|
100
|
+
"--query",
|
|
101
|
+
"-q",
|
|
102
|
+
type=str,
|
|
103
|
+
default=None,
|
|
104
|
+
help="Fuzzy text search (simple matching).",
|
|
105
|
+
)
|
|
106
|
+
@click.option("--csv", "csv_flag", is_flag=True, help="Output as CSV (to stdout).")
|
|
107
|
+
@click.option(
|
|
108
|
+
"--csv-bom",
|
|
109
|
+
is_flag=True,
|
|
110
|
+
help="Add UTF-8 BOM for Excel (use with redirection: --csv --csv-bom > file.csv).",
|
|
111
|
+
)
|
|
112
|
+
@output_options
|
|
113
|
+
@click.pass_obj
|
|
114
|
+
def opportunity_ls(
|
|
115
|
+
ctx: CLIContext,
|
|
116
|
+
*,
|
|
117
|
+
page_size: int | None,
|
|
118
|
+
cursor: str | None,
|
|
119
|
+
max_results: int | None,
|
|
120
|
+
all_pages: bool,
|
|
121
|
+
query: str | None,
|
|
122
|
+
csv_flag: bool,
|
|
123
|
+
csv_bom: bool,
|
|
124
|
+
) -> None:
|
|
125
|
+
"""
|
|
126
|
+
List opportunities.
|
|
127
|
+
|
|
128
|
+
Use --query for free-text search.
|
|
129
|
+
|
|
130
|
+
Examples:
|
|
131
|
+
- `xaffinity opportunity ls`
|
|
132
|
+
- `xaffinity opportunity ls --page-size 200`
|
|
133
|
+
- `xaffinity opportunity ls --query "Series A" --all`
|
|
134
|
+
- `xaffinity opportunity ls --cursor <cursor>`
|
|
135
|
+
- `xaffinity opportunity ls --all --csv > opportunities.csv`
|
|
136
|
+
- `xaffinity opportunity ls --all --csv --csv-bom > opportunities.csv`
|
|
137
|
+
"""
|
|
138
|
+
|
|
139
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
140
|
+
# Check mutual exclusivity: --csv and --json
|
|
141
|
+
if csv_flag and ctx.output == "json":
|
|
142
|
+
raise CLIError(
|
|
143
|
+
"--csv and --json are mutually exclusive.",
|
|
144
|
+
exit_code=2,
|
|
145
|
+
error_type="usage_error",
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
client = ctx.get_client(warnings=warnings)
|
|
149
|
+
|
|
150
|
+
if cursor is not None and page_size is not None:
|
|
151
|
+
raise CLIError(
|
|
152
|
+
"--cursor cannot be combined with --page-size.",
|
|
153
|
+
exit_code=2,
|
|
154
|
+
error_type="usage_error",
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
# Build CommandContext upfront for all return paths
|
|
158
|
+
ctx_modifiers: dict[str, object] = {}
|
|
159
|
+
if page_size is not None:
|
|
160
|
+
ctx_modifiers["pageSize"] = page_size
|
|
161
|
+
if cursor is not None:
|
|
162
|
+
ctx_modifiers["cursor"] = cursor
|
|
163
|
+
if max_results is not None:
|
|
164
|
+
ctx_modifiers["maxResults"] = max_results
|
|
165
|
+
if all_pages:
|
|
166
|
+
ctx_modifiers["allPages"] = True
|
|
167
|
+
if query:
|
|
168
|
+
ctx_modifiers["query"] = query
|
|
169
|
+
if csv_flag:
|
|
170
|
+
ctx_modifiers["csv"] = True
|
|
171
|
+
if csv_bom:
|
|
172
|
+
ctx_modifiers["csvBom"] = True
|
|
173
|
+
|
|
174
|
+
cmd_context = CommandContext(
|
|
175
|
+
name="opportunity ls",
|
|
176
|
+
inputs={},
|
|
177
|
+
modifiers=ctx_modifiers,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
rows: list[dict[str, object]] = []
|
|
181
|
+
first_page = True
|
|
182
|
+
use_v1_search = query is not None
|
|
183
|
+
|
|
184
|
+
show_progress = (
|
|
185
|
+
ctx.progress != "never"
|
|
186
|
+
and not ctx.quiet
|
|
187
|
+
and (ctx.progress == "always" or sys.stderr.isatty())
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Use V1 search when --query is provided, otherwise V2 list
|
|
191
|
+
pages_iter: (
|
|
192
|
+
Iterator[V1PaginatedResponse[Opportunity]] | Iterator[PaginatedResponse[Opportunity]]
|
|
193
|
+
)
|
|
194
|
+
if use_v1_search:
|
|
195
|
+
assert query is not None
|
|
196
|
+
pages_iter = client.opportunities.search_pages(
|
|
197
|
+
query,
|
|
198
|
+
page_size=page_size,
|
|
199
|
+
page_token=cursor,
|
|
200
|
+
)
|
|
201
|
+
else:
|
|
202
|
+
pages_iter = client.opportunities.pages(limit=page_size, cursor=cursor)
|
|
203
|
+
|
|
204
|
+
with ExitStack() as stack:
|
|
205
|
+
progress: Progress | None = None
|
|
206
|
+
task_id: TaskID | None = None
|
|
207
|
+
if show_progress:
|
|
208
|
+
progress = stack.enter_context(
|
|
209
|
+
Progress(
|
|
210
|
+
TextColumn("{task.description}"),
|
|
211
|
+
BarColumn(),
|
|
212
|
+
TextColumn("{task.completed} rows"),
|
|
213
|
+
TimeElapsedColumn(),
|
|
214
|
+
console=Console(file=sys.stderr),
|
|
215
|
+
transient=True,
|
|
216
|
+
)
|
|
217
|
+
)
|
|
218
|
+
task_id = progress.add_task("Fetching", total=max_results)
|
|
219
|
+
|
|
220
|
+
for page in pages_iter:
|
|
221
|
+
# Get next cursor/token based on API type
|
|
222
|
+
if hasattr(page, "next_page_token"):
|
|
223
|
+
next_cursor = page.next_page_token
|
|
224
|
+
prev_cursor = None # V1 doesn't have prev cursor
|
|
225
|
+
else:
|
|
226
|
+
next_cursor = page.pagination.next_cursor
|
|
227
|
+
prev_cursor = page.pagination.prev_cursor
|
|
228
|
+
|
|
229
|
+
for idx, opportunity in enumerate(page.data):
|
|
230
|
+
rows.append(_opportunity_ls_row(opportunity))
|
|
231
|
+
if progress and task_id is not None:
|
|
232
|
+
progress.update(task_id, completed=len(rows))
|
|
233
|
+
if max_results is not None and len(rows) >= max_results:
|
|
234
|
+
stopped_mid_page = idx < (len(page.data) - 1)
|
|
235
|
+
if stopped_mid_page:
|
|
236
|
+
warnings.append(
|
|
237
|
+
"Results limited by --max-results. Use --all to fetch all results."
|
|
238
|
+
)
|
|
239
|
+
pagination = None
|
|
240
|
+
if next_cursor and not stopped_mid_page and next_cursor != cursor:
|
|
241
|
+
pagination = {
|
|
242
|
+
"opportunities": {
|
|
243
|
+
"nextCursor": next_cursor,
|
|
244
|
+
"prevCursor": prev_cursor,
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
return CommandOutput(
|
|
248
|
+
data={"opportunities": rows[:max_results]},
|
|
249
|
+
context=cmd_context,
|
|
250
|
+
pagination=pagination,
|
|
251
|
+
api_called=True,
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
if first_page and not all_pages and max_results is None:
|
|
255
|
+
return CommandOutput(
|
|
256
|
+
data={"opportunities": rows},
|
|
257
|
+
context=cmd_context,
|
|
258
|
+
pagination=(
|
|
259
|
+
{
|
|
260
|
+
"opportunities": {
|
|
261
|
+
"nextCursor": next_cursor,
|
|
262
|
+
"prevCursor": prev_cursor,
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
if next_cursor
|
|
266
|
+
else None
|
|
267
|
+
),
|
|
268
|
+
api_called=True,
|
|
269
|
+
)
|
|
270
|
+
first_page = False
|
|
271
|
+
|
|
272
|
+
# CSV output to stdout
|
|
273
|
+
if csv_flag:
|
|
274
|
+
fieldnames = list(rows[0].keys()) if rows else []
|
|
275
|
+
write_csv_to_stdout(
|
|
276
|
+
rows=rows,
|
|
277
|
+
fieldnames=fieldnames,
|
|
278
|
+
bom=csv_bom,
|
|
279
|
+
)
|
|
280
|
+
sys.exit(0)
|
|
281
|
+
|
|
282
|
+
return CommandOutput(
|
|
283
|
+
data={"opportunities": rows},
|
|
284
|
+
context=cmd_context,
|
|
285
|
+
pagination=None,
|
|
286
|
+
api_called=True,
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
run_command(ctx, command="opportunity ls", fn=fn)
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def _opportunity_ls_row(opportunity: Opportunity) -> dict[str, object]:
|
|
293
|
+
"""Build a row for opportunity ls output."""
|
|
294
|
+
return {
|
|
295
|
+
"id": int(opportunity.id),
|
|
296
|
+
"name": opportunity.name,
|
|
297
|
+
"listId": int(opportunity.list_id) if opportunity.list_id else None,
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
@category("read")
|
|
302
|
+
@opportunity_group.command(name="get", cls=RichCommand)
|
|
303
|
+
@click.argument("opportunity_selector", type=str)
|
|
304
|
+
@click.option(
|
|
305
|
+
"--details",
|
|
306
|
+
"details",
|
|
307
|
+
is_flag=True,
|
|
308
|
+
help="Fetch a fuller payload with associations and list entries.",
|
|
309
|
+
)
|
|
310
|
+
@click.option(
|
|
311
|
+
"--expand",
|
|
312
|
+
"expand",
|
|
313
|
+
multiple=True,
|
|
314
|
+
type=click.Choice(["people", "companies"]),
|
|
315
|
+
help="Include related data (repeatable).",
|
|
316
|
+
)
|
|
317
|
+
@click.option(
|
|
318
|
+
"--max-results",
|
|
319
|
+
"--limit",
|
|
320
|
+
"-n",
|
|
321
|
+
type=int,
|
|
322
|
+
default=None,
|
|
323
|
+
help="Maximum items per expansion (default: 100).",
|
|
324
|
+
)
|
|
325
|
+
@click.option(
|
|
326
|
+
"--all",
|
|
327
|
+
"all_pages",
|
|
328
|
+
is_flag=True,
|
|
329
|
+
help="Fetch all expanded items (no limit).",
|
|
330
|
+
)
|
|
331
|
+
@output_options
|
|
332
|
+
@click.pass_obj
|
|
333
|
+
def opportunity_get(
|
|
334
|
+
ctx: CLIContext,
|
|
335
|
+
opportunity_selector: str,
|
|
336
|
+
*,
|
|
337
|
+
details: bool,
|
|
338
|
+
expand: tuple[str, ...],
|
|
339
|
+
max_results: int | None,
|
|
340
|
+
all_pages: bool,
|
|
341
|
+
) -> None:
|
|
342
|
+
"""
|
|
343
|
+
Get an opportunity by id or URL.
|
|
344
|
+
|
|
345
|
+
Examples:
|
|
346
|
+
- `xaffinity opportunity get 123`
|
|
347
|
+
- `xaffinity opportunity get https://mydomain.affinity.com/opportunities/123`
|
|
348
|
+
- `xaffinity opportunity get 123 --details`
|
|
349
|
+
- `xaffinity opportunity get 123 --expand people`
|
|
350
|
+
- `xaffinity opportunity get 123 --expand people --expand companies`
|
|
351
|
+
"""
|
|
352
|
+
|
|
353
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
354
|
+
client = ctx.get_client(warnings=warnings)
|
|
355
|
+
opportunity_id, resolved = _resolve_opportunity_selector(selector=opportunity_selector)
|
|
356
|
+
|
|
357
|
+
# Build CommandContext for opportunity get
|
|
358
|
+
ctx_modifiers: dict[str, object] = {}
|
|
359
|
+
if details:
|
|
360
|
+
ctx_modifiers["details"] = True
|
|
361
|
+
if expand:
|
|
362
|
+
ctx_modifiers["expand"] = list(expand)
|
|
363
|
+
if max_results is not None:
|
|
364
|
+
ctx_modifiers["maxResults"] = max_results
|
|
365
|
+
if all_pages:
|
|
366
|
+
ctx_modifiers["allPages"] = True
|
|
367
|
+
|
|
368
|
+
cmd_context = CommandContext(
|
|
369
|
+
name="opportunity get",
|
|
370
|
+
inputs={"selector": opportunity_selector},
|
|
371
|
+
modifiers=ctx_modifiers,
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
expand_set = {e.strip() for e in expand if e and e.strip()}
|
|
375
|
+
|
|
376
|
+
# Use service methods instead of raw HTTP
|
|
377
|
+
if details:
|
|
378
|
+
opp = client.opportunities.get_details(opportunity_id)
|
|
379
|
+
else:
|
|
380
|
+
opp = client.opportunities.get(opportunity_id)
|
|
381
|
+
|
|
382
|
+
data: dict[str, Any] = {"opportunity": serialize_model_for_cli(opp)}
|
|
383
|
+
if not details and not opp.fields:
|
|
384
|
+
data["opportunity"].pop("fields", None)
|
|
385
|
+
|
|
386
|
+
# Fetch associations once if both people and companies are requested (saves 1 V1 call)
|
|
387
|
+
want_people = "people" in expand_set
|
|
388
|
+
want_companies = "companies" in expand_set
|
|
389
|
+
cached_person_ids: list[int] | None = None
|
|
390
|
+
cached_company_ids: list[int] | None = None
|
|
391
|
+
|
|
392
|
+
# Show spinner for expansion operations
|
|
393
|
+
show_expand_progress = (
|
|
394
|
+
expand_set
|
|
395
|
+
and ctx.progress != "never"
|
|
396
|
+
and not ctx.quiet
|
|
397
|
+
and (ctx.progress == "always" or sys.stderr.isatty())
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
with ExitStack() as stack:
|
|
401
|
+
if show_expand_progress:
|
|
402
|
+
progress = stack.enter_context(
|
|
403
|
+
Progress(
|
|
404
|
+
SpinnerColumn(),
|
|
405
|
+
TextColumn("Fetching expanded data..."),
|
|
406
|
+
console=Console(file=sys.stderr),
|
|
407
|
+
transient=True,
|
|
408
|
+
)
|
|
409
|
+
)
|
|
410
|
+
progress.add_task("expand", total=None)
|
|
411
|
+
|
|
412
|
+
if want_people and want_companies:
|
|
413
|
+
assoc = client.opportunities.get_associations(opportunity_id)
|
|
414
|
+
cached_person_ids = [int(pid) for pid in assoc.person_ids]
|
|
415
|
+
cached_company_ids = [int(cid) for cid in assoc.company_ids]
|
|
416
|
+
|
|
417
|
+
# Handle people expansion
|
|
418
|
+
if want_people:
|
|
419
|
+
people_cap = max_results
|
|
420
|
+
if people_cap is None and not all_pages:
|
|
421
|
+
people_cap = 100
|
|
422
|
+
if people_cap is not None and people_cap <= 0:
|
|
423
|
+
data["people"] = []
|
|
424
|
+
else:
|
|
425
|
+
# Use cached IDs if available, otherwise fetch
|
|
426
|
+
if cached_person_ids is not None:
|
|
427
|
+
person_ids = cached_person_ids
|
|
428
|
+
else:
|
|
429
|
+
person_ids = [
|
|
430
|
+
int(pid)
|
|
431
|
+
for pid in client.opportunities.get_associated_person_ids(
|
|
432
|
+
opportunity_id
|
|
433
|
+
)
|
|
434
|
+
]
|
|
435
|
+
total_people = len(person_ids)
|
|
436
|
+
if people_cap is not None and total_people > people_cap:
|
|
437
|
+
warnings.append(
|
|
438
|
+
f"People truncated at {people_cap:,} items; re-run with --all "
|
|
439
|
+
"or a higher --max-results to fetch more."
|
|
440
|
+
)
|
|
441
|
+
if total_people > 50:
|
|
442
|
+
warnings.append(
|
|
443
|
+
f"Fetching {min(people_cap, total_people)} people requires "
|
|
444
|
+
f"{min(people_cap, total_people) + 1} API calls."
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
people = client.opportunities.get_associated_people(
|
|
448
|
+
opportunity_id,
|
|
449
|
+
max_results=people_cap,
|
|
450
|
+
)
|
|
451
|
+
data["people"] = [
|
|
452
|
+
{
|
|
453
|
+
"id": int(person.id),
|
|
454
|
+
"name": person.full_name,
|
|
455
|
+
"primaryEmail": person.primary_email,
|
|
456
|
+
"type": (
|
|
457
|
+
person.type.value
|
|
458
|
+
if hasattr(person.type, "value")
|
|
459
|
+
else person.type
|
|
460
|
+
if person.type
|
|
461
|
+
else None
|
|
462
|
+
),
|
|
463
|
+
}
|
|
464
|
+
for person in people
|
|
465
|
+
]
|
|
466
|
+
|
|
467
|
+
# Handle companies expansion
|
|
468
|
+
if want_companies:
|
|
469
|
+
companies_cap = max_results
|
|
470
|
+
if companies_cap is None and not all_pages:
|
|
471
|
+
companies_cap = 100
|
|
472
|
+
if companies_cap is not None and companies_cap <= 0:
|
|
473
|
+
data["companies"] = []
|
|
474
|
+
else:
|
|
475
|
+
# Use cached IDs if available, otherwise fetch
|
|
476
|
+
if cached_company_ids is not None:
|
|
477
|
+
company_ids = cached_company_ids
|
|
478
|
+
else:
|
|
479
|
+
company_ids = [
|
|
480
|
+
int(cid)
|
|
481
|
+
for cid in client.opportunities.get_associated_company_ids(
|
|
482
|
+
opportunity_id
|
|
483
|
+
)
|
|
484
|
+
]
|
|
485
|
+
total_companies = len(company_ids)
|
|
486
|
+
if companies_cap is not None and total_companies > companies_cap:
|
|
487
|
+
warnings.append(
|
|
488
|
+
f"Companies truncated at {companies_cap:,} items; re-run with --all "
|
|
489
|
+
"or a higher --max-results to fetch more."
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
companies = client.opportunities.get_associated_companies(
|
|
493
|
+
opportunity_id,
|
|
494
|
+
max_results=companies_cap,
|
|
495
|
+
)
|
|
496
|
+
data["companies"] = [
|
|
497
|
+
{
|
|
498
|
+
"id": int(company.id),
|
|
499
|
+
"name": company.name,
|
|
500
|
+
"domain": company.domain,
|
|
501
|
+
}
|
|
502
|
+
for company in companies
|
|
503
|
+
]
|
|
504
|
+
|
|
505
|
+
if expand_set:
|
|
506
|
+
resolved["expand"] = sorted(expand_set)
|
|
507
|
+
|
|
508
|
+
# Fetch field metadata if fields are present in response
|
|
509
|
+
opp_payload = data.get("opportunity", {})
|
|
510
|
+
opp_fields = opp_payload.get("fields") if isinstance(opp_payload, dict) else None
|
|
511
|
+
opp_list_id = opp_payload.get("listId") if isinstance(opp_payload, dict) else None
|
|
512
|
+
if isinstance(opp_fields, list) and opp_fields and opp_list_id is not None:
|
|
513
|
+
try:
|
|
514
|
+
from ..field_utils import build_field_id_to_name_map
|
|
515
|
+
|
|
516
|
+
field_metadata = client.lists.get_fields(ListId(int(opp_list_id)))
|
|
517
|
+
resolved["fieldMetadata"] = build_field_id_to_name_map(field_metadata)
|
|
518
|
+
except Exception:
|
|
519
|
+
# Field metadata is optional - continue without names if fetch fails
|
|
520
|
+
pass
|
|
521
|
+
|
|
522
|
+
return CommandOutput(
|
|
523
|
+
data=data,
|
|
524
|
+
context=cmd_context,
|
|
525
|
+
resolved=resolved,
|
|
526
|
+
api_called=True,
|
|
527
|
+
)
|
|
528
|
+
|
|
529
|
+
run_command(ctx, command="opportunity get", fn=fn)
|
|
530
|
+
|
|
531
|
+
|
|
532
|
+
@category("write")
|
|
533
|
+
@opportunity_group.command(name="create", cls=RichCommand)
|
|
534
|
+
@click.option("--name", required=True, help="Opportunity name.")
|
|
535
|
+
@click.option("--list", "list_selector", required=True, help="List id or exact list name.")
|
|
536
|
+
@click.option(
|
|
537
|
+
"--person-id",
|
|
538
|
+
"person_ids",
|
|
539
|
+
multiple=True,
|
|
540
|
+
type=int,
|
|
541
|
+
help="Associate a person id (repeatable).",
|
|
542
|
+
)
|
|
543
|
+
@click.option(
|
|
544
|
+
"--company-id",
|
|
545
|
+
"company_ids",
|
|
546
|
+
multiple=True,
|
|
547
|
+
type=int,
|
|
548
|
+
help="Associate a company id (repeatable).",
|
|
549
|
+
)
|
|
550
|
+
@output_options
|
|
551
|
+
@click.pass_obj
|
|
552
|
+
def opportunity_create(
|
|
553
|
+
ctx: CLIContext,
|
|
554
|
+
*,
|
|
555
|
+
name: str,
|
|
556
|
+
list_selector: str,
|
|
557
|
+
person_ids: tuple[int, ...],
|
|
558
|
+
company_ids: tuple[int, ...],
|
|
559
|
+
) -> None:
|
|
560
|
+
"""
|
|
561
|
+
Create a new opportunity.
|
|
562
|
+
|
|
563
|
+
Examples:
|
|
564
|
+
- `xaffinity opportunity create --name "Series A" --list "Dealflow"`
|
|
565
|
+
- `xaffinity opportunity create --name "Series A" --list 123 --person-id 1 --company-id 2`
|
|
566
|
+
"""
|
|
567
|
+
|
|
568
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
569
|
+
client = ctx.get_client(warnings=warnings)
|
|
570
|
+
cache = ctx.session_cache
|
|
571
|
+
resolved_list = resolve_list_selector(client=client, selector=list_selector, cache=cache)
|
|
572
|
+
if resolved_list.list.type != ListType.OPPORTUNITY:
|
|
573
|
+
raise CLIError(
|
|
574
|
+
"List is not an opportunity list.",
|
|
575
|
+
exit_code=2,
|
|
576
|
+
error_type="usage_error",
|
|
577
|
+
details={
|
|
578
|
+
"listId": int(resolved_list.list.id),
|
|
579
|
+
"listType": resolved_list.list.type,
|
|
580
|
+
},
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
data = OpportunityCreate(
|
|
584
|
+
name=name,
|
|
585
|
+
list_id=ListId(int(resolved_list.list.id)),
|
|
586
|
+
person_ids=[PersonId(pid) for pid in person_ids],
|
|
587
|
+
company_ids=[CompanyId(cid) for cid in company_ids],
|
|
588
|
+
)
|
|
589
|
+
created = client.opportunities.create(data)
|
|
590
|
+
payload = serialize_model_for_cli(created)
|
|
591
|
+
|
|
592
|
+
# Build CommandContext for opportunity create
|
|
593
|
+
ctx_modifiers: dict[str, object] = {"name": name}
|
|
594
|
+
if person_ids:
|
|
595
|
+
ctx_modifiers["personIds"] = list(person_ids)
|
|
596
|
+
if company_ids:
|
|
597
|
+
ctx_modifiers["companyIds"] = list(company_ids)
|
|
598
|
+
|
|
599
|
+
# Extract resolved list name for context
|
|
600
|
+
ctx_resolved: dict[str, str] | None = None
|
|
601
|
+
list_resolved = resolved_list.resolved.get("list", {})
|
|
602
|
+
if isinstance(list_resolved, dict):
|
|
603
|
+
list_name = list_resolved.get("entityName")
|
|
604
|
+
if list_name:
|
|
605
|
+
ctx_resolved = {"listId": str(list_name)}
|
|
606
|
+
|
|
607
|
+
cmd_context = CommandContext(
|
|
608
|
+
name="opportunity create",
|
|
609
|
+
inputs={"listId": int(resolved_list.list.id)},
|
|
610
|
+
modifiers=ctx_modifiers,
|
|
611
|
+
resolved=ctx_resolved,
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
return CommandOutput(
|
|
615
|
+
data={"opportunity": payload},
|
|
616
|
+
context=cmd_context,
|
|
617
|
+
resolved=resolved_list.resolved,
|
|
618
|
+
api_called=True,
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
run_command(ctx, command="opportunity create", fn=fn)
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
@category("write")
|
|
625
|
+
@opportunity_group.command(name="update", cls=RichCommand)
|
|
626
|
+
@click.argument("opportunity_id", type=int)
|
|
627
|
+
@click.option("--name", default=None, help="Updated opportunity name.")
|
|
628
|
+
@click.option(
|
|
629
|
+
"--person-id",
|
|
630
|
+
"person_ids",
|
|
631
|
+
multiple=True,
|
|
632
|
+
type=int,
|
|
633
|
+
help="Replace associated person ids (repeatable).",
|
|
634
|
+
)
|
|
635
|
+
@click.option(
|
|
636
|
+
"--company-id",
|
|
637
|
+
"company_ids",
|
|
638
|
+
multiple=True,
|
|
639
|
+
type=int,
|
|
640
|
+
help="Replace associated company ids (repeatable).",
|
|
641
|
+
)
|
|
642
|
+
@output_options
|
|
643
|
+
@click.pass_obj
|
|
644
|
+
def opportunity_update(
|
|
645
|
+
ctx: CLIContext,
|
|
646
|
+
opportunity_id: int,
|
|
647
|
+
*,
|
|
648
|
+
name: str | None,
|
|
649
|
+
person_ids: tuple[int, ...],
|
|
650
|
+
company_ids: tuple[int, ...],
|
|
651
|
+
) -> None:
|
|
652
|
+
"""
|
|
653
|
+
Update an opportunity (replaces association arrays when provided).
|
|
654
|
+
|
|
655
|
+
Examples:
|
|
656
|
+
- `xaffinity opportunity update 123 --name "Series A (Closed)"`
|
|
657
|
+
- `xaffinity opportunity update 123 --person-id 1 --person-id 2`
|
|
658
|
+
"""
|
|
659
|
+
|
|
660
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
661
|
+
client = ctx.get_client(warnings=warnings)
|
|
662
|
+
|
|
663
|
+
if name is None and not person_ids and not company_ids:
|
|
664
|
+
raise CLIError(
|
|
665
|
+
"No updates specified.",
|
|
666
|
+
exit_code=2,
|
|
667
|
+
error_type="usage_error",
|
|
668
|
+
hint="Provide at least one of --name, --person-id, or --company-id.",
|
|
669
|
+
)
|
|
670
|
+
|
|
671
|
+
data = OpportunityUpdate(
|
|
672
|
+
name=name,
|
|
673
|
+
person_ids=[PersonId(pid) for pid in person_ids] if person_ids else None,
|
|
674
|
+
company_ids=[CompanyId(cid) for cid in company_ids] if company_ids else None,
|
|
675
|
+
)
|
|
676
|
+
updated = client.opportunities.update(OpportunityId(opportunity_id), data)
|
|
677
|
+
payload = serialize_model_for_cli(updated)
|
|
678
|
+
|
|
679
|
+
resolved = ResolvedEntity(
|
|
680
|
+
input=str(opportunity_id),
|
|
681
|
+
entity_id=int(opportunity_id),
|
|
682
|
+
entity_type="opportunity",
|
|
683
|
+
source="id",
|
|
684
|
+
)
|
|
685
|
+
|
|
686
|
+
# Build CommandContext for opportunity update
|
|
687
|
+
ctx_modifiers: dict[str, object] = {}
|
|
688
|
+
if name:
|
|
689
|
+
ctx_modifiers["name"] = name
|
|
690
|
+
if person_ids:
|
|
691
|
+
ctx_modifiers["personIds"] = list(person_ids)
|
|
692
|
+
if company_ids:
|
|
693
|
+
ctx_modifiers["companyIds"] = list(company_ids)
|
|
694
|
+
|
|
695
|
+
cmd_context = CommandContext(
|
|
696
|
+
name="opportunity update",
|
|
697
|
+
inputs={"opportunityId": opportunity_id},
|
|
698
|
+
modifiers=ctx_modifiers,
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
return CommandOutput(
|
|
702
|
+
data={"opportunity": payload},
|
|
703
|
+
context=cmd_context,
|
|
704
|
+
resolved={"opportunity": resolved.to_dict()},
|
|
705
|
+
api_called=True,
|
|
706
|
+
)
|
|
707
|
+
|
|
708
|
+
run_command(ctx, command="opportunity update", fn=fn)
|
|
709
|
+
|
|
710
|
+
|
|
711
|
+
@category("write")
|
|
712
|
+
@destructive
|
|
713
|
+
@opportunity_group.command(name="delete", cls=RichCommand)
|
|
714
|
+
@click.argument("opportunity_id", type=int)
|
|
715
|
+
@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.")
|
|
716
|
+
@output_options
|
|
717
|
+
@click.pass_obj
|
|
718
|
+
def opportunity_delete(
|
|
719
|
+
ctx: CLIContext,
|
|
720
|
+
opportunity_id: int,
|
|
721
|
+
yes: bool,
|
|
722
|
+
) -> None:
|
|
723
|
+
"""
|
|
724
|
+
Delete an opportunity.
|
|
725
|
+
|
|
726
|
+
Example:
|
|
727
|
+
- `xaffinity opportunity delete 123 --yes`
|
|
728
|
+
"""
|
|
729
|
+
if not yes:
|
|
730
|
+
click.confirm(f"Delete opportunity {opportunity_id}?", abort=True)
|
|
731
|
+
|
|
732
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
733
|
+
client = ctx.get_client(warnings=warnings)
|
|
734
|
+
success = client.opportunities.delete(OpportunityId(opportunity_id))
|
|
735
|
+
|
|
736
|
+
resolved = ResolvedEntity(
|
|
737
|
+
input=str(opportunity_id),
|
|
738
|
+
entity_id=int(opportunity_id),
|
|
739
|
+
entity_type="opportunity",
|
|
740
|
+
source="id",
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
cmd_context = CommandContext(
|
|
744
|
+
name="opportunity delete",
|
|
745
|
+
inputs={"opportunityId": opportunity_id},
|
|
746
|
+
modifiers={},
|
|
747
|
+
)
|
|
748
|
+
|
|
749
|
+
return CommandOutput(
|
|
750
|
+
data={"opportunityId": opportunity_id, "success": success},
|
|
751
|
+
context=cmd_context,
|
|
752
|
+
resolved={"opportunity": resolved.to_dict()},
|
|
753
|
+
api_called=True,
|
|
754
|
+
)
|
|
755
|
+
|
|
756
|
+
run_command(ctx, command="opportunity delete", fn=fn)
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
@opportunity_group.group(name="files", cls=RichGroup)
|
|
760
|
+
def opportunity_files_group() -> None:
|
|
761
|
+
"""Opportunity files."""
|
|
762
|
+
|
|
763
|
+
|
|
764
|
+
@category("read")
|
|
765
|
+
@opportunity_files_group.command(name="dump", cls=RichCommand)
|
|
766
|
+
@click.argument("opportunity_id", type=int)
|
|
767
|
+
@click.option(
|
|
768
|
+
"--out",
|
|
769
|
+
"out_dir",
|
|
770
|
+
type=click.Path(),
|
|
771
|
+
default=None,
|
|
772
|
+
help="Output directory for downloaded files.",
|
|
773
|
+
)
|
|
774
|
+
@click.option("--overwrite", is_flag=True, help="Overwrite existing files.")
|
|
775
|
+
@click.option(
|
|
776
|
+
"--concurrency", type=int, default=3, show_default=True, help="Number of concurrent downloads."
|
|
777
|
+
)
|
|
778
|
+
@click.option(
|
|
779
|
+
"--page-size",
|
|
780
|
+
type=int,
|
|
781
|
+
default=100,
|
|
782
|
+
show_default=True,
|
|
783
|
+
help="Page size for file listing (max 100).",
|
|
784
|
+
)
|
|
785
|
+
@click.option("--max-files", type=int, default=None, help="Stop after N files.")
|
|
786
|
+
@output_options
|
|
787
|
+
@click.pass_obj
|
|
788
|
+
def opportunity_files_dump(
|
|
789
|
+
ctx: CLIContext,
|
|
790
|
+
opportunity_id: int,
|
|
791
|
+
*,
|
|
792
|
+
out_dir: str | None,
|
|
793
|
+
overwrite: bool,
|
|
794
|
+
concurrency: int,
|
|
795
|
+
page_size: int,
|
|
796
|
+
max_files: int | None,
|
|
797
|
+
) -> None:
|
|
798
|
+
"""Download all files attached to an opportunity.
|
|
799
|
+
|
|
800
|
+
Creates a bundle directory with:
|
|
801
|
+
- files/ subdirectory containing all downloaded files
|
|
802
|
+
- manifest.json with file metadata
|
|
803
|
+
|
|
804
|
+
Example:
|
|
805
|
+
xaffinity opportunity files dump 12345 --out ./my-opp-files
|
|
806
|
+
"""
|
|
807
|
+
|
|
808
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
809
|
+
# Build CommandContext
|
|
810
|
+
ctx_modifiers: dict[str, object] = {}
|
|
811
|
+
if out_dir:
|
|
812
|
+
ctx_modifiers["outDir"] = out_dir
|
|
813
|
+
if overwrite:
|
|
814
|
+
ctx_modifiers["overwrite"] = True
|
|
815
|
+
if concurrency != 4:
|
|
816
|
+
ctx_modifiers["concurrency"] = concurrency
|
|
817
|
+
if page_size != 100:
|
|
818
|
+
ctx_modifiers["pageSize"] = page_size
|
|
819
|
+
if max_files is not None:
|
|
820
|
+
ctx_modifiers["maxFiles"] = max_files
|
|
821
|
+
|
|
822
|
+
cmd_context = CommandContext(
|
|
823
|
+
name="opportunity files dump",
|
|
824
|
+
inputs={"opportunityId": opportunity_id},
|
|
825
|
+
modifiers=ctx_modifiers,
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
return asyncio.run(
|
|
829
|
+
dump_entity_files_bundle(
|
|
830
|
+
ctx=ctx,
|
|
831
|
+
warnings=warnings,
|
|
832
|
+
out_dir=out_dir,
|
|
833
|
+
overwrite=overwrite,
|
|
834
|
+
concurrency=concurrency,
|
|
835
|
+
page_size=page_size,
|
|
836
|
+
max_files=max_files,
|
|
837
|
+
default_dirname=f"affinity-opportunity-{opportunity_id}-files",
|
|
838
|
+
manifest_entity={"type": "opportunity", "opportunityId": opportunity_id},
|
|
839
|
+
files_list_kwargs={"opportunity_id": OpportunityId(opportunity_id)},
|
|
840
|
+
context=cmd_context,
|
|
841
|
+
)
|
|
842
|
+
)
|
|
843
|
+
|
|
844
|
+
run_command(ctx, command="opportunity files dump", fn=fn)
|
|
845
|
+
|
|
846
|
+
|
|
847
|
+
@category("write")
|
|
848
|
+
@progress_capable
|
|
849
|
+
@opportunity_files_group.command(name="upload", cls=RichCommand)
|
|
850
|
+
@click.argument("opportunity_id", type=int)
|
|
851
|
+
@click.option(
|
|
852
|
+
"--file",
|
|
853
|
+
"file_paths",
|
|
854
|
+
type=click.Path(exists=False),
|
|
855
|
+
multiple=True,
|
|
856
|
+
required=True,
|
|
857
|
+
help="File path to upload (repeatable).",
|
|
858
|
+
)
|
|
859
|
+
@output_options
|
|
860
|
+
@click.pass_obj
|
|
861
|
+
def opportunity_files_upload(
|
|
862
|
+
ctx: CLIContext,
|
|
863
|
+
opportunity_id: int,
|
|
864
|
+
*,
|
|
865
|
+
file_paths: tuple[str, ...],
|
|
866
|
+
) -> None:
|
|
867
|
+
"""
|
|
868
|
+
Upload files to an opportunity.
|
|
869
|
+
|
|
870
|
+
Examples:
|
|
871
|
+
|
|
872
|
+
- `xaffinity opportunity files upload 123 --file doc.pdf`
|
|
873
|
+
- `xaffinity opportunity files upload 123 --file a.pdf --file b.pdf`
|
|
874
|
+
"""
|
|
875
|
+
|
|
876
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
877
|
+
client = ctx.get_client(warnings=warnings)
|
|
878
|
+
|
|
879
|
+
# Validate all file paths first
|
|
880
|
+
paths: list[Path] = []
|
|
881
|
+
for fp in file_paths:
|
|
882
|
+
p = Path(fp)
|
|
883
|
+
if not p.exists():
|
|
884
|
+
raise CLIError(
|
|
885
|
+
f"File not found: {fp}",
|
|
886
|
+
exit_code=2,
|
|
887
|
+
error_type="usage_error",
|
|
888
|
+
hint="Check the file path and try again.",
|
|
889
|
+
)
|
|
890
|
+
if not p.is_file():
|
|
891
|
+
raise CLIError(
|
|
892
|
+
f"Not a regular file: {fp}",
|
|
893
|
+
exit_code=2,
|
|
894
|
+
error_type="usage_error",
|
|
895
|
+
hint="Only regular files can be uploaded, not directories.",
|
|
896
|
+
)
|
|
897
|
+
paths.append(p)
|
|
898
|
+
|
|
899
|
+
results: list[dict[str, object]] = []
|
|
900
|
+
settings = ProgressSettings(mode=ctx.progress, quiet=ctx.quiet)
|
|
901
|
+
|
|
902
|
+
with ProgressManager(settings=settings) as pm:
|
|
903
|
+
for p in paths:
|
|
904
|
+
file_size = p.stat().st_size
|
|
905
|
+
_task_id, cb = pm.task(
|
|
906
|
+
description=f"upload {p.name}",
|
|
907
|
+
total_bytes=file_size,
|
|
908
|
+
)
|
|
909
|
+
success = client.files.upload_path(
|
|
910
|
+
p,
|
|
911
|
+
opportunity_id=OpportunityId(opportunity_id),
|
|
912
|
+
on_progress=cb,
|
|
913
|
+
)
|
|
914
|
+
results.append(
|
|
915
|
+
{
|
|
916
|
+
"file": str(p),
|
|
917
|
+
"filename": p.name,
|
|
918
|
+
"size": file_size,
|
|
919
|
+
"success": success,
|
|
920
|
+
}
|
|
921
|
+
)
|
|
922
|
+
|
|
923
|
+
cmd_context = CommandContext(
|
|
924
|
+
name="opportunity files upload",
|
|
925
|
+
inputs={"opportunityId": opportunity_id},
|
|
926
|
+
modifiers={"files": list(file_paths)},
|
|
927
|
+
)
|
|
928
|
+
|
|
929
|
+
return CommandOutput(
|
|
930
|
+
data={"uploads": results, "opportunityId": opportunity_id},
|
|
931
|
+
context=cmd_context,
|
|
932
|
+
api_called=True,
|
|
933
|
+
)
|
|
934
|
+
|
|
935
|
+
run_command(ctx, command="opportunity files upload", fn=fn)
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
def _get_opportunity_list_id(*, client: Any, opportunity_id: int) -> int:
|
|
939
|
+
"""Fetch opportunity and return its list_id."""
|
|
940
|
+
opp = client.opportunities.get(OpportunityId(opportunity_id))
|
|
941
|
+
if opp.list_id is None:
|
|
942
|
+
raise CLIError(
|
|
943
|
+
"Opportunity has no list_id.",
|
|
944
|
+
exit_code=2,
|
|
945
|
+
error_type="internal_error",
|
|
946
|
+
)
|
|
947
|
+
return int(opp.list_id)
|
|
948
|
+
|
|
949
|
+
|
|
950
|
+
@category("write")
|
|
951
|
+
@opportunity_group.command(name="field", cls=RichCommand)
|
|
952
|
+
@click.argument("opportunity_id", type=int)
|
|
953
|
+
@click.option(
|
|
954
|
+
"--set",
|
|
955
|
+
"set_values",
|
|
956
|
+
nargs=2,
|
|
957
|
+
multiple=True,
|
|
958
|
+
metavar="FIELD VALUE",
|
|
959
|
+
help="Set field value (repeatable). Use two args: FIELD VALUE.",
|
|
960
|
+
)
|
|
961
|
+
@click.option(
|
|
962
|
+
"--unset",
|
|
963
|
+
"unset_fields",
|
|
964
|
+
multiple=True,
|
|
965
|
+
metavar="FIELD",
|
|
966
|
+
help="Unset field (repeatable). Removes all values for the field.",
|
|
967
|
+
)
|
|
968
|
+
@click.option(
|
|
969
|
+
"--set-json",
|
|
970
|
+
"json_input",
|
|
971
|
+
type=str,
|
|
972
|
+
help="JSON object of field:value pairs to set.",
|
|
973
|
+
)
|
|
974
|
+
@click.option(
|
|
975
|
+
"--get",
|
|
976
|
+
"get_fields",
|
|
977
|
+
multiple=True,
|
|
978
|
+
metavar="FIELD",
|
|
979
|
+
help="Get specific field values (repeatable).",
|
|
980
|
+
)
|
|
981
|
+
@output_options
|
|
982
|
+
@click.pass_obj
|
|
983
|
+
def opportunity_field(
|
|
984
|
+
ctx: CLIContext,
|
|
985
|
+
opportunity_id: int,
|
|
986
|
+
*,
|
|
987
|
+
set_values: tuple[tuple[str, str], ...],
|
|
988
|
+
unset_fields: tuple[str, ...],
|
|
989
|
+
json_input: str | None,
|
|
990
|
+
get_fields: tuple[str, ...],
|
|
991
|
+
) -> None:
|
|
992
|
+
"""
|
|
993
|
+
Manage opportunity field values.
|
|
994
|
+
|
|
995
|
+
Unified command for getting, setting, and unsetting field values.
|
|
996
|
+
For field names with spaces, use quotes.
|
|
997
|
+
|
|
998
|
+
Examples:
|
|
999
|
+
|
|
1000
|
+
- `xaffinity opportunity field 123 --set Status "Active"`
|
|
1001
|
+
- `xaffinity opportunity field 123 --set Status "Active" --set Stage "Negotiation"`
|
|
1002
|
+
- `xaffinity opportunity field 123 --unset Status`
|
|
1003
|
+
- `xaffinity opportunity field 123 --set-json '{"Status": "Active", "Stage": "Negotiation"}'`
|
|
1004
|
+
- `xaffinity opportunity field 123 --get Status --get Stage`
|
|
1005
|
+
"""
|
|
1006
|
+
import json as json_module
|
|
1007
|
+
|
|
1008
|
+
def fn(ctx: CLIContext, warnings: list[str]) -> CommandOutput:
|
|
1009
|
+
from affinity.models.entities import FieldValueCreate
|
|
1010
|
+
from affinity.types import FieldId as FieldIdType
|
|
1011
|
+
|
|
1012
|
+
from ..field_utils import (
|
|
1013
|
+
FieldResolver,
|
|
1014
|
+
fetch_field_metadata,
|
|
1015
|
+
find_field_values_for_field,
|
|
1016
|
+
)
|
|
1017
|
+
|
|
1018
|
+
# Validate: at least one operation must be specified
|
|
1019
|
+
has_set = bool(set_values) or bool(json_input)
|
|
1020
|
+
has_unset = bool(unset_fields)
|
|
1021
|
+
has_get = bool(get_fields)
|
|
1022
|
+
|
|
1023
|
+
if not has_set and not has_unset and not has_get:
|
|
1024
|
+
raise CLIError(
|
|
1025
|
+
"Provide at least one of --set, --unset, --set-json, or --get.",
|
|
1026
|
+
exit_code=2,
|
|
1027
|
+
error_type="usage_error",
|
|
1028
|
+
)
|
|
1029
|
+
|
|
1030
|
+
# Validate: --get is exclusive (can't mix read with write)
|
|
1031
|
+
if has_get and (has_set or has_unset):
|
|
1032
|
+
raise CLIError(
|
|
1033
|
+
"--get cannot be combined with --set, --unset, or --set-json.",
|
|
1034
|
+
exit_code=2,
|
|
1035
|
+
error_type="usage_error",
|
|
1036
|
+
)
|
|
1037
|
+
|
|
1038
|
+
client = ctx.get_client(warnings=warnings)
|
|
1039
|
+
field_metadata = fetch_field_metadata(client=client, entity_type="opportunity")
|
|
1040
|
+
resolver = FieldResolver(field_metadata)
|
|
1041
|
+
|
|
1042
|
+
results: dict[str, Any] = {}
|
|
1043
|
+
|
|
1044
|
+
# Build modifiers for CommandContext
|
|
1045
|
+
ctx_modifiers: dict[str, object] = {}
|
|
1046
|
+
if set_values:
|
|
1047
|
+
ctx_modifiers["set"] = [list(sv) for sv in set_values]
|
|
1048
|
+
if unset_fields:
|
|
1049
|
+
ctx_modifiers["unset"] = list(unset_fields)
|
|
1050
|
+
if json_input:
|
|
1051
|
+
ctx_modifiers["json"] = json_input
|
|
1052
|
+
if get_fields:
|
|
1053
|
+
ctx_modifiers["get"] = list(get_fields)
|
|
1054
|
+
|
|
1055
|
+
# Handle --get: read field values
|
|
1056
|
+
if has_get:
|
|
1057
|
+
existing_values = client.field_values.list(opportunity_id=OpportunityId(opportunity_id))
|
|
1058
|
+
field_results: dict[str, Any] = {}
|
|
1059
|
+
|
|
1060
|
+
for field_name in get_fields:
|
|
1061
|
+
target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
|
|
1062
|
+
field_values = find_field_values_for_field(
|
|
1063
|
+
field_values=[serialize_model_for_cli(v) for v in existing_values],
|
|
1064
|
+
field_id=target_field_id,
|
|
1065
|
+
)
|
|
1066
|
+
resolved_name = resolver.get_field_name(target_field_id) or field_name
|
|
1067
|
+
if field_values:
|
|
1068
|
+
if len(field_values) == 1:
|
|
1069
|
+
field_results[resolved_name] = field_values[0].get("value")
|
|
1070
|
+
else:
|
|
1071
|
+
field_results[resolved_name] = [fv.get("value") for fv in field_values]
|
|
1072
|
+
else:
|
|
1073
|
+
field_results[resolved_name] = None
|
|
1074
|
+
|
|
1075
|
+
results["fields"] = field_results
|
|
1076
|
+
|
|
1077
|
+
cmd_context = CommandContext(
|
|
1078
|
+
name="opportunity field",
|
|
1079
|
+
inputs={"opportunityId": opportunity_id},
|
|
1080
|
+
modifiers=ctx_modifiers,
|
|
1081
|
+
)
|
|
1082
|
+
|
|
1083
|
+
return CommandOutput(
|
|
1084
|
+
data=results,
|
|
1085
|
+
context=cmd_context,
|
|
1086
|
+
api_called=True,
|
|
1087
|
+
)
|
|
1088
|
+
|
|
1089
|
+
# Handle --set and --json: set field values
|
|
1090
|
+
set_operations: list[tuple[str, Any]] = []
|
|
1091
|
+
|
|
1092
|
+
# Collect from --set options
|
|
1093
|
+
for field_name, value in set_values:
|
|
1094
|
+
set_operations.append((field_name, value))
|
|
1095
|
+
|
|
1096
|
+
# Collect from --json
|
|
1097
|
+
if json_input:
|
|
1098
|
+
try:
|
|
1099
|
+
json_data = json_module.loads(json_input)
|
|
1100
|
+
if not isinstance(json_data, dict):
|
|
1101
|
+
raise CLIError(
|
|
1102
|
+
"--json must be a JSON object.",
|
|
1103
|
+
exit_code=2,
|
|
1104
|
+
error_type="usage_error",
|
|
1105
|
+
)
|
|
1106
|
+
for field_name, value in json_data.items():
|
|
1107
|
+
set_operations.append((field_name, value))
|
|
1108
|
+
except json_module.JSONDecodeError as e:
|
|
1109
|
+
raise CLIError(
|
|
1110
|
+
f"Invalid JSON: {e}",
|
|
1111
|
+
exit_code=2,
|
|
1112
|
+
error_type="usage_error",
|
|
1113
|
+
) from e
|
|
1114
|
+
|
|
1115
|
+
# Execute set operations
|
|
1116
|
+
created_values: list[dict[str, Any]] = []
|
|
1117
|
+
for field_name, value in set_operations:
|
|
1118
|
+
target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
|
|
1119
|
+
|
|
1120
|
+
# Check for existing values and delete them first (replace behavior)
|
|
1121
|
+
existing_values = client.field_values.list(opportunity_id=OpportunityId(opportunity_id))
|
|
1122
|
+
existing_for_field = find_field_values_for_field(
|
|
1123
|
+
field_values=[serialize_model_for_cli(v) for v in existing_values],
|
|
1124
|
+
field_id=target_field_id,
|
|
1125
|
+
)
|
|
1126
|
+
for fv in existing_for_field:
|
|
1127
|
+
fv_id = fv.get("id")
|
|
1128
|
+
if fv_id:
|
|
1129
|
+
client.field_values.delete(fv_id)
|
|
1130
|
+
|
|
1131
|
+
# Create new value
|
|
1132
|
+
created = client.field_values.create(
|
|
1133
|
+
FieldValueCreate(
|
|
1134
|
+
field_id=FieldIdType(target_field_id),
|
|
1135
|
+
entity_id=opportunity_id,
|
|
1136
|
+
value=value,
|
|
1137
|
+
)
|
|
1138
|
+
)
|
|
1139
|
+
created_values.append(serialize_model_for_cli(created))
|
|
1140
|
+
|
|
1141
|
+
# Handle --unset: remove field values
|
|
1142
|
+
deleted_count = 0
|
|
1143
|
+
for field_name in unset_fields:
|
|
1144
|
+
target_field_id = resolver.resolve_field_name_or_id(field_name, context="field")
|
|
1145
|
+
existing_values = client.field_values.list(opportunity_id=OpportunityId(opportunity_id))
|
|
1146
|
+
existing_for_field = find_field_values_for_field(
|
|
1147
|
+
field_values=[serialize_model_for_cli(v) for v in existing_values],
|
|
1148
|
+
field_id=target_field_id,
|
|
1149
|
+
)
|
|
1150
|
+
for fv in existing_for_field:
|
|
1151
|
+
fv_id = fv.get("id")
|
|
1152
|
+
if fv_id:
|
|
1153
|
+
client.field_values.delete(fv_id)
|
|
1154
|
+
deleted_count += 1
|
|
1155
|
+
|
|
1156
|
+
# Build result
|
|
1157
|
+
if created_values:
|
|
1158
|
+
results["created"] = created_values
|
|
1159
|
+
if deleted_count > 0:
|
|
1160
|
+
results["deleted"] = deleted_count
|
|
1161
|
+
|
|
1162
|
+
cmd_context = CommandContext(
|
|
1163
|
+
name="opportunity field",
|
|
1164
|
+
inputs={"opportunityId": opportunity_id},
|
|
1165
|
+
modifiers=ctx_modifiers,
|
|
1166
|
+
)
|
|
1167
|
+
|
|
1168
|
+
return CommandOutput(
|
|
1169
|
+
data=results,
|
|
1170
|
+
context=cmd_context,
|
|
1171
|
+
api_called=True,
|
|
1172
|
+
)
|
|
1173
|
+
|
|
1174
|
+
run_command(ctx, command="opportunity field", fn=fn)
|