affinity-sdk 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- affinity/__init__.py +139 -0
- affinity/cli/__init__.py +7 -0
- affinity/cli/click_compat.py +27 -0
- affinity/cli/commands/__init__.py +1 -0
- affinity/cli/commands/_entity_files_dump.py +219 -0
- affinity/cli/commands/_list_entry_fields.py +41 -0
- affinity/cli/commands/_v1_parsing.py +77 -0
- affinity/cli/commands/company_cmds.py +2139 -0
- affinity/cli/commands/completion_cmd.py +33 -0
- affinity/cli/commands/config_cmds.py +540 -0
- affinity/cli/commands/entry_cmds.py +33 -0
- affinity/cli/commands/field_cmds.py +413 -0
- affinity/cli/commands/interaction_cmds.py +875 -0
- affinity/cli/commands/list_cmds.py +3152 -0
- affinity/cli/commands/note_cmds.py +433 -0
- affinity/cli/commands/opportunity_cmds.py +1174 -0
- affinity/cli/commands/person_cmds.py +1980 -0
- affinity/cli/commands/query_cmd.py +444 -0
- affinity/cli/commands/relationship_strength_cmds.py +62 -0
- affinity/cli/commands/reminder_cmds.py +595 -0
- affinity/cli/commands/resolve_url_cmd.py +127 -0
- affinity/cli/commands/session_cmds.py +84 -0
- affinity/cli/commands/task_cmds.py +110 -0
- affinity/cli/commands/version_cmd.py +29 -0
- affinity/cli/commands/whoami_cmd.py +36 -0
- affinity/cli/config.py +108 -0
- affinity/cli/context.py +749 -0
- affinity/cli/csv_utils.py +195 -0
- affinity/cli/date_utils.py +42 -0
- affinity/cli/decorators.py +77 -0
- affinity/cli/errors.py +28 -0
- affinity/cli/field_utils.py +355 -0
- affinity/cli/formatters.py +551 -0
- affinity/cli/help_json.py +283 -0
- affinity/cli/logging.py +100 -0
- affinity/cli/main.py +261 -0
- affinity/cli/options.py +53 -0
- affinity/cli/paths.py +32 -0
- affinity/cli/progress.py +183 -0
- affinity/cli/query/__init__.py +163 -0
- affinity/cli/query/aggregates.py +357 -0
- affinity/cli/query/dates.py +194 -0
- affinity/cli/query/exceptions.py +147 -0
- affinity/cli/query/executor.py +1236 -0
- affinity/cli/query/filters.py +248 -0
- affinity/cli/query/models.py +333 -0
- affinity/cli/query/output.py +331 -0
- affinity/cli/query/parser.py +619 -0
- affinity/cli/query/planner.py +430 -0
- affinity/cli/query/progress.py +270 -0
- affinity/cli/query/schema.py +439 -0
- affinity/cli/render.py +1589 -0
- affinity/cli/resolve.py +222 -0
- affinity/cli/resolvers.py +249 -0
- affinity/cli/results.py +308 -0
- affinity/cli/runner.py +218 -0
- affinity/cli/serialization.py +65 -0
- affinity/cli/session_cache.py +276 -0
- affinity/cli/types.py +70 -0
- affinity/client.py +771 -0
- affinity/clients/__init__.py +19 -0
- affinity/clients/http.py +3664 -0
- affinity/clients/pipeline.py +165 -0
- affinity/compare.py +501 -0
- affinity/downloads.py +114 -0
- affinity/exceptions.py +615 -0
- affinity/filters.py +1128 -0
- affinity/hooks.py +198 -0
- affinity/inbound_webhooks.py +302 -0
- affinity/models/__init__.py +163 -0
- affinity/models/entities.py +798 -0
- affinity/models/pagination.py +513 -0
- affinity/models/rate_limit_snapshot.py +48 -0
- affinity/models/secondary.py +413 -0
- affinity/models/types.py +663 -0
- affinity/policies.py +40 -0
- affinity/progress.py +22 -0
- affinity/py.typed +0 -0
- affinity/services/__init__.py +42 -0
- affinity/services/companies.py +1286 -0
- affinity/services/lists.py +1892 -0
- affinity/services/opportunities.py +1330 -0
- affinity/services/persons.py +1348 -0
- affinity/services/rate_limits.py +173 -0
- affinity/services/tasks.py +193 -0
- affinity/services/v1_only.py +2445 -0
- affinity/types.py +83 -0
- affinity_sdk-0.9.5.dist-info/METADATA +622 -0
- affinity_sdk-0.9.5.dist-info/RECORD +92 -0
- affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
- affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
- affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
affinity/__init__.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Affinity Python SDK - A modern, strongly-typed wrapper for the Affinity CRM API.
|
|
3
|
+
|
|
4
|
+
This SDK provides:
|
|
5
|
+
- V2 terminology throughout (Company, not Organization)
|
|
6
|
+
- V2 API for reads, V1 for writes where V2 isn't available
|
|
7
|
+
- Strong typing with Pydantic V2 models
|
|
8
|
+
- Typed ID classes to prevent type confusion (PersonId, CompanyId, etc.)
|
|
9
|
+
- Automatic pagination iterators
|
|
10
|
+
- Optional response caching for field metadata
|
|
11
|
+
- Rate limit handling with automatic retry
|
|
12
|
+
- Both sync and async clients
|
|
13
|
+
|
|
14
|
+
Example:
|
|
15
|
+
```python
|
|
16
|
+
from affinity import Affinity
|
|
17
|
+
from affinity.types import CompanyId, FieldId, FieldType, ListId, PersonId
|
|
18
|
+
|
|
19
|
+
# Initialize
|
|
20
|
+
with Affinity(api_key="your-key") as client:
|
|
21
|
+
# Iterate all companies with enriched data
|
|
22
|
+
for company in client.companies.all(field_types=[FieldType.ENRICHED]):
|
|
23
|
+
print(f"{company.name}: {company.domain}")
|
|
24
|
+
|
|
25
|
+
# Get a person with field values
|
|
26
|
+
person = client.persons.get(PersonId(12345))
|
|
27
|
+
|
|
28
|
+
# Add to a list and update fields
|
|
29
|
+
entries = client.lists.entries(ListId(789))
|
|
30
|
+
entry = entries.add_company(CompanyId(456))
|
|
31
|
+
entries.update_field_value(entry.id, FieldId(101), "New status")
|
|
32
|
+
```
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
from __future__ import annotations
|
|
36
|
+
|
|
37
|
+
import logging
|
|
38
|
+
from importlib.metadata import version as _get_version
|
|
39
|
+
|
|
40
|
+
# Main client
|
|
41
|
+
from . import models, types
|
|
42
|
+
from .client import Affinity, AsyncAffinity
|
|
43
|
+
|
|
44
|
+
# Exceptions
|
|
45
|
+
from .exceptions import (
|
|
46
|
+
AffinityError,
|
|
47
|
+
AuthenticationError,
|
|
48
|
+
AuthorizationError,
|
|
49
|
+
CompanyNotFoundError,
|
|
50
|
+
ConfigurationError,
|
|
51
|
+
ConflictError,
|
|
52
|
+
EntityNotFoundError,
|
|
53
|
+
FilterParseError,
|
|
54
|
+
NetworkError,
|
|
55
|
+
NotFoundError,
|
|
56
|
+
OpportunityNotFoundError,
|
|
57
|
+
PersonNotFoundError,
|
|
58
|
+
PolicyError,
|
|
59
|
+
RateLimitError,
|
|
60
|
+
ServerError,
|
|
61
|
+
TimeoutError,
|
|
62
|
+
TooManyResultsError,
|
|
63
|
+
ValidationError,
|
|
64
|
+
VersionCompatibilityError,
|
|
65
|
+
WebhookInvalidJsonError,
|
|
66
|
+
WebhookInvalidPayloadError,
|
|
67
|
+
WebhookInvalidSentAtError,
|
|
68
|
+
WebhookMissingKeyError,
|
|
69
|
+
WebhookParseError,
|
|
70
|
+
WriteNotAllowedError,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# Filter builder (FR-007)
|
|
74
|
+
from .filters import F, Filter, FilterExpression
|
|
75
|
+
|
|
76
|
+
# Inbound webhook parsing helpers (optional)
|
|
77
|
+
from .inbound_webhooks import BodyRegistry, WebhookEnvelope, dispatch_webhook, parse_webhook
|
|
78
|
+
|
|
79
|
+
# Pagination helpers
|
|
80
|
+
from .models.pagination import PaginationProgress
|
|
81
|
+
|
|
82
|
+
# Policies
|
|
83
|
+
from .policies import ExternalHookPolicy, Policies, WritePolicy
|
|
84
|
+
|
|
85
|
+
__version__ = _get_version("affinity-sdk")
|
|
86
|
+
|
|
87
|
+
_logger = logging.getLogger("affinity_sdk")
|
|
88
|
+
if not any(isinstance(h, logging.NullHandler) for h in _logger.handlers):
|
|
89
|
+
_logger.addHandler(logging.NullHandler())
|
|
90
|
+
|
|
91
|
+
__all__ = [
|
|
92
|
+
# Main clients
|
|
93
|
+
"Affinity",
|
|
94
|
+
"AsyncAffinity",
|
|
95
|
+
# Exceptions
|
|
96
|
+
"AffinityError",
|
|
97
|
+
"AuthenticationError",
|
|
98
|
+
"AuthorizationError",
|
|
99
|
+
"NotFoundError",
|
|
100
|
+
"EntityNotFoundError",
|
|
101
|
+
"PersonNotFoundError",
|
|
102
|
+
"CompanyNotFoundError",
|
|
103
|
+
"OpportunityNotFoundError",
|
|
104
|
+
"ValidationError",
|
|
105
|
+
"RateLimitError",
|
|
106
|
+
"ConflictError",
|
|
107
|
+
"ServerError",
|
|
108
|
+
"ConfigurationError",
|
|
109
|
+
"TimeoutError",
|
|
110
|
+
"NetworkError",
|
|
111
|
+
"PolicyError",
|
|
112
|
+
"WriteNotAllowedError",
|
|
113
|
+
"TooManyResultsError",
|
|
114
|
+
"VersionCompatibilityError",
|
|
115
|
+
"WebhookParseError",
|
|
116
|
+
"WebhookInvalidJsonError",
|
|
117
|
+
"WebhookInvalidPayloadError",
|
|
118
|
+
"WebhookMissingKeyError",
|
|
119
|
+
"WebhookInvalidSentAtError",
|
|
120
|
+
"FilterParseError",
|
|
121
|
+
# Filter builder
|
|
122
|
+
"Filter",
|
|
123
|
+
"FilterExpression",
|
|
124
|
+
"F",
|
|
125
|
+
# Inbound webhooks
|
|
126
|
+
"WebhookEnvelope",
|
|
127
|
+
"parse_webhook",
|
|
128
|
+
"dispatch_webhook",
|
|
129
|
+
"BodyRegistry",
|
|
130
|
+
# Policies
|
|
131
|
+
"WritePolicy",
|
|
132
|
+
"ExternalHookPolicy",
|
|
133
|
+
"Policies",
|
|
134
|
+
# Pagination helpers
|
|
135
|
+
"PaginationProgress",
|
|
136
|
+
# Type aliases (re-exported for convenience)
|
|
137
|
+
"types",
|
|
138
|
+
"models",
|
|
139
|
+
]
|
affinity/cli/__init__.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, cast
|
|
4
|
+
|
|
5
|
+
try:
|
|
6
|
+
import click # pyright: ignore[reportMissingImports]
|
|
7
|
+
except ModuleNotFoundError as exc: # pragma: no cover
|
|
8
|
+
raise ModuleNotFoundError(
|
|
9
|
+
'The Affinity CLI requires `click`. Install it with `pip install "affinity-sdk[cli]"`.'
|
|
10
|
+
) from exc
|
|
11
|
+
|
|
12
|
+
rich_click: Any
|
|
13
|
+
try:
|
|
14
|
+
import rich_click as _rich_click # pyright: ignore[reportMissingImports]
|
|
15
|
+
except ModuleNotFoundError: # pragma: no cover
|
|
16
|
+
rich_click = None
|
|
17
|
+
else:
|
|
18
|
+
rich_click = _rich_click
|
|
19
|
+
|
|
20
|
+
if rich_click is not None: # pragma: no cover
|
|
21
|
+
RichGroup = cast(type[click.Group], rich_click.RichGroup)
|
|
22
|
+
RichCommand = cast(type[click.Command], rich_click.RichCommand)
|
|
23
|
+
else:
|
|
24
|
+
RichGroup = click.Group
|
|
25
|
+
RichCommand = click.Command
|
|
26
|
+
|
|
27
|
+
__all__ = ["RichCommand", "RichGroup", "click"]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
from pathlib import Path, PurePosixPath
|
|
6
|
+
from typing import Any, TypedDict
|
|
7
|
+
|
|
8
|
+
from affinity import AsyncAffinity
|
|
9
|
+
from affinity.models.rate_limit_snapshot import RateLimitSnapshot
|
|
10
|
+
from affinity.models.secondary import EntityFile
|
|
11
|
+
|
|
12
|
+
from ..context import CLIContext
|
|
13
|
+
from ..csv_utils import sanitize_filename
|
|
14
|
+
from ..errors import CLIError
|
|
15
|
+
from ..progress import ProgressManager, ProgressSettings
|
|
16
|
+
from ..results import CommandContext
|
|
17
|
+
from ..runner import CommandOutput
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ManifestFile(TypedDict):
|
|
21
|
+
fileId: int
|
|
22
|
+
name: str
|
|
23
|
+
contentType: str | None
|
|
24
|
+
size: int
|
|
25
|
+
createdAt: str
|
|
26
|
+
uploaderId: int
|
|
27
|
+
path: str
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
async def dump_entity_files_bundle(
|
|
31
|
+
*,
|
|
32
|
+
ctx: CLIContext,
|
|
33
|
+
warnings: list[str],
|
|
34
|
+
out_dir: str | None,
|
|
35
|
+
overwrite: bool,
|
|
36
|
+
concurrency: int,
|
|
37
|
+
page_size: int,
|
|
38
|
+
max_files: int | None,
|
|
39
|
+
default_dirname: str,
|
|
40
|
+
manifest_entity: dict[str, Any],
|
|
41
|
+
files_list_kwargs: dict[str, Any],
|
|
42
|
+
context: CommandContext | None = None,
|
|
43
|
+
) -> CommandOutput:
|
|
44
|
+
"""
|
|
45
|
+
Download all files for a single entity into a folder bundle with a manifest.
|
|
46
|
+
|
|
47
|
+
Notes:
|
|
48
|
+
- Uses a bounded worker pool (avoids spawning one task per file).
|
|
49
|
+
- Uses the same resolved client settings as sync commands (env/profile/flags).
|
|
50
|
+
"""
|
|
51
|
+
settings = ctx.resolve_client_settings(warnings=warnings)
|
|
52
|
+
|
|
53
|
+
entity_dir = Path(out_dir) if out_dir is not None else (Path.cwd() / default_dirname)
|
|
54
|
+
files_dir = entity_dir / "files"
|
|
55
|
+
files_dir.mkdir(parents=True, exist_ok=True)
|
|
56
|
+
|
|
57
|
+
workers = max(1, int(concurrency))
|
|
58
|
+
queue: asyncio.Queue[EntityFile | None] = asyncio.Queue(maxsize=workers * 2)
|
|
59
|
+
|
|
60
|
+
manifest_files: list[ManifestFile] = []
|
|
61
|
+
rate_limit_snapshot: RateLimitSnapshot | None = None
|
|
62
|
+
task_lock = asyncio.Lock()
|
|
63
|
+
skipped_existing = 0
|
|
64
|
+
downloaded = 0
|
|
65
|
+
used_filenames: set[str] = set()
|
|
66
|
+
|
|
67
|
+
async with AsyncAffinity(
|
|
68
|
+
api_key=settings.api_key,
|
|
69
|
+
v1_base_url=settings.v1_base_url,
|
|
70
|
+
v2_base_url=settings.v2_base_url,
|
|
71
|
+
timeout=settings.timeout,
|
|
72
|
+
log_requests=settings.log_requests,
|
|
73
|
+
max_retries=settings.max_retries,
|
|
74
|
+
on_request=settings.on_request,
|
|
75
|
+
on_response=settings.on_response,
|
|
76
|
+
on_error=settings.on_error,
|
|
77
|
+
policies=settings.policies,
|
|
78
|
+
) as async_client:
|
|
79
|
+
|
|
80
|
+
async def producer() -> None:
|
|
81
|
+
token: str | None = None
|
|
82
|
+
produced = 0
|
|
83
|
+
while True:
|
|
84
|
+
resp = await async_client.files.list(
|
|
85
|
+
**files_list_kwargs,
|
|
86
|
+
page_size=page_size,
|
|
87
|
+
page_token=token,
|
|
88
|
+
)
|
|
89
|
+
for f in resp.data:
|
|
90
|
+
await queue.put(f)
|
|
91
|
+
produced += 1
|
|
92
|
+
if max_files is not None and produced >= max_files:
|
|
93
|
+
token = None
|
|
94
|
+
break
|
|
95
|
+
if max_files is not None and produced >= max_files:
|
|
96
|
+
break
|
|
97
|
+
if not resp.next_page_token:
|
|
98
|
+
break
|
|
99
|
+
token = resp.next_page_token
|
|
100
|
+
|
|
101
|
+
for _ in range(workers):
|
|
102
|
+
await queue.put(None)
|
|
103
|
+
|
|
104
|
+
with ProgressManager(settings=ProgressSettings(mode=ctx.progress, quiet=ctx.quiet)) as pm:
|
|
105
|
+
|
|
106
|
+
async def worker() -> None:
|
|
107
|
+
nonlocal skipped_existing
|
|
108
|
+
nonlocal downloaded
|
|
109
|
+
while True:
|
|
110
|
+
f = await queue.get()
|
|
111
|
+
if f is None:
|
|
112
|
+
return
|
|
113
|
+
|
|
114
|
+
def choose_filename(name: str, file_id: int) -> str:
|
|
115
|
+
candidate = sanitize_filename(name) or str(file_id)
|
|
116
|
+
if candidate not in used_filenames:
|
|
117
|
+
return candidate
|
|
118
|
+
|
|
119
|
+
base = PurePosixPath(candidate)
|
|
120
|
+
stem = base.stem or "file"
|
|
121
|
+
suffix = base.suffix
|
|
122
|
+
disambiguated = f"{stem}__{file_id}{suffix}"
|
|
123
|
+
if disambiguated not in used_filenames:
|
|
124
|
+
return disambiguated
|
|
125
|
+
|
|
126
|
+
i = 2
|
|
127
|
+
while True:
|
|
128
|
+
alt = f"{stem}__{file_id}__{i}{suffix}"
|
|
129
|
+
if alt not in used_filenames:
|
|
130
|
+
return alt
|
|
131
|
+
i += 1
|
|
132
|
+
|
|
133
|
+
filename = choose_filename(f.name, int(f.id))
|
|
134
|
+
used_filenames.add(filename)
|
|
135
|
+
dest = files_dir / filename
|
|
136
|
+
if dest.exists() and not overwrite:
|
|
137
|
+
existing_size = dest.stat().st_size
|
|
138
|
+
if f.size and existing_size != int(f.size):
|
|
139
|
+
raise CLIError(
|
|
140
|
+
(
|
|
141
|
+
"Refusing to skip existing file with size mismatch: "
|
|
142
|
+
f"{dest} (expected {int(f.size)} bytes, got {existing_size}); "
|
|
143
|
+
"use --overwrite to re-download."
|
|
144
|
+
),
|
|
145
|
+
error_type="usage_error",
|
|
146
|
+
)
|
|
147
|
+
skipped_existing += 1
|
|
148
|
+
manifest_files.append(
|
|
149
|
+
{
|
|
150
|
+
"fileId": int(f.id),
|
|
151
|
+
"name": f.name,
|
|
152
|
+
"contentType": f.content_type,
|
|
153
|
+
"size": f.size,
|
|
154
|
+
"createdAt": f.created_at.isoformat(),
|
|
155
|
+
"uploaderId": int(f.uploader_id),
|
|
156
|
+
"path": str(dest.relative_to(entity_dir)),
|
|
157
|
+
}
|
|
158
|
+
)
|
|
159
|
+
continue
|
|
160
|
+
async with task_lock:
|
|
161
|
+
_task_id, cb = pm.task(
|
|
162
|
+
description=f"download {f.name}",
|
|
163
|
+
total_bytes=int(f.size) if f.size else None,
|
|
164
|
+
)
|
|
165
|
+
await async_client.files.download_to(
|
|
166
|
+
f.id,
|
|
167
|
+
dest,
|
|
168
|
+
overwrite=overwrite,
|
|
169
|
+
on_progress=cb,
|
|
170
|
+
timeout=settings.timeout,
|
|
171
|
+
)
|
|
172
|
+
downloaded += 1
|
|
173
|
+
manifest_files.append(
|
|
174
|
+
{
|
|
175
|
+
"fileId": int(f.id),
|
|
176
|
+
"name": f.name,
|
|
177
|
+
"contentType": f.content_type,
|
|
178
|
+
"size": f.size,
|
|
179
|
+
"createdAt": f.created_at.isoformat(),
|
|
180
|
+
"uploaderId": int(f.uploader_id),
|
|
181
|
+
"path": str(dest.relative_to(entity_dir)),
|
|
182
|
+
}
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
await asyncio.gather(
|
|
186
|
+
producer(),
|
|
187
|
+
*(worker() for _ in range(workers)),
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
if skipped_existing and not overwrite:
|
|
191
|
+
warnings.append(
|
|
192
|
+
f"Skipped {skipped_existing} existing file(s); use --overwrite to re-download."
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
manifest = {
|
|
196
|
+
"entity": manifest_entity,
|
|
197
|
+
"files": sorted(manifest_files, key=lambda x: x["fileId"]),
|
|
198
|
+
}
|
|
199
|
+
(entity_dir / "manifest.json").write_text(
|
|
200
|
+
json.dumps(manifest, ensure_ascii=False, indent=2) + "\n",
|
|
201
|
+
encoding="utf-8",
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
rate_limit_snapshot = async_client.rate_limits.snapshot()
|
|
205
|
+
|
|
206
|
+
data = {
|
|
207
|
+
"out": str(entity_dir),
|
|
208
|
+
"filesDownloaded": downloaded,
|
|
209
|
+
"filesSkippedExisting": skipped_existing,
|
|
210
|
+
"filesTotal": len(manifest_files),
|
|
211
|
+
"manifest": str((entity_dir / "manifest.json").relative_to(entity_dir)),
|
|
212
|
+
}
|
|
213
|
+
return CommandOutput(
|
|
214
|
+
data=data,
|
|
215
|
+
context=context,
|
|
216
|
+
warnings=warnings,
|
|
217
|
+
api_called=True,
|
|
218
|
+
rate_limit=rate_limit_snapshot,
|
|
219
|
+
)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Literal
|
|
4
|
+
|
|
5
|
+
ListEntryFieldsScope = Literal["list-only", "all"]
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _is_list_type(value: Any) -> bool:
|
|
9
|
+
if not isinstance(value, str):
|
|
10
|
+
return False
|
|
11
|
+
return value.strip().lower() == "list"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def filter_list_entry_fields(
|
|
15
|
+
fields: list[Any],
|
|
16
|
+
*,
|
|
17
|
+
scope: ListEntryFieldsScope,
|
|
18
|
+
) -> tuple[list[dict[str, Any]], int, int]:
|
|
19
|
+
field_dicts = [f for f in fields if isinstance(f, dict)]
|
|
20
|
+
total_count = len(field_dicts)
|
|
21
|
+
list_only = [f for f in field_dicts if _is_list_type(f.get("type"))]
|
|
22
|
+
list_only_count = len(list_only)
|
|
23
|
+
|
|
24
|
+
if scope == "list-only":
|
|
25
|
+
return list_only, list_only_count, total_count
|
|
26
|
+
return field_dicts, list_only_count, total_count
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def build_list_entry_field_rows(fields: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
30
|
+
rows: list[dict[str, Any]] = []
|
|
31
|
+
for f in fields:
|
|
32
|
+
rows.append(
|
|
33
|
+
{
|
|
34
|
+
"id": f.get("id"),
|
|
35
|
+
"type": f.get("type"),
|
|
36
|
+
"enrichmentSource": f.get("enrichmentSource"),
|
|
37
|
+
"name": f.get("name"),
|
|
38
|
+
"value": f.get("value"),
|
|
39
|
+
}
|
|
40
|
+
)
|
|
41
|
+
return rows
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from collections.abc import Mapping
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
from typing import Any, TypeVar
|
|
7
|
+
|
|
8
|
+
from ..errors import CLIError
|
|
9
|
+
|
|
10
|
+
T = TypeVar("T")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def parse_choice(value: str | None, mapping: Mapping[str, T], *, label: str) -> T | None:
|
|
14
|
+
if value is None:
|
|
15
|
+
return None
|
|
16
|
+
key = value.strip().lower()
|
|
17
|
+
if key in mapping:
|
|
18
|
+
return mapping[key]
|
|
19
|
+
choices = ", ".join(sorted(mapping.keys()))
|
|
20
|
+
raise CLIError(
|
|
21
|
+
f"Unknown {label}: {value}",
|
|
22
|
+
error_type="usage_error",
|
|
23
|
+
exit_code=2,
|
|
24
|
+
hint=f"Choose one of: {choices}.",
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def parse_iso_datetime(value: str, *, label: str) -> datetime:
|
|
29
|
+
"""
|
|
30
|
+
Parse ISO-8601 datetime string to UTC-aware datetime.
|
|
31
|
+
|
|
32
|
+
Timezone handling:
|
|
33
|
+
- Explicit timezone (Z or offset): Respected, converted to UTC
|
|
34
|
+
- Naive string: Interpreted as LOCAL time, converted to UTC
|
|
35
|
+
|
|
36
|
+
This provides intuitive UX for CLI users who think in local time.
|
|
37
|
+
|
|
38
|
+
Examples (assuming user is in EST/UTC-5):
|
|
39
|
+
"2024-01-01" → 2024-01-01T05:00:00Z (midnight EST)
|
|
40
|
+
"2024-01-01T12:00:00" → 2024-01-01T17:00:00Z (noon EST)
|
|
41
|
+
"2024-01-01T12:00:00Z" → 2024-01-01T12:00:00Z (explicit UTC)
|
|
42
|
+
"2024-01-01T12:00:00-05:00" → 2024-01-01T17:00:00Z (explicit EST)
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
UTC-aware datetime object
|
|
46
|
+
"""
|
|
47
|
+
raw = value.strip()
|
|
48
|
+
if raw.endswith("Z"):
|
|
49
|
+
raw = raw[:-1] + "+00:00"
|
|
50
|
+
try:
|
|
51
|
+
dt = datetime.fromisoformat(raw)
|
|
52
|
+
except ValueError as exc:
|
|
53
|
+
raise CLIError(
|
|
54
|
+
f"Invalid {label} datetime: {value}",
|
|
55
|
+
error_type="usage_error",
|
|
56
|
+
exit_code=2,
|
|
57
|
+
hint="Use ISO-8601, e.g. 2024-01-01, 2024-01-01T13:00:00, or 2024-01-01T13:00:00Z.",
|
|
58
|
+
) from exc
|
|
59
|
+
|
|
60
|
+
# Convert to UTC
|
|
61
|
+
if dt.tzinfo is None:
|
|
62
|
+
# Naive datetime = local time
|
|
63
|
+
# astimezone() on naive datetime uses system timezone
|
|
64
|
+
dt = dt.astimezone()
|
|
65
|
+
return dt.astimezone(timezone.utc)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def parse_json_value(value: str, *, label: str) -> Any:
|
|
69
|
+
try:
|
|
70
|
+
return json.loads(value)
|
|
71
|
+
except json.JSONDecodeError as exc:
|
|
72
|
+
raise CLIError(
|
|
73
|
+
f"Invalid JSON for {label}.",
|
|
74
|
+
error_type="usage_error",
|
|
75
|
+
exit_code=2,
|
|
76
|
+
hint='Provide a valid JSON literal (e.g. "\\"text\\"", 123, true, {"k": 1}).',
|
|
77
|
+
) from exc
|