affinity-sdk 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- affinity/__init__.py +139 -0
- affinity/cli/__init__.py +7 -0
- affinity/cli/click_compat.py +27 -0
- affinity/cli/commands/__init__.py +1 -0
- affinity/cli/commands/_entity_files_dump.py +219 -0
- affinity/cli/commands/_list_entry_fields.py +41 -0
- affinity/cli/commands/_v1_parsing.py +77 -0
- affinity/cli/commands/company_cmds.py +2139 -0
- affinity/cli/commands/completion_cmd.py +33 -0
- affinity/cli/commands/config_cmds.py +540 -0
- affinity/cli/commands/entry_cmds.py +33 -0
- affinity/cli/commands/field_cmds.py +413 -0
- affinity/cli/commands/interaction_cmds.py +875 -0
- affinity/cli/commands/list_cmds.py +3152 -0
- affinity/cli/commands/note_cmds.py +433 -0
- affinity/cli/commands/opportunity_cmds.py +1174 -0
- affinity/cli/commands/person_cmds.py +1980 -0
- affinity/cli/commands/query_cmd.py +444 -0
- affinity/cli/commands/relationship_strength_cmds.py +62 -0
- affinity/cli/commands/reminder_cmds.py +595 -0
- affinity/cli/commands/resolve_url_cmd.py +127 -0
- affinity/cli/commands/session_cmds.py +84 -0
- affinity/cli/commands/task_cmds.py +110 -0
- affinity/cli/commands/version_cmd.py +29 -0
- affinity/cli/commands/whoami_cmd.py +36 -0
- affinity/cli/config.py +108 -0
- affinity/cli/context.py +749 -0
- affinity/cli/csv_utils.py +195 -0
- affinity/cli/date_utils.py +42 -0
- affinity/cli/decorators.py +77 -0
- affinity/cli/errors.py +28 -0
- affinity/cli/field_utils.py +355 -0
- affinity/cli/formatters.py +551 -0
- affinity/cli/help_json.py +283 -0
- affinity/cli/logging.py +100 -0
- affinity/cli/main.py +261 -0
- affinity/cli/options.py +53 -0
- affinity/cli/paths.py +32 -0
- affinity/cli/progress.py +183 -0
- affinity/cli/query/__init__.py +163 -0
- affinity/cli/query/aggregates.py +357 -0
- affinity/cli/query/dates.py +194 -0
- affinity/cli/query/exceptions.py +147 -0
- affinity/cli/query/executor.py +1236 -0
- affinity/cli/query/filters.py +248 -0
- affinity/cli/query/models.py +333 -0
- affinity/cli/query/output.py +331 -0
- affinity/cli/query/parser.py +619 -0
- affinity/cli/query/planner.py +430 -0
- affinity/cli/query/progress.py +270 -0
- affinity/cli/query/schema.py +439 -0
- affinity/cli/render.py +1589 -0
- affinity/cli/resolve.py +222 -0
- affinity/cli/resolvers.py +249 -0
- affinity/cli/results.py +308 -0
- affinity/cli/runner.py +218 -0
- affinity/cli/serialization.py +65 -0
- affinity/cli/session_cache.py +276 -0
- affinity/cli/types.py +70 -0
- affinity/client.py +771 -0
- affinity/clients/__init__.py +19 -0
- affinity/clients/http.py +3664 -0
- affinity/clients/pipeline.py +165 -0
- affinity/compare.py +501 -0
- affinity/downloads.py +114 -0
- affinity/exceptions.py +615 -0
- affinity/filters.py +1128 -0
- affinity/hooks.py +198 -0
- affinity/inbound_webhooks.py +302 -0
- affinity/models/__init__.py +163 -0
- affinity/models/entities.py +798 -0
- affinity/models/pagination.py +513 -0
- affinity/models/rate_limit_snapshot.py +48 -0
- affinity/models/secondary.py +413 -0
- affinity/models/types.py +663 -0
- affinity/policies.py +40 -0
- affinity/progress.py +22 -0
- affinity/py.typed +0 -0
- affinity/services/__init__.py +42 -0
- affinity/services/companies.py +1286 -0
- affinity/services/lists.py +1892 -0
- affinity/services/opportunities.py +1330 -0
- affinity/services/persons.py +1348 -0
- affinity/services/rate_limits.py +173 -0
- affinity/services/tasks.py +193 -0
- affinity/services/v1_only.py +2445 -0
- affinity/types.py +83 -0
- affinity_sdk-0.9.5.dist-info/METADATA +622 -0
- affinity_sdk-0.9.5.dist-info/RECORD +92 -0
- affinity_sdk-0.9.5.dist-info/WHEEL +4 -0
- affinity_sdk-0.9.5.dist-info/entry_points.txt +2 -0
- affinity_sdk-0.9.5.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
"""Aggregate functions for query results.
|
|
2
|
+
|
|
3
|
+
This module provides aggregation functions like sum, avg, count, etc.
|
|
4
|
+
It is CLI-only and NOT part of the public SDK API.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import contextlib
|
|
10
|
+
import statistics
|
|
11
|
+
from collections import defaultdict
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
14
|
+
from .filters import resolve_field_path
|
|
15
|
+
from .models import AggregateFunc, HavingClause
|
|
16
|
+
|
|
17
|
+
# =============================================================================
|
|
18
|
+
# Aggregate Functions
|
|
19
|
+
# =============================================================================
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def compute_sum(records: list[dict[str, Any]], field: str) -> float:
|
|
23
|
+
"""Compute sum of a field across records."""
|
|
24
|
+
total = 0.0
|
|
25
|
+
for record in records:
|
|
26
|
+
value = resolve_field_path(record, field)
|
|
27
|
+
if value is not None:
|
|
28
|
+
with contextlib.suppress(ValueError, TypeError):
|
|
29
|
+
total += float(value)
|
|
30
|
+
return total
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def compute_avg(records: list[dict[str, Any]], field: str) -> float | None:
|
|
34
|
+
"""Compute average of a field across records."""
|
|
35
|
+
values: list[float] = []
|
|
36
|
+
for record in records:
|
|
37
|
+
value = resolve_field_path(record, field)
|
|
38
|
+
if value is not None:
|
|
39
|
+
with contextlib.suppress(ValueError, TypeError):
|
|
40
|
+
values.append(float(value))
|
|
41
|
+
|
|
42
|
+
if not values:
|
|
43
|
+
return None
|
|
44
|
+
return sum(values) / len(values)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def compute_min(records: list[dict[str, Any]], field: str) -> Any:
|
|
48
|
+
"""Compute minimum value of a field across records."""
|
|
49
|
+
values: list[Any] = []
|
|
50
|
+
for record in records:
|
|
51
|
+
value = resolve_field_path(record, field)
|
|
52
|
+
if value is not None:
|
|
53
|
+
values.append(value)
|
|
54
|
+
|
|
55
|
+
if not values:
|
|
56
|
+
return None
|
|
57
|
+
return min(values)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def compute_max(records: list[dict[str, Any]], field: str) -> Any:
|
|
61
|
+
"""Compute maximum value of a field across records."""
|
|
62
|
+
values: list[Any] = []
|
|
63
|
+
for record in records:
|
|
64
|
+
value = resolve_field_path(record, field)
|
|
65
|
+
if value is not None:
|
|
66
|
+
values.append(value)
|
|
67
|
+
|
|
68
|
+
if not values:
|
|
69
|
+
return None
|
|
70
|
+
return max(values)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def compute_count(records: list[dict[str, Any]], field: str | bool | None = None) -> int:
|
|
74
|
+
"""Compute count of records.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
records: List of records
|
|
78
|
+
field: If True or None, count all records.
|
|
79
|
+
If a string, count records where field is not null.
|
|
80
|
+
"""
|
|
81
|
+
if field is None or field is True:
|
|
82
|
+
return len(records)
|
|
83
|
+
|
|
84
|
+
if isinstance(field, str):
|
|
85
|
+
count = 0
|
|
86
|
+
for record in records:
|
|
87
|
+
value = resolve_field_path(record, field)
|
|
88
|
+
if value is not None:
|
|
89
|
+
count += 1
|
|
90
|
+
return count
|
|
91
|
+
|
|
92
|
+
return len(records)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def compute_percentile(records: list[dict[str, Any]], field: str, p: int | float) -> float | None:
|
|
96
|
+
"""Compute percentile of a field across records.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
records: List of records
|
|
100
|
+
field: Field to compute percentile for
|
|
101
|
+
p: Percentile value (0-100)
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
The percentile value, or None if no valid values
|
|
105
|
+
"""
|
|
106
|
+
values: list[float] = []
|
|
107
|
+
for record in records:
|
|
108
|
+
value = resolve_field_path(record, field)
|
|
109
|
+
if value is not None:
|
|
110
|
+
with contextlib.suppress(ValueError, TypeError):
|
|
111
|
+
values.append(float(value))
|
|
112
|
+
|
|
113
|
+
if not values:
|
|
114
|
+
return None
|
|
115
|
+
|
|
116
|
+
values.sort()
|
|
117
|
+
# Convert percentile to quantile (0-1)
|
|
118
|
+
quantile = p / 100.0
|
|
119
|
+
return (
|
|
120
|
+
statistics.quantiles(values, n=100)[int(quantile * 99)] if len(values) >= 2 else values[0]
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def compute_first(records: list[dict[str, Any]], field: str) -> Any:
|
|
125
|
+
"""Get first non-null value of a field."""
|
|
126
|
+
for record in records:
|
|
127
|
+
value = resolve_field_path(record, field)
|
|
128
|
+
if value is not None:
|
|
129
|
+
return value
|
|
130
|
+
return None
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def compute_last(records: list[dict[str, Any]], field: str) -> Any:
|
|
134
|
+
"""Get last non-null value of a field."""
|
|
135
|
+
for record in reversed(records):
|
|
136
|
+
value = resolve_field_path(record, field)
|
|
137
|
+
if value is not None:
|
|
138
|
+
return value
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
# =============================================================================
|
|
143
|
+
# Expression Aggregates
|
|
144
|
+
# =============================================================================
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def compute_expression(
|
|
148
|
+
values: dict[str, Any],
|
|
149
|
+
operation: str,
|
|
150
|
+
operands: list[str | int | float],
|
|
151
|
+
) -> float | None:
|
|
152
|
+
"""Compute an expression aggregate.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
values: Dict of computed aggregate values
|
|
156
|
+
operation: One of "multiply", "divide", "add", "subtract"
|
|
157
|
+
operands: List of aggregate names or literal numbers
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
Computed value, or None if any operand is None
|
|
161
|
+
"""
|
|
162
|
+
resolved: list[float] = []
|
|
163
|
+
for operand in operands:
|
|
164
|
+
if isinstance(operand, (int, float)):
|
|
165
|
+
resolved.append(float(operand))
|
|
166
|
+
elif isinstance(operand, str):
|
|
167
|
+
value = values.get(operand)
|
|
168
|
+
if value is None:
|
|
169
|
+
return None
|
|
170
|
+
try:
|
|
171
|
+
resolved.append(float(value))
|
|
172
|
+
except (ValueError, TypeError):
|
|
173
|
+
return None
|
|
174
|
+
|
|
175
|
+
if len(resolved) < 2:
|
|
176
|
+
return None
|
|
177
|
+
|
|
178
|
+
result = resolved[0]
|
|
179
|
+
for val in resolved[1:]:
|
|
180
|
+
if operation == "multiply":
|
|
181
|
+
result *= val
|
|
182
|
+
elif operation == "divide":
|
|
183
|
+
if val == 0:
|
|
184
|
+
return None
|
|
185
|
+
result /= val
|
|
186
|
+
elif operation == "add":
|
|
187
|
+
result += val
|
|
188
|
+
elif operation == "subtract":
|
|
189
|
+
result -= val
|
|
190
|
+
|
|
191
|
+
return result
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
# =============================================================================
|
|
195
|
+
# Main Aggregation Function
|
|
196
|
+
# =============================================================================
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def compute_aggregates(
|
|
200
|
+
records: list[dict[str, Any]],
|
|
201
|
+
aggregates: dict[str, AggregateFunc],
|
|
202
|
+
) -> dict[str, Any]:
|
|
203
|
+
"""Compute all aggregates for a set of records.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
records: List of records to aggregate
|
|
207
|
+
aggregates: Dict of aggregate name -> AggregateFunc
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
Dict of aggregate name -> computed value
|
|
211
|
+
"""
|
|
212
|
+
results: dict[str, Any] = {}
|
|
213
|
+
expression_aggs: list[tuple[str, str, list[str | int | float]]] = []
|
|
214
|
+
|
|
215
|
+
# First pass: compute non-expression aggregates
|
|
216
|
+
for name, agg_func in aggregates.items():
|
|
217
|
+
if agg_func.sum is not None:
|
|
218
|
+
results[name] = compute_sum(records, agg_func.sum)
|
|
219
|
+
elif agg_func.avg is not None:
|
|
220
|
+
results[name] = compute_avg(records, agg_func.avg)
|
|
221
|
+
elif agg_func.min is not None:
|
|
222
|
+
results[name] = compute_min(records, agg_func.min)
|
|
223
|
+
elif agg_func.max is not None:
|
|
224
|
+
results[name] = compute_max(records, agg_func.max)
|
|
225
|
+
elif agg_func.count is not None:
|
|
226
|
+
results[name] = compute_count(records, agg_func.count)
|
|
227
|
+
elif agg_func.percentile is not None:
|
|
228
|
+
field = agg_func.percentile.get("field", "")
|
|
229
|
+
p = agg_func.percentile.get("p", 50)
|
|
230
|
+
results[name] = compute_percentile(records, field, p)
|
|
231
|
+
elif agg_func.first is not None:
|
|
232
|
+
results[name] = compute_first(records, agg_func.first)
|
|
233
|
+
elif agg_func.last is not None:
|
|
234
|
+
results[name] = compute_last(records, agg_func.last)
|
|
235
|
+
elif agg_func.multiply is not None:
|
|
236
|
+
expression_aggs.append((name, "multiply", agg_func.multiply))
|
|
237
|
+
elif agg_func.divide is not None:
|
|
238
|
+
expression_aggs.append((name, "divide", agg_func.divide))
|
|
239
|
+
elif agg_func.add is not None:
|
|
240
|
+
expression_aggs.append((name, "add", agg_func.add))
|
|
241
|
+
elif agg_func.subtract is not None:
|
|
242
|
+
expression_aggs.append((name, "subtract", agg_func.subtract))
|
|
243
|
+
|
|
244
|
+
# Second pass: compute expression aggregates
|
|
245
|
+
for name, operation, operands in expression_aggs:
|
|
246
|
+
results[name] = compute_expression(results, operation, operands)
|
|
247
|
+
|
|
248
|
+
return results
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def group_and_aggregate(
|
|
252
|
+
records: list[dict[str, Any]],
|
|
253
|
+
group_by: str,
|
|
254
|
+
aggregates: dict[str, AggregateFunc],
|
|
255
|
+
) -> list[dict[str, Any]]:
|
|
256
|
+
"""Group records and compute aggregates for each group.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
records: List of records to group
|
|
260
|
+
group_by: Field to group by
|
|
261
|
+
aggregates: Dict of aggregate name -> AggregateFunc
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
List of result dicts, one per group, sorted with null values at end
|
|
265
|
+
"""
|
|
266
|
+
groups: dict[Any, list[dict[str, Any]]] = defaultdict(list)
|
|
267
|
+
|
|
268
|
+
def make_hashable(value: Any) -> Any:
|
|
269
|
+
"""Convert unhashable types (lists) to hashable types (tuples).
|
|
270
|
+
|
|
271
|
+
For multi-select fields, we sort the values so that different orderings
|
|
272
|
+
(e.g., ["Team", "Market"] vs ["Market", "Team"]) are treated as the same group.
|
|
273
|
+
"""
|
|
274
|
+
if isinstance(value, list):
|
|
275
|
+
try:
|
|
276
|
+
return tuple(sorted(value))
|
|
277
|
+
except TypeError:
|
|
278
|
+
# If values aren't sortable, fall back to original order
|
|
279
|
+
return tuple(value)
|
|
280
|
+
return value
|
|
281
|
+
|
|
282
|
+
for record in records:
|
|
283
|
+
key = resolve_field_path(record, group_by)
|
|
284
|
+
hashable_key = make_hashable(key)
|
|
285
|
+
groups[hashable_key].append(record)
|
|
286
|
+
|
|
287
|
+
results: list[dict[str, Any]] = []
|
|
288
|
+
null_result: dict[str, Any] | None = None
|
|
289
|
+
|
|
290
|
+
for key, group_records in groups.items():
|
|
291
|
+
agg_values = compute_aggregates(group_records, aggregates)
|
|
292
|
+
|
|
293
|
+
# Convert tuple back to list for display, use "(no value)" for null
|
|
294
|
+
display_key: Any
|
|
295
|
+
if key is None:
|
|
296
|
+
display_key = "(no value)"
|
|
297
|
+
elif isinstance(key, tuple):
|
|
298
|
+
display_key = list(key)
|
|
299
|
+
else:
|
|
300
|
+
display_key = key
|
|
301
|
+
result = {group_by: display_key, **agg_values}
|
|
302
|
+
|
|
303
|
+
# Collect null group separately to append at end
|
|
304
|
+
if key is None:
|
|
305
|
+
null_result = result
|
|
306
|
+
else:
|
|
307
|
+
results.append(result)
|
|
308
|
+
|
|
309
|
+
# Append null group at end if present
|
|
310
|
+
if null_result is not None:
|
|
311
|
+
results.append(null_result)
|
|
312
|
+
|
|
313
|
+
return results
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def apply_having(
|
|
317
|
+
results: list[dict[str, Any]],
|
|
318
|
+
having: HavingClause,
|
|
319
|
+
) -> list[dict[str, Any]]:
|
|
320
|
+
"""Apply HAVING clause to filter aggregated results.
|
|
321
|
+
|
|
322
|
+
Args:
|
|
323
|
+
results: List of aggregated result dicts
|
|
324
|
+
having: HAVING clause to apply
|
|
325
|
+
|
|
326
|
+
Returns:
|
|
327
|
+
Filtered list of results
|
|
328
|
+
"""
|
|
329
|
+
from .filters import matches
|
|
330
|
+
|
|
331
|
+
# Convert HavingClause to WhereClause for filtering
|
|
332
|
+
# (They have the same structure for simple conditions)
|
|
333
|
+
from .models import WhereClause
|
|
334
|
+
|
|
335
|
+
# Build a WhereClause from HavingClause
|
|
336
|
+
if having.path is not None and having.op is not None:
|
|
337
|
+
where = WhereClause(path=having.path, op=having.op, value=having.value)
|
|
338
|
+
elif having.and_ is not None:
|
|
339
|
+
where = WhereClause(
|
|
340
|
+
and_=[
|
|
341
|
+
WhereClause(path=h.path, op=h.op, value=h.value)
|
|
342
|
+
for h in having.and_
|
|
343
|
+
if h.path is not None
|
|
344
|
+
]
|
|
345
|
+
)
|
|
346
|
+
elif having.or_ is not None:
|
|
347
|
+
where = WhereClause(
|
|
348
|
+
or_=[
|
|
349
|
+
WhereClause(path=h.path, op=h.op, value=h.value)
|
|
350
|
+
for h in having.or_
|
|
351
|
+
if h.path is not None
|
|
352
|
+
]
|
|
353
|
+
)
|
|
354
|
+
else:
|
|
355
|
+
return results
|
|
356
|
+
|
|
357
|
+
return [r for r in results if matches(r, where)]
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
"""Relative date parsing for query WHERE clauses.
|
|
2
|
+
|
|
3
|
+
This module provides parsing for relative date strings like "-30d", "today", etc.
|
|
4
|
+
It is CLI-only and NOT part of the public SDK API.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import re
|
|
10
|
+
from datetime import datetime, timedelta, timezone
|
|
11
|
+
|
|
12
|
+
# =============================================================================
|
|
13
|
+
# Patterns
|
|
14
|
+
# =============================================================================
|
|
15
|
+
|
|
16
|
+
# Pattern for relative dates: -30d, +7d, -4w, -3m, -1y
|
|
17
|
+
RELATIVE_DATE_PATTERN = re.compile(r"^([+-]?\d+)([dwmy])$")
|
|
18
|
+
|
|
19
|
+
# Pattern for ISO dates
|
|
20
|
+
ISO_DATE_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2})?")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# =============================================================================
|
|
24
|
+
# Parsing Functions
|
|
25
|
+
# =============================================================================
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def parse_relative_date(
|
|
29
|
+
value: str,
|
|
30
|
+
*,
|
|
31
|
+
now: datetime | None = None,
|
|
32
|
+
use_utc: bool = True,
|
|
33
|
+
) -> datetime:
|
|
34
|
+
"""Parse relative date strings.
|
|
35
|
+
|
|
36
|
+
Supports:
|
|
37
|
+
- Relative: "-30d", "+7d", "-4w", "-3m", "-1y"
|
|
38
|
+
- Keywords: "today", "now", "yesterday", "tomorrow"
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
value: The date string to parse
|
|
42
|
+
now: Reference time (defaults to current UTC time)
|
|
43
|
+
use_utc: If True and now is None, use UTC; otherwise use local time
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
Resolved datetime
|
|
47
|
+
|
|
48
|
+
Raises:
|
|
49
|
+
ValueError: If the date string is invalid
|
|
50
|
+
"""
|
|
51
|
+
if now is None:
|
|
52
|
+
now = datetime.now(timezone.utc) if use_utc else datetime.now()
|
|
53
|
+
|
|
54
|
+
value_lower = value.lower().strip()
|
|
55
|
+
|
|
56
|
+
# Keywords
|
|
57
|
+
if value_lower == "now":
|
|
58
|
+
return now
|
|
59
|
+
if value_lower == "today":
|
|
60
|
+
return now.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
61
|
+
if value_lower == "yesterday":
|
|
62
|
+
yesterday = now - timedelta(days=1)
|
|
63
|
+
return yesterday.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
64
|
+
if value_lower == "tomorrow":
|
|
65
|
+
tomorrow = now + timedelta(days=1)
|
|
66
|
+
return tomorrow.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
67
|
+
|
|
68
|
+
# Relative date pattern
|
|
69
|
+
match = RELATIVE_DATE_PATTERN.match(value_lower)
|
|
70
|
+
if match:
|
|
71
|
+
amount = int(match.group(1))
|
|
72
|
+
unit = match.group(2)
|
|
73
|
+
|
|
74
|
+
if unit == "d":
|
|
75
|
+
return now + timedelta(days=amount)
|
|
76
|
+
elif unit == "w":
|
|
77
|
+
return now + timedelta(weeks=amount)
|
|
78
|
+
elif unit == "m":
|
|
79
|
+
# Approximate month as 30 days
|
|
80
|
+
return now + timedelta(days=amount * 30)
|
|
81
|
+
elif unit == "y":
|
|
82
|
+
# Approximate year as 365 days
|
|
83
|
+
return now + timedelta(days=amount * 365)
|
|
84
|
+
|
|
85
|
+
raise ValueError(f"Invalid relative date: {value}")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def parse_date_value(value: str) -> datetime | None:
|
|
89
|
+
"""Try to parse a value as a date.
|
|
90
|
+
|
|
91
|
+
Attempts to parse as:
|
|
92
|
+
1. Relative date (-30d, today, etc.)
|
|
93
|
+
2. ISO date string
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
value: The value to parse
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
datetime if parseable, None otherwise
|
|
100
|
+
"""
|
|
101
|
+
# Skip if not a string
|
|
102
|
+
if not isinstance(value, str):
|
|
103
|
+
return None
|
|
104
|
+
|
|
105
|
+
value = value.strip()
|
|
106
|
+
if not value:
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
# Try relative date
|
|
110
|
+
try:
|
|
111
|
+
return parse_relative_date(value)
|
|
112
|
+
except ValueError:
|
|
113
|
+
pass
|
|
114
|
+
|
|
115
|
+
# Try ISO date
|
|
116
|
+
if ISO_DATE_PATTERN.match(value):
|
|
117
|
+
try:
|
|
118
|
+
# Handle with and without time component
|
|
119
|
+
if "T" in value:
|
|
120
|
+
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
|
121
|
+
else:
|
|
122
|
+
return datetime.fromisoformat(value)
|
|
123
|
+
except ValueError:
|
|
124
|
+
pass
|
|
125
|
+
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def days_since(date: datetime, *, now: datetime | None = None) -> int:
|
|
130
|
+
"""Calculate days since a date.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
date: The date to calculate from
|
|
134
|
+
now: Reference time (defaults to current UTC)
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
Number of days since the date (positive if in past)
|
|
138
|
+
"""
|
|
139
|
+
if now is None:
|
|
140
|
+
now = datetime.now(timezone.utc)
|
|
141
|
+
|
|
142
|
+
# Make both timezone-aware or both naive for comparison
|
|
143
|
+
if date.tzinfo is None and now.tzinfo is not None:
|
|
144
|
+
date = date.replace(tzinfo=timezone.utc)
|
|
145
|
+
elif date.tzinfo is not None and now.tzinfo is None:
|
|
146
|
+
now = now.replace(tzinfo=timezone.utc)
|
|
147
|
+
|
|
148
|
+
delta = now - date
|
|
149
|
+
return delta.days
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def days_until(date: datetime, *, now: datetime | None = None) -> int:
|
|
153
|
+
"""Calculate days until a date.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
date: The target date
|
|
157
|
+
now: Reference time (defaults to current UTC)
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
Number of days until the date (positive if in future)
|
|
161
|
+
"""
|
|
162
|
+
if now is None:
|
|
163
|
+
now = datetime.now(timezone.utc)
|
|
164
|
+
|
|
165
|
+
# Make both timezone-aware or both naive for comparison
|
|
166
|
+
if date.tzinfo is None and now.tzinfo is not None:
|
|
167
|
+
date = date.replace(tzinfo=timezone.utc)
|
|
168
|
+
elif date.tzinfo is not None and now.tzinfo is None:
|
|
169
|
+
now = now.replace(tzinfo=timezone.utc)
|
|
170
|
+
|
|
171
|
+
delta = date - now
|
|
172
|
+
return delta.days
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def is_relative_date(value: str) -> bool:
|
|
176
|
+
"""Check if a value looks like a relative date.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
value: The value to check
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
True if it looks like a relative date string
|
|
183
|
+
"""
|
|
184
|
+
if not isinstance(value, str):
|
|
185
|
+
return False
|
|
186
|
+
|
|
187
|
+
value_lower = value.lower().strip()
|
|
188
|
+
|
|
189
|
+
# Keywords
|
|
190
|
+
if value_lower in ("now", "today", "yesterday", "tomorrow"):
|
|
191
|
+
return True
|
|
192
|
+
|
|
193
|
+
# Relative pattern
|
|
194
|
+
return bool(RELATIVE_DATE_PATTERN.match(value_lower))
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"""Query engine exceptions.
|
|
2
|
+
|
|
3
|
+
These exceptions are specific to the query engine and are CLI-only.
|
|
4
|
+
They are NOT part of the public SDK API.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from .models import PlanStep
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class QueryError(Exception):
|
|
16
|
+
"""Base class for query engine errors."""
|
|
17
|
+
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class QueryParseError(QueryError):
|
|
22
|
+
"""Raised when query parsing or validation fails.
|
|
23
|
+
|
|
24
|
+
Examples:
|
|
25
|
+
- Invalid JSON syntax
|
|
26
|
+
- Unknown operator
|
|
27
|
+
- Invalid field path
|
|
28
|
+
- Type mismatch in value
|
|
29
|
+
- Unsupported query version
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, message: str, *, field: str | None = None) -> None:
|
|
33
|
+
self.field = field
|
|
34
|
+
if field:
|
|
35
|
+
message = f"{field}: {message}"
|
|
36
|
+
super().__init__(message)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class QueryValidationError(QueryError):
|
|
40
|
+
"""Raised when query passes parsing but fails semantic validation.
|
|
41
|
+
|
|
42
|
+
Examples:
|
|
43
|
+
- Aggregate with include (not allowed)
|
|
44
|
+
- Unknown entity type
|
|
45
|
+
- Invalid relationship path
|
|
46
|
+
- groupBy with incompatible aggregate
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self, message: str, *, field: str | None = None) -> None:
|
|
50
|
+
self.field = field
|
|
51
|
+
if field:
|
|
52
|
+
message = f"{field}: {message}"
|
|
53
|
+
super().__init__(message)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class QueryPlanError(QueryError):
|
|
57
|
+
"""Raised when execution plan cannot be generated.
|
|
58
|
+
|
|
59
|
+
Examples:
|
|
60
|
+
- Circular dependency in plan steps
|
|
61
|
+
- Unknown entity in schema registry
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class QueryExecutionError(QueryError):
|
|
68
|
+
"""Raised during query execution.
|
|
69
|
+
|
|
70
|
+
Examples:
|
|
71
|
+
- API call failed
|
|
72
|
+
- Authentication error
|
|
73
|
+
- Rate limiting exhausted
|
|
74
|
+
- Timeout exceeded
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
def __init__(
|
|
78
|
+
self,
|
|
79
|
+
message: str,
|
|
80
|
+
*,
|
|
81
|
+
step: PlanStep | None = None,
|
|
82
|
+
cause: Exception | None = None,
|
|
83
|
+
partial_results: list[Any] | None = None,
|
|
84
|
+
) -> None:
|
|
85
|
+
self.step = step
|
|
86
|
+
self.cause = cause
|
|
87
|
+
self.partial_results = partial_results
|
|
88
|
+
super().__init__(message)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class QueryInterruptedError(QueryError):
|
|
92
|
+
"""Raised when query execution is interrupted (e.g., Ctrl+C).
|
|
93
|
+
|
|
94
|
+
Carries partial results that were collected before interruption.
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
def __init__(
|
|
98
|
+
self,
|
|
99
|
+
message: str,
|
|
100
|
+
*,
|
|
101
|
+
step_id: int | None = None,
|
|
102
|
+
records_fetched: int = 0,
|
|
103
|
+
partial_results: list[Any] | None = None,
|
|
104
|
+
) -> None:
|
|
105
|
+
self.step_id = step_id
|
|
106
|
+
self.records_fetched = records_fetched
|
|
107
|
+
self.partial_results = partial_results
|
|
108
|
+
super().__init__(message)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class QueryTimeoutError(QueryExecutionError):
|
|
112
|
+
"""Raised when query execution exceeds the timeout."""
|
|
113
|
+
|
|
114
|
+
def __init__(
|
|
115
|
+
self,
|
|
116
|
+
message: str,
|
|
117
|
+
*,
|
|
118
|
+
timeout_seconds: float,
|
|
119
|
+
elapsed_seconds: float,
|
|
120
|
+
step: PlanStep | None = None,
|
|
121
|
+
partial_results: list[Any] | None = None,
|
|
122
|
+
) -> None:
|
|
123
|
+
self.timeout_seconds = timeout_seconds
|
|
124
|
+
self.elapsed_seconds = elapsed_seconds
|
|
125
|
+
super().__init__(message, step=step, partial_results=partial_results)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class QuerySafetyLimitError(QueryError):
|
|
129
|
+
"""Raised when query would exceed safety limits.
|
|
130
|
+
|
|
131
|
+
Examples:
|
|
132
|
+
- Estimated records > max_records
|
|
133
|
+
- Estimated API calls > threshold
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
def __init__(
|
|
137
|
+
self,
|
|
138
|
+
message: str,
|
|
139
|
+
*,
|
|
140
|
+
limit_name: str,
|
|
141
|
+
limit_value: int,
|
|
142
|
+
estimated_value: int,
|
|
143
|
+
) -> None:
|
|
144
|
+
self.limit_name = limit_name
|
|
145
|
+
self.limit_value = limit_value
|
|
146
|
+
self.estimated_value = estimated_value
|
|
147
|
+
super().__init__(message)
|