fal 1.42.0__py3-none-any.whl → 1.44.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fal might be problematic. Click here for more details.
- fal/_fal_version.py +2 -2
- fal/app.py +23 -3
- fal/cli/parser.py +32 -1
- fal/cli/runners.py +354 -3
- {fal-1.42.0.dist-info → fal-1.44.0.dist-info}/METADATA +2 -1
- {fal-1.42.0.dist-info → fal-1.44.0.dist-info}/RECORD +9 -9
- {fal-1.42.0.dist-info → fal-1.44.0.dist-info}/WHEEL +0 -0
- {fal-1.42.0.dist-info → fal-1.44.0.dist-info}/entry_points.txt +0 -0
- {fal-1.42.0.dist-info → fal-1.44.0.dist-info}/top_level.txt +0 -0
fal/_fal_version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '1.
|
|
32
|
-
__version_tuple__ = version_tuple = (1,
|
|
31
|
+
__version__ = version = '1.44.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (1, 44, 0)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
fal/app.py
CHANGED
|
@@ -311,10 +311,15 @@ class App(BaseServable):
|
|
|
311
311
|
"resolver": "uv",
|
|
312
312
|
"keep_alive": 60,
|
|
313
313
|
}
|
|
314
|
-
app_name: ClassVar[str]
|
|
314
|
+
app_name: ClassVar[Optional[str]] = None
|
|
315
315
|
app_auth: ClassVar[Optional[AuthModeLiteral]] = None
|
|
316
|
-
request_timeout: ClassVar[int
|
|
317
|
-
startup_timeout: ClassVar[int
|
|
316
|
+
request_timeout: ClassVar[Optional[int]] = None
|
|
317
|
+
startup_timeout: ClassVar[Optional[int]] = None
|
|
318
|
+
min_concurrency: ClassVar[Optional[int]] = None
|
|
319
|
+
max_concurrency: ClassVar[Optional[int]] = None
|
|
320
|
+
concurrency_buffer: ClassVar[Optional[int]] = None
|
|
321
|
+
concurrency_buffer_perc: ClassVar[Optional[int]] = None
|
|
322
|
+
max_multiplexing: ClassVar[Optional[int]] = None
|
|
318
323
|
|
|
319
324
|
isolate_channel: async_grpc.Channel | None = None
|
|
320
325
|
|
|
@@ -329,6 +334,21 @@ class App(BaseServable):
|
|
|
329
334
|
if cls.startup_timeout is not None:
|
|
330
335
|
cls.host_kwargs["startup_timeout"] = cls.startup_timeout
|
|
331
336
|
|
|
337
|
+
if cls.min_concurrency is not None:
|
|
338
|
+
cls.host_kwargs["min_concurrency"] = cls.min_concurrency
|
|
339
|
+
|
|
340
|
+
if cls.max_concurrency is not None:
|
|
341
|
+
cls.host_kwargs["max_concurrency"] = cls.max_concurrency
|
|
342
|
+
|
|
343
|
+
if cls.concurrency_buffer is not None:
|
|
344
|
+
cls.host_kwargs["concurrency_buffer"] = cls.concurrency_buffer
|
|
345
|
+
|
|
346
|
+
if cls.concurrency_buffer_perc is not None:
|
|
347
|
+
cls.host_kwargs["concurrency_buffer_perc"] = cls.concurrency_buffer_perc
|
|
348
|
+
|
|
349
|
+
if cls.max_multiplexing is not None:
|
|
350
|
+
cls.host_kwargs["max_multiplexing"] = cls.max_multiplexing
|
|
351
|
+
|
|
332
352
|
cls.app_name = getattr(cls, "app_name", app_name)
|
|
333
353
|
|
|
334
354
|
if cls.__init__ is not App.__init__:
|
fal/cli/parser.py
CHANGED
|
@@ -86,6 +86,37 @@ class SinceAction(argparse.Action):
|
|
|
86
86
|
|
|
87
87
|
super().__init__(*args, **kwargs)
|
|
88
88
|
|
|
89
|
+
# If a default is provided as a string like "1h ago", parse it into a datetime
|
|
90
|
+
# so callers can rely on receiving a datetime even when the flag isn't passed.
|
|
91
|
+
default_value = getattr(self, "default", None)
|
|
92
|
+
if default_value is not None and default_value is not argparse.SUPPRESS:
|
|
93
|
+
if isinstance(default_value, str):
|
|
94
|
+
dt = self._parse_since(default_value)
|
|
95
|
+
if not dt:
|
|
96
|
+
raise ValueError(
|
|
97
|
+
f"Invalid 'default' value for SinceAction: {default_value!r}"
|
|
98
|
+
)
|
|
99
|
+
if (
|
|
100
|
+
self._limit
|
|
101
|
+
and self._limit_dt is not None
|
|
102
|
+
and dt < self._limit_dt - self.LIMIT_LEEWAY
|
|
103
|
+
):
|
|
104
|
+
raise ValueError(
|
|
105
|
+
"Default since value is older than the allowed limit "
|
|
106
|
+
f"{self._limit}."
|
|
107
|
+
)
|
|
108
|
+
self.default = dt
|
|
109
|
+
elif isinstance(default_value, datetime):
|
|
110
|
+
if (
|
|
111
|
+
self._limit
|
|
112
|
+
and self._limit_dt is not None
|
|
113
|
+
and default_value < self._limit_dt - self.LIMIT_LEEWAY
|
|
114
|
+
):
|
|
115
|
+
raise ValueError(
|
|
116
|
+
"Default since value is older than the allowed limit "
|
|
117
|
+
f"{self._limit}."
|
|
118
|
+
)
|
|
119
|
+
|
|
89
120
|
def __call__(self, parser, args, values, option_string=None): # noqa: ARG002
|
|
90
121
|
if values is None:
|
|
91
122
|
setattr(args, self.dest, None)
|
|
@@ -102,7 +133,7 @@ class SinceAction(argparse.Action):
|
|
|
102
133
|
),
|
|
103
134
|
)
|
|
104
135
|
|
|
105
|
-
if self._limit_dt is not None:
|
|
136
|
+
if self._limit and self._limit_dt is not None:
|
|
106
137
|
if dt < self._limit_dt - self.LIMIT_LEEWAY:
|
|
107
138
|
raise argparse.ArgumentError(
|
|
108
139
|
self,
|
fal/cli/runners.py
CHANGED
|
@@ -1,9 +1,18 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
|
|
4
|
+
from collections import deque
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from datetime import datetime, timedelta, timezone
|
|
7
|
+
from http import HTTPStatus
|
|
8
|
+
from typing import Iterator, List
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
from httpx_sse import connect_sse
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
from structlog.typing import EventDict
|
|
14
|
+
|
|
15
|
+
from fal.rest_client import REST_CLIENT
|
|
7
16
|
from fal.sdk import RunnerInfo, RunnerState
|
|
8
17
|
|
|
9
18
|
from ._utils import get_client
|
|
@@ -198,6 +207,347 @@ def _add_list_parser(subparsers, parents):
|
|
|
198
207
|
parser.set_defaults(func=_list)
|
|
199
208
|
|
|
200
209
|
|
|
210
|
+
def _to_iso_naive(dt: datetime) -> str:
|
|
211
|
+
return dt.astimezone(timezone.utc).isoformat()
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _parse_ts(ts: str) -> datetime:
|
|
215
|
+
# Support both 'Z' and offset formats
|
|
216
|
+
ts_norm = ts.replace("Z", "+00:00")
|
|
217
|
+
return datetime.fromisoformat(ts_norm)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _to_aware_utc(dt: datetime) -> datetime:
|
|
221
|
+
# Treat naive datetimes as UTC
|
|
222
|
+
if dt.tzinfo is None:
|
|
223
|
+
return dt.replace(tzinfo=timezone.utc)
|
|
224
|
+
return dt.astimezone(timezone.utc)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _post_history(
|
|
228
|
+
client: httpx.Client,
|
|
229
|
+
base_params: dict[str, str],
|
|
230
|
+
since: datetime | None,
|
|
231
|
+
until: datetime | None,
|
|
232
|
+
page_size: int,
|
|
233
|
+
) -> tuple[list, str | None]:
|
|
234
|
+
params: dict[str, str] = dict(base_params)
|
|
235
|
+
if since is not None:
|
|
236
|
+
params["since"] = _to_iso_naive(since)
|
|
237
|
+
if until is not None:
|
|
238
|
+
params["until"] = _to_iso_naive(until)
|
|
239
|
+
params["page_size"] = str(page_size)
|
|
240
|
+
resp = client.post("/logs/history", params=params)
|
|
241
|
+
if resp.status_code != HTTPStatus.OK:
|
|
242
|
+
try:
|
|
243
|
+
detail = resp.json().get("detail", resp.text)
|
|
244
|
+
except Exception:
|
|
245
|
+
detail = resp.text
|
|
246
|
+
raise RuntimeError(f"Failed to fetch logs history: {detail}")
|
|
247
|
+
data = resp.json()
|
|
248
|
+
items = data.get("items", []) if isinstance(data, dict) else []
|
|
249
|
+
next_until = data.get("next_until") if isinstance(data, dict) else None
|
|
250
|
+
if not isinstance(items, list):
|
|
251
|
+
raise RuntimeError("Unexpected logs history response format")
|
|
252
|
+
return items, next_until
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
@dataclass
|
|
256
|
+
class RestRunnerInfo:
|
|
257
|
+
started_at: datetime | None
|
|
258
|
+
ended_at: datetime | None
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def _get_runner_info(runner_id: str) -> RestRunnerInfo:
|
|
262
|
+
headers = REST_CLIENT.get_headers()
|
|
263
|
+
with httpx.Client(
|
|
264
|
+
base_url=REST_CLIENT.base_url, headers=headers, timeout=30
|
|
265
|
+
) as client:
|
|
266
|
+
resp = client.get(f"/runners/{runner_id}")
|
|
267
|
+
if resp.status_code == HTTPStatus.NOT_FOUND:
|
|
268
|
+
raise RuntimeError(f"Runner {runner_id} not found")
|
|
269
|
+
if resp.status_code != HTTPStatus.OK:
|
|
270
|
+
raise RuntimeError(
|
|
271
|
+
f"Failed to fetch runner info: {resp.status_code} {resp.text}"
|
|
272
|
+
)
|
|
273
|
+
data = resp.json()
|
|
274
|
+
if not isinstance(data, dict):
|
|
275
|
+
raise RuntimeError(f"Unexpected runner info response format: {resp.text}")
|
|
276
|
+
|
|
277
|
+
start: datetime | None = None
|
|
278
|
+
end: datetime | None = None
|
|
279
|
+
|
|
280
|
+
started_at = data.get("started_at")
|
|
281
|
+
if started_at is not None:
|
|
282
|
+
try:
|
|
283
|
+
start = _to_aware_utc(_parse_ts(started_at))
|
|
284
|
+
except Exception:
|
|
285
|
+
start = None
|
|
286
|
+
|
|
287
|
+
ended_at = data.get("ended_at")
|
|
288
|
+
if ended_at is not None:
|
|
289
|
+
try:
|
|
290
|
+
end = _to_aware_utc(_parse_ts(ended_at))
|
|
291
|
+
except Exception:
|
|
292
|
+
end = None
|
|
293
|
+
|
|
294
|
+
return RestRunnerInfo(started_at=start, ended_at=end)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def _stream_logs(
|
|
298
|
+
base_params: dict[str, str], since: datetime | None, until: datetime | None
|
|
299
|
+
) -> Iterator[dict]:
|
|
300
|
+
headers = REST_CLIENT.get_headers()
|
|
301
|
+
params: dict[str, str] = base_params.copy()
|
|
302
|
+
if since is not None:
|
|
303
|
+
params["since"] = _to_iso_naive(since)
|
|
304
|
+
if until is not None:
|
|
305
|
+
params["until"] = _to_iso_naive(until)
|
|
306
|
+
with httpx.Client(
|
|
307
|
+
base_url=REST_CLIENT.base_url,
|
|
308
|
+
headers=headers,
|
|
309
|
+
timeout=None,
|
|
310
|
+
follow_redirects=True,
|
|
311
|
+
) as client:
|
|
312
|
+
with connect_sse(
|
|
313
|
+
client,
|
|
314
|
+
method="POST",
|
|
315
|
+
url="/logs/stream",
|
|
316
|
+
params=params,
|
|
317
|
+
headers={"Accept": "text/event-stream"},
|
|
318
|
+
) as event_source:
|
|
319
|
+
for sse in event_source.iter_sse():
|
|
320
|
+
if not sse.data:
|
|
321
|
+
continue
|
|
322
|
+
if sse.event == "error":
|
|
323
|
+
raise RuntimeError(f"Error streaming logs: {sse.data}")
|
|
324
|
+
try:
|
|
325
|
+
yield json.loads(sse.data)
|
|
326
|
+
except Exception:
|
|
327
|
+
continue
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
DEFAULT_PAGE_SIZE = 1000
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
def _iter_logs(
|
|
334
|
+
base_params: dict[str, str], start: datetime | None, end: datetime | None
|
|
335
|
+
) -> Iterator[dict]:
|
|
336
|
+
headers = REST_CLIENT.get_headers()
|
|
337
|
+
with httpx.Client(
|
|
338
|
+
base_url=REST_CLIENT.base_url,
|
|
339
|
+
headers=headers,
|
|
340
|
+
timeout=300,
|
|
341
|
+
follow_redirects=True,
|
|
342
|
+
) as client:
|
|
343
|
+
cursor_until = end
|
|
344
|
+
while True:
|
|
345
|
+
items, next_until = _post_history(
|
|
346
|
+
client, base_params, start, cursor_until, DEFAULT_PAGE_SIZE
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
yield from items
|
|
350
|
+
|
|
351
|
+
if not next_until:
|
|
352
|
+
break
|
|
353
|
+
|
|
354
|
+
new_until_dt = _to_aware_utc(_parse_ts(next_until))
|
|
355
|
+
if start is not None and new_until_dt <= start:
|
|
356
|
+
break
|
|
357
|
+
cursor_until = new_until_dt
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
def _get_logs(
|
|
361
|
+
params: dict[str, str],
|
|
362
|
+
since: datetime | None,
|
|
363
|
+
until: datetime | None,
|
|
364
|
+
lines_count: int | None,
|
|
365
|
+
*,
|
|
366
|
+
oldest: bool = False,
|
|
367
|
+
) -> Iterator[dict]:
|
|
368
|
+
if lines_count is None:
|
|
369
|
+
yield from _iter_logs(params, since, until)
|
|
370
|
+
return
|
|
371
|
+
|
|
372
|
+
if oldest:
|
|
373
|
+
produced = 0
|
|
374
|
+
for log in _iter_logs(params, since, until):
|
|
375
|
+
if produced >= lines_count:
|
|
376
|
+
break
|
|
377
|
+
produced += 1
|
|
378
|
+
yield log
|
|
379
|
+
return
|
|
380
|
+
|
|
381
|
+
# newest tail: collect into a fixed-size deque, then yield
|
|
382
|
+
tail: deque[dict] = deque(maxlen=lines_count)
|
|
383
|
+
for log in _iter_logs(params, since, until):
|
|
384
|
+
tail.append(log)
|
|
385
|
+
for log in tail:
|
|
386
|
+
yield log
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
class LogPrinter:
|
|
390
|
+
def __init__(self, console: Console) -> None:
|
|
391
|
+
from structlog.dev import ConsoleRenderer
|
|
392
|
+
|
|
393
|
+
from fal.logging.style import LEVEL_STYLES
|
|
394
|
+
|
|
395
|
+
self._console = console
|
|
396
|
+
self._renderer = ConsoleRenderer(level_styles=LEVEL_STYLES)
|
|
397
|
+
|
|
398
|
+
def _render_log(self, log: dict) -> str:
|
|
399
|
+
ts_str: str = log["timestamp"]
|
|
400
|
+
timestamp = _to_aware_utc(_parse_ts(ts_str))
|
|
401
|
+
local_ts = timestamp.astimezone()
|
|
402
|
+
tz_offset = local_ts.strftime("%z")
|
|
403
|
+
# Insert ':' into offset for readability, e.g. +0300 -> +03:00
|
|
404
|
+
if tz_offset and len(tz_offset) == 5:
|
|
405
|
+
tz_offset = tz_offset[:3] + ":" + tz_offset[3:]
|
|
406
|
+
|
|
407
|
+
event: EventDict = {
|
|
408
|
+
"event": log.get("message", ""),
|
|
409
|
+
"level": str(log.get("level", "")).upper(),
|
|
410
|
+
"timestamp": f"{local_ts.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}{tz_offset}",
|
|
411
|
+
}
|
|
412
|
+
return self._renderer(logger={}, name=event["level"], event_dict=event)
|
|
413
|
+
|
|
414
|
+
def print(self, log: dict) -> None:
|
|
415
|
+
self._console.print(self._render_log(log), highlight=False)
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
DEFAULT_STREAM_SINCE = timedelta(minutes=1)
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def _logs(args):
|
|
422
|
+
params: dict[str, str] = {"job_id": args.id}
|
|
423
|
+
if getattr(args, "search", None) is not None:
|
|
424
|
+
params["search"] = args.search
|
|
425
|
+
|
|
426
|
+
runner_info = _get_runner_info(args.id)
|
|
427
|
+
follow: bool = getattr(args, "follow", False)
|
|
428
|
+
since = getattr(args, "since", None)
|
|
429
|
+
if follow:
|
|
430
|
+
since = since or (datetime.now(timezone.utc) - DEFAULT_STREAM_SINCE)
|
|
431
|
+
else:
|
|
432
|
+
since = since or runner_info.started_at
|
|
433
|
+
until = getattr(args, "until", None) or runner_info.ended_at
|
|
434
|
+
|
|
435
|
+
# Normalize to aware UTC for comparisons
|
|
436
|
+
if since is not None:
|
|
437
|
+
since = _to_aware_utc(since)
|
|
438
|
+
if until is not None:
|
|
439
|
+
until = _to_aware_utc(until)
|
|
440
|
+
|
|
441
|
+
# Sanity limiters: clamp within runner lifetime when known
|
|
442
|
+
if runner_info.started_at is not None:
|
|
443
|
+
if since is not None and since < runner_info.started_at:
|
|
444
|
+
since = runner_info.started_at
|
|
445
|
+
if until is not None and until < runner_info.started_at:
|
|
446
|
+
until = runner_info.started_at
|
|
447
|
+
if runner_info.ended_at is not None:
|
|
448
|
+
if since is not None and since > runner_info.ended_at:
|
|
449
|
+
since = runner_info.ended_at
|
|
450
|
+
if until is not None and until > runner_info.ended_at:
|
|
451
|
+
until = runner_info.ended_at
|
|
452
|
+
|
|
453
|
+
# Ensure ordering if both are present
|
|
454
|
+
if since is not None and until is not None and until < since:
|
|
455
|
+
since, until = until, since
|
|
456
|
+
|
|
457
|
+
lines_arg = getattr(args, "lines", None)
|
|
458
|
+
lines_count: int | None = None
|
|
459
|
+
lines_oldest = False
|
|
460
|
+
if lines_arg is not None:
|
|
461
|
+
if lines_arg.startswith("+"):
|
|
462
|
+
lines_str = lines_arg[1:]
|
|
463
|
+
lines_oldest = True
|
|
464
|
+
else:
|
|
465
|
+
lines_str = lines_arg
|
|
466
|
+
try:
|
|
467
|
+
lines_count = int(lines_str)
|
|
468
|
+
except ValueError:
|
|
469
|
+
args.parser.error("Invalid -n|--lines value. Use an integer or +integer.")
|
|
470
|
+
|
|
471
|
+
if follow:
|
|
472
|
+
logs_gen = _stream_logs(params, since, until)
|
|
473
|
+
else:
|
|
474
|
+
logs_gen = _get_logs(params, since, until, lines_count, oldest=lines_oldest)
|
|
475
|
+
|
|
476
|
+
printer = LogPrinter(args.console)
|
|
477
|
+
|
|
478
|
+
if follow:
|
|
479
|
+
for log in logs_gen:
|
|
480
|
+
if args.output == "json":
|
|
481
|
+
args.console.print(json.dumps(log))
|
|
482
|
+
else:
|
|
483
|
+
printer.print(log)
|
|
484
|
+
return
|
|
485
|
+
|
|
486
|
+
if args.output == "json":
|
|
487
|
+
args.console.print(json.dumps({"logs": list(logs_gen)}))
|
|
488
|
+
else:
|
|
489
|
+
for log in reversed(list(logs_gen)):
|
|
490
|
+
printer.print(log)
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
def _add_logs_parser(subparsers, parents):
|
|
494
|
+
logs_help = "Show logs for a runner."
|
|
495
|
+
parser = subparsers.add_parser(
|
|
496
|
+
"logs",
|
|
497
|
+
aliases=["log"],
|
|
498
|
+
description=logs_help,
|
|
499
|
+
help=logs_help,
|
|
500
|
+
parents=[*parents, get_output_parser()],
|
|
501
|
+
)
|
|
502
|
+
parser.add_argument(
|
|
503
|
+
"id",
|
|
504
|
+
help="Runner ID.",
|
|
505
|
+
)
|
|
506
|
+
parser.add_argument(
|
|
507
|
+
"--search",
|
|
508
|
+
default=None,
|
|
509
|
+
help="Search for string in logs.",
|
|
510
|
+
)
|
|
511
|
+
parser.add_argument(
|
|
512
|
+
"--since",
|
|
513
|
+
default=None,
|
|
514
|
+
action=SinceAction,
|
|
515
|
+
help=(
|
|
516
|
+
"Show logs since the given time. "
|
|
517
|
+
"Accepts 'now', relative like '30m', '1h', or an ISO timestamp. "
|
|
518
|
+
"Defaults to runner start time or to '1m ago' in --follow mode."
|
|
519
|
+
),
|
|
520
|
+
)
|
|
521
|
+
parser.add_argument(
|
|
522
|
+
"--until",
|
|
523
|
+
default=None,
|
|
524
|
+
action=SinceAction,
|
|
525
|
+
help=(
|
|
526
|
+
"Show logs until the given time. "
|
|
527
|
+
"Accepts 'now', relative like '30m', '1h', or an ISO timestamp. "
|
|
528
|
+
"Defaults to runner finish time or 'now' if it is still running."
|
|
529
|
+
),
|
|
530
|
+
)
|
|
531
|
+
parser.add_argument(
|
|
532
|
+
"--follow",
|
|
533
|
+
"-f",
|
|
534
|
+
action="store_true",
|
|
535
|
+
help="Follow logs live. If --since is not specified, implies '--since 1m ago'.",
|
|
536
|
+
)
|
|
537
|
+
parser.add_argument(
|
|
538
|
+
"--lines",
|
|
539
|
+
"-n",
|
|
540
|
+
default=None,
|
|
541
|
+
type=str,
|
|
542
|
+
help=(
|
|
543
|
+
"Only show latest N log lines. "
|
|
544
|
+
"If '+' prefix is used, show oldest N log lines. "
|
|
545
|
+
"Ignored if --follow is used."
|
|
546
|
+
),
|
|
547
|
+
)
|
|
548
|
+
parser.set_defaults(func=_logs)
|
|
549
|
+
|
|
550
|
+
|
|
201
551
|
def add_parser(main_subparsers, parents):
|
|
202
552
|
runners_help = "Manage fal runners."
|
|
203
553
|
parser = main_subparsers.add_parser(
|
|
@@ -217,3 +567,4 @@ def add_parser(main_subparsers, parents):
|
|
|
217
567
|
|
|
218
568
|
_add_kill_parser(subparsers, parents)
|
|
219
569
|
_add_list_parser(subparsers, parents)
|
|
570
|
+
_add_logs_parser(subparsers, parents)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fal
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.44.0
|
|
4
4
|
Summary: fal is an easy-to-use Serverless Python Framework
|
|
5
5
|
Author: Features & Labels <support@fal.ai>
|
|
6
6
|
Requires-Python: >=3.8
|
|
@@ -25,6 +25,7 @@ Requires-Dist: pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<2.11
|
|
|
25
25
|
Requires-Dist: fastapi<1,>=0.99.1
|
|
26
26
|
Requires-Dist: starlette-exporter>=0.21.0
|
|
27
27
|
Requires-Dist: httpx>=0.15.4
|
|
28
|
+
Requires-Dist: httpx-sse
|
|
28
29
|
Requires-Dist: attrs>=21.3.0
|
|
29
30
|
Requires-Dist: python-dateutil<3,>=2.8.0
|
|
30
31
|
Requires-Dist: types-python-dateutil<3,>=2.8.0
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
fal/__init__.py,sha256=wXs1G0gSc7ZK60-bHe-B2m0l_sA6TrFk4BxY0tMoLe8,784
|
|
2
2
|
fal/__main__.py,sha256=4JMK66Wj4uLZTKbF-sT3LAxOsr6buig77PmOkJCRRxw,83
|
|
3
|
-
fal/_fal_version.py,sha256=
|
|
3
|
+
fal/_fal_version.py,sha256=ukbfyss0nlFEpMfoBksf6TuiHSAA5lYg-Uh3oNJKvXI,706
|
|
4
4
|
fal/_serialization.py,sha256=npXNsFJ5G7jzBeBIyVMH01Ww34mGY4XWhHpRbSrTtnQ,7598
|
|
5
5
|
fal/_version.py,sha256=1BbTFnucNC_6ldKJ_ZoC722_UkW4S9aDBSW9L0fkKAw,2315
|
|
6
6
|
fal/api.py,sha256=6LkGbbqGUC4tcMBlTL-l7DBkl7t9FpZFSZY1doIdI5o,50284
|
|
7
|
-
fal/app.py,sha256=
|
|
7
|
+
fal/app.py,sha256=dZm2PW4zLH7JBeueN-s3vvpWty66M-ujCSgN33ipXQY,26909
|
|
8
8
|
fal/apps.py,sha256=pzCd2mrKl5J_4oVc40_pggvPtFahXBCdrZXWpnaEJVs,12130
|
|
9
9
|
fal/config.py,sha256=1HRaOJFOAjB7fbQoEPCSH85gMvEEMIMPeupVWgrHVgU,3572
|
|
10
10
|
fal/container.py,sha256=FTsa5hOW4ars-yV1lUoc0BNeIIvAZcpw7Ftyt3A4m_w,2000
|
|
@@ -33,11 +33,11 @@ fal/cli/doctor.py,sha256=8SZrYG9Ku0F6LLUHtFdKopdIgZfFkw5E3Mwrxa9KOSk,1613
|
|
|
33
33
|
fal/cli/files.py,sha256=-j0q4g53A7CWSczGLdfeUCTSd4zXoV3pfZFdman7JOw,3450
|
|
34
34
|
fal/cli/keys.py,sha256=iQVMr3WT8CUqSQT3qeCCiy6rRwoux9F-UEaC4bCwMWo,3754
|
|
35
35
|
fal/cli/main.py,sha256=LDy3gze9TRsvGa4uSNc8NMFmWMLpsyoC-msteICNiso,3371
|
|
36
|
-
fal/cli/parser.py,sha256=
|
|
36
|
+
fal/cli/parser.py,sha256=siSY1kxqczZIs3l_jLwug_BpVzY_ZqHpewON3am83Ow,6658
|
|
37
37
|
fal/cli/profile.py,sha256=PAY_ffifCT71VJ8VxfDVaXPT0U1oN8drvWZDFRXwvek,6678
|
|
38
38
|
fal/cli/queue.py,sha256=9Kid3zR6VOFfAdDgnqi2TNN4ocIv5Vs61ASEZnwMa9o,2713
|
|
39
39
|
fal/cli/run.py,sha256=nAC12Qss4Fg1XmV0qOS9RdGNLYcdoHeRgQMvbTN4P9I,1202
|
|
40
|
-
fal/cli/runners.py,sha256=
|
|
40
|
+
fal/cli/runners.py,sha256=OWSsvk01IkwQhibewZQgC-iWMOXl43tWJSi9F81x8n4,17481
|
|
41
41
|
fal/cli/secrets.py,sha256=HfIeO2IZpCEiBC6Cs5Kpi3zckfDnc7GsLwLdgj3NnPU,3085
|
|
42
42
|
fal/cli/teams.py,sha256=_JcNcf659ZoLBFOxKnVP5A6Pyk1jY1vh4_xzMweYIDo,1285
|
|
43
43
|
fal/console/__init__.py,sha256=lGPUuTqIM9IKTa1cyyA-MA2iZJKVHp2YydsITZVlb6g,148
|
|
@@ -143,8 +143,8 @@ openapi_fal_rest/models/workflow_node_type.py,sha256=-FzyeY2bxcNmizKbJI8joG7byRi
|
|
|
143
143
|
openapi_fal_rest/models/workflow_schema.py,sha256=4K5gsv9u9pxx2ItkffoyHeNjBBYf6ur5bN4m_zePZNY,2019
|
|
144
144
|
openapi_fal_rest/models/workflow_schema_input.py,sha256=2OkOXWHTNsCXHWS6EGDFzcJKkW5FIap-2gfO233EvZQ,1191
|
|
145
145
|
openapi_fal_rest/models/workflow_schema_output.py,sha256=EblwSPAGfWfYVWw_WSSaBzQVju296is9o28rMBAd0mc,1196
|
|
146
|
-
fal-1.
|
|
147
|
-
fal-1.
|
|
148
|
-
fal-1.
|
|
149
|
-
fal-1.
|
|
150
|
-
fal-1.
|
|
146
|
+
fal-1.44.0.dist-info/METADATA,sha256=M6TovVUSGD1FCSOEd5VoU96qC4l09iOuHiMW-ZwONJA,4157
|
|
147
|
+
fal-1.44.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
148
|
+
fal-1.44.0.dist-info/entry_points.txt,sha256=32zwTUC1U1E7nSTIGCoANQOQ3I7-qHG5wI6gsVz5pNU,37
|
|
149
|
+
fal-1.44.0.dist-info/top_level.txt,sha256=r257X1L57oJL8_lM0tRrfGuXFwm66i1huwQygbpLmHw,21
|
|
150
|
+
fal-1.44.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|