fal 1.42.0__tar.gz → 1.43.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fal might be problematic. Click here for more details.
- {fal-1.42.0/fal.egg-info → fal-1.43.0}/PKG-INFO +2 -1
- {fal-1.42.0 → fal-1.43.0/fal.egg-info}/PKG-INFO +2 -1
- {fal-1.42.0 → fal-1.43.0}/fal.egg-info/requires.txt +1 -0
- {fal-1.42.0 → fal-1.43.0}/pyproject.toml +1 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/_fal_version.py +2 -2
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/parser.py +32 -1
- fal-1.43.0/src/fal/cli/runners.py +570 -0
- fal-1.42.0/src/fal/cli/runners.py +0 -219
- {fal-1.42.0 → fal-1.43.0}/.gitignore +0 -0
- {fal-1.42.0 → fal-1.43.0}/Makefile +0 -0
- {fal-1.42.0 → fal-1.43.0}/README.md +0 -0
- {fal-1.42.0 → fal-1.43.0}/docs/conf.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/docs/index.rst +0 -0
- {fal-1.42.0 → fal-1.43.0}/fal.egg-info/SOURCES.txt +0 -0
- {fal-1.42.0 → fal-1.43.0}/fal.egg-info/dependency_links.txt +0 -0
- {fal-1.42.0 → fal-1.43.0}/fal.egg-info/entry_points.txt +0 -0
- {fal-1.42.0 → fal-1.43.0}/fal.egg-info/top_level.txt +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/README.md +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/applications/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/applications/app_metadata.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/billing/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/billing/get_user_details.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/comfy/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/comfy/create_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/comfy/delete_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/comfy/get_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/comfy/list_user_workflows.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/comfy/update_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/files/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/files/check_dir_hash.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/files/upload_local_file.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/users/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/users/get_current_user.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/workflows/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/workflows/create_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/workflows/delete_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/workflows/get_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/workflows/list_user_workflows.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/api/workflows/update_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/client.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/errors.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/app_metadata_response_app_metadata.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/body_upload_local_file.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/comfy_workflow_detail.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/comfy_workflow_item.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/comfy_workflow_schema.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/comfy_workflow_schema_extra_data.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/comfy_workflow_schema_fal_inputs.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/comfy_workflow_schema_fal_inputs_dev_info.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/comfy_workflow_schema_prompt.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/current_user.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/customer_details.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/hash_check.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/http_validation_error.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/lock_reason.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/page_comfy_workflow_item.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/page_workflow_item.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/team_role.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/typed_comfy_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/typed_comfy_workflow_update.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/typed_workflow.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/typed_workflow_update.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/user_member.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/validation_error.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_contents.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_contents_metadata.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_contents_nodes.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_contents_output.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_detail.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_detail_contents.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_item.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_node.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_node_type.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_schema.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_schema_input.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/models/workflow_schema_output.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/py.typed +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/openapi_fal_rest/types.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi-fal-rest/pyproject.toml +0 -0
- {fal-1.42.0 → fal-1.43.0}/openapi_rest.config.yaml +0 -0
- {fal-1.42.0 → fal-1.43.0}/setup.cfg +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/__main__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/_serialization.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/_version.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/api.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/app.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/apps.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/auth/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/auth/auth0.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/auth/local.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/_utils.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/api.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/apps.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/auth.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/cli_nested_json.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/create.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/debug.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/deploy.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/doctor.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/files.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/keys.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/main.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/profile.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/queue.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/run.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/secrets.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/cli/teams.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/config.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/console/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/console/icons.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/console/ux.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/container.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/exceptions/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/exceptions/_base.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/exceptions/_cuda.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/exceptions/auth.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/files.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/flags.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/logging/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/logging/isolate.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/logging/style.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/logging/trace.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/project.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/py.typed +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/rest_client.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/sdk.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/sync.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/audio/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/audio/audio.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/exceptions.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/file/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/file/file.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/file/providers/fal.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/file/providers/gcp.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/file/providers/r2.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/file/providers/s3.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/file/types.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/image.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/nsfw_filter/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/nsfw_filter/env.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/nsfw_filter/inference.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/nsfw_filter/model.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/nsfw_filter/requirements.txt +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/image/safety_checker.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/kv.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/optimize.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/types.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/utils/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/utils/download_utils.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/utils/endpoint.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/utils/retry.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/utils/setup_utils.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/video/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/toolkit/video/video.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/utils.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/src/fal/workflows.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/assets/cat.png +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/cli/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/cli/test_apps.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/cli/test_auth.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/cli/test_deploy.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/cli/test_keys.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/cli/test_run.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/cli/test_secrets.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/conftest.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/integration_test.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/mainify_package/__init__.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/mainify_package/impl.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/mainify_package/utils.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/mainify_target.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/test_apps.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/test_files.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/test_kv.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/test_stability.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/toolkit/file/providers/test_fal_retry.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/toolkit/file_test.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/toolkit/image_test.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/toolkit/test_types.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tests/toolkit/utils/retry.py +0 -0
- {fal-1.42.0 → fal-1.43.0}/tools/demo_script.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fal
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.43.0
|
|
4
4
|
Summary: fal is an easy-to-use Serverless Python Framework
|
|
5
5
|
Author: Features & Labels <support@fal.ai>
|
|
6
6
|
Requires-Python: >=3.8
|
|
@@ -25,6 +25,7 @@ Requires-Dist: pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<2.11
|
|
|
25
25
|
Requires-Dist: fastapi<1,>=0.99.1
|
|
26
26
|
Requires-Dist: starlette-exporter>=0.21.0
|
|
27
27
|
Requires-Dist: httpx>=0.15.4
|
|
28
|
+
Requires-Dist: httpx-sse
|
|
28
29
|
Requires-Dist: attrs>=21.3.0
|
|
29
30
|
Requires-Dist: python-dateutil<3,>=2.8.0
|
|
30
31
|
Requires-Dist: types-python-dateutil<3,>=2.8.0
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fal
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.43.0
|
|
4
4
|
Summary: fal is an easy-to-use Serverless Python Framework
|
|
5
5
|
Author: Features & Labels <support@fal.ai>
|
|
6
6
|
Requires-Python: >=3.8
|
|
@@ -25,6 +25,7 @@ Requires-Dist: pydantic!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,<2.11
|
|
|
25
25
|
Requires-Dist: fastapi<1,>=0.99.1
|
|
26
26
|
Requires-Dist: starlette-exporter>=0.21.0
|
|
27
27
|
Requires-Dist: httpx>=0.15.4
|
|
28
|
+
Requires-Dist: httpx-sse
|
|
28
29
|
Requires-Dist: attrs>=21.3.0
|
|
29
30
|
Requires-Dist: python-dateutil<3,>=2.8.0
|
|
30
31
|
Requires-Dist: types-python-dateutil<3,>=2.8.0
|
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '1.
|
|
32
|
-
__version_tuple__ = version_tuple = (1,
|
|
31
|
+
__version__ = version = '1.43.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (1, 43, 0)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -86,6 +86,37 @@ class SinceAction(argparse.Action):
|
|
|
86
86
|
|
|
87
87
|
super().__init__(*args, **kwargs)
|
|
88
88
|
|
|
89
|
+
# If a default is provided as a string like "1h ago", parse it into a datetime
|
|
90
|
+
# so callers can rely on receiving a datetime even when the flag isn't passed.
|
|
91
|
+
default_value = getattr(self, "default", None)
|
|
92
|
+
if default_value is not None and default_value is not argparse.SUPPRESS:
|
|
93
|
+
if isinstance(default_value, str):
|
|
94
|
+
dt = self._parse_since(default_value)
|
|
95
|
+
if not dt:
|
|
96
|
+
raise ValueError(
|
|
97
|
+
f"Invalid 'default' value for SinceAction: {default_value!r}"
|
|
98
|
+
)
|
|
99
|
+
if (
|
|
100
|
+
self._limit
|
|
101
|
+
and self._limit_dt is not None
|
|
102
|
+
and dt < self._limit_dt - self.LIMIT_LEEWAY
|
|
103
|
+
):
|
|
104
|
+
raise ValueError(
|
|
105
|
+
"Default since value is older than the allowed limit "
|
|
106
|
+
f"{self._limit}."
|
|
107
|
+
)
|
|
108
|
+
self.default = dt
|
|
109
|
+
elif isinstance(default_value, datetime):
|
|
110
|
+
if (
|
|
111
|
+
self._limit
|
|
112
|
+
and self._limit_dt is not None
|
|
113
|
+
and default_value < self._limit_dt - self.LIMIT_LEEWAY
|
|
114
|
+
):
|
|
115
|
+
raise ValueError(
|
|
116
|
+
"Default since value is older than the allowed limit "
|
|
117
|
+
f"{self._limit}."
|
|
118
|
+
)
|
|
119
|
+
|
|
89
120
|
def __call__(self, parser, args, values, option_string=None): # noqa: ARG002
|
|
90
121
|
if values is None:
|
|
91
122
|
setattr(args, self.dest, None)
|
|
@@ -102,7 +133,7 @@ class SinceAction(argparse.Action):
|
|
|
102
133
|
),
|
|
103
134
|
)
|
|
104
135
|
|
|
105
|
-
if self._limit_dt is not None:
|
|
136
|
+
if self._limit and self._limit_dt is not None:
|
|
106
137
|
if dt < self._limit_dt - self.LIMIT_LEEWAY:
|
|
107
138
|
raise argparse.ArgumentError(
|
|
108
139
|
self,
|
|
@@ -0,0 +1,570 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from collections import deque
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from datetime import datetime, timedelta, timezone
|
|
7
|
+
from http import HTTPStatus
|
|
8
|
+
from typing import Iterator, List
|
|
9
|
+
|
|
10
|
+
import httpx
|
|
11
|
+
from httpx_sse import connect_sse
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
from structlog.typing import EventDict
|
|
14
|
+
|
|
15
|
+
from fal.rest_client import REST_CLIENT
|
|
16
|
+
from fal.sdk import RunnerInfo, RunnerState
|
|
17
|
+
|
|
18
|
+
from ._utils import get_client
|
|
19
|
+
from .parser import FalClientParser, SinceAction, get_output_parser
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def runners_table(runners: List[RunnerInfo]):
|
|
23
|
+
from rich.table import Table
|
|
24
|
+
|
|
25
|
+
table = Table()
|
|
26
|
+
table.add_column("Alias")
|
|
27
|
+
table.add_column("Runner ID")
|
|
28
|
+
table.add_column("In Flight\nRequests")
|
|
29
|
+
table.add_column("Expires In")
|
|
30
|
+
table.add_column("Uptime")
|
|
31
|
+
table.add_column("Revision")
|
|
32
|
+
table.add_column("State")
|
|
33
|
+
|
|
34
|
+
for runner in runners:
|
|
35
|
+
external_metadata = runner.external_metadata
|
|
36
|
+
present = external_metadata.get("present_in_group", True)
|
|
37
|
+
|
|
38
|
+
num_leases_with_request = len(
|
|
39
|
+
[
|
|
40
|
+
lease
|
|
41
|
+
for lease in external_metadata.get("leases", [])
|
|
42
|
+
if lease.get("request_id") is not None
|
|
43
|
+
]
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
in_flight = str(runner.in_flight_requests)
|
|
47
|
+
missing_leases = runner.in_flight_requests - num_leases_with_request
|
|
48
|
+
if missing_leases > 0:
|
|
49
|
+
# Show a small indicator of in flight requests that are not visible in the
|
|
50
|
+
# leases lists
|
|
51
|
+
# This can be due to race conditions, so only important to report if it's
|
|
52
|
+
# consistent
|
|
53
|
+
in_flight = f"{in_flight} [dim]({missing_leases})[/]"
|
|
54
|
+
|
|
55
|
+
uptime = timedelta(
|
|
56
|
+
seconds=int(runner.uptime.total_seconds()),
|
|
57
|
+
)
|
|
58
|
+
table.add_row(
|
|
59
|
+
runner.alias,
|
|
60
|
+
# Mark lost runners in red
|
|
61
|
+
runner.runner_id if present else f"[red]{runner.runner_id}[/]",
|
|
62
|
+
in_flight,
|
|
63
|
+
(
|
|
64
|
+
"N/A"
|
|
65
|
+
if runner.expiration_countdown is None
|
|
66
|
+
else f"{runner.expiration_countdown}s"
|
|
67
|
+
),
|
|
68
|
+
f"{uptime} ({uptime.total_seconds():.0f}s)",
|
|
69
|
+
runner.revision,
|
|
70
|
+
runner.state.value,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return table
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def runners_requests_table(runners: list[RunnerInfo]):
|
|
77
|
+
from rich.table import Table
|
|
78
|
+
|
|
79
|
+
table = Table()
|
|
80
|
+
table.add_column("Runner ID")
|
|
81
|
+
table.add_column("Request ID")
|
|
82
|
+
table.add_column("Caller ID")
|
|
83
|
+
|
|
84
|
+
for runner in runners:
|
|
85
|
+
for lease in runner.external_metadata.get("leases", []):
|
|
86
|
+
if not (req_id := lease.get("request_id")):
|
|
87
|
+
continue
|
|
88
|
+
|
|
89
|
+
table.add_row(
|
|
90
|
+
runner.runner_id,
|
|
91
|
+
req_id,
|
|
92
|
+
lease.get("caller_user_id") or "",
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
return table
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _kill(args):
|
|
99
|
+
client = get_client(args.host, args.team)
|
|
100
|
+
with client.connect() as connection:
|
|
101
|
+
connection.kill_runner(args.id)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _list_json(args, runners: list[RunnerInfo]):
|
|
105
|
+
json_runners = [
|
|
106
|
+
{
|
|
107
|
+
"alias": r.alias,
|
|
108
|
+
"runner_id": r.runner_id,
|
|
109
|
+
"in_flight_requests": r.in_flight_requests,
|
|
110
|
+
"expiration_countdown": r.expiration_countdown,
|
|
111
|
+
"uptime_seconds": int(r.uptime.total_seconds()),
|
|
112
|
+
"revision": r.revision,
|
|
113
|
+
"state": r.state.value,
|
|
114
|
+
}
|
|
115
|
+
for r in runners
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
res = {
|
|
119
|
+
"runners": json_runners,
|
|
120
|
+
}
|
|
121
|
+
args.console.print(json.dumps(res))
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _list(args):
|
|
125
|
+
client = get_client(args.host, args.team)
|
|
126
|
+
with client.connect() as connection:
|
|
127
|
+
start_time = getattr(args, "since", None)
|
|
128
|
+
runners = connection.list_runners(start_time=start_time)
|
|
129
|
+
|
|
130
|
+
if getattr(args, "state", None):
|
|
131
|
+
states = set(args.state)
|
|
132
|
+
if "all" not in states:
|
|
133
|
+
runners = [r for r in runners if r.state.value in states]
|
|
134
|
+
pending_runners = [
|
|
135
|
+
runner for runner in runners if runner.state == RunnerState.PENDING
|
|
136
|
+
]
|
|
137
|
+
setup_runners = [
|
|
138
|
+
runner for runner in runners if runner.state == RunnerState.SETUP
|
|
139
|
+
]
|
|
140
|
+
dead_runners = [
|
|
141
|
+
runner for runner in runners if runner.state == RunnerState.DEAD
|
|
142
|
+
]
|
|
143
|
+
if args.output == "pretty":
|
|
144
|
+
args.console.print(
|
|
145
|
+
"Runners: "
|
|
146
|
+
+ str(
|
|
147
|
+
len(runners)
|
|
148
|
+
- len(pending_runners)
|
|
149
|
+
- len(setup_runners)
|
|
150
|
+
- len(dead_runners)
|
|
151
|
+
)
|
|
152
|
+
)
|
|
153
|
+
args.console.print(f"Runners Pending: {len(pending_runners)}")
|
|
154
|
+
args.console.print(f"Runners Setting Up: {len(setup_runners)}")
|
|
155
|
+
args.console.print(runners_table(runners))
|
|
156
|
+
|
|
157
|
+
requests_table = runners_requests_table(runners)
|
|
158
|
+
args.console.print(f"Requests: {len(requests_table.rows)}")
|
|
159
|
+
args.console.print(requests_table)
|
|
160
|
+
elif args.output == "json":
|
|
161
|
+
_list_json(args, runners)
|
|
162
|
+
else:
|
|
163
|
+
raise AssertionError(f"Invalid output format: {args.output}")
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def _add_kill_parser(subparsers, parents):
|
|
167
|
+
kill_help = "Kill a runner."
|
|
168
|
+
parser = subparsers.add_parser(
|
|
169
|
+
"kill",
|
|
170
|
+
description=kill_help,
|
|
171
|
+
help=kill_help,
|
|
172
|
+
parents=parents,
|
|
173
|
+
)
|
|
174
|
+
parser.add_argument(
|
|
175
|
+
"id",
|
|
176
|
+
help="Runner ID.",
|
|
177
|
+
)
|
|
178
|
+
parser.set_defaults(func=_kill)
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
def _add_list_parser(subparsers, parents):
|
|
182
|
+
list_help = "List runners."
|
|
183
|
+
parser = subparsers.add_parser(
|
|
184
|
+
"list",
|
|
185
|
+
description=list_help,
|
|
186
|
+
help=list_help,
|
|
187
|
+
parents=[*parents, get_output_parser()],
|
|
188
|
+
)
|
|
189
|
+
parser.add_argument(
|
|
190
|
+
"--since",
|
|
191
|
+
default=None,
|
|
192
|
+
action=SinceAction,
|
|
193
|
+
limit="1 day",
|
|
194
|
+
help=(
|
|
195
|
+
"Show dead runners since the given time. "
|
|
196
|
+
"Accepts 'now', relative like '30m', '1h', '1d', "
|
|
197
|
+
"or an ISO timestamp. Max 24 hours."
|
|
198
|
+
),
|
|
199
|
+
)
|
|
200
|
+
parser.add_argument(
|
|
201
|
+
"--state",
|
|
202
|
+
choices=["all", "running", "pending", "setup", "dead"],
|
|
203
|
+
nargs="+",
|
|
204
|
+
default=None,
|
|
205
|
+
help=("Filter by runner state(s). Choose one or more, or 'all'(default)."),
|
|
206
|
+
)
|
|
207
|
+
parser.set_defaults(func=_list)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def _to_iso_naive(dt: datetime) -> str:
|
|
211
|
+
return dt.astimezone(timezone.utc).isoformat()
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def _parse_ts(ts: str) -> datetime:
|
|
215
|
+
# Support both 'Z' and offset formats
|
|
216
|
+
ts_norm = ts.replace("Z", "+00:00")
|
|
217
|
+
return datetime.fromisoformat(ts_norm)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _to_aware_utc(dt: datetime) -> datetime:
|
|
221
|
+
# Treat naive datetimes as UTC
|
|
222
|
+
if dt.tzinfo is None:
|
|
223
|
+
return dt.replace(tzinfo=timezone.utc)
|
|
224
|
+
return dt.astimezone(timezone.utc)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _post_history(
|
|
228
|
+
client: httpx.Client,
|
|
229
|
+
base_params: dict[str, str],
|
|
230
|
+
since: datetime | None,
|
|
231
|
+
until: datetime | None,
|
|
232
|
+
page_size: int,
|
|
233
|
+
) -> tuple[list, str | None]:
|
|
234
|
+
params: dict[str, str] = dict(base_params)
|
|
235
|
+
if since is not None:
|
|
236
|
+
params["since"] = _to_iso_naive(since)
|
|
237
|
+
if until is not None:
|
|
238
|
+
params["until"] = _to_iso_naive(until)
|
|
239
|
+
params["page_size"] = str(page_size)
|
|
240
|
+
resp = client.post("/logs/history", params=params)
|
|
241
|
+
if resp.status_code != HTTPStatus.OK:
|
|
242
|
+
try:
|
|
243
|
+
detail = resp.json().get("detail", resp.text)
|
|
244
|
+
except Exception:
|
|
245
|
+
detail = resp.text
|
|
246
|
+
raise RuntimeError(f"Failed to fetch logs history: {detail}")
|
|
247
|
+
data = resp.json()
|
|
248
|
+
items = data.get("items", []) if isinstance(data, dict) else []
|
|
249
|
+
next_until = data.get("next_until") if isinstance(data, dict) else None
|
|
250
|
+
if not isinstance(items, list):
|
|
251
|
+
raise RuntimeError("Unexpected logs history response format")
|
|
252
|
+
return items, next_until
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
@dataclass
|
|
256
|
+
class RestRunnerInfo:
|
|
257
|
+
started_at: datetime | None
|
|
258
|
+
ended_at: datetime | None
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def _get_runner_info(runner_id: str) -> RestRunnerInfo:
|
|
262
|
+
headers = REST_CLIENT.get_headers()
|
|
263
|
+
with httpx.Client(
|
|
264
|
+
base_url=REST_CLIENT.base_url, headers=headers, timeout=30
|
|
265
|
+
) as client:
|
|
266
|
+
resp = client.get(f"/runners/{runner_id}")
|
|
267
|
+
if resp.status_code == HTTPStatus.NOT_FOUND:
|
|
268
|
+
raise RuntimeError(f"Runner {runner_id} not found")
|
|
269
|
+
if resp.status_code != HTTPStatus.OK:
|
|
270
|
+
raise RuntimeError(
|
|
271
|
+
f"Failed to fetch runner info: {resp.status_code} {resp.text}"
|
|
272
|
+
)
|
|
273
|
+
data = resp.json()
|
|
274
|
+
if not isinstance(data, dict):
|
|
275
|
+
raise RuntimeError(f"Unexpected runner info response format: {resp.text}")
|
|
276
|
+
|
|
277
|
+
start: datetime | None = None
|
|
278
|
+
end: datetime | None = None
|
|
279
|
+
|
|
280
|
+
started_at = data.get("started_at")
|
|
281
|
+
if started_at is not None:
|
|
282
|
+
try:
|
|
283
|
+
start = _to_aware_utc(_parse_ts(started_at))
|
|
284
|
+
except Exception:
|
|
285
|
+
start = None
|
|
286
|
+
|
|
287
|
+
ended_at = data.get("ended_at")
|
|
288
|
+
if ended_at is not None:
|
|
289
|
+
try:
|
|
290
|
+
end = _to_aware_utc(_parse_ts(ended_at))
|
|
291
|
+
except Exception:
|
|
292
|
+
end = None
|
|
293
|
+
|
|
294
|
+
return RestRunnerInfo(started_at=start, ended_at=end)
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def _stream_logs(
|
|
298
|
+
base_params: dict[str, str], since: datetime | None, until: datetime | None
|
|
299
|
+
) -> Iterator[dict]:
|
|
300
|
+
headers = REST_CLIENT.get_headers()
|
|
301
|
+
params: dict[str, str] = base_params.copy()
|
|
302
|
+
if since is not None:
|
|
303
|
+
params["since"] = _to_iso_naive(since)
|
|
304
|
+
if until is not None:
|
|
305
|
+
params["until"] = _to_iso_naive(until)
|
|
306
|
+
with httpx.Client(
|
|
307
|
+
base_url=REST_CLIENT.base_url,
|
|
308
|
+
headers=headers,
|
|
309
|
+
timeout=None,
|
|
310
|
+
follow_redirects=True,
|
|
311
|
+
) as client:
|
|
312
|
+
with connect_sse(
|
|
313
|
+
client,
|
|
314
|
+
method="POST",
|
|
315
|
+
url="/logs/stream",
|
|
316
|
+
params=params,
|
|
317
|
+
headers={"Accept": "text/event-stream"},
|
|
318
|
+
) as event_source:
|
|
319
|
+
for sse in event_source.iter_sse():
|
|
320
|
+
if not sse.data:
|
|
321
|
+
continue
|
|
322
|
+
if sse.event == "error":
|
|
323
|
+
raise RuntimeError(f"Error streaming logs: {sse.data}")
|
|
324
|
+
try:
|
|
325
|
+
yield json.loads(sse.data)
|
|
326
|
+
except Exception:
|
|
327
|
+
continue
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
DEFAULT_PAGE_SIZE = 1000
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
def _iter_logs(
|
|
334
|
+
base_params: dict[str, str], start: datetime | None, end: datetime | None
|
|
335
|
+
) -> Iterator[dict]:
|
|
336
|
+
headers = REST_CLIENT.get_headers()
|
|
337
|
+
with httpx.Client(
|
|
338
|
+
base_url=REST_CLIENT.base_url,
|
|
339
|
+
headers=headers,
|
|
340
|
+
timeout=300,
|
|
341
|
+
follow_redirects=True,
|
|
342
|
+
) as client:
|
|
343
|
+
cursor_until = end
|
|
344
|
+
while True:
|
|
345
|
+
items, next_until = _post_history(
|
|
346
|
+
client, base_params, start, cursor_until, DEFAULT_PAGE_SIZE
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
yield from items
|
|
350
|
+
|
|
351
|
+
if not next_until:
|
|
352
|
+
break
|
|
353
|
+
|
|
354
|
+
new_until_dt = _to_aware_utc(_parse_ts(next_until))
|
|
355
|
+
if start is not None and new_until_dt <= start:
|
|
356
|
+
break
|
|
357
|
+
cursor_until = new_until_dt
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
def _get_logs(
|
|
361
|
+
params: dict[str, str],
|
|
362
|
+
since: datetime | None,
|
|
363
|
+
until: datetime | None,
|
|
364
|
+
lines_count: int | None,
|
|
365
|
+
*,
|
|
366
|
+
oldest: bool = False,
|
|
367
|
+
) -> Iterator[dict]:
|
|
368
|
+
if lines_count is None:
|
|
369
|
+
yield from _iter_logs(params, since, until)
|
|
370
|
+
return
|
|
371
|
+
|
|
372
|
+
if oldest:
|
|
373
|
+
produced = 0
|
|
374
|
+
for log in _iter_logs(params, since, until):
|
|
375
|
+
if produced >= lines_count:
|
|
376
|
+
break
|
|
377
|
+
produced += 1
|
|
378
|
+
yield log
|
|
379
|
+
return
|
|
380
|
+
|
|
381
|
+
# newest tail: collect into a fixed-size deque, then yield
|
|
382
|
+
tail: deque[dict] = deque(maxlen=lines_count)
|
|
383
|
+
for log in _iter_logs(params, since, until):
|
|
384
|
+
tail.append(log)
|
|
385
|
+
for log in tail:
|
|
386
|
+
yield log
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
class LogPrinter:
|
|
390
|
+
def __init__(self, console: Console) -> None:
|
|
391
|
+
from structlog.dev import ConsoleRenderer
|
|
392
|
+
|
|
393
|
+
from fal.logging.style import LEVEL_STYLES
|
|
394
|
+
|
|
395
|
+
self._console = console
|
|
396
|
+
self._renderer = ConsoleRenderer(level_styles=LEVEL_STYLES)
|
|
397
|
+
|
|
398
|
+
def _render_log(self, log: dict) -> str:
|
|
399
|
+
ts_str: str = log["timestamp"]
|
|
400
|
+
timestamp = _to_aware_utc(_parse_ts(ts_str))
|
|
401
|
+
local_ts = timestamp.astimezone()
|
|
402
|
+
tz_offset = local_ts.strftime("%z")
|
|
403
|
+
# Insert ':' into offset for readability, e.g. +0300 -> +03:00
|
|
404
|
+
if tz_offset and len(tz_offset) == 5:
|
|
405
|
+
tz_offset = tz_offset[:3] + ":" + tz_offset[3:]
|
|
406
|
+
|
|
407
|
+
event: EventDict = {
|
|
408
|
+
"event": log.get("message", ""),
|
|
409
|
+
"level": str(log.get("level", "")).upper(),
|
|
410
|
+
"timestamp": f"{local_ts.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]}{tz_offset}",
|
|
411
|
+
}
|
|
412
|
+
return self._renderer(logger={}, name=event["level"], event_dict=event)
|
|
413
|
+
|
|
414
|
+
def print(self, log: dict) -> None:
|
|
415
|
+
self._console.print(self._render_log(log), highlight=False)
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
DEFAULT_STREAM_SINCE = timedelta(minutes=1)
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def _logs(args):
|
|
422
|
+
params: dict[str, str] = {"job_id": args.id}
|
|
423
|
+
if getattr(args, "search", None) is not None:
|
|
424
|
+
params["search"] = args.search
|
|
425
|
+
|
|
426
|
+
runner_info = _get_runner_info(args.id)
|
|
427
|
+
follow: bool = getattr(args, "follow", False)
|
|
428
|
+
since = getattr(args, "since", None)
|
|
429
|
+
if follow:
|
|
430
|
+
since = since or (datetime.now(timezone.utc) - DEFAULT_STREAM_SINCE)
|
|
431
|
+
else:
|
|
432
|
+
since = since or runner_info.started_at
|
|
433
|
+
until = getattr(args, "until", None) or runner_info.ended_at
|
|
434
|
+
|
|
435
|
+
# Normalize to aware UTC for comparisons
|
|
436
|
+
if since is not None:
|
|
437
|
+
since = _to_aware_utc(since)
|
|
438
|
+
if until is not None:
|
|
439
|
+
until = _to_aware_utc(until)
|
|
440
|
+
|
|
441
|
+
# Sanity limiters: clamp within runner lifetime when known
|
|
442
|
+
if runner_info.started_at is not None:
|
|
443
|
+
if since is not None and since < runner_info.started_at:
|
|
444
|
+
since = runner_info.started_at
|
|
445
|
+
if until is not None and until < runner_info.started_at:
|
|
446
|
+
until = runner_info.started_at
|
|
447
|
+
if runner_info.ended_at is not None:
|
|
448
|
+
if since is not None and since > runner_info.ended_at:
|
|
449
|
+
since = runner_info.ended_at
|
|
450
|
+
if until is not None and until > runner_info.ended_at:
|
|
451
|
+
until = runner_info.ended_at
|
|
452
|
+
|
|
453
|
+
# Ensure ordering if both are present
|
|
454
|
+
if since is not None and until is not None and until < since:
|
|
455
|
+
since, until = until, since
|
|
456
|
+
|
|
457
|
+
lines_arg = getattr(args, "lines", None)
|
|
458
|
+
lines_count: int | None = None
|
|
459
|
+
lines_oldest = False
|
|
460
|
+
if lines_arg is not None:
|
|
461
|
+
if lines_arg.startswith("+"):
|
|
462
|
+
lines_str = lines_arg[1:]
|
|
463
|
+
lines_oldest = True
|
|
464
|
+
else:
|
|
465
|
+
lines_str = lines_arg
|
|
466
|
+
try:
|
|
467
|
+
lines_count = int(lines_str)
|
|
468
|
+
except ValueError:
|
|
469
|
+
args.parser.error("Invalid -n|--lines value. Use an integer or +integer.")
|
|
470
|
+
|
|
471
|
+
if follow:
|
|
472
|
+
logs_gen = _stream_logs(params, since, until)
|
|
473
|
+
else:
|
|
474
|
+
logs_gen = _get_logs(params, since, until, lines_count, oldest=lines_oldest)
|
|
475
|
+
|
|
476
|
+
printer = LogPrinter(args.console)
|
|
477
|
+
|
|
478
|
+
if follow:
|
|
479
|
+
for log in logs_gen:
|
|
480
|
+
if args.output == "json":
|
|
481
|
+
args.console.print(json.dumps(log))
|
|
482
|
+
else:
|
|
483
|
+
printer.print(log)
|
|
484
|
+
return
|
|
485
|
+
|
|
486
|
+
if args.output == "json":
|
|
487
|
+
args.console.print(json.dumps({"logs": list(logs_gen)}))
|
|
488
|
+
else:
|
|
489
|
+
for log in reversed(list(logs_gen)):
|
|
490
|
+
printer.print(log)
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
def _add_logs_parser(subparsers, parents):
|
|
494
|
+
logs_help = "Show logs for a runner."
|
|
495
|
+
parser = subparsers.add_parser(
|
|
496
|
+
"logs",
|
|
497
|
+
aliases=["log"],
|
|
498
|
+
description=logs_help,
|
|
499
|
+
help=logs_help,
|
|
500
|
+
parents=[*parents, get_output_parser()],
|
|
501
|
+
)
|
|
502
|
+
parser.add_argument(
|
|
503
|
+
"id",
|
|
504
|
+
help="Runner ID.",
|
|
505
|
+
)
|
|
506
|
+
parser.add_argument(
|
|
507
|
+
"--search",
|
|
508
|
+
default=None,
|
|
509
|
+
help="Search for string in logs.",
|
|
510
|
+
)
|
|
511
|
+
parser.add_argument(
|
|
512
|
+
"--since",
|
|
513
|
+
default=None,
|
|
514
|
+
action=SinceAction,
|
|
515
|
+
help=(
|
|
516
|
+
"Show logs since the given time. "
|
|
517
|
+
"Accepts 'now', relative like '30m', '1h', or an ISO timestamp. "
|
|
518
|
+
"Defaults to runner start time or to '1m ago' in --follow mode."
|
|
519
|
+
),
|
|
520
|
+
)
|
|
521
|
+
parser.add_argument(
|
|
522
|
+
"--until",
|
|
523
|
+
default=None,
|
|
524
|
+
action=SinceAction,
|
|
525
|
+
help=(
|
|
526
|
+
"Show logs until the given time. "
|
|
527
|
+
"Accepts 'now', relative like '30m', '1h', or an ISO timestamp. "
|
|
528
|
+
"Defaults to runner finish time or 'now' if it is still running."
|
|
529
|
+
),
|
|
530
|
+
)
|
|
531
|
+
parser.add_argument(
|
|
532
|
+
"--follow",
|
|
533
|
+
"-f",
|
|
534
|
+
action="store_true",
|
|
535
|
+
help="Follow logs live. If --since is not specified, implies '--since 1m ago'.",
|
|
536
|
+
)
|
|
537
|
+
parser.add_argument(
|
|
538
|
+
"--lines",
|
|
539
|
+
"-n",
|
|
540
|
+
default=None,
|
|
541
|
+
type=str,
|
|
542
|
+
help=(
|
|
543
|
+
"Only show latest N log lines. "
|
|
544
|
+
"If '+' prefix is used, show oldest N log lines. "
|
|
545
|
+
"Ignored if --follow is used."
|
|
546
|
+
),
|
|
547
|
+
)
|
|
548
|
+
parser.set_defaults(func=_logs)
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
def add_parser(main_subparsers, parents):
|
|
552
|
+
runners_help = "Manage fal runners."
|
|
553
|
+
parser = main_subparsers.add_parser(
|
|
554
|
+
"runners",
|
|
555
|
+
description=runners_help,
|
|
556
|
+
help=runners_help,
|
|
557
|
+
parents=parents,
|
|
558
|
+
aliases=["machine"], # backwards compatibility
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
subparsers = parser.add_subparsers(
|
|
562
|
+
title="Commands",
|
|
563
|
+
metavar="command",
|
|
564
|
+
required=True,
|
|
565
|
+
parser_class=FalClientParser,
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
_add_kill_parser(subparsers, parents)
|
|
569
|
+
_add_list_parser(subparsers, parents)
|
|
570
|
+
_add_logs_parser(subparsers, parents)
|