paasta-tools 1.27.0__py3-none-any.whl → 1.35.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of paasta-tools might be problematic. Click here for more details.
- paasta_tools/__init__.py +1 -1
- paasta_tools/api/api_docs/swagger.json +9 -1
- paasta_tools/api/tweens/auth.py +2 -1
- paasta_tools/api/views/instance.py +9 -2
- paasta_tools/api/views/remote_run.py +2 -0
- paasta_tools/async_utils.py +4 -1
- paasta_tools/bounce_lib.py +8 -5
- paasta_tools/check_services_replication_tools.py +10 -4
- paasta_tools/check_spark_jobs.py +1 -1
- paasta_tools/cli/cli.py +4 -4
- paasta_tools/cli/cmds/autoscale.py +2 -0
- paasta_tools/cli/cmds/check.py +2 -0
- paasta_tools/cli/cmds/cook_image.py +2 -0
- paasta_tools/cli/cmds/get_docker_image.py +2 -0
- paasta_tools/cli/cmds/get_image_version.py +2 -0
- paasta_tools/cli/cmds/get_latest_deployment.py +2 -0
- paasta_tools/cli/cmds/info.py +10 -3
- paasta_tools/cli/cmds/itest.py +2 -0
- paasta_tools/cli/cmds/list_namespaces.py +2 -0
- paasta_tools/cli/cmds/local_run.py +122 -27
- paasta_tools/cli/cmds/logs.py +31 -7
- paasta_tools/cli/cmds/mark_for_deployment.py +14 -4
- paasta_tools/cli/cmds/mesh_status.py +3 -2
- paasta_tools/cli/cmds/push_to_registry.py +2 -0
- paasta_tools/cli/cmds/remote_run.py +156 -12
- paasta_tools/cli/cmds/rollback.py +6 -2
- paasta_tools/cli/cmds/secret.py +4 -2
- paasta_tools/cli/cmds/security_check.py +2 -0
- paasta_tools/cli/cmds/spark_run.py +7 -3
- paasta_tools/cli/cmds/status.py +59 -29
- paasta_tools/cli/cmds/validate.py +325 -40
- paasta_tools/cli/cmds/wait_for_deployment.py +2 -0
- paasta_tools/cli/schemas/adhoc_schema.json +3 -0
- paasta_tools/cli/schemas/autoscaling_schema.json +3 -2
- paasta_tools/cli/schemas/eks_schema.json +24 -1
- paasta_tools/cli/schemas/kubernetes_schema.json +1 -0
- paasta_tools/cli/schemas/smartstack_schema.json +14 -0
- paasta_tools/cli/utils.py +34 -20
- paasta_tools/contrib/bounce_log_latency_parser.py +1 -1
- paasta_tools/contrib/check_orphans.py +1 -1
- paasta_tools/contrib/get_running_task_allocation.py +1 -1
- paasta_tools/contrib/ide_helper.py +14 -14
- paasta_tools/contrib/mock_patch_checker.py +1 -1
- paasta_tools/contrib/paasta_update_soa_memcpu.py +10 -14
- paasta_tools/contrib/render_template.py +1 -1
- paasta_tools/contrib/shared_ip_check.py +1 -1
- paasta_tools/generate_deployments_for_service.py +2 -0
- paasta_tools/instance/hpa_metrics_parser.py +3 -5
- paasta_tools/instance/kubernetes.py +70 -36
- paasta_tools/kubernetes/application/controller_wrappers.py +23 -2
- paasta_tools/kubernetes/remote_run.py +52 -25
- paasta_tools/kubernetes_tools.py +60 -69
- paasta_tools/long_running_service_tools.py +15 -5
- paasta_tools/mesos/master.py +1 -1
- paasta_tools/metrics/metastatus_lib.py +1 -25
- paasta_tools/metrics/metrics_lib.py +12 -3
- paasta_tools/paastaapi/__init__.py +1 -1
- paasta_tools/paastaapi/api/autoscaler_api.py +1 -1
- paasta_tools/paastaapi/api/default_api.py +1 -1
- paasta_tools/paastaapi/api/remote_run_api.py +1 -1
- paasta_tools/paastaapi/api/resources_api.py +1 -1
- paasta_tools/paastaapi/api/service_api.py +1 -1
- paasta_tools/paastaapi/api_client.py +1 -1
- paasta_tools/paastaapi/configuration.py +2 -2
- paasta_tools/paastaapi/exceptions.py +1 -1
- paasta_tools/paastaapi/model/adhoc_launch_history.py +1 -1
- paasta_tools/paastaapi/model/autoscaler_count_msg.py +1 -1
- paasta_tools/paastaapi/model/autoscaling_override.py +1 -1
- paasta_tools/paastaapi/model/deploy_queue.py +1 -1
- paasta_tools/paastaapi/model/deploy_queue_service_instance.py +1 -1
- paasta_tools/paastaapi/model/envoy_backend.py +1 -1
- paasta_tools/paastaapi/model/envoy_location.py +1 -1
- paasta_tools/paastaapi/model/envoy_status.py +1 -1
- paasta_tools/paastaapi/model/flink_cluster_overview.py +1 -1
- paasta_tools/paastaapi/model/flink_config.py +1 -1
- paasta_tools/paastaapi/model/flink_job.py +1 -1
- paasta_tools/paastaapi/model/flink_job_details.py +1 -1
- paasta_tools/paastaapi/model/flink_jobs.py +1 -1
- paasta_tools/paastaapi/model/float_and_error.py +1 -1
- paasta_tools/paastaapi/model/hpa_metric.py +1 -1
- paasta_tools/paastaapi/model/inline_object.py +1 -1
- paasta_tools/paastaapi/model/inline_response200.py +1 -1
- paasta_tools/paastaapi/model/inline_response2001.py +1 -1
- paasta_tools/paastaapi/model/inline_response202.py +1 -1
- paasta_tools/paastaapi/model/inline_response403.py +1 -1
- paasta_tools/paastaapi/model/instance_bounce_status.py +1 -1
- paasta_tools/paastaapi/model/instance_mesh_status.py +1 -1
- paasta_tools/paastaapi/model/instance_status.py +1 -1
- paasta_tools/paastaapi/model/instance_status_adhoc.py +1 -1
- paasta_tools/paastaapi/model/instance_status_cassandracluster.py +1 -1
- paasta_tools/paastaapi/model/instance_status_flink.py +1 -1
- paasta_tools/paastaapi/model/instance_status_kafkacluster.py +1 -1
- paasta_tools/paastaapi/model/instance_status_kubernetes.py +1 -1
- paasta_tools/paastaapi/model/instance_status_kubernetes_autoscaling_status.py +1 -1
- paasta_tools/paastaapi/model/instance_status_kubernetes_v2.py +1 -1
- paasta_tools/paastaapi/model/instance_status_tron.py +1 -1
- paasta_tools/paastaapi/model/instance_tasks.py +1 -1
- paasta_tools/paastaapi/model/integer_and_error.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_container.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_container_v2.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_healthcheck.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_pod.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_pod_event.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_pod_v2.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_replica_set.py +1 -1
- paasta_tools/paastaapi/model/kubernetes_version.py +4 -1
- paasta_tools/paastaapi/model/remote_run_outcome.py +1 -1
- paasta_tools/paastaapi/model/remote_run_start.py +4 -1
- paasta_tools/paastaapi/model/remote_run_stop.py +1 -1
- paasta_tools/paastaapi/model/remote_run_token.py +1 -1
- paasta_tools/paastaapi/model/resource.py +1 -1
- paasta_tools/paastaapi/model/resource_item.py +1 -1
- paasta_tools/paastaapi/model/resource_value.py +1 -1
- paasta_tools/paastaapi/model/smartstack_backend.py +1 -1
- paasta_tools/paastaapi/model/smartstack_location.py +1 -1
- paasta_tools/paastaapi/model/smartstack_status.py +1 -1
- paasta_tools/paastaapi/model/task_tail_lines.py +1 -1
- paasta_tools/paastaapi/model_utils.py +1 -1
- paasta_tools/paastaapi/rest.py +1 -1
- paasta_tools/remote_git.py +2 -2
- paasta_tools/run-paasta-api-in-dev-mode.py +2 -2
- paasta_tools/run-paasta-api-playground.py +2 -2
- paasta_tools/setup_kubernetes_job.py +43 -1
- paasta_tools/setup_prometheus_adapter_config.py +82 -0
- paasta_tools/setup_tron_namespace.py +2 -2
- paasta_tools/tron_tools.py +4 -1
- paasta_tools/utils.py +29 -11
- paasta_tools/yaml_tools.py +1 -1
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_orphans.py +1 -1
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_spark_jobs.py +1 -1
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/generate_deployments_for_service.py +2 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/get_running_task_allocation.py +1 -1
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/ide_helper.py +14 -14
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/paasta_update_soa_memcpu.py +10 -14
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/setup_kubernetes_job.py +43 -1
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/setup_prometheus_adapter_config.py +82 -0
- paasta_tools-1.35.8.dist-info/METADATA +79 -0
- {paasta_tools-1.27.0.dist-info → paasta_tools-1.35.8.dist-info}/RECORD +186 -191
- {paasta_tools-1.27.0.dist-info → paasta_tools-1.35.8.dist-info}/WHEEL +1 -1
- paasta_tools/frameworks/adhoc_scheduler.py +0 -71
- paasta_tools/frameworks/native_scheduler.py +0 -652
- paasta_tools/frameworks/task_store.py +0 -245
- paasta_tools/mesos_maintenance.py +0 -848
- paasta_tools/paasta_native_serviceinit.py +0 -21
- paasta_tools-1.27.0.dist-info/METADATA +0 -75
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/apply_external_resources.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/bounce_log_latency_parser.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_autoscaler_max_instances.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_cassandracluster_services_replication.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_flink_services_health.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_kubernetes_api.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_kubernetes_services_replication.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_manual_oapi_changes.sh +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/check_oom_events.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/cleanup_kubernetes_cr.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/cleanup_kubernetes_crd.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/cleanup_kubernetes_jobs.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/create_dynamodb_table.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/create_paasta_playground.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/delete_kubernetes_deployments.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/emit_allocated_cpu_metrics.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/generate_all_deployments +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/generate_authenticating_services.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/generate_services_file.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/generate_services_yaml.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/habitat_fixer.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/is_pod_healthy_in_proxy.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/is_pod_healthy_in_smartstack.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/kill_bad_containers.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/kubernetes_remove_evicted_pods.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/mass-deploy-tag.sh +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/mock_patch_checker.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/paasta_cleanup_remote_run_resources.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/paasta_cleanup_stale_nodes.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/paasta_deploy_tron_jobs +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/paasta_execute_docker_command.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/paasta_secrets_sync.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/paasta_tabcomplete.sh +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/render_template.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/rightsizer_soaconfigs_update.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/service_shard_remove.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/service_shard_update.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/setup_istio_mesh.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/setup_kubernetes_cr.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/setup_kubernetes_crd.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/setup_kubernetes_internal_crd.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/shared_ip_check.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/synapse_srv_namespaces_fact.py +0 -0
- {paasta_tools-1.27.0.data → paasta_tools-1.35.8.data}/scripts/timeouts_metrics_prom.py +0 -0
- {paasta_tools-1.27.0.dist-info → paasta_tools-1.35.8.dist-info}/entry_points.txt +0 -0
- {paasta_tools-1.27.0.dist-info → paasta_tools-1.35.8.dist-info/licenses}/LICENSE +0 -0
- {paasta_tools-1.27.0.dist-info → paasta_tools-1.35.8.dist-info}/top_level.txt +0 -0
paasta_tools/__init__.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"swagger": "2.0",
|
|
3
3
|
"info": {
|
|
4
4
|
"title": "Paasta API",
|
|
5
|
-
"version": "1.
|
|
5
|
+
"version": "1.3.0"
|
|
6
6
|
},
|
|
7
7
|
"basePath": "/v1",
|
|
8
8
|
"schemes": [
|
|
@@ -1788,6 +1788,11 @@
|
|
|
1788
1788
|
"KubernetesVersion": {
|
|
1789
1789
|
"type": "object",
|
|
1790
1790
|
"properties": {
|
|
1791
|
+
"container_port": {
|
|
1792
|
+
"description": "Port the container is expecting to receive traffic on",
|
|
1793
|
+
"type": "integer",
|
|
1794
|
+
"format": "int32"
|
|
1795
|
+
},
|
|
1791
1796
|
"type": {
|
|
1792
1797
|
"description": "Type of version (ReplicaSet or ControllerRevision)",
|
|
1793
1798
|
"type": "string"
|
|
@@ -2324,6 +2329,9 @@
|
|
|
2324
2329
|
},
|
|
2325
2330
|
"toolbox": {
|
|
2326
2331
|
"type": "boolean"
|
|
2332
|
+
},
|
|
2333
|
+
"command": {
|
|
2334
|
+
"type": "string"
|
|
2327
2335
|
}
|
|
2328
2336
|
},
|
|
2329
2337
|
"required": [
|
paasta_tools/api/tweens/auth.py
CHANGED
|
@@ -26,6 +26,7 @@ from typing import Mapping
|
|
|
26
26
|
from typing import Optional
|
|
27
27
|
|
|
28
28
|
import a_sync
|
|
29
|
+
from pyramid.request import Request
|
|
29
30
|
from pyramid.response import Response
|
|
30
31
|
from pyramid.view import view_config
|
|
31
32
|
|
|
@@ -139,7 +140,10 @@ def no_configuration_for_service_message(cluster, service, instance):
|
|
|
139
140
|
@view_config(
|
|
140
141
|
route_name="service.instance.status", request_method="GET", renderer="json"
|
|
141
142
|
)
|
|
142
|
-
def instance_status(
|
|
143
|
+
def instance_status(
|
|
144
|
+
request: Request,
|
|
145
|
+
) -> dict[str, Any]: # godspeed to anyone typing the retval here
|
|
146
|
+
# NOTE: swagger_data is populated by pyramid_swagger
|
|
143
147
|
service = request.swagger_data.get("service")
|
|
144
148
|
instance = request.swagger_data.get("instance")
|
|
145
149
|
verbose = request.swagger_data.get("verbose") or 0
|
|
@@ -353,7 +357,10 @@ def get_deployment_version(
|
|
|
353
357
|
request_method="GET",
|
|
354
358
|
renderer="json",
|
|
355
359
|
)
|
|
356
|
-
def instance_mesh_status(
|
|
360
|
+
def instance_mesh_status(
|
|
361
|
+
request: Request,
|
|
362
|
+
) -> dict[str, Any]: # godspeed to anyone typing the retval here
|
|
363
|
+
# NOTE: swagger_data is populated by pyramid_swagger
|
|
357
364
|
service = request.swagger_data.get("service")
|
|
358
365
|
instance = request.swagger_data.get("instance")
|
|
359
366
|
include_envoy = request.swagger_data.get("include_envoy")
|
|
@@ -35,6 +35,7 @@ def view_remote_run_start(request):
|
|
|
35
35
|
interactive = request.swagger_data["json_body"].get("interactive", True)
|
|
36
36
|
recreate = request.swagger_data["json_body"].get("recreate", False)
|
|
37
37
|
is_toolbox = request.swagger_data["json_body"].get("toolbox", False)
|
|
38
|
+
command = request.swagger_data["json_body"].get("command", None)
|
|
38
39
|
max_duration = min(
|
|
39
40
|
request.swagger_data["json_body"].get("max_duration", DEFAULT_MAX_DURATION),
|
|
40
41
|
get_max_job_duration_limit(),
|
|
@@ -49,6 +50,7 @@ def view_remote_run_start(request):
|
|
|
49
50
|
recreate=recreate,
|
|
50
51
|
max_duration=max_duration,
|
|
51
52
|
is_toolbox=is_toolbox,
|
|
53
|
+
command=command,
|
|
52
54
|
)
|
|
53
55
|
except Exception:
|
|
54
56
|
error_message = traceback.format_exc()
|
paasta_tools/async_utils.py
CHANGED
|
@@ -3,9 +3,11 @@ import functools
|
|
|
3
3
|
import time
|
|
4
4
|
import weakref
|
|
5
5
|
from collections import defaultdict
|
|
6
|
+
from typing import Any
|
|
6
7
|
from typing import AsyncIterable
|
|
7
8
|
from typing import Awaitable
|
|
8
9
|
from typing import Callable
|
|
10
|
+
from typing import Coroutine
|
|
9
11
|
from typing import Dict
|
|
10
12
|
from typing import List
|
|
11
13
|
from typing import Optional
|
|
@@ -97,7 +99,8 @@ async def aiter_to_list(
|
|
|
97
99
|
def async_timeout(
|
|
98
100
|
seconds: int = 10,
|
|
99
101
|
) -> Callable[
|
|
100
|
-
[Callable[...,
|
|
102
|
+
[Callable[..., Coroutine[Any, Any, T]]],
|
|
103
|
+
Callable[..., Coroutine[Any, Any, T]], # wrapped # inner
|
|
101
104
|
]:
|
|
102
105
|
def outer(wrapped):
|
|
103
106
|
@functools.wraps(wrapped)
|
paasta_tools/bounce_lib.py
CHANGED
|
@@ -42,11 +42,14 @@ BounceMethodResult = TypedDict(
|
|
|
42
42
|
|
|
43
43
|
BounceMethod = Callable[
|
|
44
44
|
[
|
|
45
|
-
Arg(
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
45
|
+
Arg(
|
|
46
|
+
BounceMethodConfigDict,
|
|
47
|
+
"new_config", # noqa: F821 # flake8 false-positive, these are not var references
|
|
48
|
+
),
|
|
49
|
+
Arg(bool, "new_app_running"), # noqa: F821 # flake8 false-positive
|
|
50
|
+
Arg(Collection, "happy_new_tasks"), # noqa: F821 # flake8 false-positive
|
|
51
|
+
Arg(Sequence, "old_non_draining_tasks"), # noqa: F821 # flake8 false-positive
|
|
52
|
+
DefaultArg(float, "margin_factor"), # noqa: F821 # flake8 false-positive
|
|
50
53
|
],
|
|
51
54
|
BounceMethodResult,
|
|
52
55
|
]
|
|
@@ -56,10 +56,16 @@ log = logging.getLogger(__name__)
|
|
|
56
56
|
|
|
57
57
|
CheckServiceReplication = Callable[
|
|
58
58
|
[
|
|
59
|
-
Arg(
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
59
|
+
Arg(
|
|
60
|
+
InstanceConfig_T,
|
|
61
|
+
"instance_config", # noqa: F821 # flake8 false-positive, these are not var references
|
|
62
|
+
),
|
|
63
|
+
Arg(
|
|
64
|
+
Dict[str, Dict[str, List[V1Pod]]],
|
|
65
|
+
"pods_by_service_instance", # noqa: F821 # flake8 false-positive
|
|
66
|
+
),
|
|
67
|
+
Arg(Any, "replication_checker"), # noqa: F821 # flake8 false-positive
|
|
68
|
+
NamedArg(bool, "dry_run"), # noqa: F821 # flake8 false-positive
|
|
63
69
|
],
|
|
64
70
|
Optional[bool],
|
|
65
71
|
]
|
paasta_tools/check_spark_jobs.py
CHANGED
|
@@ -124,7 +124,7 @@ def format_framework(info):
|
|
|
124
124
|
def format_message_for_service(service, frameworks):
|
|
125
125
|
output = f"Found the following long-running Spark frameworks associated with service {service}.\n"
|
|
126
126
|
output += (
|
|
127
|
-
|
|
127
|
+
"Please check why they are still running and terminate if appropriate.\n\n"
|
|
128
128
|
)
|
|
129
129
|
output += "\n".join(format_framework(f) for f in frameworks)
|
|
130
130
|
return output
|
paasta_tools/cli/cli.py
CHANGED
|
@@ -68,7 +68,7 @@ class PrintsHelpOnErrorArgumentParser(argparse.ArgumentParser):
|
|
|
68
68
|
def list_external_commands():
|
|
69
69
|
p = subprocess.check_output(["/bin/bash", "-p", "-c", "compgen -A command paasta-"])
|
|
70
70
|
lines = p.decode("utf-8").strip().split("\n")
|
|
71
|
-
return {
|
|
71
|
+
return {line.replace("paasta-", "", 1) for line in lines}
|
|
72
72
|
|
|
73
73
|
|
|
74
74
|
def calling_external_command():
|
|
@@ -132,10 +132,10 @@ PAASTA_SUBCOMMANDS = {
|
|
|
132
132
|
}
|
|
133
133
|
|
|
134
134
|
|
|
135
|
-
def get_argparser(commands=None):
|
|
135
|
+
def get_argparser(commands: list[str] | None = None) -> argparse.ArgumentParser:
|
|
136
136
|
"""Create and return argument parser for a set of subcommands.
|
|
137
137
|
|
|
138
|
-
:param commands:
|
|
138
|
+
:param commands: list[str] | None: If `commands` argument is `None`,
|
|
139
139
|
add full parsers for all subcommands, if `commands` is empty list -
|
|
140
140
|
add thin parsers for all subcommands, otherwise - add full parsers for
|
|
141
141
|
subcommands in the argument.
|
|
@@ -170,7 +170,7 @@ def get_argparser(commands=None):
|
|
|
170
170
|
|
|
171
171
|
# Adding a separate help subparser allows us to respond to "help" without --help
|
|
172
172
|
help_parser = subparsers.add_parser(
|
|
173
|
-
"help", help=
|
|
173
|
+
"help", help="run `paasta <subcommand> -h` for help"
|
|
174
174
|
)
|
|
175
175
|
help_parser.set_defaults(command=None)
|
|
176
176
|
|
|
@@ -48,6 +48,8 @@ def add_subparser(subparsers):
|
|
|
48
48
|
"-s",
|
|
49
49
|
"--service",
|
|
50
50
|
help="Service that you want to autoscale. Like 'example_service'.",
|
|
51
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
52
|
+
type=lambda x: x.rstrip("/"),
|
|
51
53
|
).completer = lazy_choices_completer(list_services)
|
|
52
54
|
autoscale_parser.add_argument(
|
|
53
55
|
"-i",
|
paasta_tools/cli/cmds/check.py
CHANGED
|
@@ -53,6 +53,8 @@ def add_subparser(subparsers):
|
|
|
53
53
|
"-s",
|
|
54
54
|
"--service",
|
|
55
55
|
help="The name of the service you wish to inspect. Defaults to autodetect.",
|
|
56
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
57
|
+
type=lambda x: x.rstrip("/"),
|
|
56
58
|
).completer = lazy_choices_completer(list_services)
|
|
57
59
|
check_parser.add_argument(
|
|
58
60
|
"-y",
|
|
@@ -48,6 +48,8 @@ def add_subparser(subparsers: argparse._SubParsersAction) -> None:
|
|
|
48
48
|
'"services-", as included in a Jenkins job name, '
|
|
49
49
|
"will be stripped."
|
|
50
50
|
),
|
|
51
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
52
|
+
type=lambda x: x.rstrip("/"),
|
|
51
53
|
required=True,
|
|
52
54
|
)
|
|
53
55
|
list_parser.add_argument(
|
|
@@ -34,6 +34,8 @@ def add_subparser(subparsers):
|
|
|
34
34
|
"--service",
|
|
35
35
|
help="Name of the service which you want to get the docker image for.",
|
|
36
36
|
required=True,
|
|
37
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
38
|
+
type=lambda x: x.rstrip("/"),
|
|
37
39
|
).completer = lazy_choices_completer(list_services)
|
|
38
40
|
list_parser.add_argument(
|
|
39
41
|
"-i",
|
|
@@ -54,6 +54,8 @@ def add_subparser(subparsers: argparse._SubParsersAction) -> None:
|
|
|
54
54
|
"--service",
|
|
55
55
|
help="Name of the service which you want to get the image version for.",
|
|
56
56
|
required=True,
|
|
57
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
58
|
+
type=lambda x: x.rstrip("/"),
|
|
57
59
|
)
|
|
58
60
|
arg_service.completer = lazy_choices_completer(list_services) # type: ignore
|
|
59
61
|
parser.add_argument(
|
|
@@ -33,6 +33,8 @@ def add_subparser(subparsers):
|
|
|
33
33
|
"--service",
|
|
34
34
|
help="Name of the service which you want to get the latest deployment for.",
|
|
35
35
|
required=True,
|
|
36
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
37
|
+
type=lambda x: x.rstrip("/"),
|
|
36
38
|
).completer = lazy_choices_completer(list_services)
|
|
37
39
|
list_parser.add_argument(
|
|
38
40
|
"-i",
|
paasta_tools/cli/cmds/info.py
CHANGED
|
@@ -51,7 +51,11 @@ def add_subparser(subparsers):
|
|
|
51
51
|
),
|
|
52
52
|
)
|
|
53
53
|
list_parser.add_argument(
|
|
54
|
-
"-s",
|
|
54
|
+
"-s",
|
|
55
|
+
"--service",
|
|
56
|
+
help="The name of the service you wish to inspect",
|
|
57
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
58
|
+
type=lambda x: x.rstrip("/"),
|
|
55
59
|
).completer = lazy_choices_completer(list_services)
|
|
56
60
|
list_parser.add_argument(
|
|
57
61
|
"-d",
|
|
@@ -78,8 +82,9 @@ def get_smartstack_endpoints(service, soa_dir):
|
|
|
78
82
|
service, full_name=False, soa_dir=soa_dir
|
|
79
83
|
):
|
|
80
84
|
mode = config.get("mode", "http")
|
|
85
|
+
url_scheme = "http" if mode == "http2" else mode
|
|
81
86
|
port = config.get("proxy_port")
|
|
82
|
-
endpoints.append(f"{
|
|
87
|
+
endpoints.append(f"{url_scheme}://169.254.255.254:{port} ({name})")
|
|
83
88
|
return endpoints
|
|
84
89
|
|
|
85
90
|
|
|
@@ -96,10 +101,12 @@ def get_deployments_strings(service: str, soa_dir: str) -> List[str]:
|
|
|
96
101
|
service=service, namespace="main", soa_dir=soa_dir
|
|
97
102
|
)
|
|
98
103
|
service_mode = service_config.get_mode()
|
|
104
|
+
url_scheme = "http" if service_mode == "http2" else service_mode
|
|
105
|
+
|
|
99
106
|
for cluster in deployments_to_clusters(deployments):
|
|
100
107
|
if service_mode in TESTABLE_SERVICE_MODES:
|
|
101
108
|
link = PaastaColors.cyan(
|
|
102
|
-
f"{
|
|
109
|
+
f"{url_scheme}://{service}.proxy.{cluster}.paasta/"
|
|
103
110
|
)
|
|
104
111
|
else:
|
|
105
112
|
link = "N/A"
|
paasta_tools/cli/cmds/itest.py
CHANGED
|
@@ -41,6 +41,8 @@ def add_subparser(subparsers):
|
|
|
41
41
|
help="Test and build docker image for this service. Leading "
|
|
42
42
|
'"services-", as included in a Jenkins job name, '
|
|
43
43
|
"will be stripped.",
|
|
44
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
45
|
+
type=lambda x: x.rstrip("/"),
|
|
44
46
|
required=True,
|
|
45
47
|
)
|
|
46
48
|
list_parser.add_argument(
|
|
@@ -31,6 +31,8 @@ def add_subparser(subparsers) -> None:
|
|
|
31
31
|
"--service",
|
|
32
32
|
help="Name of the service which you want to list the namespaces for.",
|
|
33
33
|
required=True,
|
|
34
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
35
|
+
type=lambda x: x.rstrip("/"),
|
|
34
36
|
).completer = lazy_choices_completer(list_services)
|
|
35
37
|
# Most services likely don't need to filter by cluster/instance, and can add namespaces from all instances
|
|
36
38
|
list_parser.add_argument(
|
|
@@ -12,6 +12,7 @@
|
|
|
12
12
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
13
|
# See the License for the specific language governing permissions and
|
|
14
14
|
# limitations under the License.
|
|
15
|
+
import base64
|
|
15
16
|
import datetime
|
|
16
17
|
import json
|
|
17
18
|
import os
|
|
@@ -31,7 +32,9 @@ from urllib.parse import urlparse
|
|
|
31
32
|
import boto3
|
|
32
33
|
import requests
|
|
33
34
|
from docker import errors
|
|
35
|
+
from docker.api.client import APIClient
|
|
34
36
|
from mypy_extensions import TypedDict
|
|
37
|
+
from service_configuration_lib import read_service_configuration
|
|
35
38
|
|
|
36
39
|
from paasta_tools.adhoc_tools import get_default_interactive_config
|
|
37
40
|
from paasta_tools.cli.authentication import get_service_auth_token
|
|
@@ -314,7 +317,10 @@ def add_subparser(subparsers):
|
|
|
314
317
|
"test, ensuring that a service will work inside the docker container as expected. "
|
|
315
318
|
"Additionally, 'local-run' can healthcheck a service per the configured healthcheck.\n\n"
|
|
316
319
|
"Alternatively, 'local-run' can be used with --pull, which will pull the currently "
|
|
317
|
-
"deployed docker image and use it, instead of building one
|
|
320
|
+
"deployed docker image and use it, instead of building one.\n\n"
|
|
321
|
+
"While we've tried to make 'local-run' match the real PaaSTA environment, "
|
|
322
|
+
"there are some differences/limitations: e.g., certain features like `boto_keys` "
|
|
323
|
+
"are not supported."
|
|
318
324
|
),
|
|
319
325
|
epilog=(
|
|
320
326
|
"Note: 'paasta local-run' uses docker commands, which may require elevated privileges "
|
|
@@ -322,7 +328,11 @@ def add_subparser(subparsers):
|
|
|
322
328
|
),
|
|
323
329
|
)
|
|
324
330
|
list_parser.add_argument(
|
|
325
|
-
"-s",
|
|
331
|
+
"-s",
|
|
332
|
+
"--service",
|
|
333
|
+
help="The name of the service you wish to inspect",
|
|
334
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
335
|
+
type=lambda x: x.rstrip("/"),
|
|
326
336
|
).completer = lazy_choices_completer(list_services)
|
|
327
337
|
list_parser.add_argument(
|
|
328
338
|
"-c",
|
|
@@ -609,26 +619,83 @@ class LostContainerException(Exception):
|
|
|
609
619
|
pass
|
|
610
620
|
|
|
611
621
|
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
622
|
+
class DockerAuthConfig(TypedDict):
|
|
623
|
+
username: str
|
|
624
|
+
password: str
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
def get_readonly_docker_registry_auth_config(
|
|
628
|
+
docker_url: str,
|
|
629
|
+
) -> DockerAuthConfig | None:
|
|
630
|
+
system_paasta_config = load_system_paasta_config()
|
|
631
|
+
config_path = system_paasta_config.get_readonly_docker_registry_auth_file()
|
|
632
|
+
|
|
633
|
+
try:
|
|
634
|
+
with open(config_path) as f:
|
|
635
|
+
docker_config = json.load(f)
|
|
636
|
+
except Exception:
|
|
637
|
+
print(
|
|
638
|
+
PaastaColors.yellow(
|
|
639
|
+
"Warning: unable to load read-only docker registry credentials."
|
|
640
|
+
),
|
|
641
|
+
file=sys.stderr,
|
|
642
|
+
)
|
|
643
|
+
# the best we can do is try to pull with whatever auth the user has configured locally
|
|
644
|
+
# i.e., root-owned docker config in /root/.docker/config.json
|
|
645
|
+
return None
|
|
646
|
+
registry = docker_url.split("/")[0]
|
|
647
|
+
|
|
648
|
+
# find matching auth config - our usual ro config will have at least two entries
|
|
649
|
+
# at the time this comment was written
|
|
650
|
+
auths = docker_config
|
|
651
|
+
for auth_url, auth_data in auths.items():
|
|
652
|
+
if registry in auth_url:
|
|
653
|
+
# Decode the base64 auth string if present
|
|
654
|
+
if "auth" in auth_data:
|
|
655
|
+
auth_string = base64.b64decode(auth_data["auth"]).decode("utf-8")
|
|
656
|
+
username, password = auth_string.split(":", 1)
|
|
657
|
+
return {"username": username, "password": password}
|
|
658
|
+
|
|
659
|
+
# we'll hit this for registries like docker-dev or extra-private internal registries
|
|
660
|
+
return None
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
def docker_pull_image(docker_client: APIClient, docker_url: str) -> None:
|
|
664
|
+
"""Pull an image using the docker-py library with read-only registry credentials"""
|
|
617
665
|
print(
|
|
618
|
-
"Please wait while the image (
|
|
619
|
-
% docker_url,
|
|
666
|
+
f"Please wait while the image ({docker_url}) is pulled (times out after 30m)...",
|
|
620
667
|
file=sys.stderr,
|
|
621
668
|
)
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
669
|
+
|
|
670
|
+
auth_config = get_readonly_docker_registry_auth_config(docker_url)
|
|
671
|
+
if not auth_config:
|
|
672
|
+
print(
|
|
673
|
+
PaastaColors.yellow(
|
|
674
|
+
"Warning: No read-only docker registry credentials found, attempting to pull without them."
|
|
675
|
+
),
|
|
676
|
+
file=sys.stderr,
|
|
677
|
+
)
|
|
678
|
+
|
|
679
|
+
try:
|
|
680
|
+
with Timeout(
|
|
681
|
+
seconds=1800,
|
|
682
|
+
error_message=f"Timed out pulling docker image from {docker_url}",
|
|
683
|
+
):
|
|
684
|
+
# this is slightly funky since pull() returns the output line-by-line, but as a dict
|
|
685
|
+
# ...that we then need to format back to the usual `docker pull` output
|
|
686
|
+
# :p
|
|
687
|
+
for line in docker_client.pull(
|
|
688
|
+
docker_url, auth_config=auth_config, stream=True, decode=True
|
|
689
|
+
):
|
|
690
|
+
# not all lines have an 'id' key :(
|
|
691
|
+
id_prefix = f"{line['id']}: " if "id" in line else ""
|
|
692
|
+
print(f"{id_prefix}{line['status']}", file=sys.stderr)
|
|
693
|
+
except Exception as e:
|
|
694
|
+
print(
|
|
695
|
+
f"\nPull failed. Error: {e}",
|
|
696
|
+
file=sys.stderr,
|
|
697
|
+
)
|
|
698
|
+
sys.exit(1)
|
|
632
699
|
|
|
633
700
|
|
|
634
701
|
def get_container_id(docker_client, container_name):
|
|
@@ -977,12 +1044,12 @@ def run_docker_container(
|
|
|
977
1044
|
# First try to write the file as a string
|
|
978
1045
|
# This is for text like config files
|
|
979
1046
|
with open(temp_secret_filename, "w") as f:
|
|
980
|
-
f.write(secret_content)
|
|
1047
|
+
f.write(secret_content) # type: ignore # TODO: make this type-safe rather than rely on exceptions
|
|
981
1048
|
except TypeError:
|
|
982
1049
|
# If that fails, try to write it as bytes
|
|
983
1050
|
# This is for binary files like TLS keys
|
|
984
1051
|
with open(temp_secret_filename, "wb") as fb:
|
|
985
|
-
fb.write(secret_content)
|
|
1052
|
+
fb.write(secret_content) # type: ignore # TODO: make this type-safe rather than rely on exceptions
|
|
986
1053
|
|
|
987
1054
|
# Append this to the list of volumes passed to docker run
|
|
988
1055
|
volumes.append(f"{temp_secret_filename}:{container_mount_path}:ro")
|
|
@@ -1223,7 +1290,7 @@ def configure_and_run_docker_container(
|
|
|
1223
1290
|
return 1
|
|
1224
1291
|
|
|
1225
1292
|
if pull_image:
|
|
1226
|
-
docker_pull_image(docker_url)
|
|
1293
|
+
docker_pull_image(docker_client, docker_url)
|
|
1227
1294
|
|
|
1228
1295
|
for volume in instance_config.get_volumes(
|
|
1229
1296
|
system_paasta_config.get_volumes(),
|
|
@@ -1299,10 +1366,40 @@ def docker_config_available():
|
|
|
1299
1366
|
)
|
|
1300
1367
|
|
|
1301
1368
|
|
|
1369
|
+
def should_reexec_as_root(
|
|
1370
|
+
service: str, skip_secrets: bool, action: str, soa_dir: str = DEFAULT_SOA_DIR
|
|
1371
|
+
) -> bool:
|
|
1372
|
+
# local-run can't pull secrets from Vault in prod without a root-owned token
|
|
1373
|
+
need_vault_token = not skip_secrets and action == "pull"
|
|
1374
|
+
|
|
1375
|
+
# there are some special teams with their own private docker registries and no ro creds
|
|
1376
|
+
# however, we don't know what registry is to be used without loading the service config
|
|
1377
|
+
service_info = read_service_configuration(service, soa_dir)
|
|
1378
|
+
# technically folks can set the standard registry as a value here, but atm no one is doing that :p
|
|
1379
|
+
registry_override = service_info.get("docker_registry")
|
|
1380
|
+
# note: we could also have a list of registries that have ro creds, but this seems fine for now
|
|
1381
|
+
uses_private_registry = (
|
|
1382
|
+
registry_override
|
|
1383
|
+
and registry_override
|
|
1384
|
+
in load_system_paasta_config().get_private_docker_registries()
|
|
1385
|
+
)
|
|
1386
|
+
need_docker_config = uses_private_registry and action == "pull"
|
|
1387
|
+
|
|
1388
|
+
return (need_vault_token or need_docker_config) and os.geteuid() != 0
|
|
1389
|
+
|
|
1390
|
+
|
|
1302
1391
|
def paasta_local_run(args):
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1392
|
+
service = figure_out_service_name(args, soa_dir=args.yelpsoa_config_root)
|
|
1393
|
+
if should_reexec_as_root(
|
|
1394
|
+
service, args.skip_secrets, args.action, args.yelpsoa_config_root
|
|
1395
|
+
):
|
|
1396
|
+
# XXX: we should re-architect this to not need sudo, but for now,
|
|
1397
|
+
# re-exec ourselves with sudo to get access to the paasta vault token
|
|
1398
|
+
# NOTE: once we do that, we can also remove the venv check above :)
|
|
1399
|
+
print(
|
|
1400
|
+
"Re-executing paasta local-run --pull with sudo for Vault/Docker registry access..."
|
|
1401
|
+
)
|
|
1402
|
+
os.execvp("sudo", ["sudo", "-H", "/usr/bin/paasta"] + sys.argv[1:])
|
|
1306
1403
|
if args.action == "build" and not makefile_responds_to("cook-image"):
|
|
1307
1404
|
print(
|
|
1308
1405
|
"A local Makefile with a 'cook-image' target is required for --build",
|
|
@@ -1329,8 +1426,6 @@ def paasta_local_run(args):
|
|
|
1329
1426
|
|
|
1330
1427
|
local_run_config = system_paasta_config.get_local_run_config()
|
|
1331
1428
|
|
|
1332
|
-
service = figure_out_service_name(args, soa_dir=args.yelpsoa_config_root)
|
|
1333
|
-
|
|
1334
1429
|
if args.cluster:
|
|
1335
1430
|
cluster = args.cluster
|
|
1336
1431
|
else:
|
paasta_tools/cli/cmds/logs.py
CHANGED
|
@@ -30,6 +30,7 @@ from typing import Callable
|
|
|
30
30
|
from typing import ContextManager
|
|
31
31
|
from typing import Dict
|
|
32
32
|
from typing import Iterable
|
|
33
|
+
from typing import Iterator
|
|
33
34
|
from typing import List
|
|
34
35
|
from typing import Mapping
|
|
35
36
|
from typing import MutableSequence
|
|
@@ -52,10 +53,31 @@ try:
|
|
|
52
53
|
except ImportError:
|
|
53
54
|
scribereader = None
|
|
54
55
|
|
|
56
|
+
# NOTE: this is an internal-only package, so we won't be able to typecheck against it with mypy
|
|
57
|
+
# without these hacky inlined stubs
|
|
55
58
|
try:
|
|
56
59
|
from logreader.readers import S3LogsReader
|
|
60
|
+
|
|
61
|
+
s3reader_available = True
|
|
57
62
|
except ImportError:
|
|
58
|
-
|
|
63
|
+
s3reader_available = False
|
|
64
|
+
|
|
65
|
+
class S3LogsReader: # type: ignore[no-redef] # stub class for internal-only package
|
|
66
|
+
def __init__(self, superregion: str) -> None:
|
|
67
|
+
raise ImportError(
|
|
68
|
+
"logreader (internal Yelp package) is not available - unable to display logs."
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
def get_log_reader(
|
|
72
|
+
self,
|
|
73
|
+
log_name: str,
|
|
74
|
+
start_datetime: datetime.datetime,
|
|
75
|
+
end_datetime: datetime.datetime,
|
|
76
|
+
) -> Iterator[str]:
|
|
77
|
+
raise NotImplementedError(
|
|
78
|
+
"logreader (internal Yelp package) is not available - unable to display logs."
|
|
79
|
+
)
|
|
80
|
+
|
|
59
81
|
|
|
60
82
|
from pytimeparse.timeparse import timeparse
|
|
61
83
|
|
|
@@ -95,6 +117,8 @@ def add_subparser(subparsers) -> None:
|
|
|
95
117
|
"-s",
|
|
96
118
|
"--service",
|
|
97
119
|
help="The name of the service you wish to inspect. Defaults to autodetect.",
|
|
120
|
+
# strip any potential trailing / for folks tab-completing directories
|
|
121
|
+
type=lambda x: x.rstrip("/"),
|
|
98
122
|
).completer = lazy_choices_completer(list_services)
|
|
99
123
|
status_parser.add_argument(
|
|
100
124
|
"-c",
|
|
@@ -156,7 +180,7 @@ def add_subparser(subparsers) -> None:
|
|
|
156
180
|
dest="soa_dir",
|
|
157
181
|
metavar="SOA_DIR",
|
|
158
182
|
default=DEFAULT_SOA_DIR,
|
|
159
|
-
help=
|
|
183
|
+
help="Define a different soa config directory. Defaults to %(default)s.",
|
|
160
184
|
)
|
|
161
185
|
|
|
162
186
|
status_parser.add_argument(
|
|
@@ -1174,7 +1198,7 @@ class VectorLogsReader(LogReader):
|
|
|
1174
1198
|
) -> None:
|
|
1175
1199
|
super().__init__()
|
|
1176
1200
|
|
|
1177
|
-
if
|
|
1201
|
+
if not s3reader_available:
|
|
1178
1202
|
raise Exception("yelp_clog package must be available to use S3LogsReader")
|
|
1179
1203
|
|
|
1180
1204
|
self.cluster_map = cluster_map
|
|
@@ -1226,16 +1250,16 @@ class VectorLogsReader(LogReader):
|
|
|
1226
1250
|
except ValueError:
|
|
1227
1251
|
timestamp = pytz.utc.localize(datetime.datetime.min)
|
|
1228
1252
|
|
|
1229
|
-
|
|
1230
|
-
aggregated_logs.append(
|
|
1253
|
+
formatted_line = {"raw_line": line, "sort_key": timestamp}
|
|
1254
|
+
aggregated_logs.append(formatted_line)
|
|
1231
1255
|
|
|
1232
1256
|
aggregated_logs = list(
|
|
1233
1257
|
{line["raw_line"]: line for line in aggregated_logs}.values()
|
|
1234
1258
|
)
|
|
1235
1259
|
aggregated_logs.sort(key=lambda log_line: log_line["sort_key"])
|
|
1236
1260
|
|
|
1237
|
-
for
|
|
1238
|
-
print_log(
|
|
1261
|
+
for formatted_line in aggregated_logs:
|
|
1262
|
+
print_log(formatted_line["raw_line"], levels, raw_mode, strip_headers)
|
|
1239
1263
|
|
|
1240
1264
|
def tail_logs(
|
|
1241
1265
|
self,
|