paasta-tools 1.21.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- k8s_itests/__init__.py +0 -0
- k8s_itests/test_autoscaling.py +23 -0
- k8s_itests/utils.py +38 -0
- paasta_tools/__init__.py +20 -0
- paasta_tools/adhoc_tools.py +142 -0
- paasta_tools/api/__init__.py +13 -0
- paasta_tools/api/api.py +330 -0
- paasta_tools/api/api_docs/swagger.json +2323 -0
- paasta_tools/api/client.py +106 -0
- paasta_tools/api/settings.py +33 -0
- paasta_tools/api/tweens/__init__.py +6 -0
- paasta_tools/api/tweens/auth.py +125 -0
- paasta_tools/api/tweens/profiling.py +108 -0
- paasta_tools/api/tweens/request_logger.py +124 -0
- paasta_tools/api/views/__init__.py +13 -0
- paasta_tools/api/views/autoscaler.py +100 -0
- paasta_tools/api/views/exception.py +45 -0
- paasta_tools/api/views/flink.py +73 -0
- paasta_tools/api/views/instance.py +395 -0
- paasta_tools/api/views/pause_autoscaler.py +71 -0
- paasta_tools/api/views/remote_run.py +113 -0
- paasta_tools/api/views/resources.py +76 -0
- paasta_tools/api/views/service.py +35 -0
- paasta_tools/api/views/version.py +25 -0
- paasta_tools/apply_external_resources.py +79 -0
- paasta_tools/async_utils.py +109 -0
- paasta_tools/autoscaling/__init__.py +0 -0
- paasta_tools/autoscaling/autoscaling_service_lib.py +57 -0
- paasta_tools/autoscaling/forecasting.py +106 -0
- paasta_tools/autoscaling/max_all_k8s_services.py +41 -0
- paasta_tools/autoscaling/pause_service_autoscaler.py +77 -0
- paasta_tools/autoscaling/utils.py +52 -0
- paasta_tools/bounce_lib.py +184 -0
- paasta_tools/broadcast_log_to_services.py +62 -0
- paasta_tools/cassandracluster_tools.py +210 -0
- paasta_tools/check_autoscaler_max_instances.py +212 -0
- paasta_tools/check_cassandracluster_services_replication.py +35 -0
- paasta_tools/check_flink_services_health.py +203 -0
- paasta_tools/check_kubernetes_api.py +57 -0
- paasta_tools/check_kubernetes_services_replication.py +141 -0
- paasta_tools/check_oom_events.py +244 -0
- paasta_tools/check_services_replication_tools.py +324 -0
- paasta_tools/check_spark_jobs.py +234 -0
- paasta_tools/cleanup_kubernetes_cr.py +138 -0
- paasta_tools/cleanup_kubernetes_crd.py +145 -0
- paasta_tools/cleanup_kubernetes_jobs.py +344 -0
- paasta_tools/cleanup_tron_namespaces.py +96 -0
- paasta_tools/cli/__init__.py +13 -0
- paasta_tools/cli/authentication.py +85 -0
- paasta_tools/cli/cli.py +260 -0
- paasta_tools/cli/cmds/__init__.py +13 -0
- paasta_tools/cli/cmds/autoscale.py +143 -0
- paasta_tools/cli/cmds/check.py +334 -0
- paasta_tools/cli/cmds/cook_image.py +147 -0
- paasta_tools/cli/cmds/get_docker_image.py +76 -0
- paasta_tools/cli/cmds/get_image_version.py +172 -0
- paasta_tools/cli/cmds/get_latest_deployment.py +93 -0
- paasta_tools/cli/cmds/info.py +155 -0
- paasta_tools/cli/cmds/itest.py +117 -0
- paasta_tools/cli/cmds/list.py +66 -0
- paasta_tools/cli/cmds/list_clusters.py +42 -0
- paasta_tools/cli/cmds/list_deploy_queue.py +171 -0
- paasta_tools/cli/cmds/list_namespaces.py +84 -0
- paasta_tools/cli/cmds/local_run.py +1396 -0
- paasta_tools/cli/cmds/logs.py +1601 -0
- paasta_tools/cli/cmds/mark_for_deployment.py +1988 -0
- paasta_tools/cli/cmds/mesh_status.py +174 -0
- paasta_tools/cli/cmds/pause_service_autoscaler.py +107 -0
- paasta_tools/cli/cmds/push_to_registry.py +275 -0
- paasta_tools/cli/cmds/remote_run.py +252 -0
- paasta_tools/cli/cmds/rollback.py +347 -0
- paasta_tools/cli/cmds/secret.py +549 -0
- paasta_tools/cli/cmds/security_check.py +59 -0
- paasta_tools/cli/cmds/spark_run.py +1400 -0
- paasta_tools/cli/cmds/start_stop_restart.py +401 -0
- paasta_tools/cli/cmds/status.py +2302 -0
- paasta_tools/cli/cmds/validate.py +1012 -0
- paasta_tools/cli/cmds/wait_for_deployment.py +275 -0
- paasta_tools/cli/fsm/__init__.py +13 -0
- paasta_tools/cli/fsm/autosuggest.py +82 -0
- paasta_tools/cli/fsm/template/README.md +8 -0
- paasta_tools/cli/fsm/template/cookiecutter.json +7 -0
- paasta_tools/cli/fsm/template/{{cookiecutter.service}}/kubernetes-PROD.yaml +91 -0
- paasta_tools/cli/fsm/template/{{cookiecutter.service}}/monitoring.yaml +20 -0
- paasta_tools/cli/fsm/template/{{cookiecutter.service}}/service.yaml +8 -0
- paasta_tools/cli/fsm/template/{{cookiecutter.service}}/smartstack.yaml +6 -0
- paasta_tools/cli/fsm_cmd.py +121 -0
- paasta_tools/cli/paasta_tabcomplete.sh +23 -0
- paasta_tools/cli/schemas/adhoc_schema.json +199 -0
- paasta_tools/cli/schemas/autoscaling_schema.json +91 -0
- paasta_tools/cli/schemas/autotuned_defaults/cassandracluster_schema.json +37 -0
- paasta_tools/cli/schemas/autotuned_defaults/kubernetes_schema.json +89 -0
- paasta_tools/cli/schemas/deploy_schema.json +173 -0
- paasta_tools/cli/schemas/eks_schema.json +970 -0
- paasta_tools/cli/schemas/kubernetes_schema.json +970 -0
- paasta_tools/cli/schemas/rollback_schema.json +160 -0
- paasta_tools/cli/schemas/service_schema.json +25 -0
- paasta_tools/cli/schemas/smartstack_schema.json +322 -0
- paasta_tools/cli/schemas/tron_schema.json +699 -0
- paasta_tools/cli/utils.py +1118 -0
- paasta_tools/clusterman.py +21 -0
- paasta_tools/config_utils.py +385 -0
- paasta_tools/contrib/__init__.py +0 -0
- paasta_tools/contrib/bounce_log_latency_parser.py +68 -0
- paasta_tools/contrib/check_manual_oapi_changes.sh +24 -0
- paasta_tools/contrib/check_orphans.py +306 -0
- paasta_tools/contrib/create_dynamodb_table.py +35 -0
- paasta_tools/contrib/create_paasta_playground.py +105 -0
- paasta_tools/contrib/emit_allocated_cpu_metrics.py +50 -0
- paasta_tools/contrib/get_running_task_allocation.py +346 -0
- paasta_tools/contrib/habitat_fixer.py +86 -0
- paasta_tools/contrib/ide_helper.py +316 -0
- paasta_tools/contrib/is_pod_healthy_in_proxy.py +139 -0
- paasta_tools/contrib/is_pod_healthy_in_smartstack.py +50 -0
- paasta_tools/contrib/kill_bad_containers.py +109 -0
- paasta_tools/contrib/mass-deploy-tag.sh +44 -0
- paasta_tools/contrib/mock_patch_checker.py +86 -0
- paasta_tools/contrib/paasta_update_soa_memcpu.py +520 -0
- paasta_tools/contrib/render_template.py +129 -0
- paasta_tools/contrib/rightsizer_soaconfigs_update.py +348 -0
- paasta_tools/contrib/service_shard_remove.py +157 -0
- paasta_tools/contrib/service_shard_update.py +373 -0
- paasta_tools/contrib/shared_ip_check.py +77 -0
- paasta_tools/contrib/timeouts_metrics_prom.py +64 -0
- paasta_tools/delete_kubernetes_deployments.py +89 -0
- paasta_tools/deployment_utils.py +44 -0
- paasta_tools/docker_wrapper.py +234 -0
- paasta_tools/docker_wrapper_imports.py +13 -0
- paasta_tools/drain_lib.py +351 -0
- paasta_tools/dump_locally_running_services.py +71 -0
- paasta_tools/eks_tools.py +119 -0
- paasta_tools/envoy_tools.py +373 -0
- paasta_tools/firewall.py +504 -0
- paasta_tools/firewall_logging.py +154 -0
- paasta_tools/firewall_update.py +172 -0
- paasta_tools/flink_tools.py +345 -0
- paasta_tools/flinkeks_tools.py +90 -0
- paasta_tools/frameworks/__init__.py +0 -0
- paasta_tools/frameworks/adhoc_scheduler.py +71 -0
- paasta_tools/frameworks/constraints.py +87 -0
- paasta_tools/frameworks/native_scheduler.py +652 -0
- paasta_tools/frameworks/native_service_config.py +301 -0
- paasta_tools/frameworks/task_store.py +245 -0
- paasta_tools/generate_all_deployments +9 -0
- paasta_tools/generate_authenticating_services.py +94 -0
- paasta_tools/generate_deployments_for_service.py +255 -0
- paasta_tools/generate_services_file.py +114 -0
- paasta_tools/generate_services_yaml.py +30 -0
- paasta_tools/hacheck.py +76 -0
- paasta_tools/instance/__init__.py +0 -0
- paasta_tools/instance/hpa_metrics_parser.py +122 -0
- paasta_tools/instance/kubernetes.py +1362 -0
- paasta_tools/iptables.py +240 -0
- paasta_tools/kafkacluster_tools.py +143 -0
- paasta_tools/kubernetes/__init__.py +0 -0
- paasta_tools/kubernetes/application/__init__.py +0 -0
- paasta_tools/kubernetes/application/controller_wrappers.py +476 -0
- paasta_tools/kubernetes/application/tools.py +90 -0
- paasta_tools/kubernetes/bin/__init__.py +0 -0
- paasta_tools/kubernetes/bin/kubernetes_remove_evicted_pods.py +164 -0
- paasta_tools/kubernetes/bin/paasta_cleanup_remote_run_resources.py +135 -0
- paasta_tools/kubernetes/bin/paasta_cleanup_stale_nodes.py +181 -0
- paasta_tools/kubernetes/bin/paasta_secrets_sync.py +758 -0
- paasta_tools/kubernetes/remote_run.py +558 -0
- paasta_tools/kubernetes_tools.py +4679 -0
- paasta_tools/list_kubernetes_service_instances.py +128 -0
- paasta_tools/list_tron_namespaces.py +60 -0
- paasta_tools/long_running_service_tools.py +678 -0
- paasta_tools/mac_address.py +44 -0
- paasta_tools/marathon_dashboard.py +0 -0
- paasta_tools/mesos/__init__.py +0 -0
- paasta_tools/mesos/cfg.py +46 -0
- paasta_tools/mesos/cluster.py +60 -0
- paasta_tools/mesos/exceptions.py +59 -0
- paasta_tools/mesos/framework.py +77 -0
- paasta_tools/mesos/log.py +48 -0
- paasta_tools/mesos/master.py +306 -0
- paasta_tools/mesos/mesos_file.py +169 -0
- paasta_tools/mesos/parallel.py +52 -0
- paasta_tools/mesos/slave.py +115 -0
- paasta_tools/mesos/task.py +94 -0
- paasta_tools/mesos/util.py +69 -0
- paasta_tools/mesos/zookeeper.py +37 -0
- paasta_tools/mesos_maintenance.py +848 -0
- paasta_tools/mesos_tools.py +1051 -0
- paasta_tools/metrics/__init__.py +0 -0
- paasta_tools/metrics/metastatus_lib.py +1110 -0
- paasta_tools/metrics/metrics_lib.py +217 -0
- paasta_tools/monitoring/__init__.py +13 -0
- paasta_tools/monitoring/check_k8s_api_performance.py +110 -0
- paasta_tools/monitoring_tools.py +652 -0
- paasta_tools/monkrelaycluster_tools.py +146 -0
- paasta_tools/nrtsearchservice_tools.py +143 -0
- paasta_tools/nrtsearchserviceeks_tools.py +68 -0
- paasta_tools/oom_logger.py +321 -0
- paasta_tools/paasta_deploy_tron_jobs +3 -0
- paasta_tools/paasta_execute_docker_command.py +123 -0
- paasta_tools/paasta_native_serviceinit.py +21 -0
- paasta_tools/paasta_service_config_loader.py +201 -0
- paasta_tools/paastaapi/__init__.py +29 -0
- paasta_tools/paastaapi/api/__init__.py +3 -0
- paasta_tools/paastaapi/api/autoscaler_api.py +302 -0
- paasta_tools/paastaapi/api/default_api.py +569 -0
- paasta_tools/paastaapi/api/remote_run_api.py +604 -0
- paasta_tools/paastaapi/api/resources_api.py +157 -0
- paasta_tools/paastaapi/api/service_api.py +1736 -0
- paasta_tools/paastaapi/api_client.py +818 -0
- paasta_tools/paastaapi/apis/__init__.py +22 -0
- paasta_tools/paastaapi/configuration.py +455 -0
- paasta_tools/paastaapi/exceptions.py +137 -0
- paasta_tools/paastaapi/model/__init__.py +5 -0
- paasta_tools/paastaapi/model/adhoc_launch_history.py +176 -0
- paasta_tools/paastaapi/model/autoscaler_count_msg.py +176 -0
- paasta_tools/paastaapi/model/deploy_queue.py +178 -0
- paasta_tools/paastaapi/model/deploy_queue_service_instance.py +194 -0
- paasta_tools/paastaapi/model/envoy_backend.py +185 -0
- paasta_tools/paastaapi/model/envoy_location.py +184 -0
- paasta_tools/paastaapi/model/envoy_status.py +181 -0
- paasta_tools/paastaapi/model/flink_cluster_overview.py +188 -0
- paasta_tools/paastaapi/model/flink_config.py +173 -0
- paasta_tools/paastaapi/model/flink_job.py +186 -0
- paasta_tools/paastaapi/model/flink_job_details.py +192 -0
- paasta_tools/paastaapi/model/flink_jobs.py +175 -0
- paasta_tools/paastaapi/model/float_and_error.py +173 -0
- paasta_tools/paastaapi/model/hpa_metric.py +176 -0
- paasta_tools/paastaapi/model/inline_object.py +170 -0
- paasta_tools/paastaapi/model/inline_response200.py +170 -0
- paasta_tools/paastaapi/model/inline_response2001.py +170 -0
- paasta_tools/paastaapi/model/instance_bounce_status.py +200 -0
- paasta_tools/paastaapi/model/instance_mesh_status.py +186 -0
- paasta_tools/paastaapi/model/instance_status.py +220 -0
- paasta_tools/paastaapi/model/instance_status_adhoc.py +187 -0
- paasta_tools/paastaapi/model/instance_status_cassandracluster.py +173 -0
- paasta_tools/paastaapi/model/instance_status_flink.py +173 -0
- paasta_tools/paastaapi/model/instance_status_kafkacluster.py +173 -0
- paasta_tools/paastaapi/model/instance_status_kubernetes.py +263 -0
- paasta_tools/paastaapi/model/instance_status_kubernetes_autoscaling_status.py +187 -0
- paasta_tools/paastaapi/model/instance_status_kubernetes_v2.py +197 -0
- paasta_tools/paastaapi/model/instance_status_tron.py +204 -0
- paasta_tools/paastaapi/model/instance_tasks.py +182 -0
- paasta_tools/paastaapi/model/integer_and_error.py +173 -0
- paasta_tools/paastaapi/model/kubernetes_container.py +178 -0
- paasta_tools/paastaapi/model/kubernetes_container_v2.py +219 -0
- paasta_tools/paastaapi/model/kubernetes_healthcheck.py +176 -0
- paasta_tools/paastaapi/model/kubernetes_pod.py +201 -0
- paasta_tools/paastaapi/model/kubernetes_pod_event.py +176 -0
- paasta_tools/paastaapi/model/kubernetes_pod_v2.py +213 -0
- paasta_tools/paastaapi/model/kubernetes_replica_set.py +185 -0
- paasta_tools/paastaapi/model/kubernetes_version.py +202 -0
- paasta_tools/paastaapi/model/remote_run_outcome.py +189 -0
- paasta_tools/paastaapi/model/remote_run_start.py +185 -0
- paasta_tools/paastaapi/model/remote_run_stop.py +176 -0
- paasta_tools/paastaapi/model/remote_run_token.py +173 -0
- paasta_tools/paastaapi/model/resource.py +187 -0
- paasta_tools/paastaapi/model/resource_item.py +187 -0
- paasta_tools/paastaapi/model/resource_value.py +176 -0
- paasta_tools/paastaapi/model/smartstack_backend.py +191 -0
- paasta_tools/paastaapi/model/smartstack_location.py +181 -0
- paasta_tools/paastaapi/model/smartstack_status.py +181 -0
- paasta_tools/paastaapi/model/task_tail_lines.py +176 -0
- paasta_tools/paastaapi/model_utils.py +1879 -0
- paasta_tools/paastaapi/models/__init__.py +62 -0
- paasta_tools/paastaapi/rest.py +287 -0
- paasta_tools/prune_completed_pods.py +220 -0
- paasta_tools/puppet_service_tools.py +59 -0
- paasta_tools/py.typed +1 -0
- paasta_tools/remote_git.py +127 -0
- paasta_tools/run-paasta-api-in-dev-mode.py +57 -0
- paasta_tools/run-paasta-api-playground.py +51 -0
- paasta_tools/secret_providers/__init__.py +66 -0
- paasta_tools/secret_providers/vault.py +214 -0
- paasta_tools/secret_tools.py +277 -0
- paasta_tools/setup_istio_mesh.py +353 -0
- paasta_tools/setup_kubernetes_cr.py +412 -0
- paasta_tools/setup_kubernetes_crd.py +138 -0
- paasta_tools/setup_kubernetes_internal_crd.py +154 -0
- paasta_tools/setup_kubernetes_job.py +353 -0
- paasta_tools/setup_prometheus_adapter_config.py +1028 -0
- paasta_tools/setup_tron_namespace.py +248 -0
- paasta_tools/slack.py +75 -0
- paasta_tools/smartstack_tools.py +676 -0
- paasta_tools/spark_tools.py +283 -0
- paasta_tools/synapse_srv_namespaces_fact.py +42 -0
- paasta_tools/tron/__init__.py +0 -0
- paasta_tools/tron/client.py +158 -0
- paasta_tools/tron/tron_command_context.py +194 -0
- paasta_tools/tron/tron_timeutils.py +101 -0
- paasta_tools/tron_tools.py +1448 -0
- paasta_tools/utils.py +4307 -0
- paasta_tools/yaml_tools.py +44 -0
- paasta_tools-1.21.3.data/scripts/apply_external_resources.py +79 -0
- paasta_tools-1.21.3.data/scripts/bounce_log_latency_parser.py +68 -0
- paasta_tools-1.21.3.data/scripts/check_autoscaler_max_instances.py +212 -0
- paasta_tools-1.21.3.data/scripts/check_cassandracluster_services_replication.py +35 -0
- paasta_tools-1.21.3.data/scripts/check_flink_services_health.py +203 -0
- paasta_tools-1.21.3.data/scripts/check_kubernetes_api.py +57 -0
- paasta_tools-1.21.3.data/scripts/check_kubernetes_services_replication.py +141 -0
- paasta_tools-1.21.3.data/scripts/check_manual_oapi_changes.sh +24 -0
- paasta_tools-1.21.3.data/scripts/check_oom_events.py +244 -0
- paasta_tools-1.21.3.data/scripts/check_orphans.py +306 -0
- paasta_tools-1.21.3.data/scripts/check_spark_jobs.py +234 -0
- paasta_tools-1.21.3.data/scripts/cleanup_kubernetes_cr.py +138 -0
- paasta_tools-1.21.3.data/scripts/cleanup_kubernetes_crd.py +145 -0
- paasta_tools-1.21.3.data/scripts/cleanup_kubernetes_jobs.py +344 -0
- paasta_tools-1.21.3.data/scripts/create_dynamodb_table.py +35 -0
- paasta_tools-1.21.3.data/scripts/create_paasta_playground.py +105 -0
- paasta_tools-1.21.3.data/scripts/delete_kubernetes_deployments.py +89 -0
- paasta_tools-1.21.3.data/scripts/emit_allocated_cpu_metrics.py +50 -0
- paasta_tools-1.21.3.data/scripts/generate_all_deployments +9 -0
- paasta_tools-1.21.3.data/scripts/generate_authenticating_services.py +94 -0
- paasta_tools-1.21.3.data/scripts/generate_deployments_for_service.py +255 -0
- paasta_tools-1.21.3.data/scripts/generate_services_file.py +114 -0
- paasta_tools-1.21.3.data/scripts/generate_services_yaml.py +30 -0
- paasta_tools-1.21.3.data/scripts/get_running_task_allocation.py +346 -0
- paasta_tools-1.21.3.data/scripts/habitat_fixer.py +86 -0
- paasta_tools-1.21.3.data/scripts/ide_helper.py +316 -0
- paasta_tools-1.21.3.data/scripts/is_pod_healthy_in_proxy.py +139 -0
- paasta_tools-1.21.3.data/scripts/is_pod_healthy_in_smartstack.py +50 -0
- paasta_tools-1.21.3.data/scripts/kill_bad_containers.py +109 -0
- paasta_tools-1.21.3.data/scripts/kubernetes_remove_evicted_pods.py +164 -0
- paasta_tools-1.21.3.data/scripts/mass-deploy-tag.sh +44 -0
- paasta_tools-1.21.3.data/scripts/mock_patch_checker.py +86 -0
- paasta_tools-1.21.3.data/scripts/paasta_cleanup_remote_run_resources.py +135 -0
- paasta_tools-1.21.3.data/scripts/paasta_cleanup_stale_nodes.py +181 -0
- paasta_tools-1.21.3.data/scripts/paasta_deploy_tron_jobs +3 -0
- paasta_tools-1.21.3.data/scripts/paasta_execute_docker_command.py +123 -0
- paasta_tools-1.21.3.data/scripts/paasta_secrets_sync.py +758 -0
- paasta_tools-1.21.3.data/scripts/paasta_tabcomplete.sh +23 -0
- paasta_tools-1.21.3.data/scripts/paasta_update_soa_memcpu.py +520 -0
- paasta_tools-1.21.3.data/scripts/render_template.py +129 -0
- paasta_tools-1.21.3.data/scripts/rightsizer_soaconfigs_update.py +348 -0
- paasta_tools-1.21.3.data/scripts/service_shard_remove.py +157 -0
- paasta_tools-1.21.3.data/scripts/service_shard_update.py +373 -0
- paasta_tools-1.21.3.data/scripts/setup_istio_mesh.py +353 -0
- paasta_tools-1.21.3.data/scripts/setup_kubernetes_cr.py +412 -0
- paasta_tools-1.21.3.data/scripts/setup_kubernetes_crd.py +138 -0
- paasta_tools-1.21.3.data/scripts/setup_kubernetes_internal_crd.py +154 -0
- paasta_tools-1.21.3.data/scripts/setup_kubernetes_job.py +353 -0
- paasta_tools-1.21.3.data/scripts/setup_prometheus_adapter_config.py +1028 -0
- paasta_tools-1.21.3.data/scripts/shared_ip_check.py +77 -0
- paasta_tools-1.21.3.data/scripts/synapse_srv_namespaces_fact.py +42 -0
- paasta_tools-1.21.3.data/scripts/timeouts_metrics_prom.py +64 -0
- paasta_tools-1.21.3.dist-info/LICENSE +201 -0
- paasta_tools-1.21.3.dist-info/METADATA +74 -0
- paasta_tools-1.21.3.dist-info/RECORD +348 -0
- paasta_tools-1.21.3.dist-info/WHEEL +5 -0
- paasta_tools-1.21.3.dist-info/entry_points.txt +20 -0
- paasta_tools-1.21.3.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import re
|
|
3
|
+
import socket
|
|
4
|
+
import sys
|
|
5
|
+
from typing import Any
|
|
6
|
+
from typing import cast
|
|
7
|
+
from typing import Dict
|
|
8
|
+
from typing import List
|
|
9
|
+
from typing import Mapping
|
|
10
|
+
from typing import Set
|
|
11
|
+
|
|
12
|
+
from mypy_extensions import TypedDict
|
|
13
|
+
|
|
14
|
+
from paasta_tools.utils import DockerVolume
|
|
15
|
+
from paasta_tools.utils import PaastaColors
|
|
16
|
+
|
|
17
|
+
KUBERNETES_NAMESPACE = "paasta-spark"
|
|
18
|
+
DEFAULT_SPARK_SERVICE = "spark"
|
|
19
|
+
DEFAULT_SPARK_RUNTIME_TIMEOUT = "12h"
|
|
20
|
+
SPARK_AWS_CREDS_PROVIDER = "com.amazonaws.auth.WebIdentityTokenCredentialsProvider"
|
|
21
|
+
SPARK_EXECUTOR_NAMESPACE = "paasta-spark"
|
|
22
|
+
SPARK_DRIVER_POOL = "stable"
|
|
23
|
+
SPARK_TRON_JOB_USER = "TRON"
|
|
24
|
+
SPARK_PROMETHEUS_SHARD = "ml-compute"
|
|
25
|
+
SPARK_DNS_POD_TEMPLATE = "/nail/srv/configs/spark_dns_pod_template.yaml"
|
|
26
|
+
MEM_MULTIPLIER = {"k": 1024, "m": 1024**2, "g": 1024**3, "t": 1024**4}
|
|
27
|
+
SPARK_DRIVER_DEFAULT_DISK_MB = 5120 # 5GB
|
|
28
|
+
|
|
29
|
+
log = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SparkConfig(TypedDict):
|
|
33
|
+
account_id: str
|
|
34
|
+
default_event_log_dir: str
|
|
35
|
+
history_server: str
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class SparkEnvironmentConfig(TypedDict):
|
|
39
|
+
"""
|
|
40
|
+
There's no set schema for the spark configuration from DEFAULT_SPARK_RUN_CONFIG,
|
|
41
|
+
but at the time of writing, this file looks like (with the addition of a `prod` key):
|
|
42
|
+
environments:
|
|
43
|
+
dev:
|
|
44
|
+
account_id: 'SOME_ACCOUNT_ID'
|
|
45
|
+
default_event_log_dir: s3a://SOME_BUCKET # currently the only thing paasta reads
|
|
46
|
+
history_server: http://SOME_URL/
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
environments: Dict[str, SparkConfig]
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
SparkEventLogConfiguration = TypedDict(
|
|
53
|
+
"SparkEventLogConfiguration",
|
|
54
|
+
{"spark.eventLog.enabled": str, "spark.eventLog.dir": str},
|
|
55
|
+
total=False,
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def get_webui_url(port: str) -> str:
|
|
60
|
+
return f"http://{socket.getfqdn()}:{port}"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def get_volumes_from_spark_mesos_configs(spark_conf: Mapping[str, str]) -> List[str]:
|
|
64
|
+
return (
|
|
65
|
+
spark_conf.get("spark.mesos.executor.docker.volumes", "").split(",")
|
|
66
|
+
if spark_conf.get("spark.mesos.executor.docker.volumes", "") != ""
|
|
67
|
+
else []
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def get_volumes_from_spark_k8s_configs(spark_conf: Mapping[str, str]) -> List[str]:
|
|
72
|
+
volume_names = []
|
|
73
|
+
for key in list(spark_conf.keys()):
|
|
74
|
+
if (
|
|
75
|
+
"spark.kubernetes.executor.volumes.hostPath." in key
|
|
76
|
+
and ".mount.path" in key
|
|
77
|
+
):
|
|
78
|
+
v_name = re.match(
|
|
79
|
+
r"spark.kubernetes.executor.volumes.hostPath.([a-z0-9]([-a-z0-9]*[a-z0-9])?).mount.path",
|
|
80
|
+
key,
|
|
81
|
+
)
|
|
82
|
+
if v_name:
|
|
83
|
+
volume_names.append(v_name.group(1))
|
|
84
|
+
else:
|
|
85
|
+
log.error(
|
|
86
|
+
f"Volume names must consist of lower case alphanumeric characters or '-', "
|
|
87
|
+
f"and must start and end with an alphanumeric character. Config -> '{key}' must be fixed."
|
|
88
|
+
)
|
|
89
|
+
# Failing here because the k8s pod fails to start if the volume names
|
|
90
|
+
# don't follow the lowercase RFC 1123 standard.
|
|
91
|
+
sys.exit(1)
|
|
92
|
+
|
|
93
|
+
volumes = []
|
|
94
|
+
for volume_name in volume_names:
|
|
95
|
+
read_only = (
|
|
96
|
+
"ro"
|
|
97
|
+
if spark_conf.get(
|
|
98
|
+
f"spark.kubernetes.executor.volumes.hostPath.{volume_name}.mount.readOnly"
|
|
99
|
+
)
|
|
100
|
+
== "true"
|
|
101
|
+
else "rw"
|
|
102
|
+
)
|
|
103
|
+
container_path = spark_conf.get(
|
|
104
|
+
f"spark.kubernetes.executor.volumes.hostPath.{volume_name}.mount.path"
|
|
105
|
+
)
|
|
106
|
+
host_path = spark_conf.get(
|
|
107
|
+
f"spark.kubernetes.executor.volumes.hostPath.{volume_name}.options.path"
|
|
108
|
+
)
|
|
109
|
+
volumes.append(f"{host_path}:{container_path}:{read_only}")
|
|
110
|
+
return volumes
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def setup_volume_mounts(volumes: List[DockerVolume]) -> Dict[str, str]:
|
|
114
|
+
"""
|
|
115
|
+
Returns Docker volume mount configurations in the format expected by Spark.
|
|
116
|
+
"""
|
|
117
|
+
conf = {}
|
|
118
|
+
|
|
119
|
+
# XXX: why are these necessary?
|
|
120
|
+
extra_volumes: List[DockerVolume] = cast(
|
|
121
|
+
"List[DockerVolume]",
|
|
122
|
+
[
|
|
123
|
+
{"containerPath": "/etc/passwd", "hostPath": "/etc/passwd", "mode": "RO"},
|
|
124
|
+
{"containerPath": "/etc/group", "hostPath": "/etc/group", "mode": "RO"},
|
|
125
|
+
],
|
|
126
|
+
)
|
|
127
|
+
seen_paths: Set[str] = set() # dedupe volumes, just in case
|
|
128
|
+
for index, volume in enumerate(volumes + extra_volumes):
|
|
129
|
+
host_path, container_path, mode = (
|
|
130
|
+
volume["hostPath"],
|
|
131
|
+
volume["containerPath"],
|
|
132
|
+
volume["mode"],
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
if host_path in seen_paths:
|
|
136
|
+
log.warn(f"Skipping {host_path} - already added a binding for it.")
|
|
137
|
+
continue
|
|
138
|
+
seen_paths.add(host_path)
|
|
139
|
+
|
|
140
|
+
# the names here don't matter too much, so we just use the index in the volume
|
|
141
|
+
# list as an arbitrary name
|
|
142
|
+
conf[
|
|
143
|
+
f"spark.kubernetes.executor.volumes.hostPath.{index}.mount.path"
|
|
144
|
+
] = container_path
|
|
145
|
+
conf[
|
|
146
|
+
f"spark.kubernetes.executor.volumes.hostPath.{index}.options.path"
|
|
147
|
+
] = host_path
|
|
148
|
+
conf[
|
|
149
|
+
f"spark.kubernetes.executor.volumes.hostPath.{index}.mount.readOnly"
|
|
150
|
+
] = str(mode.lower() == "ro").lower()
|
|
151
|
+
|
|
152
|
+
return conf
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def create_spark_config_str(spark_config_dict: Dict[str, Any], is_mrjob: bool) -> str:
|
|
156
|
+
conf_option = "--jobconf" if is_mrjob else "--conf"
|
|
157
|
+
spark_config_entries = list()
|
|
158
|
+
|
|
159
|
+
if is_mrjob:
|
|
160
|
+
spark_master = spark_config_dict["spark.master"]
|
|
161
|
+
spark_config_entries.append(f"--spark-master={spark_master}")
|
|
162
|
+
spark_config_dict.pop("spark.master", None)
|
|
163
|
+
|
|
164
|
+
for opt, val in spark_config_dict.items():
|
|
165
|
+
# Process Spark configs with multiple space separated values to be in single quotes
|
|
166
|
+
if isinstance(val, str) and " " in val:
|
|
167
|
+
val = f"'{val}'"
|
|
168
|
+
spark_config_entries.append(f"{conf_option} {opt}={val}")
|
|
169
|
+
return " ".join(spark_config_entries)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def inject_spark_conf_str(original_cmd: str, spark_conf_str: str) -> str:
|
|
173
|
+
for base_cmd in ("pyspark", "spark-shell", "spark-submit"):
|
|
174
|
+
if base_cmd in original_cmd:
|
|
175
|
+
return original_cmd.replace(base_cmd, base_cmd + " " + spark_conf_str, 1)
|
|
176
|
+
return original_cmd
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def auto_add_timeout_for_spark_job(
|
|
180
|
+
cmd: str, timeout_job_runtime: str, silent: bool = False
|
|
181
|
+
) -> str:
|
|
182
|
+
# Timeout only to be added for spark-submit commands
|
|
183
|
+
# TODO: Add timeout for jobs using mrjob with spark-runner
|
|
184
|
+
if "spark-submit" not in cmd:
|
|
185
|
+
return cmd
|
|
186
|
+
try:
|
|
187
|
+
# This is not an exhaustive regex, matches the invalid ones also, where as the invalid
|
|
188
|
+
# timeout command will fail during execution
|
|
189
|
+
options_regex = r"(--?[a-z][a-z-]*((\s+|=)[\w\d-]+)?\s+)*"
|
|
190
|
+
duration_regex = r"\d+\.?\d*[smhd]?"
|
|
191
|
+
|
|
192
|
+
timeout_present = re.match(
|
|
193
|
+
rf"^.*timeout\s+{options_regex}{duration_regex}\s+spark-submit .*$", cmd
|
|
194
|
+
)
|
|
195
|
+
if not timeout_present:
|
|
196
|
+
split_cmd = cmd.split("spark-submit")
|
|
197
|
+
# split_cmd[0] will always be an empty string or end with a space
|
|
198
|
+
cmd = f"{split_cmd[0]}timeout {timeout_job_runtime} spark-submit{split_cmd[1]}"
|
|
199
|
+
if not silent:
|
|
200
|
+
log.info(
|
|
201
|
+
PaastaColors.blue(
|
|
202
|
+
f"NOTE: Job will exit in given time {timeout_job_runtime}. "
|
|
203
|
+
f"Adjust timeout value using --timeout-job-runtime. "
|
|
204
|
+
f"New Updated Command with timeout: {cmd}"
|
|
205
|
+
),
|
|
206
|
+
)
|
|
207
|
+
except Exception as e:
|
|
208
|
+
err_msg = (
|
|
209
|
+
f"'timeout' could not be added to spark command: '{cmd}' due to error '{e}'. "
|
|
210
|
+
"Please report to #spark."
|
|
211
|
+
)
|
|
212
|
+
log.warn(err_msg)
|
|
213
|
+
print(PaastaColors.red(err_msg))
|
|
214
|
+
return cmd
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def build_spark_command(
|
|
218
|
+
original_cmd: str,
|
|
219
|
+
spark_config_dict: Dict[str, Any],
|
|
220
|
+
is_mrjob: bool,
|
|
221
|
+
timeout_job_runtime: str,
|
|
222
|
+
silent: bool = False,
|
|
223
|
+
) -> str:
|
|
224
|
+
command = inject_spark_conf_str(
|
|
225
|
+
original_cmd, create_spark_config_str(spark_config_dict, is_mrjob=is_mrjob)
|
|
226
|
+
)
|
|
227
|
+
return auto_add_timeout_for_spark_job(command, timeout_job_runtime, silent=silent)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def get_spark_ports_from_config(spark_conf: Dict[str, str]) -> List[int]:
|
|
231
|
+
ports = [int(v) for k, v in spark_conf.items() if k.endswith(".port")]
|
|
232
|
+
return ports
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
# TODO: Reuse by ad-hoc Spark-driver-on-k8s
|
|
236
|
+
def get_spark_driver_monitoring_annotations(
|
|
237
|
+
spark_config: Dict[str, str],
|
|
238
|
+
) -> Dict[str, str]:
|
|
239
|
+
"""
|
|
240
|
+
Returns Spark driver pod annotations - currently used for Prometheus metadata.
|
|
241
|
+
"""
|
|
242
|
+
annotations: Dict[str, str] = {}
|
|
243
|
+
|
|
244
|
+
ui_port_str = spark_config.get("spark.ui.port")
|
|
245
|
+
if ui_port_str:
|
|
246
|
+
annotations.update(
|
|
247
|
+
{
|
|
248
|
+
"prometheus.io/port": ui_port_str,
|
|
249
|
+
"prometheus.io/path": "/metrics/prometheus",
|
|
250
|
+
}
|
|
251
|
+
)
|
|
252
|
+
|
|
253
|
+
paasta_service_non_truncated = spark_config.get(
|
|
254
|
+
"spark.kubernetes.executor.annotation.paasta.yelp.com/service"
|
|
255
|
+
)
|
|
256
|
+
paasta_instance_non_truncated = spark_config.get(
|
|
257
|
+
"spark.kubernetes.executor.annotation.paasta.yelp.com/instance"
|
|
258
|
+
)
|
|
259
|
+
if paasta_service_non_truncated and paasta_instance_non_truncated:
|
|
260
|
+
annotations.update(
|
|
261
|
+
{
|
|
262
|
+
"paasta.yelp.com/service": paasta_service_non_truncated,
|
|
263
|
+
"paasta.yelp.com/instance": paasta_instance_non_truncated,
|
|
264
|
+
}
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
return annotations
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def get_spark_driver_monitoring_labels(
|
|
271
|
+
spark_config: Dict[str, str],
|
|
272
|
+
user: str,
|
|
273
|
+
) -> Dict[str, str]:
|
|
274
|
+
"""
|
|
275
|
+
Returns Spark driver pod labels - generally for Prometheus metric relabeling.
|
|
276
|
+
"""
|
|
277
|
+
ui_port_str = str(spark_config.get("spark.ui.port", ""))
|
|
278
|
+
labels = {
|
|
279
|
+
"paasta.yelp.com/prometheus_shard": SPARK_PROMETHEUS_SHARD,
|
|
280
|
+
"spark.yelp.com/user": user,
|
|
281
|
+
"spark.yelp.com/driver_ui_port": ui_port_str,
|
|
282
|
+
}
|
|
283
|
+
return labels
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# Copyright 2015-2016 Yelp Inc.
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""Usage: ./synapse_srv_namespaces_fact.py
|
|
16
|
+
|
|
17
|
+
A simple script to enumerate all namespaces as a sorted comma separated
|
|
18
|
+
string to stdout, with each entry in the form of full_name:proxy_port.
|
|
19
|
+
|
|
20
|
+
If a proxy_port isn't defined for a namespace, that namespace is skipped.
|
|
21
|
+
|
|
22
|
+
Example output: mumble.canary:5019,mumble.main:111,zookeeper.hab:4921
|
|
23
|
+
|
|
24
|
+
This is nice to use as a facter fact for Synapse stuff!
|
|
25
|
+
"""
|
|
26
|
+
import sys
|
|
27
|
+
|
|
28
|
+
from paasta_tools import long_running_service_tools
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def main():
|
|
32
|
+
strings = []
|
|
33
|
+
for full_name, config in long_running_service_tools.get_all_namespaces():
|
|
34
|
+
if "proxy_port" in config:
|
|
35
|
+
strings.append("{}:{}".format(full_name, config["proxy_port"]))
|
|
36
|
+
strings = sorted(strings)
|
|
37
|
+
print("synapse_srv_namespaces=" + ",".join(strings))
|
|
38
|
+
sys.exit(0)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
if __name__ == "__main__":
|
|
42
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
# Copyright 2015-2018 Yelp Inc.
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
import logging
|
|
14
|
+
import os
|
|
15
|
+
from typing import Dict
|
|
16
|
+
from urllib.parse import urljoin
|
|
17
|
+
|
|
18
|
+
import requests
|
|
19
|
+
|
|
20
|
+
from paasta_tools import yaml_tools as yaml
|
|
21
|
+
from paasta_tools.cli.authentication import get_service_auth_token
|
|
22
|
+
from paasta_tools.utils import get_user_agent
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
log = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class TronRequestError(Exception):
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class TronClient:
|
|
33
|
+
"""
|
|
34
|
+
Client for interacting with a Tron master.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, url):
|
|
38
|
+
self.master_url = url
|
|
39
|
+
|
|
40
|
+
def _request(self, method, url, data):
|
|
41
|
+
headers = {"User-Agent": get_user_agent()}
|
|
42
|
+
kwargs = {"url": urljoin(self.master_url, url), "headers": headers}
|
|
43
|
+
if method == "GET":
|
|
44
|
+
kwargs["params"] = data
|
|
45
|
+
response = requests.get(**kwargs)
|
|
46
|
+
elif method == "POST":
|
|
47
|
+
kwargs["data"] = data
|
|
48
|
+
if os.getenv("TRONCTL_API_AUTH"):
|
|
49
|
+
token = get_service_auth_token()
|
|
50
|
+
kwargs["headers"]["Authorization"] = f"Bearer {token}"
|
|
51
|
+
response = requests.post(**kwargs)
|
|
52
|
+
else:
|
|
53
|
+
raise ValueError(f"Unrecognized method: {method}")
|
|
54
|
+
|
|
55
|
+
return self._get_response_or_error(response)
|
|
56
|
+
|
|
57
|
+
def _get_response_or_error(self, response):
|
|
58
|
+
try:
|
|
59
|
+
result = response.json()
|
|
60
|
+
if "error" in result:
|
|
61
|
+
raise TronRequestError(result["error"])
|
|
62
|
+
return result
|
|
63
|
+
except ValueError: # Not JSON
|
|
64
|
+
if not response.ok:
|
|
65
|
+
raise TronRequestError(
|
|
66
|
+
"Status code {status_code} for {url}: {reason}".format(
|
|
67
|
+
status_code=response.status_code,
|
|
68
|
+
url=response.url,
|
|
69
|
+
reason=response.reason,
|
|
70
|
+
)
|
|
71
|
+
)
|
|
72
|
+
return response.text
|
|
73
|
+
|
|
74
|
+
def _get(self, url, data=None):
|
|
75
|
+
return self._request("GET", url, data)
|
|
76
|
+
|
|
77
|
+
def _post(self, url, data=None):
|
|
78
|
+
return self._request("POST", url, data)
|
|
79
|
+
|
|
80
|
+
def update_namespace(self, namespace, new_config, skip_if_unchanged=True):
|
|
81
|
+
"""Updates the configuration for a namespace.
|
|
82
|
+
|
|
83
|
+
:param namespace: str
|
|
84
|
+
:param new_config: str, should be valid YAML.
|
|
85
|
+
:param skip_if_unchanged: boolean. If False, will send the update
|
|
86
|
+
even if the current config matches the new config.
|
|
87
|
+
"""
|
|
88
|
+
current_config = self._get("/api/config", {"name": namespace, "no_header": 1})
|
|
89
|
+
|
|
90
|
+
if skip_if_unchanged:
|
|
91
|
+
if yaml.safe_load(new_config) == yaml.safe_load(current_config["config"]):
|
|
92
|
+
log.debug("No change in config, skipping update.")
|
|
93
|
+
return
|
|
94
|
+
|
|
95
|
+
return self._post(
|
|
96
|
+
"/api/config",
|
|
97
|
+
data={
|
|
98
|
+
"name": namespace,
|
|
99
|
+
"config": new_config,
|
|
100
|
+
"hash": current_config["hash"],
|
|
101
|
+
"check": 0,
|
|
102
|
+
},
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
def update_namespaces(
|
|
106
|
+
self, new_configs: Dict[str, str], skip_if_unchanged: bool = True
|
|
107
|
+
):
|
|
108
|
+
"""Updates the configuration for a namespace.
|
|
109
|
+
|
|
110
|
+
:param namespace: str
|
|
111
|
+
:param new_config: str, should be valid YAML.
|
|
112
|
+
:param skip_if_unchanged: boolean. If False, will send the update
|
|
113
|
+
even if the current config matches the new config.
|
|
114
|
+
"""
|
|
115
|
+
current_configs: Dict[str, Dict[str, str]] = self._get("/api/config") # type: ignore # we don't have a good way to share types between tron/paasta
|
|
116
|
+
responses: Dict[str, str] = {}
|
|
117
|
+
for namespace, new_config in new_configs.items():
|
|
118
|
+
current_config = current_configs.get(namespace, {})
|
|
119
|
+
if skip_if_unchanged:
|
|
120
|
+
if yaml.safe_load(new_config) == yaml.safe_load(
|
|
121
|
+
current_config["config"]
|
|
122
|
+
):
|
|
123
|
+
log.debug("No change in config, skipping update.")
|
|
124
|
+
continue
|
|
125
|
+
|
|
126
|
+
responses[namespace] = self._post(
|
|
127
|
+
"/api/config",
|
|
128
|
+
data={
|
|
129
|
+
"name": namespace,
|
|
130
|
+
"config": new_config,
|
|
131
|
+
"hash": current_config["hash"],
|
|
132
|
+
"check": 0,
|
|
133
|
+
},
|
|
134
|
+
)
|
|
135
|
+
return responses
|
|
136
|
+
|
|
137
|
+
def list_namespaces(self):
|
|
138
|
+
"""Gets the namespaces that are currently configured."""
|
|
139
|
+
response = self._get("/api")
|
|
140
|
+
return response.get("namespaces", [])
|
|
141
|
+
|
|
142
|
+
def get_job_content(self, job: str) -> dict:
|
|
143
|
+
return self._get(f"/api/jobs/{job}/")
|
|
144
|
+
|
|
145
|
+
def get_latest_job_run_id(self, job_content: dict) -> str:
|
|
146
|
+
job_runs = sorted(
|
|
147
|
+
job_content.get("runs", []),
|
|
148
|
+
key=lambda k: (k["state"] != "scheduled", k["run_num"]),
|
|
149
|
+
reverse=True,
|
|
150
|
+
)
|
|
151
|
+
if not job_runs:
|
|
152
|
+
return None
|
|
153
|
+
return job_runs[0]["run_num"]
|
|
154
|
+
|
|
155
|
+
def get_action_run(self, job: str, action: str, run_id: str) -> dict:
|
|
156
|
+
return self._get(
|
|
157
|
+
f"/api/jobs/{job}/{run_id}/{action}?include_stderr=1&include_stdout=1&num_lines=10"
|
|
158
|
+
)
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
"""Command Context is how we construct the command line for a command which may
|
|
2
|
+
have variables that need to be rendered.
|
|
3
|
+
This is a COPY of https://github.com/Yelp/Tron/blob/master/tron/command_context.py.
|
|
4
|
+
"""
|
|
5
|
+
import functools
|
|
6
|
+
import operator
|
|
7
|
+
|
|
8
|
+
from paasta_tools.tron import tron_timeutils
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def build_context(object, parent):
|
|
12
|
+
"""Construct a CommandContext for object. object must have a property
|
|
13
|
+
'context_class'.
|
|
14
|
+
"""
|
|
15
|
+
return CommandContext(object.context_class(object), parent)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def build_filled_context(*context_objects):
|
|
19
|
+
"""Create a CommandContext chain from context_objects, using a Filler
|
|
20
|
+
object to pass to each CommandContext. Can be used to validate a format
|
|
21
|
+
string.
|
|
22
|
+
"""
|
|
23
|
+
if not context_objects:
|
|
24
|
+
return CommandContext()
|
|
25
|
+
|
|
26
|
+
filler = Filler()
|
|
27
|
+
|
|
28
|
+
def build(current, next):
|
|
29
|
+
return CommandContext(next(filler), current)
|
|
30
|
+
|
|
31
|
+
return functools.reduce(build, context_objects, None)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class CommandContext:
|
|
35
|
+
"""A CommandContext object is a wrapper around any object which has values
|
|
36
|
+
to be used to render a command for execution. It looks up values by name.
|
|
37
|
+
|
|
38
|
+
It's lookup order is:
|
|
39
|
+
base[name],
|
|
40
|
+
base.__getattr__(name),
|
|
41
|
+
next[name],
|
|
42
|
+
next.__getattr__(name)
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(self, base=None, next=None):
|
|
46
|
+
"""
|
|
47
|
+
base - Object to look for attributes in
|
|
48
|
+
next - Next place to look for more pieces of context
|
|
49
|
+
Generally this will be another instance of CommandContext
|
|
50
|
+
"""
|
|
51
|
+
self.base = base or {}
|
|
52
|
+
self.next = next or {}
|
|
53
|
+
|
|
54
|
+
def get(self, name, default=None):
|
|
55
|
+
try:
|
|
56
|
+
return self.__getitem__(name)
|
|
57
|
+
except KeyError:
|
|
58
|
+
return default
|
|
59
|
+
|
|
60
|
+
def __getitem__(self, name):
|
|
61
|
+
getters = [operator.itemgetter(name), operator.attrgetter(name)]
|
|
62
|
+
for target in [self.base, self.next]:
|
|
63
|
+
for getter in getters:
|
|
64
|
+
try:
|
|
65
|
+
return getter(target)
|
|
66
|
+
except (KeyError, TypeError, AttributeError):
|
|
67
|
+
pass
|
|
68
|
+
|
|
69
|
+
raise KeyError(name)
|
|
70
|
+
|
|
71
|
+
def __eq__(self, other):
|
|
72
|
+
return self.base == other.base and self.next == other.next
|
|
73
|
+
|
|
74
|
+
def __ne__(self, other):
|
|
75
|
+
return not self == other
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class JobContext:
|
|
79
|
+
"""A class which exposes properties for rendering commands."""
|
|
80
|
+
|
|
81
|
+
def __init__(self, job):
|
|
82
|
+
self.job = job
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def name(self):
|
|
86
|
+
return self.job.name
|
|
87
|
+
|
|
88
|
+
def __getitem__(self, item):
|
|
89
|
+
date_name, date_spec = self._get_date_spec_parts(item)
|
|
90
|
+
if not date_spec:
|
|
91
|
+
raise KeyError(item)
|
|
92
|
+
|
|
93
|
+
if date_name == "last_success":
|
|
94
|
+
last_success = self.job.runs.last_success
|
|
95
|
+
last_success = last_success.run_time if last_success else None
|
|
96
|
+
|
|
97
|
+
time_value = tron_timeutils.DateArithmetic.parse(date_spec, last_success)
|
|
98
|
+
if time_value:
|
|
99
|
+
return time_value
|
|
100
|
+
|
|
101
|
+
raise KeyError(item)
|
|
102
|
+
|
|
103
|
+
def _get_date_spec_parts(self, name):
|
|
104
|
+
parts = name.rsplit(":", 1)
|
|
105
|
+
if len(parts) != 2:
|
|
106
|
+
return name, None
|
|
107
|
+
return parts
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class JobRunContext:
|
|
111
|
+
def __init__(self, job_run):
|
|
112
|
+
self.job_run = job_run
|
|
113
|
+
|
|
114
|
+
@property
|
|
115
|
+
def runid(self):
|
|
116
|
+
return self.job_run.id
|
|
117
|
+
|
|
118
|
+
@property
|
|
119
|
+
def cleanup_job_status(self):
|
|
120
|
+
"""Provide 'SUCCESS' or 'FAILURE' to a cleanup action context based on
|
|
121
|
+
the status of the other steps
|
|
122
|
+
"""
|
|
123
|
+
if self.job_run.action_runs.is_failed:
|
|
124
|
+
return "FAILURE"
|
|
125
|
+
elif self.job_run.action_runs.is_complete_without_cleanup:
|
|
126
|
+
return "SUCCESS"
|
|
127
|
+
return "UNKNOWN"
|
|
128
|
+
|
|
129
|
+
def __getitem__(self, name):
|
|
130
|
+
"""Attempt to parse date arithmetic syntax and apply to run_time."""
|
|
131
|
+
run_time = self.job_run.run_time
|
|
132
|
+
time_value = tron_timeutils.DateArithmetic.parse(name, run_time)
|
|
133
|
+
if time_value:
|
|
134
|
+
return time_value
|
|
135
|
+
|
|
136
|
+
raise KeyError(name)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class ActionRunContext:
|
|
140
|
+
"""Context object that gives us access to data about the action run."""
|
|
141
|
+
|
|
142
|
+
def __init__(self, action_run):
|
|
143
|
+
self.action_run = action_run
|
|
144
|
+
|
|
145
|
+
@property
|
|
146
|
+
def actionname(self):
|
|
147
|
+
return self.action_run.action_name
|
|
148
|
+
|
|
149
|
+
@property
|
|
150
|
+
def node(self):
|
|
151
|
+
return self.action_run.node.hostname
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class ServiceInstancePidContext:
|
|
155
|
+
def __init__(self, service_instance):
|
|
156
|
+
self.service_instance = service_instance
|
|
157
|
+
|
|
158
|
+
@property
|
|
159
|
+
def instance_number(self):
|
|
160
|
+
return self.service_instance.instance_number
|
|
161
|
+
|
|
162
|
+
@property
|
|
163
|
+
def node(self):
|
|
164
|
+
return self.service_instance.node.hostname
|
|
165
|
+
|
|
166
|
+
@property
|
|
167
|
+
def name(self):
|
|
168
|
+
return self.service_instance.config.name
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
class ServiceInstanceContext(ServiceInstancePidContext):
|
|
172
|
+
@property
|
|
173
|
+
def pid_file(self):
|
|
174
|
+
context = CommandContext(self, self.service_instance.parent_context)
|
|
175
|
+
return self.service_instance.config.pid_file % context
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class Filler:
|
|
179
|
+
"""Filler object for using CommandContext during config parsing. This class
|
|
180
|
+
is used as a substitute for objects that would be passed to Context objects.
|
|
181
|
+
This allows the Context objects to be used directly for config validation.
|
|
182
|
+
"""
|
|
183
|
+
|
|
184
|
+
def __getattr__(self, _):
|
|
185
|
+
return self
|
|
186
|
+
|
|
187
|
+
def __str__(self):
|
|
188
|
+
return "%(...)s"
|
|
189
|
+
|
|
190
|
+
def __mod__(self, _):
|
|
191
|
+
return self
|
|
192
|
+
|
|
193
|
+
def __nonzero__(self):
|
|
194
|
+
return False
|