honeycomb-api 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- honeycomb/__init__.py +217 -0
- honeycomb/_generated/__init__.py +8 -0
- honeycomb/_generated/api/__init__.py +1 -0
- honeycomb/_generated/api/auth/__init__.py +0 -0
- honeycomb/_generated/api/auth/get_auth.py +172 -0
- honeycomb/_generated/api/auth/get_v2_auth.py +168 -0
- honeycomb/_generated/api/boards/__init__.py +0 -0
- honeycomb/_generated/api/boards/create_board.py +217 -0
- honeycomb/_generated/api/boards/create_board_view.py +237 -0
- honeycomb/_generated/api/boards/delete_board.py +191 -0
- honeycomb/_generated/api/boards/delete_board_view.py +210 -0
- honeycomb/_generated/api/boards/get_board.py +208 -0
- honeycomb/_generated/api/boards/get_board_view.py +214 -0
- honeycomb/_generated/api/boards/list_board_views.py +206 -0
- honeycomb/_generated/api/boards/list_boards.py +177 -0
- honeycomb/_generated/api/boards/update_board.py +238 -0
- honeycomb/_generated/api/boards/update_board_view.py +250 -0
- honeycomb/_generated/api/burn_alerts/__init__.py +0 -0
- honeycomb/_generated/api/burn_alerts/create_burn_alert.py +224 -0
- honeycomb/_generated/api/burn_alerts/delete_burn_alert.py +204 -0
- honeycomb/_generated/api/burn_alerts/get_burn_alert.py +208 -0
- honeycomb/_generated/api/burn_alerts/list_burn_alerts_by_slo.py +220 -0
- honeycomb/_generated/api/calculated_fields/__init__.py +0 -0
- honeycomb/_generated/api/calculated_fields/create_calculated_field.py +239 -0
- honeycomb/_generated/api/calculated_fields/delete_calculated_field.py +225 -0
- honeycomb/_generated/api/calculated_fields/get_calculated_field.py +207 -0
- honeycomb/_generated/api/calculated_fields/list_calculated_fields.py +252 -0
- honeycomb/_generated/api/calculated_fields/update_calculated_field.py +242 -0
- honeycomb/_generated/api/columns/__init__.py +0 -0
- honeycomb/_generated/api/columns/create_column.py +241 -0
- honeycomb/_generated/api/columns/delete_column.py +221 -0
- honeycomb/_generated/api/columns/get_column.py +206 -0
- honeycomb/_generated/api/columns/list_columns.py +256 -0
- honeycomb/_generated/api/columns/update_column.py +242 -0
- honeycomb/_generated/api/dataset_definitions/__init__.py +0 -0
- honeycomb/_generated/api/dataset_definitions/list_dataset_definitions.py +193 -0
- honeycomb/_generated/api/dataset_definitions/patch_dataset_definitions.py +235 -0
- honeycomb/_generated/api/datasets/__init__.py +0 -0
- honeycomb/_generated/api/datasets/create_dataset.py +227 -0
- honeycomb/_generated/api/datasets/delete_dataset.py +251 -0
- honeycomb/_generated/api/datasets/get_dataset.py +195 -0
- honeycomb/_generated/api/datasets/list_datasets.py +177 -0
- honeycomb/_generated/api/datasets/update_dataset.py +217 -0
- honeycomb/_generated/api/enhance/__init__.py +0 -0
- honeycomb/_generated/api/enhance/record_enhance_indexer_usage.py +231 -0
- honeycomb/_generated/api/environments/__init__.py +0 -0
- honeycomb/_generated/api/environments/create_environment.py +247 -0
- honeycomb/_generated/api/environments/delete_environment.py +245 -0
- honeycomb/_generated/api/environments/get_environment.py +213 -0
- honeycomb/_generated/api/environments/list_environments.py +245 -0
- honeycomb/_generated/api/environments/update_environment.py +254 -0
- honeycomb/_generated/api/events/__init__.py +0 -0
- honeycomb/_generated/api/events/create_event.py +269 -0
- honeycomb/_generated/api/events/create_events.py +314 -0
- honeycomb/_generated/api/key_management/__init__.py +0 -0
- honeycomb/_generated/api/key_management/create_api_key.py +286 -0
- honeycomb/_generated/api/key_management/delete_api_key.py +223 -0
- honeycomb/_generated/api/key_management/get_api_key.py +225 -0
- honeycomb/_generated/api/key_management/list_api_keys.py +281 -0
- honeycomb/_generated/api/key_management/update_api_key.py +270 -0
- honeycomb/_generated/api/kinesis_events/__init__.py +0 -0
- honeycomb/_generated/api/kinesis_events/create_kinesis_events.py +220 -0
- honeycomb/_generated/api/marker_settings/__init__.py +0 -0
- honeycomb/_generated/api/marker_settings/create_marker_setting.py +209 -0
- honeycomb/_generated/api/marker_settings/delete_marker_settings.py +194 -0
- honeycomb/_generated/api/marker_settings/list_marker_settings.py +186 -0
- honeycomb/_generated/api/marker_settings/update_marker_settings.py +230 -0
- honeycomb/_generated/api/markers/__init__.py +0 -0
- honeycomb/_generated/api/markers/create_marker.py +220 -0
- honeycomb/_generated/api/markers/create_marker_v2.py +253 -0
- honeycomb/_generated/api/markers/delete_marker.py +200 -0
- honeycomb/_generated/api/markers/get_marker.py +194 -0
- honeycomb/_generated/api/markers/update_marker.py +233 -0
- honeycomb/_generated/api/markers/update_marker_v2.py +262 -0
- honeycomb/_generated/api/pipelines/__init__.py +0 -0
- honeycomb/_generated/api/pipelines/get_pipeline_configuration.py +306 -0
- honeycomb/_generated/api/pipelines/record_pipeline_usage.py +244 -0
- honeycomb/_generated/api/pipelines/update_pipeline_configuration_rollout.py +261 -0
- honeycomb/_generated/api/queries/__init__.py +0 -0
- honeycomb/_generated/api/queries/create_query.py +224 -0
- honeycomb/_generated/api/queries/get_query.py +208 -0
- honeycomb/_generated/api/query_annotations/__init__.py +0 -0
- honeycomb/_generated/api/query_annotations/create_query_annotation.py +226 -0
- honeycomb/_generated/api/query_annotations/delete_query_annotation.py +198 -0
- honeycomb/_generated/api/query_annotations/get_query_annotation.py +202 -0
- honeycomb/_generated/api/query_annotations/list_query_annotations.py +217 -0
- honeycomb/_generated/api/query_annotations/update_query_annotation.py +237 -0
- honeycomb/_generated/api/query_data/__init__.py +0 -0
- honeycomb/_generated/api/query_data/create_query_result.py +247 -0
- honeycomb/_generated/api/query_data/get_query_result.py +208 -0
- honeycomb/_generated/api/recipients/__init__.py +0 -0
- honeycomb/_generated/api/recipients/create_recipient.py +317 -0
- honeycomb/_generated/api/recipients/delete_recipient.py +199 -0
- honeycomb/_generated/api/recipients/get_recipient.py +252 -0
- honeycomb/_generated/api/recipients/list_recipients.py +230 -0
- honeycomb/_generated/api/recipients/update_recipient.py +323 -0
- honeycomb/_generated/api/reporting/__init__.py +0 -0
- honeycomb/_generated/api/reporting/get_slo_history.py +218 -0
- honeycomb/_generated/api/service_maps/__init__.py +0 -0
- honeycomb/_generated/api/service_maps/create_map_dependency_request.py +252 -0
- honeycomb/_generated/api/service_maps/get_map_dependencies.py +265 -0
- honeycomb/_generated/api/sl_os/__init__.py +0 -0
- honeycomb/_generated/api/sl_os/create_slo.py +229 -0
- honeycomb/_generated/api/sl_os/delete_slo.py +210 -0
- honeycomb/_generated/api/sl_os/get_slo.py +256 -0
- honeycomb/_generated/api/sl_os/list_slos.py +210 -0
- honeycomb/_generated/api/sl_os/update_slo.py +242 -0
- honeycomb/_generated/api/triggers/__init__.py +0 -0
- honeycomb/_generated/api/triggers/create_trigger.py +250 -0
- honeycomb/_generated/api/triggers/delete_trigger.py +204 -0
- honeycomb/_generated/api/triggers/get_trigger.py +214 -0
- honeycomb/_generated/api/triggers/list_triggers.py +206 -0
- honeycomb/_generated/api/triggers/list_triggers_with_recipient.py +208 -0
- honeycomb/_generated/api/triggers/update_trigger.py +248 -0
- honeycomb/_generated/client.py +271 -0
- honeycomb/_generated/errors.py +14 -0
- honeycomb/_generated/models/__init__.py +561 -0
- honeycomb/_generated/models/api_key_create_request.py +78 -0
- honeycomb/_generated/models/api_key_create_request_data.py +130 -0
- honeycomb/_generated/models/api_key_create_request_data_relationships.py +78 -0
- honeycomb/_generated/models/api_key_create_request_data_type.py +7 -0
- honeycomb/_generated/models/api_key_list_response.py +112 -0
- honeycomb/_generated/models/api_key_object.py +189 -0
- honeycomb/_generated/models/api_key_object_links.py +74 -0
- honeycomb/_generated/models/api_key_object_relationships.py +150 -0
- honeycomb/_generated/models/api_key_object_type.py +7 -0
- honeycomb/_generated/models/api_key_response.py +78 -0
- honeycomb/_generated/models/api_key_update_request.py +104 -0
- honeycomb/_generated/models/auth.py +140 -0
- honeycomb/_generated/models/auth_api_key_access.py +154 -0
- honeycomb/_generated/models/auth_environment.py +82 -0
- honeycomb/_generated/models/auth_team.py +82 -0
- honeycomb/_generated/models/auth_type.py +8 -0
- honeycomb/_generated/models/auth_v2_response.py +108 -0
- honeycomb/_generated/models/auth_v2_response_data.py +122 -0
- honeycomb/_generated/models/auth_v2_response_data_attributes.py +141 -0
- honeycomb/_generated/models/auth_v2_response_data_attributes_key_type.py +7 -0
- honeycomb/_generated/models/auth_v2_response_data_attributes_timestamps.py +107 -0
- honeycomb/_generated/models/auth_v2_response_data_relationships.py +59 -0
- honeycomb/_generated/models/auth_v2_response_data_type.py +7 -0
- honeycomb/_generated/models/base_trigger.py +367 -0
- honeycomb/_generated/models/base_trigger_alert_type.py +8 -0
- honeycomb/_generated/models/base_trigger_baseline_details_type_0.py +92 -0
- honeycomb/_generated/models/base_trigger_baseline_details_type_0_offset_minutes.py +10 -0
- honeycomb/_generated/models/base_trigger_baseline_details_type_0_type.py +8 -0
- honeycomb/_generated/models/base_trigger_evaluation_schedule.py +83 -0
- honeycomb/_generated/models/base_trigger_evaluation_schedule_type.py +8 -0
- honeycomb/_generated/models/base_trigger_evaluation_schedule_window.py +103 -0
- honeycomb/_generated/models/base_trigger_evaluation_schedule_window_days_of_week_item.py +13 -0
- honeycomb/_generated/models/base_trigger_threshold.py +95 -0
- honeycomb/_generated/models/base_trigger_threshold_op.py +10 -0
- honeycomb/_generated/models/batch_event.py +110 -0
- honeycomb/_generated/models/board.py +276 -0
- honeycomb/_generated/models/board_layout_generation.py +8 -0
- honeycomb/_generated/models/board_links.py +74 -0
- honeycomb/_generated/models/board_panel_position.py +104 -0
- honeycomb/_generated/models/board_query_visualization_settings.py +141 -0
- honeycomb/_generated/models/board_query_visualization_settings_charts_item.py +113 -0
- honeycomb/_generated/models/board_query_visualization_settings_charts_item_chart_type.py +13 -0
- honeycomb/_generated/models/board_type.py +7 -0
- honeycomb/_generated/models/board_view_filter.py +93 -0
- honeycomb/_generated/models/board_view_filter_operation.py +22 -0
- honeycomb/_generated/models/board_view_response.py +111 -0
- honeycomb/_generated/models/budget_rate.py +189 -0
- honeycomb/_generated/models/budget_rate_alert_type.py +8 -0
- honeycomb/_generated/models/burn_alert_shared_params.py +136 -0
- honeycomb/_generated/models/calculated_field.py +117 -0
- honeycomb/_generated/models/configuration_key_attributes.py +140 -0
- honeycomb/_generated/models/configuration_key_attributes_key_type.py +7 -0
- honeycomb/_generated/models/configuration_key_attributes_timestamps.py +107 -0
- honeycomb/_generated/models/create_board_view_request.py +96 -0
- honeycomb/_generated/models/create_budget_rate_burn_alert_request.py +232 -0
- honeycomb/_generated/models/create_budget_rate_burn_alert_request_slo.py +74 -0
- honeycomb/_generated/models/create_column.py +149 -0
- honeycomb/_generated/models/create_column_type.py +11 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request.py +78 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data.py +90 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes.py +97 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data.py +88 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item.py +89 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item.py +89 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item.py +97 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum.py +102 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_aggregation_temporality.py +7 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_datapoints_item.py +105 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_datapoints_item_attributes_item.py +87 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_datapoints_item_attributes_item_value.py +73 -0
- honeycomb/_generated/models/create_enhance_indexer_usage_record_request_data_type.py +7 -0
- honeycomb/_generated/models/create_environment_request.py +78 -0
- honeycomb/_generated/models/create_environment_request_data.py +90 -0
- honeycomb/_generated/models/create_environment_request_data_attributes.py +102 -0
- honeycomb/_generated/models/create_environment_request_data_type.py +7 -0
- honeycomb/_generated/models/create_events_content_encoding.py +8 -0
- honeycomb/_generated/models/create_events_response_200_item.py +82 -0
- honeycomb/_generated/models/create_exhaustion_time_burn_alert_request.py +217 -0
- honeycomb/_generated/models/create_exhaustion_time_burn_alert_request_slo.py +74 -0
- honeycomb/_generated/models/create_map_dependencies_request.py +128 -0
- honeycomb/_generated/models/create_map_dependencies_response.py +97 -0
- honeycomb/_generated/models/create_map_dependencies_response_status.py +9 -0
- honeycomb/_generated/models/create_pipeline_health_record_request.py +78 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data.py +98 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes.py +88 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data.py +88 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item.py +89 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item.py +89 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item.py +97 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum.py +102 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_aggregation_temporality.py +7 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_datapoints_item.py +105 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_datapoints_item_attributes_item.py +87 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_attributes_usage_data_resource_metrics_item_scope_metrics_item_metrics_item_sum_datapoints_item_attributes_item_value.py +73 -0
- honeycomb/_generated/models/create_pipeline_health_record_request_data_type.py +7 -0
- honeycomb/_generated/models/create_query_result_request.py +118 -0
- honeycomb/_generated/models/dataset.py +183 -0
- honeycomb/_generated/models/dataset_creation_payload.py +93 -0
- honeycomb/_generated/models/dataset_definition_type_1.py +96 -0
- honeycomb/_generated/models/dataset_definition_type_1_column_type.py +8 -0
- honeycomb/_generated/models/dataset_definitions.py +599 -0
- honeycomb/_generated/models/dataset_relationship.py +79 -0
- honeycomb/_generated/models/dataset_relationship_data.py +82 -0
- honeycomb/_generated/models/dataset_relationship_data_type.py +7 -0
- honeycomb/_generated/models/dataset_settings.py +73 -0
- honeycomb/_generated/models/dataset_update_payload.py +105 -0
- honeycomb/_generated/models/dataset_update_payload_settings.py +73 -0
- honeycomb/_generated/models/detailed_error.py +115 -0
- honeycomb/_generated/models/email_recipient.py +160 -0
- honeycomb/_generated/models/email_recipient_details.py +71 -0
- honeycomb/_generated/models/email_recipient_type.py +7 -0
- honeycomb/_generated/models/environment.py +112 -0
- honeycomb/_generated/models/environment_attributes.py +140 -0
- honeycomb/_generated/models/environment_attributes_color_type_1.py +7 -0
- honeycomb/_generated/models/environment_attributes_settings.py +70 -0
- honeycomb/_generated/models/environment_color.py +16 -0
- honeycomb/_generated/models/environment_links.py +70 -0
- honeycomb/_generated/models/environment_list_response.py +112 -0
- honeycomb/_generated/models/environment_relationship.py +79 -0
- honeycomb/_generated/models/environment_relationship_data.py +82 -0
- honeycomb/_generated/models/environment_relationship_data_type.py +7 -0
- honeycomb/_generated/models/environment_response.py +78 -0
- honeycomb/_generated/models/environment_type.py +7 -0
- honeycomb/_generated/models/error.py +74 -0
- honeycomb/_generated/models/event.py +71 -0
- honeycomb/_generated/models/exhaustion_time.py +174 -0
- honeycomb/_generated/models/exhaustion_time_alert_type.py +8 -0
- honeycomb/_generated/models/exhaustion_time_burn_alert_list_response.py +198 -0
- honeycomb/_generated/models/exhaustion_time_burn_alert_list_response_slo.py +77 -0
- honeycomb/_generated/models/filter_op.py +22 -0
- honeycomb/_generated/models/get_map_dependencies_response.py +169 -0
- honeycomb/_generated/models/get_map_dependencies_response_status.py +9 -0
- honeycomb/_generated/models/having_calculate_op.py +29 -0
- honeycomb/_generated/models/having_op.py +12 -0
- honeycomb/_generated/models/included_resource.py +106 -0
- honeycomb/_generated/models/included_resource_attributes.py +62 -0
- honeycomb/_generated/models/ingest_key_attributes.py +153 -0
- honeycomb/_generated/models/ingest_key_attributes_key_type.py +7 -0
- honeycomb/_generated/models/ingest_key_attributes_permissions.py +74 -0
- honeycomb/_generated/models/ingest_key_attributes_timestamps.py +107 -0
- honeycomb/_generated/models/ingest_key_type.py +90 -0
- honeycomb/_generated/models/ingest_key_type_key_type.py +7 -0
- honeycomb/_generated/models/jsonapi_error_source.py +92 -0
- honeycomb/_generated/models/kinesis_event.py +111 -0
- honeycomb/_generated/models/kinesis_event_record.py +73 -0
- honeycomb/_generated/models/kinesis_response.py +91 -0
- honeycomb/_generated/models/list_api_keys_filtertype.py +8 -0
- honeycomb/_generated/models/map_dependency.py +118 -0
- honeycomb/_generated/models/map_node.py +97 -0
- honeycomb/_generated/models/map_node_type.py +7 -0
- honeycomb/_generated/models/marker.py +151 -0
- honeycomb/_generated/models/marker_create_request.py +78 -0
- honeycomb/_generated/models/marker_create_request_data.py +104 -0
- honeycomb/_generated/models/marker_create_request_data_attributes.py +110 -0
- honeycomb/_generated/models/marker_create_request_data_relationships.py +92 -0
- honeycomb/_generated/models/marker_create_request_data_type.py +7 -0
- honeycomb/_generated/models/marker_object.py +136 -0
- honeycomb/_generated/models/marker_object_attributes.py +146 -0
- honeycomb/_generated/models/marker_object_attributes_timestamps.py +107 -0
- honeycomb/_generated/models/marker_object_links.py +74 -0
- honeycomb/_generated/models/marker_object_relationships.py +111 -0
- honeycomb/_generated/models/marker_object_relationships_dataset.py +104 -0
- honeycomb/_generated/models/marker_object_relationships_dataset_data_type_0.py +94 -0
- honeycomb/_generated/models/marker_object_relationships_dataset_data_type_0_type.py +7 -0
- honeycomb/_generated/models/marker_object_type.py +7 -0
- honeycomb/_generated/models/marker_response.py +78 -0
- honeycomb/_generated/models/marker_setting.py +125 -0
- honeycomb/_generated/models/marker_update_request.py +78 -0
- honeycomb/_generated/models/marker_update_request_data.py +104 -0
- honeycomb/_generated/models/marker_update_request_data_attributes.py +111 -0
- honeycomb/_generated/models/marker_update_request_data_relationships.py +92 -0
- honeycomb/_generated/models/marker_update_request_data_type.py +7 -0
- honeycomb/_generated/models/ms_teams_recipient.py +161 -0
- honeycomb/_generated/models/ms_teams_recipient_details.py +80 -0
- honeycomb/_generated/models/ms_teams_recipient_type.py +7 -0
- honeycomb/_generated/models/ms_teams_workflow_recipient.py +161 -0
- honeycomb/_generated/models/ms_teams_workflow_recipient_details.py +80 -0
- honeycomb/_generated/models/ms_teams_workflow_recipient_type.py +7 -0
- honeycomb/_generated/models/notification_recipient.py +131 -0
- honeycomb/_generated/models/notification_recipient_details.py +117 -0
- honeycomb/_generated/models/notification_recipient_details_pagerduty_severity.py +10 -0
- honeycomb/_generated/models/notification_recipient_details_variables_item.py +81 -0
- honeycomb/_generated/models/pager_duty_recipient.py +160 -0
- honeycomb/_generated/models/pager_duty_recipient_details.py +79 -0
- honeycomb/_generated/models/pager_duty_recipient_type.py +7 -0
- honeycomb/_generated/models/pagination_links.py +80 -0
- honeycomb/_generated/models/payload_template.py +73 -0
- honeycomb/_generated/models/pipeline_configuration_response.py +112 -0
- honeycomb/_generated/models/pipeline_configuration_response_attributes.py +101 -0
- honeycomb/_generated/models/pipeline_configuration_response_attributes_configs_item.py +85 -0
- honeycomb/_generated/models/pipeline_configuration_response_links.py +73 -0
- honeycomb/_generated/models/pipeline_configuration_response_type.py +7 -0
- honeycomb/_generated/models/pipeline_configuration_rollout.py +112 -0
- honeycomb/_generated/models/pipeline_configuration_rollout_attributes.py +75 -0
- honeycomb/_generated/models/pipeline_configuration_rollout_attributes_status.py +12 -0
- honeycomb/_generated/models/pipeline_configuration_rollout_links.py +73 -0
- honeycomb/_generated/models/pipeline_configuration_rollout_type.py +7 -0
- honeycomb/_generated/models/preset_filter.py +78 -0
- honeycomb/_generated/models/query.py +346 -0
- honeycomb/_generated/models/query_annotation.py +166 -0
- honeycomb/_generated/models/query_annotation_source.py +8 -0
- honeycomb/_generated/models/query_calculated_fields_item.py +80 -0
- honeycomb/_generated/models/query_calculations_item.py +98 -0
- honeycomb/_generated/models/query_compare_time_offset_seconds.py +14 -0
- honeycomb/_generated/models/query_filter_combination.py +8 -0
- honeycomb/_generated/models/query_filters_item.py +126 -0
- honeycomb/_generated/models/query_havings_item.py +128 -0
- honeycomb/_generated/models/query_op.py +30 -0
- honeycomb/_generated/models/query_orders_item.py +115 -0
- honeycomb/_generated/models/query_orders_item_order.py +8 -0
- honeycomb/_generated/models/query_panel.py +114 -0
- honeycomb/_generated/models/query_panel_query_panel.py +143 -0
- honeycomb/_generated/models/query_panel_query_panel_query_style.py +9 -0
- honeycomb/_generated/models/query_result.py +130 -0
- honeycomb/_generated/models/query_result_details.py +156 -0
- honeycomb/_generated/models/query_result_details_data.py +188 -0
- honeycomb/_generated/models/query_result_details_links.py +85 -0
- honeycomb/_generated/models/query_result_links.py +85 -0
- honeycomb/_generated/models/query_results_data.py +89 -0
- honeycomb/_generated/models/query_results_data_data.py +71 -0
- honeycomb/_generated/models/query_results_series.py +97 -0
- honeycomb/_generated/models/recipient_properties.py +137 -0
- honeycomb/_generated/models/recipient_type.py +12 -0
- honeycomb/_generated/models/slack_recipient.py +160 -0
- honeycomb/_generated/models/slack_recipient_details.py +71 -0
- honeycomb/_generated/models/slack_recipient_type.py +7 -0
- honeycomb/_generated/models/slo.py +245 -0
- honeycomb/_generated/models/slo_create.py +245 -0
- honeycomb/_generated/models/slo_create_sli.py +75 -0
- honeycomb/_generated/models/slo_detailed_response.py +312 -0
- honeycomb/_generated/models/slo_detailed_response_status.py +10 -0
- honeycomb/_generated/models/slo_history.py +94 -0
- honeycomb/_generated/models/slo_history_request.py +93 -0
- honeycomb/_generated/models/slo_history_response.py +91 -0
- honeycomb/_generated/models/slo_panel.py +114 -0
- honeycomb/_generated/models/slo_panel_slo_panel.py +75 -0
- honeycomb/_generated/models/slo_sli.py +74 -0
- honeycomb/_generated/models/tag.py +79 -0
- honeycomb/_generated/models/team_relationship.py +78 -0
- honeycomb/_generated/models/team_relationship_team.py +78 -0
- honeycomb/_generated/models/team_relationship_team_data.py +82 -0
- honeycomb/_generated/models/team_relationship_team_data_type.py +7 -0
- honeycomb/_generated/models/template_variable_definition.py +81 -0
- honeycomb/_generated/models/text_panel.py +114 -0
- honeycomb/_generated/models/text_panel_text_panel.py +71 -0
- honeycomb/_generated/models/trigger_response.py +399 -0
- honeycomb/_generated/models/trigger_with_inline_query.py +390 -0
- honeycomb/_generated/models/trigger_with_inline_query_query.py +60 -0
- honeycomb/_generated/models/trigger_with_query_reference.py +376 -0
- honeycomb/_generated/models/update_board_view_request.py +107 -0
- honeycomb/_generated/models/update_environment_request.py +78 -0
- honeycomb/_generated/models/update_environment_request_data.py +98 -0
- honeycomb/_generated/models/update_environment_request_data_attributes.py +118 -0
- honeycomb/_generated/models/update_environment_request_data_attributes_settings.py +73 -0
- honeycomb/_generated/models/update_environment_request_data_type.py +7 -0
- honeycomb/_generated/models/update_exhaustion_time_burn_alert_request.py +202 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout.py +98 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_attributes.py +75 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_attributes_status.py +12 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_request.py +78 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_request_data.py +98 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_request_data_attributes.py +75 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_request_data_attributes_status.py +12 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_request_data_type.py +7 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_response.py +78 -0
- honeycomb/_generated/models/update_pipeline_configuration_rollout_type.py +7 -0
- honeycomb/_generated/models/user_relationship.py +78 -0
- honeycomb/_generated/models/user_relationship_data.py +82 -0
- honeycomb/_generated/models/user_relationship_data_type.py +7 -0
- honeycomb/_generated/models/validation_error.py +145 -0
- honeycomb/_generated/models/validation_error_type_detail_item.py +103 -0
- honeycomb/_generated/models/validation_error_type_detail_item_code.py +10 -0
- honeycomb/_generated/models/webhook_header.py +81 -0
- honeycomb/_generated/models/webhook_recipient.py +160 -0
- honeycomb/_generated/models/webhook_recipient_details.py +142 -0
- honeycomb/_generated/models/webhook_recipient_details_webhook_payloads.py +117 -0
- honeycomb/_generated/models/webhook_recipient_details_webhook_payloads_payload_templates.py +126 -0
- honeycomb/_generated/models/webhook_recipient_type.py +7 -0
- honeycomb/_generated/py.typed +1 -0
- honeycomb/_generated/types.py +46 -0
- honeycomb/auth.py +152 -0
- honeycomb/client.py +736 -0
- honeycomb/exceptions.py +236 -0
- honeycomb/models/__init__.py +157 -0
- honeycomb/models/api_keys.py +81 -0
- honeycomb/models/board_builder.py +387 -0
- honeycomb/models/boards.py +86 -0
- honeycomb/models/burn_alerts.py +124 -0
- honeycomb/models/columns.py +59 -0
- honeycomb/models/datasets.py +51 -0
- honeycomb/models/derived_columns.py +107 -0
- honeycomb/models/environments.py +106 -0
- honeycomb/models/events.py +35 -0
- honeycomb/models/marker_builder.py +136 -0
- honeycomb/models/markers.py +73 -0
- honeycomb/models/queries.py +210 -0
- honeycomb/models/query_annotations.py +63 -0
- honeycomb/models/query_builder.py +840 -0
- honeycomb/models/recipient_builder.py +248 -0
- honeycomb/models/recipients.py +43 -0
- honeycomb/models/service_map_dependencies.py +126 -0
- honeycomb/models/slo_builder.py +463 -0
- honeycomb/models/slos.py +66 -0
- honeycomb/models/tags_mixin.py +79 -0
- honeycomb/models/trigger_builder.py +561 -0
- honeycomb/models/triggers.py +208 -0
- honeycomb/resources/__init__.py +35 -0
- honeycomb/resources/_recipient_utils.py +156 -0
- honeycomb/resources/api_keys.py +303 -0
- honeycomb/resources/base.py +142 -0
- honeycomb/resources/boards.py +350 -0
- honeycomb/resources/burn_alerts.py +205 -0
- honeycomb/resources/columns.py +185 -0
- honeycomb/resources/datasets.py +163 -0
- honeycomb/resources/derived_columns.py +233 -0
- honeycomb/resources/environments.py +295 -0
- honeycomb/resources/events.py +156 -0
- honeycomb/resources/markers.py +334 -0
- honeycomb/resources/queries.py +256 -0
- honeycomb/resources/query_annotations.py +207 -0
- honeycomb/resources/query_results.py +886 -0
- honeycomb/resources/recipients.py +202 -0
- honeycomb/resources/service_map_dependencies.py +389 -0
- honeycomb/resources/slos.py +353 -0
- honeycomb/resources/triggers.py +284 -0
- honeycomb/tools/__init__.py +85 -0
- honeycomb/tools/__main__.py +204 -0
- honeycomb/tools/builders.py +512 -0
- honeycomb/tools/descriptions.py +523 -0
- honeycomb/tools/executor.py +860 -0
- honeycomb/tools/generator.py +2386 -0
- honeycomb/tools/schemas.py +184 -0
- honeycomb_api-0.1.0.dist-info/METADATA +451 -0
- honeycomb_api-0.1.0.dist-info/RECORD +453 -0
- honeycomb_api-0.1.0.dist-info/WHEEL +4 -0
- honeycomb_api-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,886 @@
|
|
|
1
|
+
"""Query Results resource for Honeycomb API."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import time as time_module
|
|
7
|
+
from collections.abc import Callable
|
|
8
|
+
from typing import TYPE_CHECKING, Any, overload
|
|
9
|
+
|
|
10
|
+
from ..models.queries import Query, QueryResult, QuerySpec
|
|
11
|
+
from ..models.query_builder import Calculation
|
|
12
|
+
from .base import BaseResource
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from ..client import HoneycombClient
|
|
16
|
+
from ..models.query_builder import QueryBuilder
|
|
17
|
+
|
|
18
|
+
# Default max results for run_all_async
|
|
19
|
+
DEFAULT_MAX_RESULTS = 100_000
|
|
20
|
+
|
|
21
|
+
# Duplication threshold for smart stopping
|
|
22
|
+
DUPLICATION_THRESHOLD = 0.5 # 50%
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _get_calc_attr(calc: Calculation | dict[str, Any], attr: str, default: Any = None) -> Any:
|
|
26
|
+
"""Get an attribute from a Calculation or dict.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
calc: Either a Calculation object or a dict
|
|
30
|
+
attr: The attribute name to get
|
|
31
|
+
default: Default value if attribute is not present
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
The attribute value or default
|
|
35
|
+
"""
|
|
36
|
+
if isinstance(calc, Calculation):
|
|
37
|
+
value = getattr(calc, attr, default)
|
|
38
|
+
# Handle enum values
|
|
39
|
+
if hasattr(value, "value"):
|
|
40
|
+
return value.value
|
|
41
|
+
return value
|
|
42
|
+
return calc.get(attr, default)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class QueryResultsResource(BaseResource):
|
|
46
|
+
"""Resource for running queries and getting results.
|
|
47
|
+
|
|
48
|
+
Query results represent the execution of a query against a dataset.
|
|
49
|
+
You must first create a saved query, then run it to get results.
|
|
50
|
+
|
|
51
|
+
Note:
|
|
52
|
+
Query Results API requires Enterprise plan.
|
|
53
|
+
|
|
54
|
+
Example (async - run saved query):
|
|
55
|
+
>>> async with HoneycombClient(api_key="...") as client:
|
|
56
|
+
... # First create a saved query
|
|
57
|
+
... query = await client.queries.create_async(
|
|
58
|
+
... dataset="my-dataset",
|
|
59
|
+
... spec=QuerySpec(time_range=3600, calculations=[...])
|
|
60
|
+
... )
|
|
61
|
+
... # Then run it and poll for results
|
|
62
|
+
... result = await client.query_results.run_async(
|
|
63
|
+
... dataset="my-dataset",
|
|
64
|
+
... query_id=query.id,
|
|
65
|
+
... poll_interval=1.0,
|
|
66
|
+
... timeout=60.0,
|
|
67
|
+
... )
|
|
68
|
+
... print(f"Found {len(result.data.rows)} rows")
|
|
69
|
+
|
|
70
|
+
Example (manual polling):
|
|
71
|
+
>>> async with HoneycombClient(api_key="...") as client:
|
|
72
|
+
... # Create query result
|
|
73
|
+
... query_result_id = await client.query_results.create_async(
|
|
74
|
+
... dataset="my-dataset",
|
|
75
|
+
... spec=QuerySpec(...)
|
|
76
|
+
... )
|
|
77
|
+
... # Poll for completion
|
|
78
|
+
... result = await client.query_results.get_async(
|
|
79
|
+
... dataset="my-dataset",
|
|
80
|
+
... query_result_id=query_result_id
|
|
81
|
+
... )
|
|
82
|
+
"""
|
|
83
|
+
|
|
84
|
+
def __init__(self, client: HoneycombClient) -> None:
|
|
85
|
+
super().__init__(client)
|
|
86
|
+
|
|
87
|
+
def _build_path(self, dataset: str, query_result_id: str | None = None) -> str:
|
|
88
|
+
"""Build API path for query results."""
|
|
89
|
+
base = f"/1/query_results/{dataset}"
|
|
90
|
+
if query_result_id:
|
|
91
|
+
return f"{base}/{query_result_id}"
|
|
92
|
+
return base
|
|
93
|
+
|
|
94
|
+
# -------------------------------------------------------------------------
|
|
95
|
+
# Async methods
|
|
96
|
+
# -------------------------------------------------------------------------
|
|
97
|
+
|
|
98
|
+
async def create_async(
|
|
99
|
+
self,
|
|
100
|
+
dataset: str,
|
|
101
|
+
query_id: str,
|
|
102
|
+
disable_series: bool = True,
|
|
103
|
+
limit: int | None = None,
|
|
104
|
+
) -> str:
|
|
105
|
+
"""Create a query result (start query execution) (async).
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
dataset: The dataset slug.
|
|
109
|
+
query_id: Saved query ID (from queries.create_async).
|
|
110
|
+
disable_series: If True, disable timeseries data and allow up to 10K results
|
|
111
|
+
(default: True for better performance).
|
|
112
|
+
limit: Override result limit (max 10,000 when disable_series=True, 1,000 otherwise).
|
|
113
|
+
Defaults to 10,000 when disable_series=True, 1,000 when False.
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
Query result ID for polling.
|
|
117
|
+
|
|
118
|
+
Raises:
|
|
119
|
+
HoneycombNotFoundError: If the query doesn't exist.
|
|
120
|
+
HoneycombValidationError: If the query spec is invalid.
|
|
121
|
+
|
|
122
|
+
Note:
|
|
123
|
+
Query Results API requires Enterprise plan.
|
|
124
|
+
"""
|
|
125
|
+
json_data = {
|
|
126
|
+
"query_id": query_id,
|
|
127
|
+
"disable_series": disable_series,
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
# Set limit - default to 10K when disable_series=True, 1K otherwise
|
|
131
|
+
if limit is not None:
|
|
132
|
+
json_data["limit"] = limit
|
|
133
|
+
elif disable_series:
|
|
134
|
+
json_data["limit"] = 10000
|
|
135
|
+
else:
|
|
136
|
+
json_data["limit"] = 1000
|
|
137
|
+
|
|
138
|
+
data = await self._post_async(self._build_path(dataset), json=json_data)
|
|
139
|
+
# API returns {"id": "query-result-id"}
|
|
140
|
+
return data["id"]
|
|
141
|
+
|
|
142
|
+
async def get_async(self, dataset: str, query_result_id: str) -> QueryResult:
|
|
143
|
+
"""Get query result status/data (async).
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
dataset: The dataset slug.
|
|
147
|
+
query_result_id: Query result ID.
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
QueryResult with data if query is complete.
|
|
151
|
+
|
|
152
|
+
Raises:
|
|
153
|
+
HoneycombNotFoundError: If the query result doesn't exist.
|
|
154
|
+
"""
|
|
155
|
+
data = await self._get_async(self._build_path(dataset, query_result_id))
|
|
156
|
+
return self._parse_model(QueryResult, data)
|
|
157
|
+
|
|
158
|
+
async def run_async(
|
|
159
|
+
self,
|
|
160
|
+
dataset: str,
|
|
161
|
+
query_id: str,
|
|
162
|
+
disable_series: bool = True,
|
|
163
|
+
limit: int | None = None,
|
|
164
|
+
poll_interval: float = 1.0,
|
|
165
|
+
timeout: float = 60.0,
|
|
166
|
+
) -> QueryResult:
|
|
167
|
+
"""Run a saved query and poll for results (async).
|
|
168
|
+
|
|
169
|
+
Convenience method that creates a query result and polls until complete.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
dataset: The dataset slug.
|
|
173
|
+
query_id: Saved query ID (from queries.create_async).
|
|
174
|
+
disable_series: If True, disable timeseries data and allow up to 10K results
|
|
175
|
+
(default: True for better performance).
|
|
176
|
+
limit: Override result limit (max 10,000 when disable_series=True, 1,000 otherwise).
|
|
177
|
+
Defaults to 10,000 when disable_series=True, 1,000 when False.
|
|
178
|
+
poll_interval: Seconds between poll attempts (default: 1.0).
|
|
179
|
+
timeout: Maximum seconds to wait for results (default: 60.0).
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
QueryResult with completed data (up to 10K rows if disable_series=True).
|
|
183
|
+
|
|
184
|
+
Raises:
|
|
185
|
+
HoneycombTimeoutError: If query doesn't complete within timeout.
|
|
186
|
+
HoneycombNotFoundError: If the query doesn't exist.
|
|
187
|
+
|
|
188
|
+
Note:
|
|
189
|
+
For > 10K results, use run_all_async() with sort-based pagination.
|
|
190
|
+
Query Results API requires Enterprise plan.
|
|
191
|
+
"""
|
|
192
|
+
from ..exceptions import HoneycombTimeoutError
|
|
193
|
+
|
|
194
|
+
# Create the query result
|
|
195
|
+
result_id = await self.create_async(
|
|
196
|
+
dataset, query_id=query_id, disable_series=disable_series, limit=limit
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# Poll for completion
|
|
200
|
+
start_time = asyncio.get_event_loop().time()
|
|
201
|
+
while True:
|
|
202
|
+
result = await self.get_async(dataset, result_id)
|
|
203
|
+
|
|
204
|
+
# Check if query is complete (has results)
|
|
205
|
+
if result.data is not None and result.data.results is not None:
|
|
206
|
+
return result
|
|
207
|
+
|
|
208
|
+
# Check timeout
|
|
209
|
+
elapsed = asyncio.get_event_loop().time() - start_time
|
|
210
|
+
if elapsed >= timeout:
|
|
211
|
+
raise HoneycombTimeoutError(
|
|
212
|
+
f"Query did not complete within {timeout}s", timeout=timeout
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
# Wait before next poll
|
|
216
|
+
await asyncio.sleep(poll_interval)
|
|
217
|
+
|
|
218
|
+
@overload
|
|
219
|
+
async def create_and_run_async(
|
|
220
|
+
self,
|
|
221
|
+
spec: QueryBuilder,
|
|
222
|
+
*,
|
|
223
|
+
disable_series: bool = True,
|
|
224
|
+
limit: int | None = None,
|
|
225
|
+
poll_interval: float = 1.0,
|
|
226
|
+
timeout: float = 60.0,
|
|
227
|
+
) -> tuple[Query, QueryResult]: ...
|
|
228
|
+
|
|
229
|
+
@overload
|
|
230
|
+
async def create_and_run_async(
|
|
231
|
+
self,
|
|
232
|
+
spec: QuerySpec,
|
|
233
|
+
*,
|
|
234
|
+
dataset: str,
|
|
235
|
+
disable_series: bool = True,
|
|
236
|
+
limit: int | None = None,
|
|
237
|
+
poll_interval: float = 1.0,
|
|
238
|
+
timeout: float = 60.0,
|
|
239
|
+
) -> tuple[Query, QueryResult]: ...
|
|
240
|
+
|
|
241
|
+
async def create_and_run_async(
|
|
242
|
+
self,
|
|
243
|
+
spec: QuerySpec | QueryBuilder,
|
|
244
|
+
*,
|
|
245
|
+
dataset: str | None = None,
|
|
246
|
+
disable_series: bool = True,
|
|
247
|
+
limit: int | None = None,
|
|
248
|
+
poll_interval: float = 1.0,
|
|
249
|
+
timeout: float = 60.0,
|
|
250
|
+
) -> tuple[Query, QueryResult]:
|
|
251
|
+
"""Create a saved query and run it in one call (async).
|
|
252
|
+
|
|
253
|
+
Convenience method that:
|
|
254
|
+
1. Creates a permanent saved query
|
|
255
|
+
2. Executes it and polls for results
|
|
256
|
+
3. Returns both the saved query and results
|
|
257
|
+
|
|
258
|
+
This is useful when you want to save a query for future use
|
|
259
|
+
AND get immediate results.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
spec: Query specification (QueryBuilder or QuerySpec).
|
|
263
|
+
dataset: Dataset slug. Required for QuerySpec, extracted from QueryBuilder.
|
|
264
|
+
disable_series: If True, disable timeseries data and allow up to 10K results
|
|
265
|
+
(default: True for better performance).
|
|
266
|
+
limit: Override result limit (max 10,000 when disable_series=True, 1,000 otherwise).
|
|
267
|
+
Defaults to 10,000 when disable_series=True, 1,000 when False.
|
|
268
|
+
poll_interval: Seconds between poll attempts (default: 1.0).
|
|
269
|
+
timeout: Maximum seconds to wait for results (default: 60.0).
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
Tuple of (Query, QueryResult) - the saved query and execution results.
|
|
273
|
+
|
|
274
|
+
Raises:
|
|
275
|
+
HoneycombTimeoutError: If query doesn't complete within timeout.
|
|
276
|
+
HoneycombValidationError: If the query spec is invalid.
|
|
277
|
+
ValueError: If dataset parameter is misused.
|
|
278
|
+
|
|
279
|
+
Example (QueryBuilder - recommended):
|
|
280
|
+
>>> query, result = await client.query_results.create_and_run_async(
|
|
281
|
+
... QueryBuilder()
|
|
282
|
+
... .dataset("my-dataset")
|
|
283
|
+
... .last_1_hour()
|
|
284
|
+
... .count(),
|
|
285
|
+
... )
|
|
286
|
+
|
|
287
|
+
Example (QuerySpec - advanced):
|
|
288
|
+
>>> query, result = await client.query_results.create_and_run_async(
|
|
289
|
+
... QuerySpec(time_range=3600, calculations=[{"op": "COUNT"}]),
|
|
290
|
+
... dataset="my-dataset"
|
|
291
|
+
... )
|
|
292
|
+
"""
|
|
293
|
+
from ..models.query_builder import QueryBuilder
|
|
294
|
+
|
|
295
|
+
# Extract dataset based on spec type
|
|
296
|
+
if isinstance(spec, QueryBuilder):
|
|
297
|
+
if dataset is not None:
|
|
298
|
+
raise ValueError(
|
|
299
|
+
"dataset parameter not allowed with QueryBuilder. "
|
|
300
|
+
"Use .dataset() on the builder instead."
|
|
301
|
+
)
|
|
302
|
+
dataset = spec.get_dataset()
|
|
303
|
+
else:
|
|
304
|
+
if dataset is None:
|
|
305
|
+
raise ValueError(
|
|
306
|
+
"dataset parameter required when using QuerySpec. "
|
|
307
|
+
"Pass dataset='your-dataset' or use QueryBuilder instead."
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
# Create the saved query
|
|
311
|
+
query = (
|
|
312
|
+
await self._client.queries.create_async(spec, dataset=dataset)
|
|
313
|
+
if not isinstance(spec, QueryBuilder)
|
|
314
|
+
else await self._client.queries.create_async(spec)
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
# Run it and poll for results
|
|
318
|
+
result = await self.run_async(
|
|
319
|
+
dataset,
|
|
320
|
+
query_id=query.id,
|
|
321
|
+
disable_series=disable_series,
|
|
322
|
+
limit=limit,
|
|
323
|
+
poll_interval=poll_interval,
|
|
324
|
+
timeout=timeout,
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
return query, result
|
|
328
|
+
|
|
329
|
+
async def run_all_async(
|
|
330
|
+
self,
|
|
331
|
+
dataset: str,
|
|
332
|
+
spec: QuerySpec,
|
|
333
|
+
sort_field: str | None = None,
|
|
334
|
+
sort_order: str = "descending",
|
|
335
|
+
max_results: int = DEFAULT_MAX_RESULTS,
|
|
336
|
+
poll_interval: float = 1.0,
|
|
337
|
+
timeout: float = 60.0,
|
|
338
|
+
on_page: Callable[[int, int], None] | None = None,
|
|
339
|
+
) -> list[dict]:
|
|
340
|
+
"""Paginate through > 10K results using sort-based cursor pagination.
|
|
341
|
+
|
|
342
|
+
WARNING: This makes multiple API calls (rate limit: 10/min).
|
|
343
|
+
Each page returns up to 10,000 rows. For 100K rows, expect ~10 queries taking ~60 seconds.
|
|
344
|
+
|
|
345
|
+
How it works:
|
|
346
|
+
1. Converts relative time_range to absolute start/end timestamps
|
|
347
|
+
2. Creates saved queries with sort order (no limit in spec)
|
|
348
|
+
3. Executes each with disable_series=True and limit=10000
|
|
349
|
+
4. Captures last value in sort field
|
|
350
|
+
5. Re-runs with HAVING (for calculations) or filter (for breakdowns): sort_field <= last_value
|
|
351
|
+
6. Deduplicates results by composite key (breakdowns + calculation values)
|
|
352
|
+
7. Repeats until: no more results, max_results reached, or >50% duplicates detected
|
|
353
|
+
8. Returns deduplicated list of all rows
|
|
354
|
+
|
|
355
|
+
Args:
|
|
356
|
+
dataset: Dataset slug.
|
|
357
|
+
spec: Query specification (time range, calculations, filters, breakdowns).
|
|
358
|
+
sort_field: Field to sort/paginate by. Defaults to first calculation's alias.
|
|
359
|
+
Must be a calculation alias or breakdown field.
|
|
360
|
+
sort_order: "ascending" or "descending" (default: "descending" for most important first).
|
|
361
|
+
max_results: Maximum total results to return (default: 100,000).
|
|
362
|
+
poll_interval: Seconds between polls for each query.
|
|
363
|
+
timeout: Timeout for each individual query execution.
|
|
364
|
+
on_page: Optional callback(page_num, total_rows) called after each page.
|
|
365
|
+
|
|
366
|
+
Note:
|
|
367
|
+
Each page returns up to 10,000 rows (limit=10000 passed at execution time).
|
|
368
|
+
Saved queries have max spec.limit of 1,000, so we don't set it in QuerySpec.
|
|
369
|
+
|
|
370
|
+
Returns:
|
|
371
|
+
List of all result rows (deduplicated).
|
|
372
|
+
|
|
373
|
+
Raises:
|
|
374
|
+
ValueError: If spec has conflicting orders or invalid configuration.
|
|
375
|
+
HoneycombTimeoutError: If any query times out.
|
|
376
|
+
|
|
377
|
+
Example:
|
|
378
|
+
>>> # Get all high-latency requests in last 24h
|
|
379
|
+
>>> rows = await client.query_results.run_all_async(
|
|
380
|
+
... dataset="my-dataset",
|
|
381
|
+
... spec=QuerySpec(
|
|
382
|
+
... time_range=86400,
|
|
383
|
+
... calculations=[
|
|
384
|
+
... {"op": "AVG", "column": "duration_ms"}
|
|
385
|
+
... ],
|
|
386
|
+
... filters=[{"column": "duration_ms", "op": ">", "value": 1000}],
|
|
387
|
+
... breakdowns=["trace.trace_id", "name"],
|
|
388
|
+
... ),
|
|
389
|
+
... on_page=lambda page, total: print(f"Page {page}: {total} rows so far"),
|
|
390
|
+
... )
|
|
391
|
+
>>> print(f"Total: {len(rows)} slow requests")
|
|
392
|
+
|
|
393
|
+
Note:
|
|
394
|
+
Rate limit is 10 requests/minute. Large result sets will take time.
|
|
395
|
+
The method uses smart stopping: if >50% duplicates detected between
|
|
396
|
+
pages, pagination stops (indicates long tail of identical values).
|
|
397
|
+
"""
|
|
398
|
+
# Validate spec
|
|
399
|
+
if not spec.calculations:
|
|
400
|
+
raise ValueError("spec.calculations is required for run_all_async")
|
|
401
|
+
|
|
402
|
+
# Determine sort field (default to first calculation)
|
|
403
|
+
if sort_field is None:
|
|
404
|
+
# Auto-default from first calculation
|
|
405
|
+
first_calc = spec.calculations[0]
|
|
406
|
+
alias = _get_calc_attr(first_calc, "alias")
|
|
407
|
+
if alias:
|
|
408
|
+
# Alias provided - use it for both orders and access
|
|
409
|
+
sort_field_for_access = alias
|
|
410
|
+
sort_field_for_orders = alias
|
|
411
|
+
else:
|
|
412
|
+
# No alias - use uppercase op for both (results use uppercase like "COUNT")
|
|
413
|
+
op = _get_calc_attr(first_calc, "op", "COUNT")
|
|
414
|
+
sort_field_for_orders = op
|
|
415
|
+
sort_field_for_access = op
|
|
416
|
+
else:
|
|
417
|
+
# User provided sort_field - check if it matches a calculation op
|
|
418
|
+
matched_calc = None
|
|
419
|
+
for calc in spec.calculations:
|
|
420
|
+
# Check if sort_field matches this calculation's op (case-insensitive)
|
|
421
|
+
calc_op = _get_calc_attr(calc, "op", "")
|
|
422
|
+
if calc_op.lower() == sort_field.lower():
|
|
423
|
+
matched_calc = calc
|
|
424
|
+
break
|
|
425
|
+
# Or matches the alias exactly
|
|
426
|
+
calc_alias = _get_calc_attr(calc, "alias")
|
|
427
|
+
if calc_alias == sort_field:
|
|
428
|
+
matched_calc = calc
|
|
429
|
+
break
|
|
430
|
+
|
|
431
|
+
if matched_calc:
|
|
432
|
+
# Matched a calculation - use uppercase op or alias
|
|
433
|
+
matched_alias = _get_calc_attr(matched_calc, "alias")
|
|
434
|
+
if matched_alias:
|
|
435
|
+
sort_field_for_access = matched_alias
|
|
436
|
+
sort_field_for_orders = matched_alias
|
|
437
|
+
else:
|
|
438
|
+
# No alias - use uppercase op for both
|
|
439
|
+
matched_op = _get_calc_attr(matched_calc, "op", "COUNT")
|
|
440
|
+
sort_field_for_orders = matched_op
|
|
441
|
+
sort_field_for_access = matched_op
|
|
442
|
+
else:
|
|
443
|
+
# Assume it's a breakdown field - use as-is
|
|
444
|
+
sort_field_for_access = sort_field
|
|
445
|
+
sort_field_for_orders = sort_field
|
|
446
|
+
|
|
447
|
+
# Check for conflicting orders
|
|
448
|
+
if spec.orders:
|
|
449
|
+
raise ValueError(
|
|
450
|
+
"spec.orders must be None for run_all_async (sorting is managed automatically). "
|
|
451
|
+
"Remove orders or use run_async() instead."
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
# Normalize time range to absolute timestamps
|
|
455
|
+
start_time, end_time = self._normalize_time_range(spec)
|
|
456
|
+
|
|
457
|
+
# Track all rows and seen keys for deduplication
|
|
458
|
+
all_rows: list[dict] = []
|
|
459
|
+
seen_keys: set[tuple] = set()
|
|
460
|
+
cursor_value: Any | None = None
|
|
461
|
+
page_num = 0
|
|
462
|
+
|
|
463
|
+
while len(all_rows) < max_results:
|
|
464
|
+
page_num += 1
|
|
465
|
+
|
|
466
|
+
# Build page spec
|
|
467
|
+
page_spec = spec.model_copy(deep=True)
|
|
468
|
+
page_spec.start_time = start_time
|
|
469
|
+
page_spec.end_time = end_time
|
|
470
|
+
page_spec.time_range = None # Use absolute times instead
|
|
471
|
+
|
|
472
|
+
# Don't set page_spec.limit - saved queries have max 1000
|
|
473
|
+
# Instead pass limit=10000 when creating query result
|
|
474
|
+
page_spec.limit = None
|
|
475
|
+
|
|
476
|
+
# Set sort order
|
|
477
|
+
page_spec.orders = [{"op": sort_field_for_orders, "order": sort_order}]
|
|
478
|
+
|
|
479
|
+
# Add cursor condition for pagination (skip first page)
|
|
480
|
+
if cursor_value is not None:
|
|
481
|
+
# descending: get values <= cursor (lower values)
|
|
482
|
+
# ascending: get values >= cursor (higher values)
|
|
483
|
+
cursor_op = "<=" if sort_order == "descending" else ">="
|
|
484
|
+
|
|
485
|
+
# Check if we're paginating on a calculation or breakdown
|
|
486
|
+
is_calculation = any(
|
|
487
|
+
_get_calc_attr(calc, "alias") == sort_field_for_access
|
|
488
|
+
or _get_calc_attr(calc, "op") == sort_field_for_access
|
|
489
|
+
for calc in spec.calculations
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
if is_calculation:
|
|
493
|
+
# Use HAVING for calculation results
|
|
494
|
+
# HAVING uses "calculate_op" field, not "column"
|
|
495
|
+
cursor_having = {
|
|
496
|
+
"calculate_op": sort_field_for_access, # e.g., "COUNT" or alias
|
|
497
|
+
"op": cursor_op,
|
|
498
|
+
"value": cursor_value,
|
|
499
|
+
}
|
|
500
|
+
page_spec.havings = (page_spec.havings or []) + [cursor_having]
|
|
501
|
+
else:
|
|
502
|
+
# Use filter for breakdown fields
|
|
503
|
+
cursor_filter = {
|
|
504
|
+
"column": sort_field_for_access,
|
|
505
|
+
"op": cursor_op,
|
|
506
|
+
"value": cursor_value,
|
|
507
|
+
}
|
|
508
|
+
page_spec.filters = (page_spec.filters or []) + [cursor_filter]
|
|
509
|
+
|
|
510
|
+
# Debug logging for troubleshooting
|
|
511
|
+
import logging
|
|
512
|
+
|
|
513
|
+
logger = logging.getLogger(__name__)
|
|
514
|
+
if page_num > 1 and cursor_value is not None:
|
|
515
|
+
filter_type = "HAVING" if is_calculation else "filter"
|
|
516
|
+
logger.debug(
|
|
517
|
+
f"Page {page_num}: Using {filter_type} on '{sort_field_for_access}' "
|
|
518
|
+
f"{cursor_op} {cursor_value}"
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
# Run the page query (create saved query then run it)
|
|
522
|
+
try:
|
|
523
|
+
# Create the saved query (page_spec is QuerySpec, pass dataset explicitly)
|
|
524
|
+
query = await self._client.queries.create_async(page_spec, dataset=dataset)
|
|
525
|
+
|
|
526
|
+
# Run it and poll for results
|
|
527
|
+
result = await self.run_async(
|
|
528
|
+
dataset,
|
|
529
|
+
query_id=query.id,
|
|
530
|
+
disable_series=True,
|
|
531
|
+
limit=10000, # Override to get max results per page
|
|
532
|
+
poll_interval=poll_interval,
|
|
533
|
+
timeout=timeout,
|
|
534
|
+
)
|
|
535
|
+
except Exception:
|
|
536
|
+
# Log the spec that failed for debugging
|
|
537
|
+
logger.error(f"Failed to create/run query on page {page_num}")
|
|
538
|
+
logger.error(f"Spec: {page_spec.model_dump_for_api()}")
|
|
539
|
+
raise
|
|
540
|
+
|
|
541
|
+
if not result.data or not result.data.results or len(result.data.results) == 0:
|
|
542
|
+
break # No more results
|
|
543
|
+
|
|
544
|
+
# Deduplicate and collect new rows (use unwrapped rows)
|
|
545
|
+
new_rows_count = 0
|
|
546
|
+
for row in result.data.rows:
|
|
547
|
+
# Build composite unique key from breakdowns + calculations
|
|
548
|
+
key = self._build_row_key(row, spec)
|
|
549
|
+
|
|
550
|
+
if key not in seen_keys:
|
|
551
|
+
seen_keys.add(key)
|
|
552
|
+
all_rows.append(row)
|
|
553
|
+
new_rows_count += 1
|
|
554
|
+
|
|
555
|
+
# Progress callback
|
|
556
|
+
if on_page:
|
|
557
|
+
on_page(page_num, len(all_rows))
|
|
558
|
+
|
|
559
|
+
# Smart stopping: if >50% duplicates, we've hit a long tail
|
|
560
|
+
duplication_rate = 1.0 - (new_rows_count / len(result.data.rows))
|
|
561
|
+
if duplication_rate > DUPLICATION_THRESHOLD:
|
|
562
|
+
break # Stop pagination (long tail of identical values)
|
|
563
|
+
|
|
564
|
+
# Check if this was the last page (less than 10K means no more results)
|
|
565
|
+
if len(result.data.rows) < 10000:
|
|
566
|
+
break
|
|
567
|
+
|
|
568
|
+
# Update cursor to last row's sort value
|
|
569
|
+
try:
|
|
570
|
+
cursor_value = result.data.rows[-1][sort_field_for_access]
|
|
571
|
+
except (KeyError, IndexError) as e:
|
|
572
|
+
raise ValueError(
|
|
573
|
+
f"Sort field '{sort_field_for_access}' not found in query results. "
|
|
574
|
+
"Ensure it's a calculation alias or breakdown field."
|
|
575
|
+
) from e
|
|
576
|
+
|
|
577
|
+
return all_rows
|
|
578
|
+
|
|
579
|
+
def _normalize_time_range(self, spec: QuerySpec) -> tuple[int, int]:
|
|
580
|
+
"""Convert relative time_range to absolute start/end timestamps.
|
|
581
|
+
|
|
582
|
+
Args:
|
|
583
|
+
spec: Query specification with time_range or start/end times.
|
|
584
|
+
|
|
585
|
+
Returns:
|
|
586
|
+
Tuple of (start_time, end_time) as Unix timestamps.
|
|
587
|
+
"""
|
|
588
|
+
now = int(time_module.time())
|
|
589
|
+
|
|
590
|
+
# If absolute times provided, use them
|
|
591
|
+
if spec.start_time is not None and spec.end_time is not None:
|
|
592
|
+
return spec.start_time, spec.end_time
|
|
593
|
+
|
|
594
|
+
# If only start_time, add time_range
|
|
595
|
+
if spec.start_time is not None:
|
|
596
|
+
time_range = spec.time_range or 3600 # Default 1 hour
|
|
597
|
+
return spec.start_time, spec.start_time + time_range
|
|
598
|
+
|
|
599
|
+
# If only end_time, subtract time_range
|
|
600
|
+
if spec.end_time is not None:
|
|
601
|
+
time_range = spec.time_range or 3600
|
|
602
|
+
return spec.end_time - time_range, spec.end_time
|
|
603
|
+
|
|
604
|
+
# Relative from now
|
|
605
|
+
time_range = spec.time_range or 3600
|
|
606
|
+
return now - time_range, now
|
|
607
|
+
|
|
608
|
+
def _build_row_key(self, row: dict, spec: QuerySpec) -> tuple:
|
|
609
|
+
"""Build composite unique key from breakdowns and calculation values.
|
|
610
|
+
|
|
611
|
+
The key consists of:
|
|
612
|
+
- All breakdown field values
|
|
613
|
+
- All calculation result values (in order)
|
|
614
|
+
|
|
615
|
+
This ensures uniqueness based on the group-by dimensions and aggregated values.
|
|
616
|
+
|
|
617
|
+
Args:
|
|
618
|
+
row: Query result row.
|
|
619
|
+
spec: Query specification (for breakdowns and calculations).
|
|
620
|
+
|
|
621
|
+
Returns:
|
|
622
|
+
Tuple of values representing the unique key.
|
|
623
|
+
"""
|
|
624
|
+
key_parts = []
|
|
625
|
+
|
|
626
|
+
# Add breakdown values
|
|
627
|
+
if spec.breakdowns:
|
|
628
|
+
for breakdown in spec.breakdowns:
|
|
629
|
+
key_parts.append(row.get(breakdown))
|
|
630
|
+
|
|
631
|
+
# Add calculation values
|
|
632
|
+
if spec.calculations:
|
|
633
|
+
for calc in spec.calculations:
|
|
634
|
+
# Use alias if present, otherwise uppercase op (results use "COUNT" not "count")
|
|
635
|
+
field = _get_calc_attr(calc, "alias") or _get_calc_attr(calc, "op", "COUNT")
|
|
636
|
+
key_parts.append(row.get(field))
|
|
637
|
+
|
|
638
|
+
return tuple(key_parts)
|
|
639
|
+
|
|
640
|
+
# -------------------------------------------------------------------------
|
|
641
|
+
# Sync methods
|
|
642
|
+
# -------------------------------------------------------------------------
|
|
643
|
+
|
|
644
|
+
def create(
|
|
645
|
+
self,
|
|
646
|
+
dataset: str,
|
|
647
|
+
query_id: str,
|
|
648
|
+
disable_series: bool = True,
|
|
649
|
+
limit: int | None = None,
|
|
650
|
+
) -> str:
|
|
651
|
+
"""Create a query result (start query execution).
|
|
652
|
+
|
|
653
|
+
Args:
|
|
654
|
+
dataset: The dataset slug.
|
|
655
|
+
query_id: Saved query ID (from queries.create).
|
|
656
|
+
disable_series: If True, disable timeseries data and allow up to 10K results
|
|
657
|
+
(default: True for better performance).
|
|
658
|
+
limit: Override result limit (max 10,000 when disable_series=True, 1,000 otherwise).
|
|
659
|
+
Defaults to 10,000 when disable_series=True, 1,000 when False.
|
|
660
|
+
|
|
661
|
+
Returns:
|
|
662
|
+
Query result ID for polling.
|
|
663
|
+
|
|
664
|
+
Raises:
|
|
665
|
+
HoneycombNotFoundError: If the query doesn't exist.
|
|
666
|
+
HoneycombValidationError: If the query spec is invalid.
|
|
667
|
+
|
|
668
|
+
Note:
|
|
669
|
+
Query Results API requires Enterprise plan.
|
|
670
|
+
"""
|
|
671
|
+
if not self._client.is_sync:
|
|
672
|
+
raise RuntimeError("Use create_async() for async mode, or pass sync=True to client")
|
|
673
|
+
|
|
674
|
+
json_data = {
|
|
675
|
+
"query_id": query_id,
|
|
676
|
+
"disable_series": disable_series,
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
# Set limit - default to 10K when disable_series=True, 1K otherwise
|
|
680
|
+
if limit is not None:
|
|
681
|
+
json_data["limit"] = limit
|
|
682
|
+
elif disable_series:
|
|
683
|
+
json_data["limit"] = 10000
|
|
684
|
+
else:
|
|
685
|
+
json_data["limit"] = 1000
|
|
686
|
+
|
|
687
|
+
data = self._post_sync(self._build_path(dataset), json=json_data)
|
|
688
|
+
return data["id"]
|
|
689
|
+
|
|
690
|
+
def get(self, dataset: str, query_result_id: str) -> QueryResult:
|
|
691
|
+
"""Get query result status/data.
|
|
692
|
+
|
|
693
|
+
Args:
|
|
694
|
+
dataset: The dataset slug.
|
|
695
|
+
query_result_id: Query result ID.
|
|
696
|
+
|
|
697
|
+
Returns:
|
|
698
|
+
QueryResult with data if query is complete.
|
|
699
|
+
|
|
700
|
+
Raises:
|
|
701
|
+
HoneycombNotFoundError: If the query result doesn't exist.
|
|
702
|
+
"""
|
|
703
|
+
if not self._client.is_sync:
|
|
704
|
+
raise RuntimeError("Use get_async() for async mode, or pass sync=True to client")
|
|
705
|
+
data = self._get_sync(self._build_path(dataset, query_result_id))
|
|
706
|
+
return self._parse_model(QueryResult, data)
|
|
707
|
+
|
|
708
|
+
def run(
|
|
709
|
+
self,
|
|
710
|
+
dataset: str,
|
|
711
|
+
query_id: str,
|
|
712
|
+
disable_series: bool = True,
|
|
713
|
+
limit: int | None = None,
|
|
714
|
+
poll_interval: float = 1.0,
|
|
715
|
+
timeout: float = 60.0,
|
|
716
|
+
) -> QueryResult:
|
|
717
|
+
"""Run a saved query and poll for results.
|
|
718
|
+
|
|
719
|
+
Convenience method that creates a query result and polls until complete.
|
|
720
|
+
|
|
721
|
+
Args:
|
|
722
|
+
dataset: The dataset slug.
|
|
723
|
+
query_id: Saved query ID (from queries.create).
|
|
724
|
+
disable_series: If True, disable timeseries data and allow up to 10K results
|
|
725
|
+
(default: True for better performance).
|
|
726
|
+
limit: Override result limit (max 10,000 when disable_series=True, 1,000 otherwise).
|
|
727
|
+
Defaults to 10,000 when disable_series=True, 1,000 when False.
|
|
728
|
+
poll_interval: Seconds between poll attempts (default: 1.0).
|
|
729
|
+
timeout: Maximum seconds to wait for results (default: 60.0).
|
|
730
|
+
|
|
731
|
+
Returns:
|
|
732
|
+
QueryResult with completed data (up to 10K rows if disable_series=True).
|
|
733
|
+
|
|
734
|
+
Raises:
|
|
735
|
+
HoneycombTimeoutError: If query doesn't complete within timeout.
|
|
736
|
+
HoneycombNotFoundError: If the query doesn't exist.
|
|
737
|
+
|
|
738
|
+
Note:
|
|
739
|
+
Query Results API requires Enterprise plan.
|
|
740
|
+
"""
|
|
741
|
+
if not self._client.is_sync:
|
|
742
|
+
raise RuntimeError("Use run_async() for async mode, or pass sync=True to client")
|
|
743
|
+
|
|
744
|
+
import time
|
|
745
|
+
|
|
746
|
+
from ..exceptions import HoneycombTimeoutError
|
|
747
|
+
|
|
748
|
+
# Create the query result
|
|
749
|
+
result_id = self.create(
|
|
750
|
+
dataset, query_id=query_id, disable_series=disable_series, limit=limit
|
|
751
|
+
)
|
|
752
|
+
|
|
753
|
+
# Poll for completion
|
|
754
|
+
start_time = time.time()
|
|
755
|
+
while True:
|
|
756
|
+
result = self.get(dataset, result_id)
|
|
757
|
+
|
|
758
|
+
# Check if query is complete (has results)
|
|
759
|
+
if result.data is not None and result.data.results is not None:
|
|
760
|
+
return result
|
|
761
|
+
|
|
762
|
+
# Check timeout
|
|
763
|
+
elapsed = time.time() - start_time
|
|
764
|
+
if elapsed >= timeout:
|
|
765
|
+
raise HoneycombTimeoutError(
|
|
766
|
+
f"Query did not complete within {timeout}s", timeout=timeout
|
|
767
|
+
)
|
|
768
|
+
|
|
769
|
+
# Wait before next poll
|
|
770
|
+
time.sleep(poll_interval)
|
|
771
|
+
|
|
772
|
+
@overload
|
|
773
|
+
def create_and_run(
|
|
774
|
+
self,
|
|
775
|
+
spec: QueryBuilder,
|
|
776
|
+
*,
|
|
777
|
+
disable_series: bool = True,
|
|
778
|
+
limit: int | None = None,
|
|
779
|
+
poll_interval: float = 1.0,
|
|
780
|
+
timeout: float = 60.0,
|
|
781
|
+
) -> tuple[Query, QueryResult]: ...
|
|
782
|
+
|
|
783
|
+
@overload
|
|
784
|
+
def create_and_run(
|
|
785
|
+
self,
|
|
786
|
+
spec: QuerySpec,
|
|
787
|
+
*,
|
|
788
|
+
dataset: str,
|
|
789
|
+
disable_series: bool = True,
|
|
790
|
+
limit: int | None = None,
|
|
791
|
+
poll_interval: float = 1.0,
|
|
792
|
+
timeout: float = 60.0,
|
|
793
|
+
) -> tuple[Query, QueryResult]: ...
|
|
794
|
+
|
|
795
|
+
def create_and_run(
|
|
796
|
+
self,
|
|
797
|
+
spec: QuerySpec | QueryBuilder,
|
|
798
|
+
*,
|
|
799
|
+
dataset: str | None = None,
|
|
800
|
+
disable_series: bool = True,
|
|
801
|
+
limit: int | None = None,
|
|
802
|
+
poll_interval: float = 1.0,
|
|
803
|
+
timeout: float = 60.0,
|
|
804
|
+
) -> tuple[Query, QueryResult]:
|
|
805
|
+
"""Create a saved query and run it in one call.
|
|
806
|
+
|
|
807
|
+
Convenience method that:
|
|
808
|
+
1. Creates a permanent saved query
|
|
809
|
+
2. Executes it and polls for results
|
|
810
|
+
3. Returns both the saved query and results
|
|
811
|
+
|
|
812
|
+
This is useful when you want to save a query for future use
|
|
813
|
+
AND get immediate results.
|
|
814
|
+
|
|
815
|
+
Args:
|
|
816
|
+
spec: Query specification (QueryBuilder or QuerySpec).
|
|
817
|
+
dataset: Dataset slug. Required for QuerySpec, extracted from QueryBuilder.
|
|
818
|
+
disable_series: If True, disable timeseries data and allow up to 10K results
|
|
819
|
+
(default: True for better performance).
|
|
820
|
+
limit: Override result limit (max 10,000 when disable_series=True, 1,000 otherwise).
|
|
821
|
+
Defaults to 10,000 when disable_series=True, 1,000 when False.
|
|
822
|
+
poll_interval: Seconds between poll attempts (default: 1.0).
|
|
823
|
+
timeout: Maximum seconds to wait for results (default: 60.0).
|
|
824
|
+
|
|
825
|
+
Returns:
|
|
826
|
+
Tuple of (Query, QueryResult) - the saved query and execution results.
|
|
827
|
+
|
|
828
|
+
Raises:
|
|
829
|
+
HoneycombTimeoutError: If query doesn't complete within timeout.
|
|
830
|
+
HoneycombValidationError: If the query spec is invalid.
|
|
831
|
+
ValueError: If dataset parameter is misused.
|
|
832
|
+
|
|
833
|
+
Example (QueryBuilder - recommended):
|
|
834
|
+
>>> query, result = client.query_results.create_and_run(
|
|
835
|
+
... QueryBuilder()
|
|
836
|
+
... .dataset("my-dataset")
|
|
837
|
+
... .last_1_hour()
|
|
838
|
+
... .count(),
|
|
839
|
+
... )
|
|
840
|
+
|
|
841
|
+
Example (QuerySpec - advanced):
|
|
842
|
+
>>> query, result = client.query_results.create_and_run(
|
|
843
|
+
... QuerySpec(time_range=3600, calculations=[{"op": "COUNT"}]),
|
|
844
|
+
... dataset="my-dataset"
|
|
845
|
+
... )
|
|
846
|
+
"""
|
|
847
|
+
if not self._client.is_sync:
|
|
848
|
+
raise RuntimeError(
|
|
849
|
+
"Use create_and_run_async() for async mode, or pass sync=True to client"
|
|
850
|
+
)
|
|
851
|
+
|
|
852
|
+
from ..models.query_builder import QueryBuilder
|
|
853
|
+
|
|
854
|
+
# Extract dataset based on spec type
|
|
855
|
+
if isinstance(spec, QueryBuilder):
|
|
856
|
+
if dataset is not None:
|
|
857
|
+
raise ValueError(
|
|
858
|
+
"dataset parameter not allowed with QueryBuilder. "
|
|
859
|
+
"Use .dataset() on the builder instead."
|
|
860
|
+
)
|
|
861
|
+
dataset = spec.get_dataset()
|
|
862
|
+
else:
|
|
863
|
+
if dataset is None:
|
|
864
|
+
raise ValueError(
|
|
865
|
+
"dataset parameter required when using QuerySpec. "
|
|
866
|
+
"Pass dataset='your-dataset' or use QueryBuilder instead."
|
|
867
|
+
)
|
|
868
|
+
|
|
869
|
+
# Create the saved query
|
|
870
|
+
query = (
|
|
871
|
+
self._client.queries.create(spec, dataset=dataset)
|
|
872
|
+
if not isinstance(spec, QueryBuilder)
|
|
873
|
+
else self._client.queries.create(spec)
|
|
874
|
+
)
|
|
875
|
+
|
|
876
|
+
# Run it and poll for results
|
|
877
|
+
result = self.run(
|
|
878
|
+
dataset,
|
|
879
|
+
query_id=query.id,
|
|
880
|
+
disable_series=disable_series,
|
|
881
|
+
limit=limit,
|
|
882
|
+
poll_interval=poll_interval,
|
|
883
|
+
timeout=timeout,
|
|
884
|
+
)
|
|
885
|
+
|
|
886
|
+
return query, result
|