infrahub-server 1.1.7__py3-none-any.whl → 1.2.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/artifact.py +16 -4
- infrahub/api/dependencies.py +8 -0
- infrahub/api/oauth2.py +0 -1
- infrahub/api/oidc.py +0 -1
- infrahub/api/query.py +18 -7
- infrahub/api/schema.py +32 -6
- infrahub/api/transformation.py +12 -5
- infrahub/{message_bus/messages/check_artifact_create.py → artifacts/models.py} +5 -3
- infrahub/{message_bus/operations/check/artifact.py → artifacts/tasks.py} +26 -25
- infrahub/cli/__init__.py +0 -2
- infrahub/cli/db.py +6 -7
- infrahub/cli/events.py +8 -3
- infrahub/cli/git_agent.py +9 -7
- infrahub/cli/tasks.py +4 -6
- infrahub/computed_attribute/models.py +1 -1
- infrahub/computed_attribute/tasks.py +64 -17
- infrahub/computed_attribute/triggers.py +90 -0
- infrahub/config.py +1 -1
- infrahub/context.py +39 -0
- infrahub/core/account.py +5 -8
- infrahub/core/attribute.py +50 -21
- infrahub/core/branch/models.py +4 -4
- infrahub/core/branch/tasks.py +130 -125
- infrahub/core/changelog/__init__.py +0 -0
- infrahub/core/changelog/diff.py +283 -0
- infrahub/core/changelog/models.py +499 -0
- infrahub/core/constants/__init__.py +43 -2
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/constants/schema.py +2 -0
- infrahub/core/diff/combiner.py +1 -1
- infrahub/core/diff/enricher/cardinality_one.py +6 -1
- infrahub/core/diff/enricher/hierarchy.py +22 -7
- infrahub/core/diff/enricher/labels.py +6 -1
- infrahub/core/diff/enricher/path_identifier.py +5 -1
- infrahub/core/diff/enricher/summary_counts.py +107 -0
- infrahub/core/diff/merger/merger.py +3 -1
- infrahub/core/diff/model/path.py +34 -11
- infrahub/core/diff/parent_node_adder.py +78 -0
- infrahub/core/diff/payload_builder.py +13 -2
- infrahub/core/diff/query/all_conflicts.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -1
- infrahub/core/diff/query/delete_query.py +1 -1
- infrahub/core/diff/query/diff_get.py +1 -1
- infrahub/core/diff/query/diff_summary.py +1 -1
- infrahub/core/diff/query/field_specifiers.py +1 -1
- infrahub/core/diff/query/field_summary.py +1 -1
- infrahub/core/diff/query/filters.py +2 -2
- infrahub/core/diff/query/get_conflict_query.py +1 -1
- infrahub/core/diff/query/has_conflicts_query.py +1 -1
- infrahub/core/diff/query/merge.py +3 -3
- infrahub/core/diff/query/merge_tracking_id.py +1 -1
- infrahub/core/diff/query/roots_metadata.py +1 -1
- infrahub/core/diff/query/save.py +191 -185
- infrahub/core/diff/query/summary_counts_enricher.py +52 -5
- infrahub/core/diff/query/time_range_query.py +1 -1
- infrahub/core/diff/query/update_conflict_query.py +1 -1
- infrahub/core/diff/repository/deserializer.py +9 -4
- infrahub/core/diff/repository/repository.py +156 -38
- infrahub/core/diff/tasks.py +13 -12
- infrahub/core/enums.py +1 -1
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/graph/index.py +3 -0
- infrahub/core/integrity/object_conflict/conflict_recorder.py +1 -1
- infrahub/core/ipam/reconciler.py +1 -1
- infrahub/core/ipam/tasks.py +2 -3
- infrahub/core/manager.py +20 -15
- infrahub/core/merge.py +5 -2
- infrahub/core/migrations/graph/__init__.py +4 -0
- infrahub/core/migrations/graph/m001_add_version_to_graph.py +1 -1
- infrahub/core/migrations/graph/m002_attribute_is_default.py +2 -2
- infrahub/core/migrations/graph/m003_relationship_parent_optional.py +2 -2
- infrahub/core/migrations/graph/m004_add_attr_documentation.py +1 -1
- infrahub/core/migrations/graph/m005_add_rel_read_only.py +1 -1
- infrahub/core/migrations/graph/m006_add_rel_on_delete.py +1 -1
- infrahub/core/migrations/graph/m007_add_rel_allow_override.py +1 -1
- infrahub/core/migrations/graph/m008_add_human_friendly_id.py +1 -1
- infrahub/core/migrations/graph/m009_add_generate_profile_attr.py +1 -1
- infrahub/core/migrations/graph/m010_add_generate_profile_attr_generic.py +1 -1
- infrahub/core/migrations/graph/m011_remove_profile_relationship_schema.py +2 -2
- infrahub/core/migrations/graph/m012_convert_account_generic.py +12 -23
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +7 -11
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +2 -2
- infrahub/core/migrations/graph/m015_diff_format_update.py +1 -1
- infrahub/core/migrations/graph/m016_diff_delete_bug_fix.py +1 -1
- infrahub/core/migrations/graph/m017_add_core_profile.py +1 -1
- infrahub/core/migrations/graph/m018_uniqueness_nulls.py +2 -2
- infrahub/core/migrations/graph/m019_restore_rels_to_time.py +256 -0
- infrahub/core/migrations/graph/m020_add_generate_template_attr.py +48 -0
- infrahub/core/migrations/query/attribute_add.py +1 -1
- infrahub/core/migrations/query/attribute_rename.py +1 -1
- infrahub/core/migrations/query/delete_element_in_schema.py +1 -1
- infrahub/core/migrations/query/node_duplicate.py +39 -19
- infrahub/core/migrations/query/relationship_duplicate.py +1 -1
- infrahub/core/migrations/query/schema_attribute_update.py +1 -1
- infrahub/core/migrations/schema/node_attribute_remove.py +1 -1
- infrahub/core/migrations/schema/node_remove.py +27 -13
- infrahub/core/migrations/schema/tasks.py +5 -5
- infrahub/core/migrations/shared.py +4 -4
- infrahub/core/models.py +7 -8
- infrahub/core/node/__init__.py +164 -45
- infrahub/core/node/base.py +1 -1
- infrahub/core/node/delete_validator.py +4 -4
- infrahub/core/node/ipam.py +7 -7
- infrahub/core/node/resource_manager/ip_prefix_pool.py +8 -5
- infrahub/core/node/standard.py +3 -5
- infrahub/core/property.py +1 -1
- infrahub/core/protocols.py +6 -0
- infrahub/core/protocols_base.py +4 -2
- infrahub/core/query/__init__.py +2 -5
- infrahub/core/query/attribute.py +9 -9
- infrahub/core/query/branch.py +5 -5
- infrahub/core/query/delete.py +1 -1
- infrahub/core/query/diff.py +4 -4
- infrahub/core/query/ipam.py +4 -4
- infrahub/core/query/node.py +11 -12
- infrahub/core/query/relationship.py +211 -25
- infrahub/core/query/resource_manager.py +10 -10
- infrahub/core/query/standard_node.py +6 -6
- infrahub/core/query/task.py +3 -3
- infrahub/core/query/task_log.py +1 -1
- infrahub/core/query/utils.py +5 -5
- infrahub/core/registry.py +0 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/constraints/peer_kind.py +1 -1
- infrahub/core/relationship/model.py +76 -38
- infrahub/core/schema/__init__.py +6 -4
- infrahub/core/schema/attribute_schema.py +8 -0
- infrahub/core/schema/basenode_schema.py +13 -3
- infrahub/core/schema/definitions/core/__init__.py +153 -0
- infrahub/core/schema/definitions/core/account.py +168 -0
- infrahub/core/schema/definitions/core/artifact.py +127 -0
- infrahub/core/schema/definitions/core/builtin.py +21 -0
- infrahub/core/schema/definitions/core/check.py +60 -0
- infrahub/core/schema/definitions/core/generator.py +96 -0
- infrahub/core/schema/definitions/core/graphql_query.py +77 -0
- infrahub/core/schema/definitions/core/group.py +105 -0
- infrahub/core/schema/definitions/core/ipam.py +252 -0
- infrahub/core/schema/definitions/core/lineage.py +17 -0
- infrahub/core/schema/definitions/core/menu.py +46 -0
- infrahub/core/schema/definitions/core/permission.py +161 -0
- infrahub/core/schema/definitions/core/profile.py +29 -0
- infrahub/core/schema/definitions/core/propose_change.py +88 -0
- infrahub/core/schema/definitions/core/propose_change_comment.py +188 -0
- infrahub/core/schema/definitions/core/propose_change_validator.py +326 -0
- infrahub/core/schema/definitions/core/repository.py +280 -0
- infrahub/core/schema/definitions/core/resource_pool.py +180 -0
- infrahub/core/schema/definitions/core/template.py +12 -0
- infrahub/core/schema/definitions/core/transform.py +87 -0
- infrahub/core/schema/definitions/core/webhook.py +108 -0
- infrahub/core/schema/definitions/internal.py +16 -0
- infrahub/core/schema/generated/genericnode_schema.py +5 -0
- infrahub/core/schema/generated/node_schema.py +5 -0
- infrahub/core/schema/generic_schema.py +5 -1
- infrahub/core/schema/manager.py +45 -42
- infrahub/core/schema/node_schema.py +4 -0
- infrahub/core/schema/profile_schema.py +4 -0
- infrahub/core/schema/relationship_schema.py +10 -2
- infrahub/core/schema/schema_branch.py +260 -16
- infrahub/core/schema/template_schema.py +36 -0
- infrahub/core/task/user_task.py +7 -5
- infrahub/core/timestamp.py +3 -3
- infrahub/core/utils.py +2 -2
- infrahub/core/validators/attribute/choices.py +1 -1
- infrahub/core/validators/attribute/enum.py +1 -1
- infrahub/core/validators/attribute/kind.py +1 -1
- infrahub/core/validators/attribute/length.py +1 -1
- infrahub/core/validators/attribute/optional.py +1 -1
- infrahub/core/validators/attribute/regex.py +1 -1
- infrahub/core/validators/attribute/unique.py +1 -1
- infrahub/core/validators/checks_runner.py +37 -0
- infrahub/core/validators/node/generate_profile.py +1 -1
- infrahub/core/validators/node/hierarchy.py +1 -1
- infrahub/core/validators/query.py +1 -1
- infrahub/core/validators/relationship/count.py +1 -1
- infrahub/core/validators/relationship/optional.py +1 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +8 -6
- infrahub/core/validators/uniqueness/query.py +1 -1
- infrahub/database/__init__.py +3 -2
- infrahub/database/memgraph.py +1 -1
- infrahub/dependencies/builder/diff/combiner.py +1 -1
- infrahub/dependencies/builder/diff/conflicts_enricher.py +1 -1
- infrahub/dependencies/builder/diff/deserializer.py +4 -2
- infrahub/dependencies/builder/diff/enricher/hierarchy.py +3 -1
- infrahub/dependencies/builder/diff/enricher/summary_counts.py +8 -0
- infrahub/dependencies/builder/diff/parent_node_adder.py +8 -0
- infrahub/events/artifact_action.py +76 -0
- infrahub/events/branch_action.py +50 -21
- infrahub/events/group_action.py +117 -0
- infrahub/events/models.py +164 -51
- infrahub/events/node_action.py +70 -8
- infrahub/events/repository_action.py +8 -8
- infrahub/events/schema_action.py +21 -8
- infrahub/exceptions.py +9 -0
- infrahub/generators/models.py +1 -0
- infrahub/generators/tasks.py +34 -15
- infrahub/git/base.py +3 -5
- infrahub/git/constants.py +0 -1
- infrahub/git/integrator.py +60 -36
- infrahub/git/models.py +80 -1
- infrahub/git/repository.py +7 -8
- infrahub/git/tasks.py +432 -112
- infrahub/git_credential/helper.py +2 -3
- infrahub/graphql/analyzer.py +572 -11
- infrahub/graphql/app.py +34 -26
- infrahub/graphql/auth/query_permission_checker/anonymous_checker.py +5 -5
- infrahub/graphql/auth/query_permission_checker/default_branch_checker.py +4 -4
- infrahub/graphql/auth/query_permission_checker/merge_operation_checker.py +4 -4
- infrahub/graphql/auth/query_permission_checker/object_permission_checker.py +28 -35
- infrahub/graphql/auth/query_permission_checker/super_admin_checker.py +5 -5
- infrahub/graphql/context.py +33 -0
- infrahub/graphql/enums.py +1 -1
- infrahub/graphql/initialization.py +5 -1
- infrahub/graphql/loaders/node.py +2 -2
- infrahub/graphql/manager.py +63 -63
- infrahub/graphql/mutations/account.py +20 -13
- infrahub/graphql/mutations/artifact_definition.py +16 -12
- infrahub/graphql/mutations/branch.py +86 -40
- infrahub/graphql/mutations/computed_attribute.py +24 -15
- infrahub/graphql/mutations/diff.py +33 -17
- infrahub/graphql/mutations/diff_conflict.py +14 -8
- infrahub/graphql/mutations/generator.py +83 -0
- infrahub/graphql/mutations/graphql_query.py +19 -11
- infrahub/graphql/mutations/ipam.py +25 -23
- infrahub/graphql/mutations/main.py +233 -45
- infrahub/graphql/mutations/menu.py +10 -10
- infrahub/graphql/mutations/proposed_change.py +36 -28
- infrahub/graphql/mutations/relationship.py +341 -130
- infrahub/graphql/mutations/repository.py +41 -35
- infrahub/graphql/mutations/resource_manager.py +26 -26
- infrahub/graphql/mutations/schema.py +66 -33
- infrahub/graphql/mutations/tasks.py +10 -7
- infrahub/graphql/parser.py +1 -1
- infrahub/graphql/permissions.py +3 -10
- infrahub/graphql/queries/account.py +22 -18
- infrahub/graphql/queries/branch.py +6 -4
- infrahub/graphql/queries/diff/tree.py +63 -52
- infrahub/graphql/queries/event.py +115 -0
- infrahub/graphql/queries/internal.py +3 -3
- infrahub/graphql/queries/ipam.py +23 -18
- infrahub/graphql/queries/relationship.py +11 -10
- infrahub/graphql/queries/resource_manager.py +37 -25
- infrahub/graphql/queries/search.py +9 -8
- infrahub/graphql/queries/status.py +12 -9
- infrahub/graphql/queries/task.py +11 -9
- infrahub/graphql/resolvers/resolver.py +69 -43
- infrahub/graphql/resolvers/single_relationship.py +16 -10
- infrahub/graphql/schema.py +4 -0
- infrahub/graphql/subscription/__init__.py +1 -1
- infrahub/graphql/subscription/events.py +1 -1
- infrahub/graphql/subscription/graphql_query.py +8 -8
- infrahub/graphql/types/branch.py +2 -2
- infrahub/graphql/types/common.py +6 -1
- infrahub/graphql/types/context.py +12 -0
- infrahub/graphql/types/enums.py +2 -0
- infrahub/graphql/types/event.py +158 -0
- infrahub/graphql/types/interface.py +2 -2
- infrahub/graphql/types/node.py +3 -3
- infrahub/graphql/types/permission.py +2 -2
- infrahub/graphql/types/relationship.py +3 -3
- infrahub/graphql/types/standard_node.py +9 -11
- infrahub/graphql/utils.py +28 -182
- infrahub/groups/tasks.py +2 -3
- infrahub/lock.py +21 -21
- infrahub/menu/generator.py +0 -1
- infrahub/menu/menu.py +116 -138
- infrahub/menu/models.py +4 -4
- infrahub/message_bus/__init__.py +11 -13
- infrahub/message_bus/messages/__init__.py +0 -14
- infrahub/message_bus/messages/check_generator_run.py +1 -3
- infrahub/message_bus/messages/event_branch_merge.py +3 -0
- infrahub/message_bus/messages/proposed_change/request_proposedchange_refreshartifacts.py +6 -0
- infrahub/message_bus/messages/request_proposedchange_pipeline.py +2 -0
- infrahub/message_bus/messages/send_echo_request.py +1 -1
- infrahub/message_bus/operations/__init__.py +4 -13
- infrahub/message_bus/operations/check/__init__.py +2 -2
- infrahub/message_bus/operations/check/generator.py +1 -3
- infrahub/message_bus/operations/event/branch.py +7 -3
- infrahub/message_bus/operations/event/schema.py +1 -1
- infrahub/message_bus/operations/event/worker.py +0 -3
- infrahub/message_bus/operations/finalize/validator.py +1 -1
- infrahub/message_bus/operations/git/file.py +2 -2
- infrahub/message_bus/operations/git/repository.py +1 -1
- infrahub/message_bus/operations/requests/__init__.py +0 -4
- infrahub/message_bus/operations/requests/generator_definition.py +2 -4
- infrahub/message_bus/operations/requests/proposed_change.py +37 -20
- infrahub/message_bus/operations/send/echo.py +1 -1
- infrahub/message_bus/types.py +1 -1
- infrahub/permissions/globals.py +15 -0
- infrahub/pools/prefix.py +29 -165
- infrahub/prefect_server/__init__.py +0 -0
- infrahub/prefect_server/app.py +18 -0
- infrahub/prefect_server/database.py +20 -0
- infrahub/prefect_server/events.py +28 -0
- infrahub/prefect_server/models.py +46 -0
- infrahub/proposed_change/models.py +18 -1
- infrahub/proposed_change/tasks.py +195 -53
- infrahub/pytest_plugin.py +4 -4
- infrahub/server.py +13 -12
- infrahub/services/__init__.py +148 -63
- infrahub/services/adapters/cache/__init__.py +11 -11
- infrahub/services/adapters/cache/nats.py +42 -25
- infrahub/services/adapters/cache/redis.py +3 -11
- infrahub/services/adapters/event/__init__.py +10 -18
- infrahub/services/adapters/http/__init__.py +0 -5
- infrahub/services/adapters/http/httpx.py +22 -15
- infrahub/services/adapters/message_bus/__init__.py +25 -8
- infrahub/services/adapters/message_bus/local.py +9 -7
- infrahub/services/adapters/message_bus/nats.py +14 -8
- infrahub/services/adapters/message_bus/rabbitmq.py +23 -10
- infrahub/services/adapters/workflow/__init__.py +11 -8
- infrahub/services/adapters/workflow/local.py +27 -6
- infrahub/services/adapters/workflow/worker.py +23 -7
- infrahub/services/component.py +43 -40
- infrahub/services/protocols.py +7 -7
- infrahub/services/scheduler.py +30 -29
- infrahub/storage.py +2 -4
- infrahub/task_manager/constants.py +1 -1
- infrahub/task_manager/event.py +261 -0
- infrahub/task_manager/models.py +147 -3
- infrahub/task_manager/task.py +1 -1
- infrahub/tasks/artifact.py +19 -18
- infrahub/tasks/registry.py +1 -1
- infrahub/tasks/telemetry.py +13 -14
- infrahub/transformations/tasks.py +3 -5
- infrahub/trigger/__init__.py +0 -0
- infrahub/trigger/catalogue.py +16 -0
- infrahub/trigger/constants.py +9 -0
- infrahub/trigger/models.py +105 -0
- infrahub/trigger/tasks.py +91 -0
- infrahub/types.py +1 -1
- infrahub/utils.py +1 -1
- infrahub/webhook/constants.py +0 -2
- infrahub/webhook/models.py +161 -40
- infrahub/webhook/tasks.py +123 -202
- infrahub/webhook/triggers.py +27 -0
- infrahub/workers/infrahub_async.py +36 -25
- infrahub/workers/utils.py +63 -0
- infrahub/workflows/catalogue.py +71 -52
- infrahub/workflows/initialization.py +14 -8
- infrahub/workflows/models.py +28 -4
- infrahub/workflows/utils.py +1 -1
- infrahub_sdk/batch.py +2 -2
- infrahub_sdk/client.py +8 -0
- infrahub_sdk/config.py +1 -1
- infrahub_sdk/ctl/branch.py +3 -2
- infrahub_sdk/ctl/check.py +4 -4
- infrahub_sdk/ctl/cli_commands.py +16 -11
- infrahub_sdk/ctl/exceptions.py +0 -6
- infrahub_sdk/ctl/exporter.py +1 -1
- infrahub_sdk/ctl/generator.py +5 -5
- infrahub_sdk/ctl/importer.py +3 -2
- infrahub_sdk/ctl/menu.py +1 -1
- infrahub_sdk/ctl/object.py +1 -1
- infrahub_sdk/ctl/repository.py +23 -15
- infrahub_sdk/ctl/schema.py +2 -2
- infrahub_sdk/ctl/utils.py +6 -21
- infrahub_sdk/ctl/validate.py +2 -1
- infrahub_sdk/data.py +1 -1
- infrahub_sdk/exceptions.py +12 -0
- infrahub_sdk/generator.py +3 -0
- infrahub_sdk/node.py +5 -8
- infrahub_sdk/protocols.py +20 -8
- infrahub_sdk/schema/__init__.py +14 -5
- infrahub_sdk/schema/main.py +7 -0
- infrahub_sdk/task/__init__.py +1 -0
- infrahub_sdk/task/constants.py +3 -0
- infrahub_sdk/task/exceptions.py +25 -0
- infrahub_sdk/task/manager.py +545 -0
- infrahub_sdk/task/models.py +74 -0
- infrahub_sdk/testing/docker.py +30 -0
- infrahub_sdk/timestamp.py +134 -33
- infrahub_sdk/transfer/exporter/json.py +1 -1
- infrahub_sdk/utils.py +39 -1
- infrahub_sdk/yaml.py +2 -3
- {infrahub_server-1.1.7.dist-info → infrahub_server-1.2.0b1.dist-info}/METADATA +7 -6
- {infrahub_server-1.1.7.dist-info → infrahub_server-1.2.0b1.dist-info}/RECORD +383 -339
- infrahub_testcontainers/container.py +2 -3
- infrahub_testcontainers/docker-compose.test.yml +2 -2
- infrahub/core/branch/constants.py +0 -2
- infrahub/core/schema/definitions/core.py +0 -2274
- infrahub/graphql/query.py +0 -52
- infrahub/message_bus/messages/check_repository_checkdefinition.py +0 -20
- infrahub/message_bus/messages/check_repository_mergeconflicts.py +0 -16
- infrahub/message_bus/messages/check_repository_usercheck.py +0 -26
- infrahub/message_bus/messages/request_artifactdefinition_check.py +0 -17
- infrahub/message_bus/messages/request_repository_checks.py +0 -12
- infrahub/message_bus/messages/request_repository_userchecks.py +0 -18
- infrahub/message_bus/operations/check/repository.py +0 -293
- infrahub/message_bus/operations/requests/artifact_definition.py +0 -148
- infrahub/message_bus/operations/requests/repository.py +0 -133
- infrahub/schema/constants.py +0 -1
- infrahub/schema/tasks.py +0 -76
- infrahub/services/adapters/database/__init__.py +0 -9
- infrahub_sdk/ctl/_file.py +0 -13
- /infrahub/{schema → artifacts}/__init__.py +0 -0
- {infrahub_server-1.1.7.dist-info → infrahub_server-1.2.0b1.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.1.7.dist-info → infrahub_server-1.2.0b1.dist-info}/WHEEL +0 -0
- {infrahub_server-1.1.7.dist-info → infrahub_server-1.2.0b1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,545 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import time
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
from ..graphql import Query
|
|
8
|
+
from .constants import FINAL_STATES
|
|
9
|
+
from .exceptions import TaskNotCompletedError, TaskNotFoundError, TooManyTasksError
|
|
10
|
+
from .models import Task, TaskFilter
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from ..client import InfrahubClient, InfrahubClientSync
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class InfraHubTaskManagerBase:
|
|
17
|
+
def _generate_query(
|
|
18
|
+
self,
|
|
19
|
+
filters: TaskFilter | None = None,
|
|
20
|
+
include_logs: bool = False,
|
|
21
|
+
include_related_nodes: bool = False,
|
|
22
|
+
offset: int | None = None,
|
|
23
|
+
limit: int | None = None,
|
|
24
|
+
count: bool = False,
|
|
25
|
+
) -> Query:
|
|
26
|
+
query: dict[str, Any] = {
|
|
27
|
+
"InfrahubTask": {
|
|
28
|
+
"edges": {
|
|
29
|
+
"node": {
|
|
30
|
+
"id": None,
|
|
31
|
+
"title": None,
|
|
32
|
+
"state": None,
|
|
33
|
+
"progress": None,
|
|
34
|
+
"workflow": None,
|
|
35
|
+
"branch": None,
|
|
36
|
+
"created_at": None,
|
|
37
|
+
"updated_at": None,
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if not filters and (offset or limit):
|
|
44
|
+
filters = TaskFilter(offset=offset, limit=limit)
|
|
45
|
+
elif filters and offset:
|
|
46
|
+
filters.offset = offset
|
|
47
|
+
elif filters and limit:
|
|
48
|
+
filters.limit = limit
|
|
49
|
+
|
|
50
|
+
if filters:
|
|
51
|
+
query["InfrahubTask"]["@filters"] = filters.to_dict()
|
|
52
|
+
|
|
53
|
+
if count:
|
|
54
|
+
query["InfrahubTask"]["count"] = None
|
|
55
|
+
|
|
56
|
+
if include_logs:
|
|
57
|
+
query["InfrahubTask"]["edges"]["node"]["logs"] = {
|
|
58
|
+
"edges": {
|
|
59
|
+
"node": {
|
|
60
|
+
"message": None,
|
|
61
|
+
"severity": None,
|
|
62
|
+
"timestamp": None,
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if include_related_nodes:
|
|
68
|
+
query["InfrahubTask"]["edges"]["node"]["related_nodes"] = {"id": None, "kind": None}
|
|
69
|
+
|
|
70
|
+
return Query(query=query)
|
|
71
|
+
|
|
72
|
+
def _generate_count_query(self, filters: TaskFilter | None = None) -> Query:
|
|
73
|
+
query: dict[str, Any] = {
|
|
74
|
+
"InfrahubTask": {
|
|
75
|
+
"count": None,
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
if filters:
|
|
79
|
+
query["InfrahubTask"]["@filters"] = filters.to_dict()
|
|
80
|
+
|
|
81
|
+
return Query(query=query)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class InfrahubTaskManager(InfraHubTaskManagerBase):
|
|
85
|
+
client: InfrahubClient
|
|
86
|
+
|
|
87
|
+
def __init__(self, client: InfrahubClient):
|
|
88
|
+
self.client = client
|
|
89
|
+
|
|
90
|
+
async def count(self, filters: TaskFilter | None = None) -> int:
|
|
91
|
+
"""Count the number of tasks.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
filters: The filter to apply to the tasks. Defaults to None.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
The number of tasks.
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
query = self._generate_count_query(filters=filters)
|
|
101
|
+
response = await self.client.execute_graphql(query=query.render(), tracker="query-tasks-count")
|
|
102
|
+
return int(response["InfrahubTask"]["count"])
|
|
103
|
+
|
|
104
|
+
async def all(
|
|
105
|
+
self,
|
|
106
|
+
limit: int | None = None,
|
|
107
|
+
offset: int | None = None,
|
|
108
|
+
timeout: int | None = None,
|
|
109
|
+
parallel: bool = False,
|
|
110
|
+
include_logs: bool = False,
|
|
111
|
+
include_related_nodes: bool = False,
|
|
112
|
+
) -> list[Task]:
|
|
113
|
+
"""Get all tasks.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
limit: The maximum number of tasks to return. Defaults to None.
|
|
117
|
+
offset: The offset to start the tasks from. Defaults to None.
|
|
118
|
+
timeout: The timeout to wait for the tasks to complete. Defaults to None.
|
|
119
|
+
parallel: Whether to query the tasks in parallel. Defaults to False.
|
|
120
|
+
include_logs: Whether to include the logs in the tasks. Defaults to False.
|
|
121
|
+
include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False.
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
A list of tasks.
|
|
125
|
+
"""
|
|
126
|
+
|
|
127
|
+
return await self.filter(
|
|
128
|
+
limit=limit,
|
|
129
|
+
offset=offset,
|
|
130
|
+
timeout=timeout,
|
|
131
|
+
parallel=parallel,
|
|
132
|
+
include_logs=include_logs,
|
|
133
|
+
include_related_nodes=include_related_nodes,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
async def filter(
|
|
137
|
+
self,
|
|
138
|
+
filter: TaskFilter | None = None,
|
|
139
|
+
limit: int | None = None,
|
|
140
|
+
offset: int | None = None,
|
|
141
|
+
timeout: int | None = None,
|
|
142
|
+
parallel: bool = False,
|
|
143
|
+
include_logs: bool = False,
|
|
144
|
+
include_related_nodes: bool = False,
|
|
145
|
+
) -> list[Task]:
|
|
146
|
+
"""Filter tasks.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
filter: The filter to apply to the tasks. Defaults to None.
|
|
150
|
+
limit: The maximum number of tasks to return. Defaults to None.
|
|
151
|
+
offset: The offset to start the tasks from. Defaults to None.
|
|
152
|
+
timeout: The timeout to wait for the tasks to complete. Defaults to None.
|
|
153
|
+
parallel: Whether to query the tasks in parallel. Defaults to False.
|
|
154
|
+
include_logs: Whether to include the logs in the tasks. Defaults to False.
|
|
155
|
+
include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False.
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
A list of tasks.
|
|
159
|
+
"""
|
|
160
|
+
if filter is None:
|
|
161
|
+
filter = TaskFilter()
|
|
162
|
+
|
|
163
|
+
if limit:
|
|
164
|
+
tasks, _ = await self.process_page(
|
|
165
|
+
self.client, self._generate_query(filters=filter, offset=offset, limit=limit, count=False), 1, timeout
|
|
166
|
+
)
|
|
167
|
+
return tasks
|
|
168
|
+
|
|
169
|
+
if parallel:
|
|
170
|
+
return await self.process_batch(
|
|
171
|
+
filters=filter,
|
|
172
|
+
timeout=timeout,
|
|
173
|
+
include_logs=include_logs,
|
|
174
|
+
include_related_nodes=include_related_nodes,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
return await self.process_non_batch(
|
|
178
|
+
filters=filter,
|
|
179
|
+
offset=offset,
|
|
180
|
+
limit=limit,
|
|
181
|
+
timeout=timeout,
|
|
182
|
+
include_logs=include_logs,
|
|
183
|
+
include_related_nodes=include_related_nodes,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
async def get(self, id: str, include_logs: bool = False, include_related_nodes: bool = False) -> Task:
|
|
187
|
+
tasks = await self.filter(
|
|
188
|
+
filter=TaskFilter(ids=[id]),
|
|
189
|
+
include_logs=include_logs,
|
|
190
|
+
include_related_nodes=include_related_nodes,
|
|
191
|
+
parallel=False,
|
|
192
|
+
)
|
|
193
|
+
if not tasks:
|
|
194
|
+
raise TaskNotFoundError(id=id)
|
|
195
|
+
|
|
196
|
+
if len(tasks) != 1:
|
|
197
|
+
raise TooManyTasksError(expected_id=id, received_ids=[task.id for task in tasks])
|
|
198
|
+
|
|
199
|
+
return tasks[0]
|
|
200
|
+
|
|
201
|
+
async def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 60) -> Task:
|
|
202
|
+
"""Wait for a task to complete.
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
id: The id of the task to wait for.
|
|
206
|
+
interval: The interval to check the task state. Defaults to 1.
|
|
207
|
+
timeout: The timeout to wait for the task to complete. Defaults to 60.
|
|
208
|
+
|
|
209
|
+
Raises:
|
|
210
|
+
TaskNotCompletedError: The task did not complete in the given timeout.
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
The task object.
|
|
214
|
+
"""
|
|
215
|
+
for _ in range(timeout // interval):
|
|
216
|
+
task = await self.get(id=id)
|
|
217
|
+
if task.state in FINAL_STATES:
|
|
218
|
+
return task
|
|
219
|
+
await asyncio.sleep(interval)
|
|
220
|
+
raise TaskNotCompletedError(id=id, message=f"Task {id} did not complete in {timeout} seconds")
|
|
221
|
+
|
|
222
|
+
@staticmethod
|
|
223
|
+
async def process_page(
|
|
224
|
+
client: InfrahubClient, query: Query, page_number: int, timeout: int | None = None
|
|
225
|
+
) -> tuple[list[Task], int | None]:
|
|
226
|
+
"""Process a single page of results.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
client: The client to use to execute the query.
|
|
230
|
+
query: The query to execute.
|
|
231
|
+
page_number: The page number to process.
|
|
232
|
+
timeout: The timeout to wait for the query to complete. Defaults to None.
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
A tuple containing a list of tasks and the count of tasks.
|
|
236
|
+
"""
|
|
237
|
+
|
|
238
|
+
response = await client.execute_graphql(
|
|
239
|
+
query=query.render(),
|
|
240
|
+
tracker=f"query-tasks-page{page_number}",
|
|
241
|
+
timeout=timeout,
|
|
242
|
+
)
|
|
243
|
+
count = response["InfrahubTask"].get("count", None)
|
|
244
|
+
return [Task.from_graphql(task["node"]) for task in response["InfrahubTask"]["edges"]], count
|
|
245
|
+
|
|
246
|
+
async def process_batch(
|
|
247
|
+
self,
|
|
248
|
+
filters: TaskFilter | None = None,
|
|
249
|
+
timeout: int | None = None,
|
|
250
|
+
include_logs: bool = False,
|
|
251
|
+
include_related_nodes: bool = False,
|
|
252
|
+
) -> list[Task]:
|
|
253
|
+
"""Process queries in parallel mode."""
|
|
254
|
+
pagination_size = self.client.pagination_size
|
|
255
|
+
tasks = []
|
|
256
|
+
batch_process = await self.client.create_batch()
|
|
257
|
+
count = await self.count(filters=filters)
|
|
258
|
+
total_pages = (count + pagination_size - 1) // pagination_size
|
|
259
|
+
|
|
260
|
+
for page_number in range(1, total_pages + 1):
|
|
261
|
+
page_offset = (page_number - 1) * pagination_size
|
|
262
|
+
query = self._generate_query(
|
|
263
|
+
filters=filters,
|
|
264
|
+
offset=page_offset,
|
|
265
|
+
limit=pagination_size,
|
|
266
|
+
include_logs=include_logs,
|
|
267
|
+
include_related_nodes=include_related_nodes,
|
|
268
|
+
count=False,
|
|
269
|
+
)
|
|
270
|
+
batch_process.add(
|
|
271
|
+
task=self.process_page, client=self.client, query=query, page_number=page_number, timeout=timeout
|
|
272
|
+
)
|
|
273
|
+
|
|
274
|
+
async for _, (new_tasks, _) in batch_process.execute():
|
|
275
|
+
tasks.extend(new_tasks)
|
|
276
|
+
|
|
277
|
+
return tasks
|
|
278
|
+
|
|
279
|
+
async def process_non_batch(
|
|
280
|
+
self,
|
|
281
|
+
filters: TaskFilter | None = None,
|
|
282
|
+
offset: int | None = None,
|
|
283
|
+
limit: int | None = None,
|
|
284
|
+
timeout: int | None = None,
|
|
285
|
+
include_logs: bool = False,
|
|
286
|
+
include_related_nodes: bool = False,
|
|
287
|
+
) -> list[Task]:
|
|
288
|
+
"""Process queries without parallel mode."""
|
|
289
|
+
tasks = []
|
|
290
|
+
has_remaining_items = True
|
|
291
|
+
page_number = 1
|
|
292
|
+
|
|
293
|
+
while has_remaining_items:
|
|
294
|
+
page_offset = (page_number - 1) * self.client.pagination_size
|
|
295
|
+
query = self._generate_query(
|
|
296
|
+
filters=filters,
|
|
297
|
+
offset=page_offset,
|
|
298
|
+
limit=self.client.pagination_size,
|
|
299
|
+
include_logs=include_logs,
|
|
300
|
+
include_related_nodes=include_related_nodes,
|
|
301
|
+
)
|
|
302
|
+
new_tasks, count = await self.process_page(
|
|
303
|
+
client=self.client, query=query, page_number=page_number, timeout=timeout
|
|
304
|
+
)
|
|
305
|
+
if count is None:
|
|
306
|
+
raise ValueError("Count is None, a value must be retrieve from the query")
|
|
307
|
+
|
|
308
|
+
tasks.extend(new_tasks)
|
|
309
|
+
remaining_items = count - (page_offset + self.client.pagination_size)
|
|
310
|
+
if remaining_items < 0 or offset is not None or limit is not None:
|
|
311
|
+
has_remaining_items = False
|
|
312
|
+
page_number += 1
|
|
313
|
+
return tasks
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
class InfrahubTaskManagerSync(InfraHubTaskManagerBase):
|
|
317
|
+
client: InfrahubClientSync
|
|
318
|
+
|
|
319
|
+
def __init__(self, client: InfrahubClientSync):
|
|
320
|
+
self.client = client
|
|
321
|
+
|
|
322
|
+
def count(self, filters: TaskFilter | None = None) -> int:
|
|
323
|
+
"""Count the number of tasks.
|
|
324
|
+
|
|
325
|
+
Args:
|
|
326
|
+
filters: The filter to apply to the tasks. Defaults to None.
|
|
327
|
+
|
|
328
|
+
Returns:
|
|
329
|
+
The number of tasks.
|
|
330
|
+
"""
|
|
331
|
+
|
|
332
|
+
query = self._generate_count_query(filters=filters)
|
|
333
|
+
response = self.client.execute_graphql(query=query.render(), tracker="query-tasks-count")
|
|
334
|
+
return int(response["InfrahubTask"]["count"])
|
|
335
|
+
|
|
336
|
+
def all(
|
|
337
|
+
self,
|
|
338
|
+
limit: int | None = None,
|
|
339
|
+
offset: int | None = None,
|
|
340
|
+
timeout: int | None = None,
|
|
341
|
+
parallel: bool = False,
|
|
342
|
+
include_logs: bool = False,
|
|
343
|
+
include_related_nodes: bool = False,
|
|
344
|
+
) -> list[Task]:
|
|
345
|
+
"""Get all tasks.
|
|
346
|
+
|
|
347
|
+
Args:
|
|
348
|
+
limit: The maximum number of tasks to return. Defaults to None.
|
|
349
|
+
offset: The offset to start the tasks from. Defaults to None.
|
|
350
|
+
timeout: The timeout to wait for the tasks to complete. Defaults to None.
|
|
351
|
+
parallel: Whether to query the tasks in parallel. Defaults to False.
|
|
352
|
+
include_logs: Whether to include the logs in the tasks. Defaults to False.
|
|
353
|
+
include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False.
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
A list of tasks.
|
|
357
|
+
"""
|
|
358
|
+
|
|
359
|
+
return self.filter(
|
|
360
|
+
limit=limit,
|
|
361
|
+
offset=offset,
|
|
362
|
+
timeout=timeout,
|
|
363
|
+
parallel=parallel,
|
|
364
|
+
include_logs=include_logs,
|
|
365
|
+
include_related_nodes=include_related_nodes,
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
def filter(
|
|
369
|
+
self,
|
|
370
|
+
filter: TaskFilter | None = None,
|
|
371
|
+
limit: int | None = None,
|
|
372
|
+
offset: int | None = None,
|
|
373
|
+
timeout: int | None = None,
|
|
374
|
+
parallel: bool = False,
|
|
375
|
+
include_logs: bool = False,
|
|
376
|
+
include_related_nodes: bool = False,
|
|
377
|
+
) -> list[Task]:
|
|
378
|
+
"""Filter tasks.
|
|
379
|
+
|
|
380
|
+
Args:
|
|
381
|
+
filter: The filter to apply to the tasks. Defaults to None.
|
|
382
|
+
limit: The maximum number of tasks to return. Defaults to None.
|
|
383
|
+
offset: The offset to start the tasks from. Defaults to None.
|
|
384
|
+
timeout: The timeout to wait for the tasks to complete. Defaults to None.
|
|
385
|
+
parallel: Whether to query the tasks in parallel. Defaults to False.
|
|
386
|
+
include_logs: Whether to include the logs in the tasks. Defaults to False.
|
|
387
|
+
include_related_nodes: Whether to include the related nodes in the tasks. Defaults to False.
|
|
388
|
+
|
|
389
|
+
Returns:
|
|
390
|
+
A list of tasks.
|
|
391
|
+
"""
|
|
392
|
+
if filter is None:
|
|
393
|
+
filter = TaskFilter()
|
|
394
|
+
|
|
395
|
+
if limit:
|
|
396
|
+
tasks, _ = self.process_page(
|
|
397
|
+
self.client, self._generate_query(filters=filter, offset=offset, limit=limit, count=False), 1, timeout
|
|
398
|
+
)
|
|
399
|
+
return tasks
|
|
400
|
+
|
|
401
|
+
if parallel:
|
|
402
|
+
return self.process_batch(
|
|
403
|
+
filters=filter,
|
|
404
|
+
timeout=timeout,
|
|
405
|
+
include_logs=include_logs,
|
|
406
|
+
include_related_nodes=include_related_nodes,
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
return self.process_non_batch(
|
|
410
|
+
filters=filter,
|
|
411
|
+
offset=offset,
|
|
412
|
+
limit=limit,
|
|
413
|
+
timeout=timeout,
|
|
414
|
+
include_logs=include_logs,
|
|
415
|
+
include_related_nodes=include_related_nodes,
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
def get(self, id: str, include_logs: bool = False, include_related_nodes: bool = False) -> Task:
|
|
419
|
+
tasks = self.filter(
|
|
420
|
+
filter=TaskFilter(ids=[id]),
|
|
421
|
+
include_logs=include_logs,
|
|
422
|
+
include_related_nodes=include_related_nodes,
|
|
423
|
+
parallel=False,
|
|
424
|
+
)
|
|
425
|
+
if not tasks:
|
|
426
|
+
raise TaskNotFoundError(id=id)
|
|
427
|
+
|
|
428
|
+
if len(tasks) != 1:
|
|
429
|
+
raise TooManyTasksError(expected_id=id, received_ids=[task.id for task in tasks])
|
|
430
|
+
|
|
431
|
+
return tasks[0]
|
|
432
|
+
|
|
433
|
+
def wait_for_completion(self, id: str, interval: int = 1, timeout: int = 60) -> Task:
|
|
434
|
+
"""Wait for a task to complete.
|
|
435
|
+
|
|
436
|
+
Args:
|
|
437
|
+
id: The id of the task to wait for.
|
|
438
|
+
interval: The interval to check the task state. Defaults to 1.
|
|
439
|
+
timeout: The timeout to wait for the task to complete. Defaults to 60.
|
|
440
|
+
|
|
441
|
+
Raises:
|
|
442
|
+
TaskNotCompletedError: The task did not complete in the given timeout.
|
|
443
|
+
|
|
444
|
+
Returns:
|
|
445
|
+
The task object.
|
|
446
|
+
"""
|
|
447
|
+
for _ in range(timeout // interval):
|
|
448
|
+
task = self.get(id=id)
|
|
449
|
+
if task.state in FINAL_STATES:
|
|
450
|
+
return task
|
|
451
|
+
time.sleep(interval)
|
|
452
|
+
raise TaskNotCompletedError(id=id, message=f"Task {id} did not complete in {timeout} seconds")
|
|
453
|
+
|
|
454
|
+
@staticmethod
|
|
455
|
+
def process_page(
|
|
456
|
+
client: InfrahubClientSync, query: Query, page_number: int, timeout: int | None = None
|
|
457
|
+
) -> tuple[list[Task], int | None]:
|
|
458
|
+
"""Process a single page of results.
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
client: The client to use to execute the query.
|
|
462
|
+
query: The query to execute.
|
|
463
|
+
page_number: The page number to process.
|
|
464
|
+
timeout: The timeout to wait for the query to complete. Defaults to None.
|
|
465
|
+
|
|
466
|
+
Returns:
|
|
467
|
+
A tuple containing a list of tasks and the count of tasks.
|
|
468
|
+
"""
|
|
469
|
+
|
|
470
|
+
response = client.execute_graphql(
|
|
471
|
+
query=query.render(),
|
|
472
|
+
tracker=f"query-tasks-page{page_number}",
|
|
473
|
+
timeout=timeout,
|
|
474
|
+
)
|
|
475
|
+
count = response["InfrahubTask"].get("count", None)
|
|
476
|
+
return [Task.from_graphql(task["node"]) for task in response["InfrahubTask"]["edges"]], count
|
|
477
|
+
|
|
478
|
+
def process_batch(
|
|
479
|
+
self,
|
|
480
|
+
filters: TaskFilter | None = None,
|
|
481
|
+
timeout: int | None = None,
|
|
482
|
+
include_logs: bool = False,
|
|
483
|
+
include_related_nodes: bool = False,
|
|
484
|
+
) -> list[Task]:
|
|
485
|
+
"""Process queries in parallel mode."""
|
|
486
|
+
pagination_size = self.client.pagination_size
|
|
487
|
+
tasks = []
|
|
488
|
+
batch_process = self.client.create_batch()
|
|
489
|
+
count = self.count(filters=filters)
|
|
490
|
+
total_pages = (count + pagination_size - 1) // pagination_size
|
|
491
|
+
|
|
492
|
+
for page_number in range(1, total_pages + 1):
|
|
493
|
+
page_offset = (page_number - 1) * pagination_size
|
|
494
|
+
query = self._generate_query(
|
|
495
|
+
filters=filters,
|
|
496
|
+
offset=page_offset,
|
|
497
|
+
limit=pagination_size,
|
|
498
|
+
include_logs=include_logs,
|
|
499
|
+
include_related_nodes=include_related_nodes,
|
|
500
|
+
count=False,
|
|
501
|
+
)
|
|
502
|
+
batch_process.add(
|
|
503
|
+
task=self.process_page, client=self.client, query=query, page_number=page_number, timeout=timeout
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
for _, (new_tasks, _) in batch_process.execute():
|
|
507
|
+
tasks.extend(new_tasks)
|
|
508
|
+
|
|
509
|
+
return tasks
|
|
510
|
+
|
|
511
|
+
def process_non_batch(
|
|
512
|
+
self,
|
|
513
|
+
filters: TaskFilter | None = None,
|
|
514
|
+
offset: int | None = None,
|
|
515
|
+
limit: int | None = None,
|
|
516
|
+
timeout: int | None = None,
|
|
517
|
+
include_logs: bool = False,
|
|
518
|
+
include_related_nodes: bool = False,
|
|
519
|
+
) -> list[Task]:
|
|
520
|
+
"""Process queries without parallel mode."""
|
|
521
|
+
tasks = []
|
|
522
|
+
has_remaining_items = True
|
|
523
|
+
page_number = 1
|
|
524
|
+
|
|
525
|
+
while has_remaining_items:
|
|
526
|
+
page_offset = (page_number - 1) * self.client.pagination_size
|
|
527
|
+
query = self._generate_query(
|
|
528
|
+
filters=filters,
|
|
529
|
+
offset=page_offset,
|
|
530
|
+
limit=self.client.pagination_size,
|
|
531
|
+
include_logs=include_logs,
|
|
532
|
+
include_related_nodes=include_related_nodes,
|
|
533
|
+
)
|
|
534
|
+
new_tasks, count = self.process_page(
|
|
535
|
+
client=self.client, query=query, page_number=page_number, timeout=timeout
|
|
536
|
+
)
|
|
537
|
+
if count is None:
|
|
538
|
+
raise ValueError("Count is None, a value must be retrieve from the query")
|
|
539
|
+
|
|
540
|
+
tasks.extend(new_tasks)
|
|
541
|
+
remaining_items = count - (page_offset + self.client.pagination_size)
|
|
542
|
+
if remaining_items < 0 or offset is not None or limit is not None:
|
|
543
|
+
has_remaining_items = False
|
|
544
|
+
page_number += 1
|
|
545
|
+
return tasks
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from enum import Enum
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class TaskState(str, Enum):
|
|
10
|
+
SCHEDULED = "SCHEDULED"
|
|
11
|
+
PENDING = "PENDING"
|
|
12
|
+
RUNNING = "RUNNING"
|
|
13
|
+
COMPLETED = "COMPLETED"
|
|
14
|
+
FAILED = "FAILED"
|
|
15
|
+
CANCELLED = "CANCELLED"
|
|
16
|
+
CRASHED = "CRASHED"
|
|
17
|
+
PAUSED = "PAUSED"
|
|
18
|
+
CANCELLING = "CANCELLING"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TaskLog(BaseModel):
|
|
22
|
+
message: str
|
|
23
|
+
severity: str
|
|
24
|
+
timestamp: datetime
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class TaskRelatedNode(BaseModel):
|
|
28
|
+
id: str
|
|
29
|
+
kind: str
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class Task(BaseModel):
|
|
33
|
+
id: str
|
|
34
|
+
title: str
|
|
35
|
+
state: TaskState
|
|
36
|
+
progress: float | None = None
|
|
37
|
+
workflow: str | None = None
|
|
38
|
+
branch: str | None = None
|
|
39
|
+
# start_time: datetime # Is it still required
|
|
40
|
+
created_at: datetime
|
|
41
|
+
updated_at: datetime
|
|
42
|
+
parameters: dict | None = None
|
|
43
|
+
tags: list[str] | None = None
|
|
44
|
+
related_nodes: list[TaskRelatedNode] = Field(default_factory=list)
|
|
45
|
+
logs: list[TaskLog] = Field(default_factory=list)
|
|
46
|
+
|
|
47
|
+
@classmethod
|
|
48
|
+
def from_graphql(cls, data: dict) -> Task:
|
|
49
|
+
related_nodes: list[TaskRelatedNode] = []
|
|
50
|
+
logs: list[TaskLog] = []
|
|
51
|
+
|
|
52
|
+
if data.get("related_nodes"):
|
|
53
|
+
related_nodes = [TaskRelatedNode(**item) for item in data["related_nodes"]]
|
|
54
|
+
del data["related_nodes"]
|
|
55
|
+
|
|
56
|
+
if data.get("logs"):
|
|
57
|
+
logs = [TaskLog(**item["node"]) for item in data["logs"]["edges"]]
|
|
58
|
+
del data["logs"]
|
|
59
|
+
|
|
60
|
+
return cls(**data, related_nodes=related_nodes, logs=logs)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class TaskFilter(BaseModel):
|
|
64
|
+
ids: list[str] | None = None
|
|
65
|
+
q: str | None = None
|
|
66
|
+
branch: str | None = None
|
|
67
|
+
state: list[TaskState] | None = None
|
|
68
|
+
workflow: list[str] | None = None
|
|
69
|
+
limit: int | None = None
|
|
70
|
+
offset: int | None = None
|
|
71
|
+
related_node__ids: list[str] | None = None
|
|
72
|
+
|
|
73
|
+
def to_dict(self) -> dict:
|
|
74
|
+
return self.model_dump(exclude_none=True)
|
infrahub_sdk/testing/docker.py
CHANGED
|
@@ -1,10 +1,40 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
1
5
|
import pytest
|
|
2
6
|
from infrahub_testcontainers.helpers import TestInfrahubDocker
|
|
7
|
+
from packaging.version import InvalidVersion, Version
|
|
3
8
|
|
|
4
9
|
from .. import Config, InfrahubClient, InfrahubClientSync
|
|
5
10
|
|
|
11
|
+
INFRAHUB_VERSION = os.getenv("INFRAHUB_TESTING_IMAGE_VER", "latest")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def skip_version(min_infrahub_version: str | None = None, max_infrahub_version: str | None = None) -> bool:
|
|
15
|
+
"""
|
|
16
|
+
Check if a test should be skipped depending on infrahub version.
|
|
17
|
+
"""
|
|
18
|
+
try:
|
|
19
|
+
version = Version(INFRAHUB_VERSION)
|
|
20
|
+
except InvalidVersion:
|
|
21
|
+
# We would typically end up here for development purpose while running a CI test against
|
|
22
|
+
# unreleased versions of infrahub, like `stable` or `develop` branch.
|
|
23
|
+
# For now, we consider this means we are testing against the most recent version of infrahub,
|
|
24
|
+
# so we skip if the test should not be ran against a maximum version.
|
|
25
|
+
return max_infrahub_version is None
|
|
26
|
+
|
|
27
|
+
if min_infrahub_version is not None and version < Version(min_infrahub_version):
|
|
28
|
+
return True
|
|
29
|
+
|
|
30
|
+
return max_infrahub_version is not None and version > Version(max_infrahub_version)
|
|
31
|
+
|
|
6
32
|
|
|
7
33
|
class TestInfrahubDockerClient(TestInfrahubDocker):
|
|
34
|
+
@pytest.fixture(scope="class")
|
|
35
|
+
def infrahub_version(self) -> str:
|
|
36
|
+
return INFRAHUB_VERSION
|
|
37
|
+
|
|
8
38
|
@pytest.fixture(scope="class")
|
|
9
39
|
def client(self, infrahub_port: int) -> InfrahubClient:
|
|
10
40
|
return InfrahubClient(
|