port-ocean 0.26.3__tar.gz → 0.27.10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {port_ocean-0.26.3 → port_ocean-0.27.10}/PKG-INFO +5 -2
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/Dockerfile.Deb +4 -1
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/Dockerfile.local +3 -1
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/authentication.py +2 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/client.py +5 -2
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/mixins/integrations.py +1 -1
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/config/settings.py +13 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/kafka.py +14 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entities_state_applier/port/applier.py +9 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entity_processor/jq_entity_processor.py +12 -9
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/port_app_config/models.py +1 -0
- port_ocean-0.27.10/port_ocean/core/handlers/queue/__init__.py +5 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/queue/abstract_queue.py +8 -0
- port_ocean-0.27.10/port_ocean/core/handlers/queue/group_queue.py +138 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/queue/local_queue.py +3 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/resync_state_updater/updater.py +2 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/webhook/processor_manager.py +97 -78
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/webhook/webhook_event.py +2 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/mixins/sync_raw.py +22 -4
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/mixins/utils.py +26 -4
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/helpers/async_client.py +7 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/helpers/metric/metric.py +13 -11
- port_ocean-0.27.10/port_ocean/helpers/stream.py +71 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/log/handlers.py +3 -4
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/ocean.py +11 -12
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/conftest.py +7 -1
- port_ocean-0.27.10/port_ocean/tests/core/event_listener/test_kafka.py +70 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/entities_state_applier/test_applier.py +2 -2
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/entity_processor/test_jq_entity_processor.py +1 -1
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/mixins/test_live_events.py +23 -13
- port_ocean-0.27.10/port_ocean/tests/core/handlers/queue/test_group_queue.py +681 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/webhook/test_processor_manager.py +32 -27
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/helpers/port_client.py +1 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/pyproject.toml +6 -2
- port_ocean-0.26.3/port_ocean/core/handlers/queue/__init__.py +0 -4
- {port_ocean-0.26.3 → port_ocean-0.27.10}/LICENSE.md +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/README.md +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/Dockerfile.alpine +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/Dockerfile.base.builder +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/Dockerfile.base.runner +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/Dockerfile.dockerignore +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/Makefile +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/README.md +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/entry_local.sh +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/grpcio.sh +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/integrations/_infra/init.sh +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/bootstrap.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cache/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cache/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cache/disk.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cache/errors.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cache/memory.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cli.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/defaults/__init___.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/defaults/clean.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/defaults/dock.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/defaults/group.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/list_integrations.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/main.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/new.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/pull.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/sail.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/commands/version.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/cookiecutter.json +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/extensions.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/hooks/post_gen_project.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.env.example +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.gitignore +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/.gitignore +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/blueprints.json +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/resources/port-app-config.yml +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/.port/spec.yaml +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CHANGELOG.md +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/CONTRIBUTING.md +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/README.md +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/changelog/.gitignore +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/debug.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/main.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/poetry.toml +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/pyproject.toml +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/sonar-project.properties +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/tests/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/cookiecutter/{{cookiecutter.integration_slug}}/tests/test_sample.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/cli/utils.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/auth/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/auth/auth_client.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/auth/oauth_client.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/mixins/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/mixins/blueprints.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/mixins/entities.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/mixins/migrations.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/mixins/organization.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/retry_transport.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/types.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/clients/port/utils.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/config/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/config/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/config/dynamic.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/consumers/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/consumers/kafka_consumer.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/context/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/context/event.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/context/metric_resource.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/context/ocean.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/context/resource.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/defaults/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/defaults/clean.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/defaults/common.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/defaults/initialize.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/factory.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/http.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/once.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/polling.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/event_listener/webhooks_only.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entities_state_applier/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entities_state_applier/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entities_state_applier/port/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entities_state_applier/port/get_related_entities.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entities_state_applier/port/order_by_entities_dependencies.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entity_processor/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/entity_processor/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/port_app_config/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/port_app_config/api.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/port_app_config/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/resync_state_updater/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/webhook/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/webhook/abstract_webhook_processor.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/mixins/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/mixins/events.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/mixins/handler.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/mixins/live_events.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/integrations/mixins/sync.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/models.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/ocean_types.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/utils/entity_topological_sorter.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/utils/utils.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/debug_cli.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/api.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/clients.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/context.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/core.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/port_defaults.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/utils.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/exceptions/webhook_processor.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/helpers/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/helpers/metric/utils.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/helpers/retry.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/log/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/log/logger_setup.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/log/sensetive.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/middlewares.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/py.typed +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/run.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/sonar-project.properties +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/cache/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/cache/test_disk_cache.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/cache/test_memory_cache.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/clients/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/clients/oauth/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/clients/oauth/test_oauth_client.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/clients/port/mixins/test_entities.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/clients/port/mixins/test_integrations.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/clients/port/mixins/test_organization_mixin.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/config/test_config.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/conftest.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/defaults/test_common.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/mixins/test_sync_raw.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/port_app_config/test_api.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/port_app_config/test_base.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/queue/test_local_queue.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/webhook/test_abstract_webhook_processor.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/handlers/webhook/test_webhook_event.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/test_utils.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/utils/test_entity_topological_sorter.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/core/utils/test_resolve_entities_diff.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/helpers/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/helpers/fake_port_api.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/helpers/fixtures.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/helpers/integration.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/helpers/ocean_app.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/helpers/smoke_test.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/log/test_handlers.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/test_metric.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/test_ocean.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/test_smoke.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/utils/test_async_iterators.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/tests/utils/test_cache.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/__init__.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/async_http.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/async_iterators.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/cache.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/ipc.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/misc.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/queue_utils.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/repeat.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/signal.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/utils/time.py +0 -0
- {port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/version.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: port-ocean
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.27.10
|
4
4
|
Summary: Port Ocean is a CLI tool for managing your Port projects.
|
5
5
|
Home-page: https://app.getport.io
|
6
6
|
Keywords: ocean,port-ocean,port
|
@@ -22,12 +22,15 @@ Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
|
22
22
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
23
23
|
Classifier: Topic :: Utilities
|
24
24
|
Provides-Extra: cli
|
25
|
+
Requires-Dist: aiofiles (>=24.1.0,<25.0.0)
|
25
26
|
Requires-Dist: aiostream (>=0.5.2,<0.7.0)
|
26
27
|
Requires-Dist: click (>=8.1.3,<9.0.0) ; extra == "cli"
|
27
28
|
Requires-Dist: confluent-kafka (>=2.10.1,<3.0.0)
|
28
29
|
Requires-Dist: cookiecutter (>=2.1.1,<3.0.0) ; extra == "cli"
|
29
|
-
Requires-Dist:
|
30
|
+
Requires-Dist: cryptography (>=44.0.1,<45.0.0)
|
31
|
+
Requires-Dist: fastapi (>=0.116.0,<0.117.0)
|
30
32
|
Requires-Dist: httpx (>=0.28.1,<0.29.0)
|
33
|
+
Requires-Dist: ijson (>=3.4.0,<4.0.0)
|
31
34
|
Requires-Dist: jinja2 (>=3.1.6)
|
32
35
|
Requires-Dist: jinja2-time (>=0.2.0,<0.3.0) ; extra == "cli"
|
33
36
|
Requires-Dist: jq (>=1.8.0,<2.0.0)
|
@@ -28,11 +28,14 @@ ARG INTEGRATION_VERSION
|
|
28
28
|
ARG BUILD_CONTEXT
|
29
29
|
ARG PROMETHEUS_MULTIPROC_DIR=/tmp/ocean/prometheus/metrics
|
30
30
|
ARG OAUTH_CONFIG_DIR=/app/.config
|
31
|
+
ARG STREAMING_LOCATION=/tmp/ocean/streaming
|
31
32
|
|
32
33
|
ENV LIBRDKAFKA_VERSION=2.8.2 \
|
33
|
-
PROMETHEUS_MULTIPROC_DIR=${PROMETHEUS_MULTIPROC_DIR}
|
34
|
+
PROMETHEUS_MULTIPROC_DIR=${PROMETHEUS_MULTIPROC_DIR} \
|
35
|
+
STREAMING_LOCATION=${STREAMING_LOCATION}
|
34
36
|
|
35
37
|
RUN mkdir -p ${PROMETHEUS_MULTIPROC_DIR}
|
38
|
+
RUN mkdir -p ${STREAMING_LOCATION}
|
36
39
|
RUN chown -R ocean:appgroup /tmp/ocean && chmod -R 755 /tmp/ocean
|
37
40
|
|
38
41
|
RUN mkdir -p ${OAUTH_CONFIG_DIR}
|
@@ -33,13 +33,15 @@ RUN apt-get update \
|
|
33
33
|
|
34
34
|
ARG BUILD_CONTEXT
|
35
35
|
ARG PROMETHEUS_MULTIPROC_DIR=/tmp/ocean/prometheus/metrics
|
36
|
+
ARG STREAMING_LOCATION=/tmp/ocean/streaming
|
36
37
|
|
37
38
|
ENV PROMETHEUS_MULTIPROC_DIR=${PROMETHEUS_MULTIPROC_DIR}
|
38
|
-
|
39
|
+
ENV STREAMING_LOCATION=${STREAMING_LOCATION}
|
39
40
|
# Create /tmp/ocean directory and set permissions
|
40
41
|
|
41
42
|
|
42
43
|
RUN mkdir -p ${PROMETHEUS_MULTIPROC_DIR}
|
44
|
+
RUN mkdir -p ${STREAMING_LOCATION}
|
43
45
|
|
44
46
|
WORKDIR /app
|
45
47
|
|
@@ -35,6 +35,7 @@ class PortAuthentication:
|
|
35
35
|
integration_identifier: str,
|
36
36
|
integration_type: str,
|
37
37
|
integration_version: str,
|
38
|
+
ingest_url: str,
|
38
39
|
):
|
39
40
|
self.client = client
|
40
41
|
self.api_url = api_url
|
@@ -43,6 +44,7 @@ class PortAuthentication:
|
|
43
44
|
self.integration_identifier = integration_identifier
|
44
45
|
self.integration_type = integration_type
|
45
46
|
self.integration_version = integration_version
|
47
|
+
self.ingest_url = ingest_url
|
46
48
|
self.last_token_object: TokenResponse | None = None
|
47
49
|
|
48
50
|
async def _get_token(self, client_id: str, client_secret: str) -> TokenResponse:
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
1
3
|
from loguru import logger
|
2
4
|
|
3
5
|
from port_ocean.clients.port.authentication import PortAuthentication
|
@@ -10,11 +12,10 @@ from port_ocean.clients.port.types import (
|
|
10
12
|
KafkaCreds,
|
11
13
|
)
|
12
14
|
from port_ocean.clients.port.utils import (
|
13
|
-
handle_port_status_code,
|
14
15
|
get_internal_http_client,
|
16
|
+
handle_port_status_code,
|
15
17
|
)
|
16
18
|
from port_ocean.exceptions.clients import KafkaCredentialsNotFound
|
17
|
-
from typing import Any
|
18
19
|
|
19
20
|
|
20
21
|
class PortClient(
|
@@ -32,6 +33,7 @@ class PortClient(
|
|
32
33
|
integration_identifier: str,
|
33
34
|
integration_type: str,
|
34
35
|
integration_version: str,
|
36
|
+
ingest_url: str,
|
35
37
|
):
|
36
38
|
self.api_url = f"{base_url}/v1"
|
37
39
|
self.client = get_internal_http_client(self)
|
@@ -43,6 +45,7 @@ class PortClient(
|
|
43
45
|
integration_identifier,
|
44
46
|
integration_type,
|
45
47
|
integration_version,
|
48
|
+
ingest_url,
|
46
49
|
)
|
47
50
|
EntityClientMixin.__init__(self, self.auth, self.client)
|
48
51
|
IntegrationClientMixin.__init__(
|
@@ -296,7 +296,7 @@ class IntegrationClientMixin:
|
|
296
296
|
logger.debug("starting POST raw data request", raw_data=raw_data)
|
297
297
|
headers = await self.auth.headers()
|
298
298
|
response = await self.client.post(
|
299
|
-
f"{self.auth.
|
299
|
+
f"{self.auth.ingest_url}/lakehouse/integration-type/{self.auth.integration_type}/integration/{self.integration_identifier}/sync/{sync_id}/kind/{kind}/items",
|
300
300
|
headers=headers,
|
301
301
|
json={
|
302
302
|
"items": raw_data,
|
@@ -46,6 +46,7 @@ class PortSettings(BaseOceanModel, extra=Extra.allow):
|
|
46
46
|
client_secret: str = Field(..., sensitive=True)
|
47
47
|
base_url: AnyHttpUrl = parse_obj_as(AnyHttpUrl, "https://api.getport.io")
|
48
48
|
port_app_config_cache_ttl: int = 60
|
49
|
+
ingest_url: AnyHttpUrl = parse_obj_as(AnyHttpUrl, "https://ingest.getport.io")
|
49
50
|
|
50
51
|
|
51
52
|
class IntegrationSettings(BaseOceanModel, extra=Extra.allow):
|
@@ -72,6 +73,13 @@ class MetricsSettings(BaseOceanModel, extra=Extra.allow):
|
|
72
73
|
webhook_url: str | None = Field(default=None)
|
73
74
|
|
74
75
|
|
76
|
+
class StreamingSettings(BaseOceanModel, extra=Extra.allow):
|
77
|
+
enabled: bool = Field(default=False)
|
78
|
+
max_buffer_size_mb: int = Field(default=1024 * 1024 * 20) # 20 mb
|
79
|
+
chunk_size: int = Field(default=1024 * 64) # 64 kb
|
80
|
+
location: str = Field(default="/tmp/ocean/streaming")
|
81
|
+
|
82
|
+
|
75
83
|
class IntegrationConfiguration(BaseOceanSettings, extra=Extra.allow):
|
76
84
|
_integration_config_model: BaseModel | None = None
|
77
85
|
|
@@ -88,6 +96,7 @@ class IntegrationConfiguration(BaseOceanSettings, extra=Extra.allow):
|
|
88
96
|
event_listener: EventListenerSettingsType = Field(
|
89
97
|
default=cast(EventListenerSettingsType, {"type": "POLLING"})
|
90
98
|
)
|
99
|
+
event_workers_count: int = 1
|
91
100
|
# If an identifier or type is not provided, it will be generated based on the integration name
|
92
101
|
integration: IntegrationSettings = Field(
|
93
102
|
default_factory=lambda: IntegrationSettings(type="", identifier="")
|
@@ -109,6 +118,10 @@ class IntegrationConfiguration(BaseOceanSettings, extra=Extra.allow):
|
|
109
118
|
upsert_entities_batch_max_length: int = 20
|
110
119
|
upsert_entities_batch_max_size_in_bytes: int = 1024 * 1024
|
111
120
|
lakehouse_enabled: bool = False
|
121
|
+
yield_items_to_parse: bool = False
|
122
|
+
yield_items_to_parse_batch_size: int = 10
|
123
|
+
|
124
|
+
streaming: StreamingSettings = Field(default_factory=lambda: StreamingSettings())
|
112
125
|
|
113
126
|
@validator("process_execution_mode")
|
114
127
|
def validate_process_execution_mode(
|
@@ -16,6 +16,7 @@ from port_ocean.core.event_listener.base import (
|
|
16
16
|
EventListenerEvents,
|
17
17
|
EventListenerSettings,
|
18
18
|
)
|
19
|
+
from pydantic import validator
|
19
20
|
|
20
21
|
|
21
22
|
class KafkaEventListenerSettings(EventListenerSettings):
|
@@ -46,6 +47,19 @@ class KafkaEventListenerSettings(EventListenerSettings):
|
|
46
47
|
kafka_security_enabled: bool = True
|
47
48
|
consumer_poll_timeout: int = 1
|
48
49
|
|
50
|
+
@validator("brokers")
|
51
|
+
@classmethod
|
52
|
+
def parse_brokers(cls, v: str) -> str:
|
53
|
+
# If it's a JSON array string, parse and join
|
54
|
+
if v.strip().startswith("[") and v.strip().endswith("]"):
|
55
|
+
try:
|
56
|
+
parsed = json.loads(v)
|
57
|
+
if isinstance(parsed, list):
|
58
|
+
return ",".join(parsed)
|
59
|
+
except json.JSONDecodeError:
|
60
|
+
pass
|
61
|
+
return v
|
62
|
+
|
49
63
|
def get_changelog_destination_details(self) -> dict[str, Any]:
|
50
64
|
"""
|
51
65
|
Returns the changelog destination configuration for the Kafka event listener.
|
@@ -87,6 +87,15 @@ class HttpEntitiesStateApplier(BaseEntitiesStateApplier):
|
|
87
87
|
diff = get_port_diff(entities["before"], entities["after"])
|
88
88
|
|
89
89
|
if not diff.deleted:
|
90
|
+
ocean.metrics.inc_metric(
|
91
|
+
name=MetricType.OBJECT_COUNT_NAME,
|
92
|
+
labels=[
|
93
|
+
ocean.metrics.current_resource_kind(),
|
94
|
+
MetricPhase.DELETE,
|
95
|
+
MetricPhase.DeletionResult.DELETED,
|
96
|
+
],
|
97
|
+
value=0,
|
98
|
+
)
|
90
99
|
return
|
91
100
|
|
92
101
|
kept_entities = diff.created + diff.modified
|
@@ -242,19 +242,21 @@ class JQEntityProcessor(BaseEntityProcessor):
|
|
242
242
|
data: dict[str, Any],
|
243
243
|
raw_entity_mappings: dict[str, Any],
|
244
244
|
items_to_parse: str | None,
|
245
|
+
items_to_parse_name: str,
|
245
246
|
selector_query: str,
|
246
247
|
parse_all: bool = False,
|
247
248
|
) -> tuple[list[MappedEntity], list[Exception]]:
|
248
249
|
raw_data = [data.copy()]
|
249
|
-
if
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
250
|
+
if not ocean.config.yield_items_to_parse:
|
251
|
+
if items_to_parse:
|
252
|
+
items = await self._search(data, items_to_parse)
|
253
|
+
if not isinstance(items, list):
|
254
|
+
logger.warning(
|
255
|
+
f"Failed to parse items for JQ expression {items_to_parse}, Expected list but got {type(items)}."
|
256
|
+
f" Skipping..."
|
257
|
+
)
|
258
|
+
return [], []
|
259
|
+
raw_data = [{items_to_parse_name: item, **data} for item in items]
|
258
260
|
|
259
261
|
entities, errors = await gather_and_split_errors_from_results(
|
260
262
|
[
|
@@ -303,6 +305,7 @@ class JQEntityProcessor(BaseEntityProcessor):
|
|
303
305
|
self._calculate_entity,
|
304
306
|
raw_entity_mappings,
|
305
307
|
mapping.port.items_to_parse,
|
308
|
+
mapping.port.items_to_parse_name,
|
306
309
|
mapping.selector.query,
|
307
310
|
parse_all,
|
308
311
|
)
|
@@ -39,6 +39,7 @@ class MappingsConfig(BaseModel):
|
|
39
39
|
class PortResourceConfig(BaseModel):
|
40
40
|
entity: MappingsConfig
|
41
41
|
items_to_parse: str | None = Field(alias="itemsToParse")
|
42
|
+
items_to_parse_name: str | None = Field(alias="itemsToParseName", default="item")
|
42
43
|
|
43
44
|
|
44
45
|
class Selector(BaseModel):
|
@@ -7,6 +7,9 @@ T = TypeVar("T")
|
|
7
7
|
class AbstractQueue(ABC, Generic[T]):
|
8
8
|
"""Abstract interface for queues"""
|
9
9
|
|
10
|
+
def __init__(self, name: str | None = None):
|
11
|
+
pass
|
12
|
+
|
10
13
|
@abstractmethod
|
11
14
|
async def put(self, item: T) -> None:
|
12
15
|
"""Put an item into the queue"""
|
@@ -22,6 +25,11 @@ class AbstractQueue(ABC, Generic[T]):
|
|
22
25
|
"""Wait for all items to be processed"""
|
23
26
|
pass
|
24
27
|
|
28
|
+
@abstractmethod
|
29
|
+
async def size(self) -> int:
|
30
|
+
"""Size of the queue"""
|
31
|
+
pass
|
32
|
+
|
25
33
|
@abstractmethod
|
26
34
|
async def commit(self) -> None:
|
27
35
|
"""Mark item as processed"""
|
@@ -0,0 +1,138 @@
|
|
1
|
+
import asyncio
|
2
|
+
from collections import defaultdict, deque
|
3
|
+
import time
|
4
|
+
from typing import Deque, Dict, Optional, Set, TypeVar, Any
|
5
|
+
from contextvars import ContextVar
|
6
|
+
|
7
|
+
from loguru import logger
|
8
|
+
|
9
|
+
from .abstract_queue import AbstractQueue
|
10
|
+
|
11
|
+
T = TypeVar("T")
|
12
|
+
MaybeStr = str | None
|
13
|
+
|
14
|
+
_NO_GROUP = object()
|
15
|
+
_current_group: ContextVar[Any] = ContextVar("current_group", default=_NO_GROUP)
|
16
|
+
|
17
|
+
|
18
|
+
class GroupQueue(AbstractQueue[T]):
|
19
|
+
"""Queue with exclusive processing per group."""
|
20
|
+
|
21
|
+
def __init__(
|
22
|
+
self,
|
23
|
+
group_key: MaybeStr = None,
|
24
|
+
name: MaybeStr = None,
|
25
|
+
lock_timeout: float = 300,
|
26
|
+
):
|
27
|
+
super().__init__(name)
|
28
|
+
self.group_key = group_key
|
29
|
+
self._queues: Dict[MaybeStr, Deque[T]] = defaultdict(deque)
|
30
|
+
self._locked: Set[MaybeStr] = set()
|
31
|
+
self._queue_not_empty = asyncio.Condition()
|
32
|
+
self.lock_timeout = lock_timeout
|
33
|
+
self._lock_timestamps: Dict[MaybeStr, float] = {}
|
34
|
+
self._timeout_task: Optional[asyncio.Task[None]] = None
|
35
|
+
|
36
|
+
async def _background_timeout_check(self) -> None:
|
37
|
+
"""Periodically release locks that have timed out."""
|
38
|
+
while True:
|
39
|
+
try:
|
40
|
+
await asyncio.sleep(self.lock_timeout / 4)
|
41
|
+
async with self._queue_not_empty:
|
42
|
+
await self._release_expired_locks()
|
43
|
+
except asyncio.CancelledError:
|
44
|
+
break
|
45
|
+
|
46
|
+
def _extract_group_key(self, item: T) -> MaybeStr:
|
47
|
+
"""Extract the group key from an item."""
|
48
|
+
if self.group_key is None:
|
49
|
+
return None
|
50
|
+
if not hasattr(item, self.group_key):
|
51
|
+
raise ValueError(
|
52
|
+
f"Item {item!r} lacks attribute '{self.group_key}' required for grouping"
|
53
|
+
)
|
54
|
+
return getattr(item, self.group_key)
|
55
|
+
|
56
|
+
async def put(self, item: T) -> None:
|
57
|
+
"""Add item to its group's queue."""
|
58
|
+
group_key = self._extract_group_key(item)
|
59
|
+
async with self._queue_not_empty:
|
60
|
+
self._queues[group_key].append(item)
|
61
|
+
self._queue_not_empty.notify_all()
|
62
|
+
|
63
|
+
async def _release_expired_locks(self) -> None:
|
64
|
+
"""Release locks that have exceeded the timeout."""
|
65
|
+
now = time.time()
|
66
|
+
expired_groups = []
|
67
|
+
|
68
|
+
for group, timestamp in list(self._lock_timestamps.items()):
|
69
|
+
if now - timestamp > self.lock_timeout:
|
70
|
+
expired_groups.append(group)
|
71
|
+
logger.warning(f"Releasing expired lock for group {group}")
|
72
|
+
self._locked.discard(group)
|
73
|
+
del self._lock_timestamps[group]
|
74
|
+
|
75
|
+
if expired_groups:
|
76
|
+
self._queue_not_empty.notify_all()
|
77
|
+
|
78
|
+
async def get(self) -> T:
|
79
|
+
"""Get the next item from an unlocked group, locking that group."""
|
80
|
+
if self._timeout_task is None or self._timeout_task.done():
|
81
|
+
self._timeout_task = asyncio.create_task(self._background_timeout_check())
|
82
|
+
|
83
|
+
async with self._queue_not_empty:
|
84
|
+
while True:
|
85
|
+
await self._release_expired_locks()
|
86
|
+
|
87
|
+
for group, queue in self._queues.items():
|
88
|
+
if queue and group not in self._locked:
|
89
|
+
self._locked.add(group)
|
90
|
+
self._lock_timestamps[group] = time.time()
|
91
|
+
_current_group.set(group)
|
92
|
+
return queue[0]
|
93
|
+
|
94
|
+
await self._queue_not_empty.wait()
|
95
|
+
|
96
|
+
async def commit(self) -> None:
|
97
|
+
"""Remove the current item and unlock its group."""
|
98
|
+
group = _current_group.get()
|
99
|
+
if group is _NO_GROUP:
|
100
|
+
logger.warning("commit() called without active get()")
|
101
|
+
return
|
102
|
+
|
103
|
+
async with self._queue_not_empty:
|
104
|
+
queue = self._queues.get(group)
|
105
|
+
if queue:
|
106
|
+
queue.popleft()
|
107
|
+
if not queue:
|
108
|
+
del self._queues[group]
|
109
|
+
|
110
|
+
self._locked.discard(group)
|
111
|
+
self._lock_timestamps.pop(group, None)
|
112
|
+
_current_group.set(_NO_GROUP)
|
113
|
+
self._queue_not_empty.notify_all()
|
114
|
+
|
115
|
+
async def teardown(self) -> None:
|
116
|
+
"""Wait until all queues are empty and no groups are locked."""
|
117
|
+
async with self._queue_not_empty:
|
118
|
+
while any(self._queues.values()) or self._locked:
|
119
|
+
await self._queue_not_empty.wait()
|
120
|
+
|
121
|
+
if self._timeout_task and not self._timeout_task.done():
|
122
|
+
self._timeout_task.cancel()
|
123
|
+
try:
|
124
|
+
await self._timeout_task
|
125
|
+
except asyncio.CancelledError:
|
126
|
+
pass
|
127
|
+
|
128
|
+
async def size(self) -> int:
|
129
|
+
"""Return total number of items across all groups."""
|
130
|
+
async with self._queue_not_empty:
|
131
|
+
return sum(len(queue) for queue in self._queues.values())
|
132
|
+
|
133
|
+
async def force_unlock_all(self) -> None:
|
134
|
+
"""Force unlock all groups."""
|
135
|
+
async with self._queue_not_empty:
|
136
|
+
self._locked.clear()
|
137
|
+
self._lock_timestamps.clear()
|
138
|
+
self._queue_not_empty.notify_all()
|
{port_ocean-0.26.3 → port_ocean-0.27.10}/port_ocean/core/handlers/webhook/processor_manager.py
RENAMED
@@ -1,4 +1,5 @@
|
|
1
|
-
from typing import Dict, Type, Set
|
1
|
+
from typing import Dict, Tuple, Type, Set, List
|
2
|
+
|
2
3
|
from fastapi import APIRouter, Request
|
3
4
|
from loguru import logger
|
4
5
|
import asyncio
|
@@ -6,6 +7,7 @@ import asyncio
|
|
6
7
|
from port_ocean.context.ocean import ocean
|
7
8
|
from port_ocean.context.event import EventType, event_context
|
8
9
|
from port_ocean.core.handlers.port_app_config.models import ResourceConfig
|
10
|
+
from port_ocean.core.handlers.queue.abstract_queue import AbstractQueue
|
9
11
|
from port_ocean.core.integrations.mixins.events import EventsMixin
|
10
12
|
from port_ocean.core.integrations.mixins.live_events import LiveEventsMixin
|
11
13
|
from port_ocean.exceptions.webhook_processor import WebhookEventNotSupportedError
|
@@ -15,7 +17,7 @@ from port_ocean.context.event import event
|
|
15
17
|
|
16
18
|
from .abstract_webhook_processor import AbstractWebhookProcessor
|
17
19
|
from port_ocean.utils.signal import SignalHandler
|
18
|
-
from port_ocean.core.handlers.queue import
|
20
|
+
from port_ocean.core.handlers.queue import LocalQueue
|
19
21
|
|
20
22
|
|
21
23
|
class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
@@ -31,22 +33,103 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
31
33
|
self._router = router
|
32
34
|
self._processors_classes: Dict[str, list[Type[AbstractWebhookProcessor]]] = {}
|
33
35
|
self._event_queues: Dict[str, AbstractQueue[WebhookEvent]] = {}
|
34
|
-
self.
|
36
|
+
self._event_processor_tasks: Set[asyncio.Task[None]] = set()
|
35
37
|
self._max_event_processing_seconds = max_event_processing_seconds
|
36
38
|
self._max_wait_seconds_before_shutdown = max_wait_seconds_before_shutdown
|
37
39
|
signal_handler.register(self.shutdown)
|
38
40
|
|
39
41
|
async def start_processing_event_messages(self) -> None:
|
40
|
-
"""Start processing events for all registered paths"""
|
42
|
+
"""Start processing events for all registered paths with N workers each."""
|
41
43
|
await self.initialize_handlers()
|
42
44
|
loop = asyncio.get_event_loop()
|
45
|
+
config = ocean.integration.context.config
|
46
|
+
|
43
47
|
for path in self._event_queues.keys():
|
48
|
+
for worker_id in range(0, config.event_workers_count):
|
49
|
+
task = loop.create_task(self._process_webhook_events(path, worker_id))
|
50
|
+
self._event_processor_tasks.add(task)
|
51
|
+
task.add_done_callback(self._event_processor_tasks.discard)
|
52
|
+
|
53
|
+
async def _process_webhook_events(self, path: str, worker_id: int) -> None:
|
54
|
+
"""Process webhook events from the queue for a given path."""
|
55
|
+
queue = self._event_queues[path]
|
56
|
+
while True:
|
57
|
+
event = None
|
58
|
+
matching_processors: List[
|
59
|
+
Tuple[ResourceConfig, AbstractWebhookProcessor]
|
60
|
+
] = []
|
44
61
|
try:
|
45
|
-
|
46
|
-
|
47
|
-
|
62
|
+
event = await queue.get()
|
63
|
+
with logger.contextualize(
|
64
|
+
worker=worker_id,
|
65
|
+
webhook_path=path,
|
66
|
+
trace_id=event.trace_id,
|
67
|
+
):
|
68
|
+
async with event_context(
|
69
|
+
EventType.HTTP_REQUEST,
|
70
|
+
trigger_type="machine",
|
71
|
+
):
|
72
|
+
|
73
|
+
await ocean.integration.port_app_config_handler.get_port_app_config(
|
74
|
+
use_cache=False
|
75
|
+
)
|
76
|
+
matching_processors = await self._extract_matching_processors(
|
77
|
+
event, path
|
78
|
+
)
|
79
|
+
|
80
|
+
processing_results = await asyncio.gather(
|
81
|
+
*(
|
82
|
+
self._process_single_event(proc, path, res)
|
83
|
+
for res, proc in matching_processors
|
84
|
+
),
|
85
|
+
return_exceptions=True,
|
86
|
+
)
|
87
|
+
|
88
|
+
successful_results: List[WebhookEventRawResults] = []
|
89
|
+
failed_exceptions: List[Exception] = []
|
90
|
+
|
91
|
+
for result in processing_results:
|
92
|
+
if isinstance(result, WebhookEventRawResults):
|
93
|
+
successful_results.append(result)
|
94
|
+
elif isinstance(result, Exception):
|
95
|
+
failed_exceptions.append(result)
|
96
|
+
|
97
|
+
if successful_results:
|
98
|
+
logger.info(
|
99
|
+
"Successfully processed webhook events",
|
100
|
+
success_count=len(successful_results),
|
101
|
+
failure_count=len(failed_exceptions),
|
102
|
+
)
|
103
|
+
|
104
|
+
if failed_exceptions:
|
105
|
+
logger.warning(
|
106
|
+
"Some webhook events failed processing",
|
107
|
+
failures=[str(e) for e in failed_exceptions],
|
108
|
+
)
|
109
|
+
|
110
|
+
await self.sync_raw_results(successful_results)
|
111
|
+
|
112
|
+
except asyncio.CancelledError:
|
113
|
+
logger.info(f"Worker {worker_id} for {path} shutting down")
|
114
|
+
for _, proc in matching_processors:
|
115
|
+
await proc.cancel()
|
116
|
+
self._timestamp_event_error(proc.event)
|
117
|
+
break
|
48
118
|
except Exception as e:
|
49
|
-
logger.exception(
|
119
|
+
logger.exception(
|
120
|
+
f"Unexpected error in worker {worker_id} for {path}: {e}"
|
121
|
+
)
|
122
|
+
for _, proc in matching_processors:
|
123
|
+
self._timestamp_event_error(proc.event)
|
124
|
+
finally:
|
125
|
+
try:
|
126
|
+
if event is not None:
|
127
|
+
await queue.commit()
|
128
|
+
|
129
|
+
except Exception as e:
|
130
|
+
logger.exception(
|
131
|
+
f"Unexpected error in queue commit in worker {worker_id} for {path}: {e}"
|
132
|
+
)
|
50
133
|
|
51
134
|
async def _extract_matching_processors(
|
52
135
|
self, webhook_event: WebhookEvent, path: str
|
@@ -91,70 +174,6 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
91
174
|
)
|
92
175
|
return created_processors
|
93
176
|
|
94
|
-
async def process_queue(self, path: str) -> None:
|
95
|
-
"""Process events for a specific path in order"""
|
96
|
-
while True:
|
97
|
-
matching_processors_with_resource: list[
|
98
|
-
tuple[ResourceConfig, AbstractWebhookProcessor]
|
99
|
-
] = []
|
100
|
-
webhook_event: WebhookEvent | None = None
|
101
|
-
try:
|
102
|
-
queue = self._event_queues[path]
|
103
|
-
webhook_event = await queue.get()
|
104
|
-
with logger.contextualize(
|
105
|
-
webhook_path=path, trace_id=webhook_event.trace_id
|
106
|
-
):
|
107
|
-
async with event_context(
|
108
|
-
EventType.HTTP_REQUEST,
|
109
|
-
trigger_type="machine",
|
110
|
-
):
|
111
|
-
# This forces the Processor manager to fetch the latest port app config for each event
|
112
|
-
await ocean.integration.port_app_config_handler.get_port_app_config(
|
113
|
-
use_cache=False
|
114
|
-
)
|
115
|
-
matching_processors_with_resource = (
|
116
|
-
await self._extract_matching_processors(webhook_event, path)
|
117
|
-
)
|
118
|
-
webhook_event_raw_results_for_all_resources = await asyncio.gather(
|
119
|
-
*(
|
120
|
-
self._process_single_event(processor, path, resource)
|
121
|
-
for resource, processor in matching_processors_with_resource
|
122
|
-
),
|
123
|
-
return_exceptions=True,
|
124
|
-
)
|
125
|
-
|
126
|
-
successful_raw_results: list[WebhookEventRawResults] = [
|
127
|
-
result
|
128
|
-
for result in webhook_event_raw_results_for_all_resources
|
129
|
-
if isinstance(result, WebhookEventRawResults)
|
130
|
-
]
|
131
|
-
|
132
|
-
if successful_raw_results:
|
133
|
-
logger.info(
|
134
|
-
"Exporting raw event results to entities",
|
135
|
-
webhook_event_raw_results_for_all_resources_length=len(
|
136
|
-
successful_raw_results
|
137
|
-
),
|
138
|
-
)
|
139
|
-
await self.sync_raw_results(successful_raw_results)
|
140
|
-
except asyncio.CancelledError:
|
141
|
-
logger.info(f"Queue processor for {path} is shutting down")
|
142
|
-
for _, processor in matching_processors_with_resource:
|
143
|
-
await processor.cancel()
|
144
|
-
self._timestamp_event_error(processor.event)
|
145
|
-
break
|
146
|
-
except Exception as e:
|
147
|
-
logger.exception(
|
148
|
-
f"Unexpected error in queue processor for {path}: {str(e)}"
|
149
|
-
)
|
150
|
-
for _, processor in matching_processors_with_resource:
|
151
|
-
self._timestamp_event_error(processor.event)
|
152
|
-
finally:
|
153
|
-
if webhook_event:
|
154
|
-
await self._event_queues[path].commit()
|
155
|
-
# Prevents committing empty events for cases where we shutdown while processing
|
156
|
-
webhook_event = None
|
157
|
-
|
158
177
|
def _timestamp_event_error(self, event: WebhookEvent) -> None:
|
159
178
|
"""Timestamp an event as having an error"""
|
160
179
|
event.set_timestamp(LiveEventTimestamp.FinishedProcessingWithError)
|
@@ -279,12 +298,14 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
279
298
|
methods=["POST"],
|
280
299
|
)
|
281
300
|
|
282
|
-
async def
|
283
|
-
|
284
|
-
|
301
|
+
async def _cancel_all_event_processors(
|
302
|
+
self,
|
303
|
+
) -> None:
|
304
|
+
"""Cancel all event processor tasks"""
|
305
|
+
for task in self._event_processor_tasks:
|
285
306
|
task.cancel()
|
286
307
|
|
287
|
-
await asyncio.gather(*self.
|
308
|
+
await asyncio.gather(*self._event_processor_tasks, return_exceptions=True)
|
288
309
|
|
289
310
|
async def shutdown(self) -> None:
|
290
311
|
"""Gracefully shutdown all queue processors"""
|
@@ -299,5 +320,3 @@ class LiveEventsProcessorManager(LiveEventsMixin, EventsMixin):
|
|
299
320
|
)
|
300
321
|
except asyncio.TimeoutError:
|
301
322
|
logger.warning("Shutdown timed out waiting for queues to empty")
|
302
|
-
|
303
|
-
await self._cancel_all_tasks()
|