qalita 2.6.2__py3-none-any.whl → 2.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qalita/__main__.py +29 -19
- qalita/_frontend/.next/BUILD_ID +1 -0
- qalita/_frontend/.next/app-path-routes-manifest.json +17 -0
- qalita/_frontend/.next/build-manifest.json +20 -0
- qalita/_frontend/.next/package.json +1 -0
- qalita/_frontend/.next/prerender-manifest.json +109 -0
- qalita/_frontend/.next/required-server-files.json +164 -0
- qalita/_frontend/.next/routes-manifest.json +143 -0
- qalita/_frontend/.next/server/app/_global-error/page/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/_global-error/page/build-manifest.json +17 -0
- qalita/_frontend/.next/server/app/_global-error/page/next-font-manifest.json +6 -0
- qalita/_frontend/.next/server/app/_global-error/page/react-loadable-manifest.json +1 -0
- qalita/_frontend/.next/server/app/_global-error/page/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/_global-error/page.js +10 -0
- qalita/_frontend/.next/server/app/_global-error/page.js.map +5 -0
- qalita/_frontend/.next/server/app/_global-error/page.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/_global-error/page_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/_global-error.html +2 -0
- qalita/_frontend/.next/server/app/_global-error.meta +15 -0
- qalita/_frontend/.next/server/app/_global-error.rsc +12 -0
- qalita/_frontend/.next/server/app/_global-error.segments/__PAGE__.segment.rsc +5 -0
- qalita/_frontend/.next/server/app/_global-error.segments/_full.segment.rsc +12 -0
- qalita/_frontend/.next/server/app/_global-error.segments/_head.segment.rsc +5 -0
- qalita/_frontend/.next/server/app/_global-error.segments/_index.segment.rsc +4 -0
- qalita/_frontend/.next/server/app/_global-error.segments/_tree.segment.rsc +1 -0
- qalita/_frontend/.next/server/app/_not-found/page/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/_not-found/page/build-manifest.json +17 -0
- qalita/_frontend/.next/server/app/_not-found/page/next-font-manifest.json +6 -0
- qalita/_frontend/.next/server/app/_not-found/page/react-loadable-manifest.json +1 -0
- qalita/_frontend/.next/server/app/_not-found/page/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/_not-found/page.js +13 -0
- qalita/_frontend/.next/server/app/_not-found/page.js.map +5 -0
- qalita/_frontend/.next/server/app/_not-found/page.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/_not-found/page_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/_not-found.html +1 -0
- qalita/_frontend/.next/server/app/_not-found.meta +16 -0
- qalita/_frontend/.next/server/app/_not-found.rsc +15 -0
- qalita/_frontend/.next/server/app/_not-found.segments/_full.segment.rsc +15 -0
- qalita/_frontend/.next/server/app/_not-found.segments/_head.segment.rsc +5 -0
- qalita/_frontend/.next/server/app/_not-found.segments/_index.segment.rsc +7 -0
- qalita/_frontend/.next/server/app/_not-found.segments/_not-found/__PAGE__.segment.rsc +5 -0
- qalita/_frontend/.next/server/app/_not-found.segments/_not-found.segment.rsc +4 -0
- qalita/_frontend/.next/server/app/_not-found.segments/_tree.segment.rsc +3 -0
- qalita/_frontend/.next/server/app/api/context/[...path]/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/context/[...path]/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/context/[...path]/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/context/[...path]/route.js +6 -0
- qalita/_frontend/.next/server/app/api/context/[...path]/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/context/[...path]/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/context/[...path]/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/contexts/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/contexts/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/contexts/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/contexts/route.js +6 -0
- qalita/_frontend/.next/server/app/api/contexts/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/contexts/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/contexts/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/dashboard/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/dashboard/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/dashboard/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/dashboard/route.js +6 -0
- qalita/_frontend/.next/server/app/api/dashboard/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/dashboard/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/dashboard/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/pack/push/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/pack/push/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/pack/push/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/pack/push/route.js +6 -0
- qalita/_frontend/.next/server/app/api/pack/push/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/pack/push/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/pack/push/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/push/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/push/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/push/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/push/route.js +6 -0
- qalita/_frontend/.next/server/app/api/push/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/push/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/push/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/sources/[...path]/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/sources/[...path]/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/sources/[...path]/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/sources/[...path]/route.js +6 -0
- qalita/_frontend/.next/server/app/api/sources/[...path]/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/sources/[...path]/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/sources/[...path]/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/sources/[id]/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/sources/[id]/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/sources/[id]/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/sources/[id]/route.js +6 -0
- qalita/_frontend/.next/server/app/api/sources/[id]/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/sources/[id]/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/sources/[id]/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/sources/preview/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/sources/preview/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/sources/preview/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/sources/preview/route.js +6 -0
- qalita/_frontend/.next/server/app/api/sources/preview/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/sources/preview/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/sources/preview/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/api/validate/route/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/api/validate/route/build-manifest.json +11 -0
- qalita/_frontend/.next/server/app/api/validate/route/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/api/validate/route.js +6 -0
- qalita/_frontend/.next/server/app/api/validate/route.js.map +5 -0
- qalita/_frontend/.next/server/app/api/validate/route.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/api/validate/route_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/page/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/page/build-manifest.json +17 -0
- qalita/_frontend/.next/server/app/page/next-font-manifest.json +6 -0
- qalita/_frontend/.next/server/app/page/react-loadable-manifest.json +1 -0
- qalita/_frontend/.next/server/app/page/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/page.js +15 -0
- qalita/_frontend/.next/server/app/page.js.map +5 -0
- qalita/_frontend/.next/server/app/page.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/page_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/sources/add/page/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/sources/add/page/build-manifest.json +17 -0
- qalita/_frontend/.next/server/app/sources/add/page/next-font-manifest.json +6 -0
- qalita/_frontend/.next/server/app/sources/add/page/react-loadable-manifest.json +1 -0
- qalita/_frontend/.next/server/app/sources/add/page/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/sources/add/page.js +15 -0
- qalita/_frontend/.next/server/app/sources/add/page.js.map +5 -0
- qalita/_frontend/.next/server/app/sources/add/page.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/sources/add/page_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/sources/add.html +1 -0
- qalita/_frontend/.next/server/app/sources/add.meta +16 -0
- qalita/_frontend/.next/server/app/sources/add.rsc +19 -0
- qalita/_frontend/.next/server/app/sources/add.segments/_full.segment.rsc +19 -0
- qalita/_frontend/.next/server/app/sources/add.segments/_head.segment.rsc +5 -0
- qalita/_frontend/.next/server/app/sources/add.segments/_index.segment.rsc +7 -0
- qalita/_frontend/.next/server/app/sources/add.segments/_tree.segment.rsc +3 -0
- qalita/_frontend/.next/server/app/sources/add.segments/sources/add/__PAGE__.segment.rsc +9 -0
- qalita/_frontend/.next/server/app/sources/add.segments/sources/add.segment.rsc +4 -0
- qalita/_frontend/.next/server/app/sources/add.segments/sources.segment.rsc +4 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page/build-manifest.json +17 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page/next-font-manifest.json +6 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page/react-loadable-manifest.json +1 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page.js +15 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page.js.map +5 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/sources/edit/[id]/page_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/sources/page/app-paths-manifest.json +3 -0
- qalita/_frontend/.next/server/app/sources/page/build-manifest.json +17 -0
- qalita/_frontend/.next/server/app/sources/page/next-font-manifest.json +6 -0
- qalita/_frontend/.next/server/app/sources/page/react-loadable-manifest.json +1 -0
- qalita/_frontend/.next/server/app/sources/page/server-reference-manifest.json +4 -0
- qalita/_frontend/.next/server/app/sources/page.js +15 -0
- qalita/_frontend/.next/server/app/sources/page.js.map +5 -0
- qalita/_frontend/.next/server/app/sources/page.js.nft.json +1 -0
- qalita/_frontend/.next/server/app/sources/page_client-reference-manifest.js +2 -0
- qalita/_frontend/.next/server/app/sources.html +1 -0
- qalita/_frontend/.next/server/app/sources.meta +15 -0
- qalita/_frontend/.next/server/app/sources.rsc +19 -0
- qalita/_frontend/.next/server/app/sources.segments/_full.segment.rsc +19 -0
- qalita/_frontend/.next/server/app/sources.segments/_head.segment.rsc +5 -0
- qalita/_frontend/.next/server/app/sources.segments/_index.segment.rsc +7 -0
- qalita/_frontend/.next/server/app/sources.segments/_tree.segment.rsc +3 -0
- qalita/_frontend/.next/server/app/sources.segments/sources/__PAGE__.segment.rsc +9 -0
- qalita/_frontend/.next/server/app/sources.segments/sources.segment.rsc +4 -0
- qalita/_frontend/.next/server/app-paths-manifest.json +17 -0
- qalita/_frontend/.next/server/chunks/[externals]_next_dist_f9e520a4._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__2b503b6f._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__2f426c3e._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__574da4cd._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__86997be6._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__8f4e8c59._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__bf0c3d33._.js +28 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__cf78fda6._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__e868c9e1._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__ebaae723._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__ef006352._.js +3 -0
- qalita/_frontend/.next/server/chunks/[root-of-the-server]__f408c708._.js +21 -0
- qalita/_frontend/.next/server/chunks/[turbopack]_runtime.js +795 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_context_[___path]_route_actions_74b89a6c.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_contexts_route_actions_4ef2babb.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_dashboard_route_actions_310b0824.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_pack_push_route_actions_b4cb9223.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_push_route_actions_a54c7a3c.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_sources_[___path]_route_actions_9bd41193.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_sources_[id]_route_actions_2549e4c4.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_sources_preview_route_actions_382d6286.js +3 -0
- qalita/_frontend/.next/server/chunks/_next-internal_server_app_api_validate_route_actions_bdcfdeea.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__17f2c9b6._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__1d5b5394._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__21824174._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__296a25b5._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__336e4c46._.js +4 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__3e948587._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__655f8d11._.js +10 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__7876511a._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__8d98de57._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__96e5a947._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__9ca3089a._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__be91267c._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__bf3c5174._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[root-of-the-server]__d15765f1._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/[turbopack]_runtime.js +795 -0
- qalita/_frontend/.next/server/chunks/ssr/_069a688e._.js +4 -0
- qalita/_frontend/.next/server/chunks/ssr/_16c750ac._.js +4 -0
- qalita/_frontend/.next/server/chunks/ssr/_37435df3._.js +4 -0
- qalita/_frontend/.next/server/chunks/ssr/_404f6e81._.js +5 -0
- qalita/_frontend/.next/server/chunks/ssr/_62a2ff63._.js +4 -0
- qalita/_frontend/.next/server/chunks/ssr/_6a67f6f0._.js +5 -0
- qalita/_frontend/.next/server/chunks/ssr/_cafb65ac._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_cb7b44d6._.js +6 -0
- qalita/_frontend/.next/server/chunks/ssr/_d44c43ed._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_next-internal_server_app__global-error_page_actions_75761787.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_next-internal_server_app__not-found_page_actions_554ec2bf.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_next-internal_server_app_page_actions_39d4fc33.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_next-internal_server_app_sources_add_page_actions_8b66db20.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_next-internal_server_app_sources_edit_[id]_page_actions_0c144362.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/_next-internal_server_app_sources_page_actions_bed02d10.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/components_DashboardContent_tsx_c3635665._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_08570d7f._.js +4 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_4b9a0874._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_77ec7569._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_client_components_9774470f._.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_client_components_builtin_forbidden_45780354.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_client_components_builtin_global-error_ece394eb.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_client_components_builtin_unauthorized_15817684.js +3 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_esm_build_templates_app-page_d92e159d.js +4 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_esm_eedfc1fd._.js +6 -0
- qalita/_frontend/.next/server/chunks/ssr/node_modules_next_dist_server_route-modules_app-page_vendored_a443a6bf._.js +3 -0
- qalita/_frontend/.next/server/functions-config-manifest.json +14 -0
- qalita/_frontend/.next/server/middleware-build-manifest.js +21 -0
- qalita/_frontend/.next/server/middleware-manifest.json +6 -0
- qalita/_frontend/.next/server/middleware.js +5 -0
- qalita/_frontend/.next/server/next-font-manifest.js +1 -0
- qalita/_frontend/.next/server/next-font-manifest.json +6 -0
- qalita/_frontend/.next/server/pages/404.html +1 -0
- qalita/_frontend/.next/server/pages/500.html +2 -0
- qalita/_frontend/.next/server/pages-manifest.json +4 -0
- qalita/_frontend/.next/server/server-reference-manifest.js +1 -0
- qalita/_frontend/.next/server/server-reference-manifest.json +5 -0
- qalita/_frontend/.next/static/chunks/02a64570f0a14789.js +1 -0
- qalita/_frontend/.next/static/chunks/{7340adf74ff47ec0.js → 0b082245f106d665.js} +1 -1
- qalita/_frontend/.next/static/chunks/27b3ba70c7ef50a8.js +1 -0
- qalita/_frontend/.next/static/chunks/517e9b74d1a3c0ce.js +1 -0
- qalita/_frontend/.next/static/chunks/58689c96b0676c41.js +1 -0
- qalita/_frontend/.next/static/chunks/{236f7e5abd6f09ff.js → 89ba62a8ba9b79ce.js} +2 -2
- qalita/_frontend/.next/static/chunks/acc5da18ff20daa1.js +3 -0
- qalita/_frontend/.next/static/chunks/bdc8a8e7721f5675.js +2 -0
- qalita/_frontend/.next/static/chunks/e0df86cbf44bbf9f.js +1 -0
- qalita/_frontend/.next/static/chunks/e4c3a252774ab7fd.css +1 -0
- qalita/_frontend/.next/static/chunks/e6ce59ba40b863f2.js +1 -0
- qalita/_frontend/.next/static/chunks/{30ea11065999f7ac.js → ec4b1f1e3cd3ae43.js} +1 -1
- qalita/_frontend/.next/static/chunks/{turbopack-25186fc8e1264445.js → turbopack-d21156d03715fafa.js} +1 -1
- qalita/_frontend/node_modules/@swc/helpers/package.json +225 -2
- qalita/_frontend/node_modules/next/node_modules/@swc/helpers/package.json +471 -0
- qalita/_frontend/package.json +12 -1
- qalita/commands/pack.py +61 -8
- qalita/commands/worker.py +46 -20
- qalita/commands/worker_grpc.py +941 -0
- qalita/grpc/__init__.py +8 -0
- qalita/grpc/client.py +693 -0
- qalita/grpc/protos/__init__.py +4 -0
- qalita/grpc/protos/qalita.proto +391 -0
- qalita/grpc/protos/qalita_pb2.py +112 -0
- qalita/grpc/protos/qalita_pb2_grpc.py +588 -0
- qalita/internal/data_preview.py +565 -0
- qalita/internal/request.py +2 -1
- qalita/internal/utils.py +1 -1
- qalita/web/app.py +6 -2
- qalita/web/blueprints/dashboard.py +12 -44
- qalita/web/blueprints/helpers.py +119 -46
- qalita/web/blueprints/sources.py +5 -99
- qalita/web/blueprints/workers.py +6 -6
- {qalita-2.6.2.dist-info → qalita-2.8.0.dist-info}/METADATA +7 -1
- {qalita-2.6.2.dist-info → qalita-2.8.0.dist-info}/RECORD +279 -33
- qalita/_frontend/.next/static/chunks/0f84739db4a8acc7.js +0 -1
- qalita/_frontend/.next/static/chunks/1107bdca1eff6d34.css +0 -1
- qalita/_frontend/.next/static/chunks/4b0c5de8d4cc313f.js +0 -1
- qalita/_frontend/.next/static/chunks/4dd28bc3f722184a.js +0 -2
- qalita/_frontend/.next/static/chunks/711d597b816a80c1.js +0 -1
- qalita/_frontend/.next/static/chunks/bb29c2be4df20a40.js +0 -1
- qalita/_frontend/.next/static/chunks/ecf559101be0ae12.js +0 -3
- /qalita/_frontend/.next/static/{BNkGe67QNZQE3gQg47i0I → oDJBrlQBPl3vggds1RNfL}/_buildManifest.js +0 -0
- /qalita/_frontend/.next/static/{BNkGe67QNZQE3gQg47i0I → oDJBrlQBPl3vggds1RNfL}/_clientMiddlewareManifest.json +0 -0
- /qalita/_frontend/.next/static/{BNkGe67QNZQE3gQg47i0I → oDJBrlQBPl3vggds1RNfL}/_ssgManifest.js +0 -0
- /qalita/_frontend/node_modules/{@swc → next/node_modules/@swc}/helpers/cjs/_interop_require_default.cjs +0 -0
- /qalita/_frontend/node_modules/{@swc → next/node_modules/@swc}/helpers/cjs/_interop_require_wildcard.cjs +0 -0
- {qalita-2.6.2.dist-info → qalita-2.8.0.dist-info}/WHEEL +0 -0
- {qalita-2.6.2.dist-info → qalita-2.8.0.dist-info}/entry_points.txt +0 -0
- {qalita-2.6.2.dist-info → qalita-2.8.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,941 @@
|
|
|
1
|
+
"""
|
|
2
|
+
# QALITA (c) COPYRIGHT 2025 - ALL RIGHTS RESERVED -
|
|
3
|
+
gRPC-based worker implementation for real-time communication with the backend
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
import time
|
|
11
|
+
import random
|
|
12
|
+
import string
|
|
13
|
+
import tarfile
|
|
14
|
+
from datetime import datetime, timezone
|
|
15
|
+
from typing import Optional, Dict
|
|
16
|
+
from shutil import copy2
|
|
17
|
+
|
|
18
|
+
import semver
|
|
19
|
+
import croniter
|
|
20
|
+
|
|
21
|
+
from qalita.internal.utils import logger, get_version, validate_token
|
|
22
|
+
from qalita.internal.request import send_request
|
|
23
|
+
from qalita.internal.data_preview import preview_source, DataPreviewResult
|
|
24
|
+
from qalita.grpc import GrpcClient
|
|
25
|
+
from qalita.grpc.protos import qalita_pb2
|
|
26
|
+
from qalita.commands.pack import run_pack
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# In-memory guard to avoid re-scheduling the same routine within the same cron window
|
|
30
|
+
ROUTINE_LAST_SCHEDULED_UTC: Dict[int, datetime] = {}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _safe_extractall(tar: tarfile.TarFile, path: str) -> None:
|
|
34
|
+
"""
|
|
35
|
+
Safely extract all members from a tarfile, preventing path traversal attacks.
|
|
36
|
+
Uses the 'data' filter on Python 3.12+ for built-in security,
|
|
37
|
+
falls back to manual validation on older versions.
|
|
38
|
+
"""
|
|
39
|
+
# Python 3.12+ has built-in filter support
|
|
40
|
+
if sys.version_info >= (3, 12):
|
|
41
|
+
tar.extractall(path, filter="data") # nosec B202
|
|
42
|
+
else:
|
|
43
|
+
# Manual validation for Python 3.10, 3.11
|
|
44
|
+
abs_path = os.path.abspath(path)
|
|
45
|
+
for member in tar.getmembers():
|
|
46
|
+
member_path = os.path.join(abs_path, member.name)
|
|
47
|
+
# Resolve the path and check it's within the target directory
|
|
48
|
+
resolved_path = os.path.realpath(member_path)
|
|
49
|
+
if not resolved_path.startswith(abs_path + os.sep) and resolved_path != abs_path:
|
|
50
|
+
raise ValueError(f"Attempted path traversal in tar file: {member.name}")
|
|
51
|
+
# Reject absolute paths and paths with ..
|
|
52
|
+
if os.path.isabs(member.name) or ".." in member.name.split(os.sep):
|
|
53
|
+
raise ValueError(f"Unsafe path in tar file: {member.name}")
|
|
54
|
+
# Members have been validated above, safe to extract
|
|
55
|
+
tar.extractall(path, members=tar.getmembers()) # nosec B202
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class GrpcWorkerRunner:
|
|
59
|
+
"""
|
|
60
|
+
gRPC-based worker runner for real-time job execution.
|
|
61
|
+
|
|
62
|
+
Uses bidirectional streaming for:
|
|
63
|
+
- Keep-alive signals
|
|
64
|
+
- Job assignments (pushed from server)
|
|
65
|
+
- Job status updates
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(self, config, name: str, mode: str, token: str, url: str):
|
|
69
|
+
self.config = config
|
|
70
|
+
self.name = name
|
|
71
|
+
self.mode = mode
|
|
72
|
+
self.token = token
|
|
73
|
+
self.url = url
|
|
74
|
+
|
|
75
|
+
self.grpc_client: Optional[GrpcClient] = None
|
|
76
|
+
self.worker_id: Optional[int] = None
|
|
77
|
+
self.partner_id: Optional[int] = None
|
|
78
|
+
self.user_info: Optional[dict] = None
|
|
79
|
+
self.registries: list = []
|
|
80
|
+
|
|
81
|
+
self._running = False
|
|
82
|
+
self._jobs_path: Optional[str] = None
|
|
83
|
+
|
|
84
|
+
async def authenticate(self) -> bool:
|
|
85
|
+
"""Authenticate and register the worker."""
|
|
86
|
+
logger.info("------------- Worker Authenticate (gRPC) -------------")
|
|
87
|
+
|
|
88
|
+
# Validate token
|
|
89
|
+
validated_info = validate_token(self.token)
|
|
90
|
+
user_id = validated_info.get("user_id")
|
|
91
|
+
|
|
92
|
+
# Create gRPC client
|
|
93
|
+
self.grpc_client = GrpcClient(
|
|
94
|
+
url=self.url,
|
|
95
|
+
token=self.token,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
# Connect and authenticate
|
|
99
|
+
if not await self.grpc_client.connect():
|
|
100
|
+
logger.error("Failed to connect to gRPC server")
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
auth_response = await self.grpc_client.authenticate()
|
|
104
|
+
if not auth_response or not auth_response.authenticated:
|
|
105
|
+
error = auth_response.error if auth_response else "Unknown error"
|
|
106
|
+
logger.error(f"Authentication failed: {error}")
|
|
107
|
+
return False
|
|
108
|
+
|
|
109
|
+
self.user_info = {
|
|
110
|
+
"id": auth_response.user.id,
|
|
111
|
+
"email": auth_response.user.email,
|
|
112
|
+
"name": auth_response.user.name,
|
|
113
|
+
"partner_id": auth_response.user.partner_id,
|
|
114
|
+
}
|
|
115
|
+
self.partner_id = auth_response.user.partner_id
|
|
116
|
+
|
|
117
|
+
logger.success(f"Authenticated as {self.user_info['email']}")
|
|
118
|
+
|
|
119
|
+
# Register worker
|
|
120
|
+
worker = await self.grpc_client.register_worker(
|
|
121
|
+
name=self.name,
|
|
122
|
+
mode=self.mode,
|
|
123
|
+
status="online",
|
|
124
|
+
is_active=True,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
if not worker:
|
|
128
|
+
logger.error("Failed to register worker")
|
|
129
|
+
return False
|
|
130
|
+
|
|
131
|
+
self.worker_id = worker.id
|
|
132
|
+
logger.success(f"Worker '{self.name}' registered with ID {self.worker_id}")
|
|
133
|
+
|
|
134
|
+
# Get registries
|
|
135
|
+
self.registries = await self.grpc_client.get_registries()
|
|
136
|
+
if not self.registries:
|
|
137
|
+
logger.warning("No registries found")
|
|
138
|
+
|
|
139
|
+
# Save worker config
|
|
140
|
+
config_json = self.config.load_worker_config() if hasattr(self.config, 'load_worker_config') else {}
|
|
141
|
+
config_json["user"] = self.user_info
|
|
142
|
+
config_json["context"] = config_json.get("context", {})
|
|
143
|
+
config_json["context"]["local"] = {
|
|
144
|
+
"url": self.url,
|
|
145
|
+
"token": self.token,
|
|
146
|
+
"name": self.name,
|
|
147
|
+
"mode": self.mode,
|
|
148
|
+
}
|
|
149
|
+
config_json["context"]["remote"] = {
|
|
150
|
+
"id": self.worker_id,
|
|
151
|
+
"name": worker.name,
|
|
152
|
+
"mode": worker.mode,
|
|
153
|
+
"status": worker.status,
|
|
154
|
+
}
|
|
155
|
+
config_json["registries"] = [
|
|
156
|
+
{"id": r.id, "name": r.name, "url": r.url}
|
|
157
|
+
for r in self.registries
|
|
158
|
+
]
|
|
159
|
+
self.config.save_worker_config(config_json)
|
|
160
|
+
|
|
161
|
+
return True
|
|
162
|
+
|
|
163
|
+
async def run(self) -> None:
|
|
164
|
+
"""Run the worker in the specified mode."""
|
|
165
|
+
logger.info("------------- Worker Run (gRPC) -------------")
|
|
166
|
+
logger.info(f"Worker ID: {self.worker_id}")
|
|
167
|
+
logger.info(f"Worker Mode: {self.mode}")
|
|
168
|
+
|
|
169
|
+
# Create jobs folder
|
|
170
|
+
self._jobs_path = self.config.get_worker_run_path()
|
|
171
|
+
if not os.path.exists(self._jobs_path):
|
|
172
|
+
os.makedirs(self._jobs_path)
|
|
173
|
+
|
|
174
|
+
if self.mode == "worker":
|
|
175
|
+
await self._run_worker_mode()
|
|
176
|
+
elif self.mode == "job":
|
|
177
|
+
logger.warning("Job mode not implemented for gRPC yet, use REST")
|
|
178
|
+
else:
|
|
179
|
+
logger.error(f"Unknown mode: {self.mode}")
|
|
180
|
+
|
|
181
|
+
async def _run_worker_mode(self) -> None:
|
|
182
|
+
"""Run in continuous worker mode with gRPC streaming."""
|
|
183
|
+
logger.info(f"Worker started at {time.strftime('%X %d-%m-%Y %Z')}")
|
|
184
|
+
|
|
185
|
+
self._running = True
|
|
186
|
+
self._agent_start_datetime = datetime.now(timezone.utc)
|
|
187
|
+
|
|
188
|
+
# Load local source IDs for routine matching
|
|
189
|
+
self._local_source_ids = self._get_local_source_ids()
|
|
190
|
+
logger.info(f"Local source IDs: {self._local_source_ids}")
|
|
191
|
+
|
|
192
|
+
# Set up callbacks for incoming messages
|
|
193
|
+
self.grpc_client.on_job_received(self._handle_job_assignment)
|
|
194
|
+
self.grpc_client.on_routine_received(self._handle_routine_trigger)
|
|
195
|
+
self.grpc_client.on_data_preview_request(self._handle_data_preview_request)
|
|
196
|
+
self.grpc_client.on_add_source_request(self._handle_add_source_request)
|
|
197
|
+
self.grpc_client.on_disconnect(self._handle_disconnect)
|
|
198
|
+
|
|
199
|
+
try:
|
|
200
|
+
# Start routine checking task alongside the stream
|
|
201
|
+
routine_check_task = asyncio.create_task(self._routine_check_loop())
|
|
202
|
+
|
|
203
|
+
try:
|
|
204
|
+
# Start the bidirectional stream
|
|
205
|
+
await self.grpc_client.start_stream()
|
|
206
|
+
finally:
|
|
207
|
+
routine_check_task.cancel()
|
|
208
|
+
try:
|
|
209
|
+
await routine_check_task
|
|
210
|
+
except asyncio.CancelledError:
|
|
211
|
+
pass
|
|
212
|
+
except KeyboardInterrupt:
|
|
213
|
+
logger.warning("KILLSIG detected. Gracefully exiting.")
|
|
214
|
+
await self._shutdown()
|
|
215
|
+
except Exception as e:
|
|
216
|
+
logger.error(f"Worker error: {e}")
|
|
217
|
+
await self._shutdown()
|
|
218
|
+
|
|
219
|
+
def _get_local_source_ids(self) -> list[int]:
|
|
220
|
+
"""Get list of source IDs from local configuration."""
|
|
221
|
+
try:
|
|
222
|
+
source_conf = self.config.load_source_config(verbose=False)
|
|
223
|
+
return [
|
|
224
|
+
source["id"] for source in source_conf.get("sources", [])
|
|
225
|
+
if "id" in source
|
|
226
|
+
]
|
|
227
|
+
except Exception as e:
|
|
228
|
+
logger.warning(f"Could not load local source config: {e}")
|
|
229
|
+
return []
|
|
230
|
+
|
|
231
|
+
async def _routine_check_loop(self) -> None:
|
|
232
|
+
"""Periodically check routines and create jobs if needed."""
|
|
233
|
+
logger.info("Routine check loop started")
|
|
234
|
+
|
|
235
|
+
# Wait a bit for the stream to be established
|
|
236
|
+
await asyncio.sleep(2)
|
|
237
|
+
|
|
238
|
+
while self._running:
|
|
239
|
+
try:
|
|
240
|
+
await self._check_routines()
|
|
241
|
+
# Check routines every 10 seconds
|
|
242
|
+
await asyncio.sleep(10)
|
|
243
|
+
except asyncio.CancelledError:
|
|
244
|
+
logger.info("Routine check loop cancelled")
|
|
245
|
+
break
|
|
246
|
+
except Exception as e:
|
|
247
|
+
logger.error(f"Error in routine check loop: {e}")
|
|
248
|
+
await asyncio.sleep(10)
|
|
249
|
+
|
|
250
|
+
async def _check_routines(self) -> None:
|
|
251
|
+
"""
|
|
252
|
+
Check routines from the platform and create jobs if needed.
|
|
253
|
+
|
|
254
|
+
This mirrors the logic from the REST-based worker:
|
|
255
|
+
1. Get all active routines
|
|
256
|
+
2. Check if source_id is locally defined
|
|
257
|
+
3. Check if a job is already running/pending for this routine
|
|
258
|
+
4. Evaluate the schedule
|
|
259
|
+
5. Create job if it's time
|
|
260
|
+
"""
|
|
261
|
+
if not self._local_source_ids:
|
|
262
|
+
return
|
|
263
|
+
|
|
264
|
+
# Get all routines
|
|
265
|
+
routines = await self.grpc_client.get_routines()
|
|
266
|
+
if not routines:
|
|
267
|
+
return
|
|
268
|
+
|
|
269
|
+
for routine in routines:
|
|
270
|
+
try:
|
|
271
|
+
# Only process active routines
|
|
272
|
+
if routine.status != "active":
|
|
273
|
+
continue
|
|
274
|
+
|
|
275
|
+
# Check if source is locally defined
|
|
276
|
+
if routine.source_id not in self._local_source_ids:
|
|
277
|
+
continue
|
|
278
|
+
|
|
279
|
+
# Check if target (if any) is locally defined
|
|
280
|
+
if routine.HasField('target_id') and routine.target_id not in self._local_source_ids:
|
|
281
|
+
continue
|
|
282
|
+
|
|
283
|
+
# Check schedule using the is_time_for_job logic
|
|
284
|
+
if self._is_time_for_job(routine):
|
|
285
|
+
routine_name = routine.name if routine.name else f"routine-{routine.id}"
|
|
286
|
+
logger.info(f"Routine {routine.id} ({routine_name}) is due, creating job...")
|
|
287
|
+
await self._create_job_for_routine(routine)
|
|
288
|
+
|
|
289
|
+
except Exception as e:
|
|
290
|
+
logger.error(f"Error processing routine {routine.id}: {e}")
|
|
291
|
+
|
|
292
|
+
def _is_time_for_job(self, routine: qalita_pb2.Routine) -> bool:
|
|
293
|
+
"""
|
|
294
|
+
Evaluate if it's time to create a job for this routine based on cron schedule.
|
|
295
|
+
"""
|
|
296
|
+
routine_id = routine.id
|
|
297
|
+
cron_expression = routine.schedule
|
|
298
|
+
|
|
299
|
+
if not cron_expression:
|
|
300
|
+
return False
|
|
301
|
+
|
|
302
|
+
# Get start_date from routine
|
|
303
|
+
try:
|
|
304
|
+
if routine.HasField('start_date') and routine.start_date.seconds > 0:
|
|
305
|
+
start_date = routine.start_date.ToDatetime().replace(tzinfo=timezone.utc)
|
|
306
|
+
else:
|
|
307
|
+
start_date = datetime.min.replace(tzinfo=timezone.utc)
|
|
308
|
+
except Exception:
|
|
309
|
+
start_date = datetime.min.replace(tzinfo=timezone.utc)
|
|
310
|
+
|
|
311
|
+
# Determine base datetime for cron calculation
|
|
312
|
+
# Use the last scheduled time for this routine if available
|
|
313
|
+
last_scheduled = ROUTINE_LAST_SCHEDULED_UTC.get(routine_id)
|
|
314
|
+
if last_scheduled:
|
|
315
|
+
base_dt = last_scheduled
|
|
316
|
+
else:
|
|
317
|
+
base_dt = self._agent_start_datetime
|
|
318
|
+
|
|
319
|
+
try:
|
|
320
|
+
# Initialize cron iterator
|
|
321
|
+
cron = croniter.croniter(cron_expression, base_dt)
|
|
322
|
+
next_run = cron.get_next(datetime)
|
|
323
|
+
if next_run.tzinfo is None:
|
|
324
|
+
next_run = next_run.replace(tzinfo=timezone.utc)
|
|
325
|
+
|
|
326
|
+
now = datetime.now(timezone.utc)
|
|
327
|
+
|
|
328
|
+
# Check if it's time
|
|
329
|
+
if now >= next_run and now >= start_date:
|
|
330
|
+
return True
|
|
331
|
+
|
|
332
|
+
except Exception as e:
|
|
333
|
+
logger.warning(f"Error evaluating cron for routine {routine_id}: {e}")
|
|
334
|
+
|
|
335
|
+
return False
|
|
336
|
+
|
|
337
|
+
async def _create_job_for_routine(self, routine: qalita_pb2.Routine) -> None:
|
|
338
|
+
"""Create a job for a routine and execute it immediately."""
|
|
339
|
+
try:
|
|
340
|
+
# Prepare pack config override
|
|
341
|
+
pack_config = routine.config if routine.config else None
|
|
342
|
+
|
|
343
|
+
# Create the job
|
|
344
|
+
job = await self.grpc_client.create_job(
|
|
345
|
+
source_id=routine.source_id,
|
|
346
|
+
pack_id=routine.pack_id,
|
|
347
|
+
source_version_id=routine.source_version_id if routine.HasField('source_version_id') else None,
|
|
348
|
+
target_id=routine.target_id if routine.HasField('target_id') else None,
|
|
349
|
+
target_version_id=routine.target_version_id if routine.HasField('target_version_id') else None,
|
|
350
|
+
pack_version_id=routine.pack_version_id if routine.HasField('pack_version_id') else None,
|
|
351
|
+
routine_id=routine.id,
|
|
352
|
+
pack_config_override=pack_config,
|
|
353
|
+
job_type="routine",
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
if job:
|
|
357
|
+
logger.info(f"Created job {job.id} for routine {routine.id}")
|
|
358
|
+
# Record scheduling time to avoid duplicate scheduling
|
|
359
|
+
ROUTINE_LAST_SCHEDULED_UTC[routine.id] = datetime.now(timezone.utc)
|
|
360
|
+
|
|
361
|
+
# Claim and execute the job immediately
|
|
362
|
+
claimed_job = await self.grpc_client.claim_job(job.id, self.worker_id)
|
|
363
|
+
if claimed_job:
|
|
364
|
+
logger.info(f"Claimed job {job.id}, executing...")
|
|
365
|
+
await self._handle_job_assignment(claimed_job)
|
|
366
|
+
else:
|
|
367
|
+
logger.warning(f"Failed to claim job {job.id}, another worker may have taken it")
|
|
368
|
+
else:
|
|
369
|
+
logger.error(f"Failed to create job for routine {routine.id}")
|
|
370
|
+
|
|
371
|
+
except Exception as e:
|
|
372
|
+
logger.error(f"Error creating job for routine {routine.id}: {e}")
|
|
373
|
+
|
|
374
|
+
async def _handle_job_assignment(self, job: qalita_pb2.Job) -> None:
|
|
375
|
+
"""Handle a job assignment pushed from the server."""
|
|
376
|
+
logger.info(f"Job assignment received: {job.id}")
|
|
377
|
+
|
|
378
|
+
try:
|
|
379
|
+
# Extract job details from proto
|
|
380
|
+
source_id = job.source_id
|
|
381
|
+
source_version_id = job.source_version_id if job.HasField('source_version_id') else None
|
|
382
|
+
target_id = job.target_id if job.HasField('target_id') else None
|
|
383
|
+
target_version_id = job.target_version_id if job.HasField('target_version_id') else None
|
|
384
|
+
pack_id = job.pack_id
|
|
385
|
+
pack_version_id = job.pack_version_id if job.HasField('pack_version_id') else None
|
|
386
|
+
pack_config_override = job.pack_config_override if job.HasField('pack_config_override') else None
|
|
387
|
+
|
|
388
|
+
# Execute the job
|
|
389
|
+
await self._execute_job(
|
|
390
|
+
job_id=job.id,
|
|
391
|
+
source_id=source_id,
|
|
392
|
+
source_version_id=source_version_id,
|
|
393
|
+
target_id=target_id,
|
|
394
|
+
target_version_id=target_version_id,
|
|
395
|
+
pack_id=pack_id,
|
|
396
|
+
pack_version_id=pack_version_id,
|
|
397
|
+
pack_config_override=pack_config_override,
|
|
398
|
+
)
|
|
399
|
+
except Exception as e:
|
|
400
|
+
logger.error(f"Error executing job {job.id}: {e}")
|
|
401
|
+
# Update job status to failed
|
|
402
|
+
await self.grpc_client.send_job_status(job.id, "failed", error_message=str(e))
|
|
403
|
+
|
|
404
|
+
async def _handle_routine_trigger(self, routine: qalita_pb2.Routine) -> None:
|
|
405
|
+
"""Handle a routine trigger pushed from the server."""
|
|
406
|
+
logger.info(f"Routine trigger received: {routine.id}")
|
|
407
|
+
|
|
408
|
+
# Create a job for the routine
|
|
409
|
+
job = await self.grpc_client.create_job(
|
|
410
|
+
source_id=routine.source_id,
|
|
411
|
+
pack_id=routine.pack_id,
|
|
412
|
+
target_id=routine.target_id if routine.HasField('target_id') else None,
|
|
413
|
+
routine_id=routine.id,
|
|
414
|
+
pack_config_override=routine.config if routine.config else None,
|
|
415
|
+
job_type="routine",
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
if job:
|
|
419
|
+
logger.info(f"Created job {job.id} for routine {routine.id}")
|
|
420
|
+
else:
|
|
421
|
+
logger.error(f"Failed to create job for routine {routine.id}")
|
|
422
|
+
|
|
423
|
+
async def _handle_disconnect(self) -> None:
|
|
424
|
+
"""Handle disconnection from the server."""
|
|
425
|
+
logger.warning("Disconnected from server")
|
|
426
|
+
# The GrpcClient handles reconnection automatically
|
|
427
|
+
|
|
428
|
+
async def _handle_data_preview_request(self, request: qalita_pb2.DataPreviewRequest) -> None:
|
|
429
|
+
"""
|
|
430
|
+
Handle a data preview request from the platform.
|
|
431
|
+
|
|
432
|
+
This is called when Studio requests a preview of a data source.
|
|
433
|
+
The worker:
|
|
434
|
+
1. Finds the source configuration locally
|
|
435
|
+
2. Reads the data using the preview module
|
|
436
|
+
3. Sends back a DataPreviewResponse via gRPC stream
|
|
437
|
+
"""
|
|
438
|
+
request_id = request.request_id
|
|
439
|
+
source_id = request.source_id
|
|
440
|
+
limit = request.limit if request.HasField('limit') else 1000
|
|
441
|
+
query = request.query if request.HasField('query') else None
|
|
442
|
+
|
|
443
|
+
logger.info(f"Processing data preview request {request_id} for source {source_id}")
|
|
444
|
+
|
|
445
|
+
try:
|
|
446
|
+
# Find the source in local configuration
|
|
447
|
+
source_conf = self.config.load_source_config(verbose=False)
|
|
448
|
+
matching_sources = [
|
|
449
|
+
s for s in source_conf.get("sources", [])
|
|
450
|
+
if str(s.get("id")) == str(source_id)
|
|
451
|
+
]
|
|
452
|
+
|
|
453
|
+
if not matching_sources:
|
|
454
|
+
logger.warning(f"Source {source_id} not found in local config")
|
|
455
|
+
await self.grpc_client.send_data_preview_response(
|
|
456
|
+
request_id=request_id,
|
|
457
|
+
ok=False,
|
|
458
|
+
data_type="error",
|
|
459
|
+
error=f"Source {source_id} not found in local configuration",
|
|
460
|
+
)
|
|
461
|
+
return
|
|
462
|
+
|
|
463
|
+
source_config = matching_sources[0]
|
|
464
|
+
logger.info(f"Found source config for {source_id}: type={source_config.get('type')}")
|
|
465
|
+
|
|
466
|
+
# Generate the preview
|
|
467
|
+
result: DataPreviewResult = preview_source(
|
|
468
|
+
source_config=source_config,
|
|
469
|
+
limit=limit,
|
|
470
|
+
query=query,
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
# Send the response
|
|
474
|
+
await self.grpc_client.send_data_preview_response(
|
|
475
|
+
request_id=request_id,
|
|
476
|
+
ok=result.ok,
|
|
477
|
+
data_type=result.data_type,
|
|
478
|
+
error=result.error,
|
|
479
|
+
headers=result.headers if result.headers else None,
|
|
480
|
+
rows=result.rows if result.rows else None,
|
|
481
|
+
total_rows=result.total_rows,
|
|
482
|
+
content=result.content,
|
|
483
|
+
binary_base64=result.binary_base64,
|
|
484
|
+
mime_type=result.mime_type,
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
if result.ok:
|
|
488
|
+
logger.success(f"Data preview sent for source {source_id} (type={result.data_type})")
|
|
489
|
+
else:
|
|
490
|
+
logger.warning(f"Data preview error for source {source_id}: {result.error}")
|
|
491
|
+
|
|
492
|
+
except Exception as e:
|
|
493
|
+
logger.error(f"Error handling data preview request {request_id}: {e}")
|
|
494
|
+
await self.grpc_client.send_data_preview_response(
|
|
495
|
+
request_id=request_id,
|
|
496
|
+
ok=False,
|
|
497
|
+
data_type="error",
|
|
498
|
+
error=f"Internal error: {str(e)}",
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
async def _handle_add_source_request(self, request: qalita_pb2.AddSourceRequest) -> None:
|
|
502
|
+
"""
|
|
503
|
+
Handle an add source request from the platform.
|
|
504
|
+
|
|
505
|
+
This is called when the Platform requests to add a new source configuration.
|
|
506
|
+
The worker:
|
|
507
|
+
1. Parses the source configuration
|
|
508
|
+
2. Validates connectivity to the source
|
|
509
|
+
3. Adds the source to local configuration
|
|
510
|
+
4. Sends back an AddSourceResponse via gRPC stream
|
|
511
|
+
"""
|
|
512
|
+
request_id = request.request_id
|
|
513
|
+
source_name = request.name
|
|
514
|
+
source_type = request.type
|
|
515
|
+
|
|
516
|
+
logger.info(f"Processing add source request {request_id} for '{source_name}' (type={source_type})")
|
|
517
|
+
|
|
518
|
+
try:
|
|
519
|
+
# Parse the config JSON
|
|
520
|
+
config_dict = json.loads(request.config_json)
|
|
521
|
+
|
|
522
|
+
# Load current source configuration
|
|
523
|
+
source_conf = self.config.load_source_config(verbose=False)
|
|
524
|
+
sources = source_conf.get("sources", [])
|
|
525
|
+
|
|
526
|
+
# Generate a new source ID (max existing ID + 1, or 1 if no sources)
|
|
527
|
+
existing_ids = [s.get("id", 0) for s in sources if isinstance(s.get("id"), int)]
|
|
528
|
+
new_source_id = max(existing_ids) + 1 if existing_ids else 1
|
|
529
|
+
|
|
530
|
+
# Build the source configuration entry
|
|
531
|
+
new_source = {
|
|
532
|
+
"id": new_source_id,
|
|
533
|
+
"name": source_name,
|
|
534
|
+
"type": source_type,
|
|
535
|
+
"description": request.description,
|
|
536
|
+
"visibility": request.visibility,
|
|
537
|
+
"reference": request.reference,
|
|
538
|
+
"sensitive": request.sensitive,
|
|
539
|
+
"config": config_dict,
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
# Validate connectivity before saving
|
|
543
|
+
connectivity_verified = False
|
|
544
|
+
validation_error = None
|
|
545
|
+
|
|
546
|
+
try:
|
|
547
|
+
# Try to validate the source using preview (a quick connectivity check)
|
|
548
|
+
result: DataPreviewResult = preview_source(
|
|
549
|
+
source_config=new_source,
|
|
550
|
+
limit=1, # Just check connectivity, don't load much data
|
|
551
|
+
)
|
|
552
|
+
connectivity_verified = result.ok
|
|
553
|
+
if not result.ok:
|
|
554
|
+
validation_error = result.error
|
|
555
|
+
except Exception as e:
|
|
556
|
+
validation_error = str(e)
|
|
557
|
+
logger.warning(f"Connectivity validation failed for '{source_name}': {e}")
|
|
558
|
+
|
|
559
|
+
# If connectivity failed, report error but still add the source
|
|
560
|
+
# (user may want to fix configuration later)
|
|
561
|
+
if not connectivity_verified:
|
|
562
|
+
logger.warning(f"Adding source '{source_name}' despite connectivity check failure")
|
|
563
|
+
|
|
564
|
+
# Add the new source to the configuration
|
|
565
|
+
sources.append(new_source)
|
|
566
|
+
source_conf["sources"] = sources
|
|
567
|
+
self.config.config = source_conf
|
|
568
|
+
self.config.save_source_config()
|
|
569
|
+
|
|
570
|
+
# Update local source IDs cache
|
|
571
|
+
self._local_source_ids = self._get_local_source_ids()
|
|
572
|
+
|
|
573
|
+
logger.success(f"Source '{source_name}' added with ID {new_source_id}")
|
|
574
|
+
|
|
575
|
+
# Send success response
|
|
576
|
+
await self.grpc_client.send_add_source_response(
|
|
577
|
+
request_id=request_id,
|
|
578
|
+
ok=True,
|
|
579
|
+
source_id=new_source_id,
|
|
580
|
+
connectivity_verified=connectivity_verified,
|
|
581
|
+
error=validation_error if not connectivity_verified else None,
|
|
582
|
+
)
|
|
583
|
+
|
|
584
|
+
except json.JSONDecodeError as e:
|
|
585
|
+
error_msg = f"Invalid configuration JSON: {str(e)}"
|
|
586
|
+
logger.error(f"Error handling add source request {request_id}: {error_msg}")
|
|
587
|
+
await self.grpc_client.send_add_source_response(
|
|
588
|
+
request_id=request_id,
|
|
589
|
+
ok=False,
|
|
590
|
+
error=error_msg,
|
|
591
|
+
connectivity_verified=False,
|
|
592
|
+
)
|
|
593
|
+
except Exception as e:
|
|
594
|
+
error_msg = f"Internal error: {str(e)}"
|
|
595
|
+
logger.error(f"Error handling add source request {request_id}: {error_msg}")
|
|
596
|
+
await self.grpc_client.send_add_source_response(
|
|
597
|
+
request_id=request_id,
|
|
598
|
+
ok=False,
|
|
599
|
+
error=error_msg,
|
|
600
|
+
connectivity_verified=False,
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
async def _execute_job(
|
|
604
|
+
self,
|
|
605
|
+
job_id: int,
|
|
606
|
+
source_id: int,
|
|
607
|
+
source_version_id: Optional[int],
|
|
608
|
+
target_id: Optional[int],
|
|
609
|
+
target_version_id: Optional[int],
|
|
610
|
+
pack_id: int,
|
|
611
|
+
pack_version_id: Optional[int],
|
|
612
|
+
pack_config_override: Optional[str] = None,
|
|
613
|
+
) -> None:
|
|
614
|
+
"""Execute a job."""
|
|
615
|
+
logger.info("------------- Job Run -------------")
|
|
616
|
+
start_time = datetime.now(timezone.utc)
|
|
617
|
+
logger.info(f"Start Time: {start_time.strftime('%Y-%m-%d %H:%M:%S')}")
|
|
618
|
+
logger.info(f"Source {source_id}:{source_version_id}")
|
|
619
|
+
if target_id:
|
|
620
|
+
logger.info(f"Target {target_id}:{target_version_id}")
|
|
621
|
+
logger.info(f"Pack {pack_id}:{pack_version_id}")
|
|
622
|
+
|
|
623
|
+
# Update job status to running
|
|
624
|
+
await self.grpc_client.send_job_status(job_id, "running", start_date=start_time)
|
|
625
|
+
await self.grpc_client.send_worker_status(self.worker_id, "busy")
|
|
626
|
+
|
|
627
|
+
try:
|
|
628
|
+
# Get source info
|
|
629
|
+
source = await self.grpc_client.get_source(source_id)
|
|
630
|
+
if not source:
|
|
631
|
+
raise ValueError(f"Source {source_id} not found")
|
|
632
|
+
|
|
633
|
+
# Get pack info
|
|
634
|
+
pack = await self.grpc_client.get_pack(pack_id)
|
|
635
|
+
if not pack:
|
|
636
|
+
raise ValueError(f"Pack {pack_id} not found")
|
|
637
|
+
|
|
638
|
+
# Get latest version if not specified
|
|
639
|
+
if not pack_version_id and pack.versions:
|
|
640
|
+
latest = max(pack.versions, key=lambda v: semver.parse_version_info(v.sem_ver_id))
|
|
641
|
+
pack_version_id = latest.id
|
|
642
|
+
pack_asset_id = latest.asset_id
|
|
643
|
+
else:
|
|
644
|
+
# Find the version
|
|
645
|
+
for v in pack.versions:
|
|
646
|
+
if v.id == pack_version_id:
|
|
647
|
+
pack_asset_id = v.asset_id
|
|
648
|
+
break
|
|
649
|
+
else:
|
|
650
|
+
raise ValueError(f"Pack version {pack_version_id} not found")
|
|
651
|
+
|
|
652
|
+
# Get asset URL
|
|
653
|
+
asset = await self.grpc_client.get_asset_url(pack_asset_id)
|
|
654
|
+
if not asset:
|
|
655
|
+
raise ValueError(f"Asset {pack_asset_id} not found")
|
|
656
|
+
|
|
657
|
+
# Pull and extract pack (uses REST for binary download)
|
|
658
|
+
pack_file_path = await self._pull_pack(pack_id, asset)
|
|
659
|
+
pack_folder = f"{pack_file_path.split('/')[-1].split('.')[0]}_pack"
|
|
660
|
+
|
|
661
|
+
# Create temp folder for job
|
|
662
|
+
datetime_string = start_time.strftime("%Y%m%d%H%M%S")
|
|
663
|
+
random_seed = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(5))
|
|
664
|
+
temp_folder_name = f"{self._jobs_path}/{datetime_string}_{random_seed}"
|
|
665
|
+
os.makedirs(temp_folder_name)
|
|
666
|
+
|
|
667
|
+
# Copy and extract pack
|
|
668
|
+
copy2(pack_file_path, temp_folder_name)
|
|
669
|
+
archive_name = pack_file_path.split("/")[-1]
|
|
670
|
+
archive_path = os.path.join(temp_folder_name, archive_name)
|
|
671
|
+
|
|
672
|
+
with tarfile.open(archive_path, "r:gz") as tar:
|
|
673
|
+
_safe_extractall(tar, temp_folder_name) # nosec B202
|
|
674
|
+
os.remove(archive_path)
|
|
675
|
+
|
|
676
|
+
# Setup source config
|
|
677
|
+
source_conf = self.config.load_source_config()
|
|
678
|
+
matching_sources = [s for s in source_conf.get("sources", []) if str(s.get("id")) == str(source_id)]
|
|
679
|
+
if not matching_sources:
|
|
680
|
+
raise ValueError(f"Source {source_id} not found in local config")
|
|
681
|
+
|
|
682
|
+
source_local = matching_sources[0]
|
|
683
|
+
with open(os.path.join(temp_folder_name, pack_folder, "source_conf.json"), "w") as f:
|
|
684
|
+
json.dump(source_local, f, indent=4)
|
|
685
|
+
|
|
686
|
+
# Setup target config if provided
|
|
687
|
+
if target_id:
|
|
688
|
+
matching_targets = [s for s in source_conf.get("sources", []) if str(s.get("id")) == str(target_id)]
|
|
689
|
+
if matching_targets:
|
|
690
|
+
with open(os.path.join(temp_folder_name, pack_folder, "target_conf.json"), "w") as f:
|
|
691
|
+
json.dump(matching_targets[0], f, indent=4)
|
|
692
|
+
|
|
693
|
+
# Setup pack config override
|
|
694
|
+
if pack_config_override:
|
|
695
|
+
config_data = json.loads(pack_config_override) if isinstance(pack_config_override, str) else pack_config_override
|
|
696
|
+
with open(os.path.join(temp_folder_name, pack_folder, "pack_conf.json"), "w") as f:
|
|
697
|
+
json.dump(config_data, f, indent=4)
|
|
698
|
+
|
|
699
|
+
# Create a thread-safe callback for log streaming
|
|
700
|
+
# We use asyncio.run_coroutine_threadsafe to call the async method from the sync callback
|
|
701
|
+
# Capture the running loop from the current async context
|
|
702
|
+
try:
|
|
703
|
+
loop = asyncio.get_running_loop()
|
|
704
|
+
except RuntimeError:
|
|
705
|
+
loop = asyncio.get_event_loop()
|
|
706
|
+
|
|
707
|
+
grpc_client = self.grpc_client
|
|
708
|
+
current_job_id = job_id
|
|
709
|
+
|
|
710
|
+
def log_streaming_callback(line: str, level: str) -> None:
|
|
711
|
+
"""Callback to stream log lines via gRPC."""
|
|
712
|
+
try:
|
|
713
|
+
# Schedule the async send_log_line on the event loop
|
|
714
|
+
future = asyncio.run_coroutine_threadsafe(
|
|
715
|
+
grpc_client.send_log_line(current_job_id, line, level),
|
|
716
|
+
loop
|
|
717
|
+
)
|
|
718
|
+
# Don't wait for the result to avoid blocking
|
|
719
|
+
except Exception as e:
|
|
720
|
+
logger.debug(f"Log streaming error: {e}")
|
|
721
|
+
|
|
722
|
+
# Run the pack with log streaming callback
|
|
723
|
+
logger.info(f"Starting pack execution with live log streaming for job {job_id}")
|
|
724
|
+
status = run_pack(os.path.join(temp_folder_name, pack_folder), log_callback=log_streaming_callback)
|
|
725
|
+
|
|
726
|
+
# Upload results (still uses REST for file uploads)
|
|
727
|
+
logs_id = await self._post_run(
|
|
728
|
+
os.path.join(temp_folder_name, pack_folder),
|
|
729
|
+
f"{datetime_string}_{random_seed}",
|
|
730
|
+
pack_id,
|
|
731
|
+
pack_version_id,
|
|
732
|
+
source_id,
|
|
733
|
+
source_version_id,
|
|
734
|
+
)
|
|
735
|
+
|
|
736
|
+
# Update final status
|
|
737
|
+
end_time = datetime.now(timezone.utc)
|
|
738
|
+
final_status = "succeeded" if status == 0 else "failed"
|
|
739
|
+
|
|
740
|
+
await self.grpc_client.send_job_status(
|
|
741
|
+
job_id,
|
|
742
|
+
final_status,
|
|
743
|
+
end_date=end_time,
|
|
744
|
+
logs_id=logs_id,
|
|
745
|
+
)
|
|
746
|
+
await self.grpc_client.send_worker_status(self.worker_id, final_status)
|
|
747
|
+
|
|
748
|
+
elapsed_time = end_time - start_time
|
|
749
|
+
logger.success(f"Job {job_id} finished with status {final_status}")
|
|
750
|
+
logger.info(f"End Time: {end_time.strftime('%Y-%m-%d %H:%M:%S')}")
|
|
751
|
+
logger.info(f"Elapsed Time: {elapsed_time}")
|
|
752
|
+
|
|
753
|
+
except Exception as e:
|
|
754
|
+
logger.error(f"Job {job_id} failed: {e}")
|
|
755
|
+
end_time = datetime.now(timezone.utc)
|
|
756
|
+
await self.grpc_client.send_job_status(job_id, "failed", error_message=str(e), end_date=end_time)
|
|
757
|
+
await self.grpc_client.send_worker_status(self.worker_id, "failed")
|
|
758
|
+
raise
|
|
759
|
+
|
|
760
|
+
async def _pull_pack(self, pack_id: int, asset: qalita_pb2.AssetUrl) -> str:
|
|
761
|
+
"""Download pack from S3 (uses REST for binary download)."""
|
|
762
|
+
logger.info("------------- Pack Pull -------------")
|
|
763
|
+
|
|
764
|
+
# Build cache path
|
|
765
|
+
import re
|
|
766
|
+
url_parts = asset.url.split("/")
|
|
767
|
+
file_name = url_parts[-1] if url_parts else ""
|
|
768
|
+
bucket_name = url_parts[3] if len(url_parts) > 3 else ""
|
|
769
|
+
s3_folder = "/".join(url_parts[4:-1]) if len(url_parts) > 4 else ""
|
|
770
|
+
|
|
771
|
+
# Validate path components
|
|
772
|
+
safe_pattern = re.compile(r'^[\w\-\.]+$')
|
|
773
|
+
if not file_name or not safe_pattern.match(file_name):
|
|
774
|
+
raise ValueError(f"Invalid file name: {file_name}")
|
|
775
|
+
|
|
776
|
+
cache_folder = os.path.join(self._jobs_path, bucket_name, s3_folder) if s3_folder else os.path.join(self._jobs_path, bucket_name)
|
|
777
|
+
local_path = os.path.join(cache_folder, file_name)
|
|
778
|
+
|
|
779
|
+
# Check cache
|
|
780
|
+
if os.path.exists(local_path):
|
|
781
|
+
logger.info(f"Using CACHED Pack at: {local_path}")
|
|
782
|
+
return local_path
|
|
783
|
+
|
|
784
|
+
if not os.path.exists(cache_folder):
|
|
785
|
+
os.makedirs(cache_folder)
|
|
786
|
+
|
|
787
|
+
# Download via REST (binary files still use HTTP)
|
|
788
|
+
agent_conf = self.config.load_worker_config()
|
|
789
|
+
api_url = agent_conf['context']['local']['url']
|
|
790
|
+
|
|
791
|
+
response = send_request(
|
|
792
|
+
request=f"{api_url}/api/v1/assets/{asset.id}/fetch",
|
|
793
|
+
mode="get",
|
|
794
|
+
)
|
|
795
|
+
|
|
796
|
+
if response.status_code == 200:
|
|
797
|
+
with open(local_path, "wb") as f:
|
|
798
|
+
f.write(response.content)
|
|
799
|
+
logger.info("Pack fetched successfully")
|
|
800
|
+
return local_path
|
|
801
|
+
else:
|
|
802
|
+
raise ValueError(f"Failed to fetch pack: {response.text}")
|
|
803
|
+
|
|
804
|
+
async def _post_run(
|
|
805
|
+
self,
|
|
806
|
+
run_path: str,
|
|
807
|
+
name: str,
|
|
808
|
+
pack_id: int,
|
|
809
|
+
pack_version_id: int,
|
|
810
|
+
source_id: int,
|
|
811
|
+
source_version_id: int,
|
|
812
|
+
) -> Optional[int]:
|
|
813
|
+
"""Upload job results (uses REST for file uploads)."""
|
|
814
|
+
logger.info("------------- Job Post Run -------------")
|
|
815
|
+
|
|
816
|
+
agent_conf = self.config.load_worker_config()
|
|
817
|
+
api_url = agent_conf['context']['local']['url']
|
|
818
|
+
registry_id = agent_conf['registries'][0]['id']
|
|
819
|
+
user_id = agent_conf['user']['id']
|
|
820
|
+
|
|
821
|
+
logs_id = None
|
|
822
|
+
|
|
823
|
+
# Upload logs
|
|
824
|
+
logs_path = os.path.join(run_path, "logs.txt")
|
|
825
|
+
if os.path.exists(logs_path):
|
|
826
|
+
logger.info("Uploading logs...")
|
|
827
|
+
response = send_request(
|
|
828
|
+
request=f"{api_url}/api/v1/assets/upload",
|
|
829
|
+
mode="post-multipart",
|
|
830
|
+
file_path=logs_path,
|
|
831
|
+
query_params={
|
|
832
|
+
"registry_id": registry_id,
|
|
833
|
+
"name": name,
|
|
834
|
+
"version": "1.0.0",
|
|
835
|
+
"bucket": "logs",
|
|
836
|
+
"type": "log",
|
|
837
|
+
"description": "job logs",
|
|
838
|
+
"user_id": user_id,
|
|
839
|
+
},
|
|
840
|
+
)
|
|
841
|
+
if response.status_code == 200:
|
|
842
|
+
logs_id = response.json().get("id")
|
|
843
|
+
logger.success("Logs pushed")
|
|
844
|
+
else:
|
|
845
|
+
logger.error(f"Failed to push logs: {response.text}")
|
|
846
|
+
|
|
847
|
+
# Upload metrics
|
|
848
|
+
metrics_path = os.path.join(run_path, "metrics.json")
|
|
849
|
+
if os.path.exists(metrics_path):
|
|
850
|
+
logger.info("Uploading metrics...")
|
|
851
|
+
response = send_request(
|
|
852
|
+
request=f"{api_url}/api/v1/metrics/upload",
|
|
853
|
+
mode="post-multipart",
|
|
854
|
+
file_path=metrics_path,
|
|
855
|
+
query_params={
|
|
856
|
+
"source_id": source_id,
|
|
857
|
+
"source_version_id": source_version_id,
|
|
858
|
+
"pack_id": pack_id,
|
|
859
|
+
"pack_version_id": pack_version_id,
|
|
860
|
+
},
|
|
861
|
+
)
|
|
862
|
+
if response.status_code == 200:
|
|
863
|
+
logger.success("Metrics pushed")
|
|
864
|
+
else:
|
|
865
|
+
logger.error(f"Failed to push metrics: {response.text}")
|
|
866
|
+
|
|
867
|
+
# Upload recommendations
|
|
868
|
+
recommendations_path = os.path.join(run_path, "recommendations.json")
|
|
869
|
+
if os.path.exists(recommendations_path):
|
|
870
|
+
logger.info("Uploading recommendations...")
|
|
871
|
+
response = send_request(
|
|
872
|
+
request=f"{api_url}/api/v1/recommendations/upload",
|
|
873
|
+
mode="post-multipart",
|
|
874
|
+
file_path=recommendations_path,
|
|
875
|
+
query_params={
|
|
876
|
+
"source_id": source_id,
|
|
877
|
+
"source_version_id": source_version_id,
|
|
878
|
+
"pack_id": pack_id,
|
|
879
|
+
"pack_version_id": pack_version_id,
|
|
880
|
+
},
|
|
881
|
+
)
|
|
882
|
+
if response.status_code == 200:
|
|
883
|
+
logger.success("Recommendations pushed")
|
|
884
|
+
else:
|
|
885
|
+
logger.error(f"Failed to push recommendations: {response.text}")
|
|
886
|
+
|
|
887
|
+
# Upload schemas
|
|
888
|
+
schemas_path = os.path.join(run_path, "schemas.json")
|
|
889
|
+
if os.path.exists(schemas_path):
|
|
890
|
+
logger.info("Uploading schemas...")
|
|
891
|
+
response = send_request(
|
|
892
|
+
request=f"{api_url}/api/v1/schemas/upload",
|
|
893
|
+
mode="post-multipart",
|
|
894
|
+
file_path=schemas_path,
|
|
895
|
+
query_params={
|
|
896
|
+
"source_id": source_id,
|
|
897
|
+
"source_version_id": source_version_id,
|
|
898
|
+
"pack_id": pack_id,
|
|
899
|
+
"pack_version_id": pack_version_id,
|
|
900
|
+
},
|
|
901
|
+
)
|
|
902
|
+
if response.status_code == 200:
|
|
903
|
+
logger.success("Schemas pushed")
|
|
904
|
+
else:
|
|
905
|
+
logger.error(f"Failed to push schemas: {response.text}")
|
|
906
|
+
|
|
907
|
+
return logs_id
|
|
908
|
+
|
|
909
|
+
async def _shutdown(self) -> None:
|
|
910
|
+
"""Gracefully shutdown the worker."""
|
|
911
|
+
logger.info("Shutting down worker...")
|
|
912
|
+
self._running = False
|
|
913
|
+
|
|
914
|
+
if self.grpc_client:
|
|
915
|
+
# Send offline status
|
|
916
|
+
if self.worker_id:
|
|
917
|
+
await self.grpc_client.send_worker_status(self.worker_id, "offline")
|
|
918
|
+
|
|
919
|
+
await self.grpc_client.stop_stream()
|
|
920
|
+
await self.grpc_client.disconnect()
|
|
921
|
+
|
|
922
|
+
logger.info("Worker shutdown complete")
|
|
923
|
+
|
|
924
|
+
|
|
925
|
+
async def run_worker_grpc(config, name: str, mode: str, token: str, url: str) -> None:
|
|
926
|
+
"""
|
|
927
|
+
Entry point for running the worker in gRPC mode.
|
|
928
|
+
|
|
929
|
+
Args:
|
|
930
|
+
config: CLI config object
|
|
931
|
+
name: Worker name
|
|
932
|
+
mode: Worker mode (worker/job)
|
|
933
|
+
token: Authentication token
|
|
934
|
+
url: Backend URL
|
|
935
|
+
"""
|
|
936
|
+
runner = GrpcWorkerRunner(config, name, mode, token, url)
|
|
937
|
+
|
|
938
|
+
if not await runner.authenticate():
|
|
939
|
+
sys.exit(1)
|
|
940
|
+
|
|
941
|
+
await runner.run()
|