ws-bom-robot-app 0.0.10__tar.gz → 0.0.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/PKG-INFO +2 -2
  2. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/setup.py +1 -1
  3. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/config.py +29 -0
  4. ws_bom_robot_app-0.0.11/ws_bom_robot_app/cron_manager.py +250 -0
  5. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/utils/webhooks.py +1 -0
  6. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/integration/sitemap.py +0 -1
  7. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/main.py +12 -2
  8. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/requirements.txt +1 -1
  9. ws_bom_robot_app-0.0.11/ws_bom_robot_app/task_manager.py +311 -0
  10. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app.egg-info/PKG-INFO +2 -2
  11. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app.egg-info/requires.txt +1 -1
  12. ws_bom_robot_app-0.0.10/ws_bom_robot_app/cron_manager.py +0 -99
  13. ws_bom_robot_app-0.0.10/ws_bom_robot_app/task_manager.py +0 -151
  14. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/README.md +0 -0
  15. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/pyproject.toml +0 -0
  16. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/setup.cfg +0 -0
  17. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/__init__.py +0 -0
  18. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/auth.py +0 -0
  19. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/__init__.py +0 -0
  20. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/agent_description.py +0 -0
  21. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/agent_handler.py +0 -0
  22. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/agent_lcel.py +0 -0
  23. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/api.py +0 -0
  24. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/defaut_prompt.py +0 -0
  25. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/main.py +0 -0
  26. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/models/__init__.py +0 -0
  27. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/models/api.py +0 -0
  28. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/models/base.py +0 -0
  29. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/models/kb.py +0 -0
  30. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/settings.py +0 -0
  31. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/tools/__init__.py +0 -0
  32. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/tools/models/__init__.py +0 -0
  33. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/tools/models/main.py +0 -0
  34. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/tools/tool_builder.py +0 -0
  35. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/tools/tool_manager.py +0 -0
  36. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/tools/utils.py +0 -0
  37. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/utils/__init__.py +0 -0
  38. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/utils/agent_utils.py +0 -0
  39. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/utils/download.py +0 -0
  40. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/utils/faiss_helper.py +0 -0
  41. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/utils/kb.py +0 -0
  42. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/utils/print.py +0 -0
  43. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/__init__.py +0 -0
  44. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/generator.py +0 -0
  45. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/integration/__init__.py +0 -0
  46. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/integration/base.py +0 -0
  47. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/integration/manager.py +0 -0
  48. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/loader/__init__.py +0 -0
  49. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/loader/base.py +0 -0
  50. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/llm/vector_store/loader/json_loader.py +0 -0
  51. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app/util.py +0 -0
  52. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app.egg-info/SOURCES.txt +0 -0
  53. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app.egg-info/dependency_links.txt +0 -0
  54. {ws_bom_robot_app-0.0.10 → ws_bom_robot_app-0.0.11}/ws_bom_robot_app.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.10
3
+ Version: 0.0.11
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -11,7 +11,7 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.12
12
12
  Description-Content-Type: text/markdown
13
13
  Requires-Dist: standardwebhooks==1.0.0
14
- Requires-Dist: schedule==1.2.2
14
+ Requires-Dist: apscheduler==3.11.0
15
15
  Requires-Dist: aiofiles==24.1.0
16
16
  Requires-Dist: pydantic==2.9.2
17
17
  Requires-Dist: pydantic-settings==2.6.0
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name="ws_bom_robot_app",
5
- version="0.0.10",
5
+ version="0.0.11",
6
6
  description="A FastAPI application serving ws bom/robot/llm platform ai.",
7
7
  long_description=open("README.md", encoding='utf-8').read(),
8
8
  long_description_content_type="text/markdown",
@@ -1,3 +1,4 @@
1
+ from typing import Optional
1
2
  from pydantic import BaseModel, ConfigDict
2
3
  from pydantic_settings import BaseSettings
3
4
 
@@ -25,10 +26,38 @@ class Settings(BaseSettings):
25
26
  )
26
27
 
27
28
  class RuntimeOptions(BaseModel):
29
+ def _is_multi_process() -> bool:
30
+ """
31
+ Checks if the application is running with multiple worker processes.
32
+
33
+ This function inspects the command-line arguments to determine if the
34
+ application is configured to run with more than one worker process. It
35
+ looks for the "--workers" argument and checks if the subsequent value
36
+ is greater than 1.
37
+ Sample of command-line arguments:
38
+ fastapi run main.py --port 6001 --workers 4
39
+ uvicorn main:app --port 6001 --workers 4
40
+
41
+ Returns:
42
+ bool: True if the application is running with multiple worker
43
+ processes, False otherwise.
44
+ """
45
+ import sys, os
46
+ try:
47
+ for i, arg in enumerate(sys.argv):
48
+ if arg == "--workers" and i + 1 < len(sys.argv):
49
+ workers = int(sys.argv[i + 1])
50
+ if workers > 1:
51
+ return True
52
+ except (ValueError, IndexError):
53
+ pass
54
+ # Fallback: Compare process and parent process IDs
55
+ return False #os.getpid() != os.getppid()
28
56
  debug: bool
29
57
  loader_strategy: str
30
58
  loader_show_progress: bool
31
59
  loader_silent_errors: bool
60
+ is_multi_process: bool = _is_multi_process()
32
61
 
33
62
 
34
63
  def runtime_options(self) -> RuntimeOptions:
@@ -0,0 +1,250 @@
1
+ from apscheduler.schedulers.background import BackgroundScheduler
2
+ #from apscheduler.schedulers.asyncio import AsyncIOScheduler
3
+ from apscheduler.jobstores.memory import MemoryJobStore
4
+ from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
5
+ from apscheduler.triggers.cron import CronTrigger
6
+ from apscheduler.triggers.interval import IntervalTrigger
7
+ from apscheduler.triggers.date import DateTrigger
8
+ from fastapi import APIRouter
9
+ from datetime import datetime
10
+ from ws_bom_robot_app.task_manager import task_manager
11
+ from ws_bom_robot_app.llm.utils.kb import kb_cleanup_data_file
12
+ from ws_bom_robot_app.util import _log
13
+ from ws_bom_robot_app.config import config
14
+
15
+ class JobstoreStrategy:
16
+ def get_jobstore(self):
17
+ raise NotImplementedError("Subclasses should implement this method")
18
+
19
+ class MemoryJobstoreStrategy(JobstoreStrategy):
20
+ def get_jobstore(self):
21
+ _log.info("Using in-memory cron jobstore.")
22
+ return {"default": MemoryJobStore()}
23
+
24
+ class PersistentJobstoreStrategy(JobstoreStrategy):
25
+ def get_jobstore(self, db_url: str = "sqlite:///.data/db/jobs.sqlite"):
26
+ _log.info(f"Using persistent crob jobstore with database URL: {db_url}.")
27
+ return {"default": SQLAlchemyJobStore(url=db_url)}
28
+
29
+ class Job:
30
+ def __init__(self, name: str, job_func, args: list = None, kwargs: dict = None, cron_expression: str = None, interval: int = None, run_at: datetime = None):
31
+ """
32
+ Job class that supports both recurring and one-time jobs.
33
+ :param job_func: The function to execute.
34
+ :param interval: Interval in seconds for recurring jobs.
35
+ :param run_at: Specific datetime for one-time jobs.
36
+ :param tags: Tags associated with the job.
37
+ """
38
+ if not (cron_expression or interval or run_at):
39
+ raise ValueError("Either 'interval' or 'run_at' must be provided.")
40
+ self.name = name
41
+ self.job_func = job_func
42
+ self.args: list = args or []
43
+ self.kwargs: dict = kwargs or {}
44
+ self.cron_expression = cron_expression
45
+ self.interval = interval
46
+ self.run_at = run_at
47
+
48
+ def create_trigger(self):
49
+ """Create the appropriate trigger based on the job type."""
50
+ if self.cron_expression:
51
+ return CronTrigger.from_crontab(self.cron_expression)
52
+ if self.interval:
53
+ return IntervalTrigger(seconds=self.interval)
54
+ elif self.run_at:
55
+ return DateTrigger(run_date=self.run_at)
56
+
57
+ class CronManager:
58
+ _list_default = [
59
+ Job('cleanup-task',task_manager.cleanup_task, interval=5 * 60),
60
+ Job('cleanup-data',kb_cleanup_data_file, interval=180 * 60),
61
+ ]
62
+ def __get_jobstore_strategy() -> JobstoreStrategy:
63
+ if config.runtime_options().is_multi_process:
64
+ return MemoryJobstoreStrategy()
65
+ return PersistentJobstoreStrategy()
66
+ def __init__(self, strategy: JobstoreStrategy = None, enable_defaults: bool = True):
67
+ self.enable_defaults = enable_defaults
68
+ if strategy is None:
69
+ strategy = CronManager.__get_jobstore_strategy()
70
+ jobstores = strategy.get_jobstore()
71
+ self.scheduler: BackgroundScheduler = BackgroundScheduler(jobstores=jobstores)
72
+ self.__scheduler_is_running = False
73
+
74
+ def add_job(self, job: Job):
75
+ """
76
+ Adds a job to the scheduler with the specified name and job details.
77
+ Args:
78
+ name (str): The unique identifier for the job.
79
+ job (Job): An instance of the Job class containing the job details.
80
+ The job details include:
81
+ - job_func: The function to be executed.
82
+ - args: The positional arguments to pass to the job function.
83
+ - kwargs: The keyword arguments to pass to the job function.
84
+ - trigger: The trigger that determines when the job should be executed.
85
+ The job will replace any existing job with the same name.
86
+ Sample usage:
87
+ recurring_job = Job(name="sample-recurring-job",job_func=example_job, interval=5, tags=tags, args=args, kwargs=kwargs)
88
+ cron_manager.add_job(recurring_job)
89
+ fire_once_job = Job(name="sample-fire-once-job",job_func=example_job, run_at=datetime.now(), tags=tags, args=args, kwargs=kwargs)
90
+ cron_manager.add_job(fire_once_job)
91
+ """
92
+ existing_job = self.scheduler.get_job(job.name)
93
+ if existing_job:
94
+ _log.info(f"Job with name '{job.name}' already exists. Skip creation.")
95
+ else:
96
+ trigger = job.create_trigger()
97
+ self.scheduler.add_job(
98
+ func=job.job_func,
99
+ args=job.args,
100
+ kwargs=job.kwargs,
101
+ trigger=trigger,
102
+ id=job.name,
103
+ name=job.name,
104
+ replace_existing=False
105
+ )
106
+
107
+ def start(self):
108
+ if not self.__scheduler_is_running:
109
+ self.__scheduler_is_running = True
110
+ self.scheduler.start()
111
+ if self.enable_defaults and CronManager._list_default:
112
+ for job in CronManager._list_default:
113
+ existing_job = self.scheduler.get_job(job.name)
114
+ if existing_job is None:
115
+ self.add_job(job)
116
+
117
+ def get_job(self, job_id: str):
118
+ return self.scheduler.get_job(job_id)
119
+
120
+ def get_jobs(self):
121
+ return self.scheduler.get_jobs()
122
+
123
+ def execute_job(self, job_id: str):
124
+ job = self.scheduler.get_job(job_id)
125
+ if job:
126
+ job.func()
127
+ else:
128
+ raise ValueError(f"Job with id '{job_id}' not found.")
129
+
130
+ def pause_job(self, job_id: str):
131
+ self.scheduler.pause_job(job_id)
132
+
133
+ def resume_job(self, job_id: str):
134
+ self.scheduler.resume_job(job_id)
135
+
136
+ def remove_job(self, job_id: str):
137
+ self.scheduler.remove_job(job_id)
138
+
139
+ def execute_recurring_jobs(self):
140
+ for job in self.scheduler.get_jobs():
141
+ if job.interval:
142
+ job.job_func()
143
+
144
+ def pause_recurring_jobs(self):
145
+ for job in self.scheduler.get_jobs():
146
+ if job.interval:
147
+ self.pause_job(job.id)
148
+
149
+ def resume_recurring_jobs(self):
150
+ for job in self.scheduler.get_jobs():
151
+ if job.interval:
152
+ self.resume_job(job.id)
153
+
154
+ def remove_recurring_jobs(self):
155
+ for job in self.scheduler.get_jobs():
156
+ if job.interval:
157
+ self.remove_job(job.id)
158
+
159
+ def clear(self):
160
+ self.__scheduler_is_running = False
161
+ self.scheduler.remove_all_jobs()
162
+
163
+ def shutdown(self):
164
+ self.scheduler.shutdown()
165
+
166
+ cron_manager = CronManager()
167
+
168
+ # FastAPI Routes
169
+ router = APIRouter(prefix="/api/cron", tags=["cron"])
170
+
171
+ @router.get("/list")
172
+ def _list():
173
+ def __format(job):
174
+ return {
175
+ "id": job.id,
176
+ "name": job.name,
177
+ "func": job.func_ref,
178
+ "pending": job.pending,
179
+ "trigger": str(job.trigger),
180
+ "next_run_time": job.next_run_time
181
+ }
182
+ return [__format(job) for job in cron_manager.get_jobs()]
183
+
184
+ @router.get("/default-jobs")
185
+ def _default_jobs():
186
+ def __format(job):
187
+ existing_job = cron_manager.scheduler.get_job(job.name)
188
+ return {
189
+ "name": job.name,
190
+ "status": "exists" if existing_job else "not added"
191
+ }
192
+ return [__format(job) for job in CronManager._list_default]
193
+
194
+ @router.post("/execute-job/{job_id}")
195
+ def _execute_job(job_id: str):
196
+ try:
197
+ cron_manager.execute_job(job_id)
198
+ return {"status": f"Job {job_id} executed"}
199
+ except ValueError as e:
200
+ return {"error": str(e)}
201
+
202
+ @router.post("/pause-job/{job_id}")
203
+ def _pause_job(job_id: str):
204
+ cron_manager.pause_job(job_id)
205
+ return {"status": f"Job {job_id} paused"}
206
+
207
+ @router.post("/resume-job/{job_id}")
208
+ def _resume_job(job_id: str):
209
+ cron_manager.resume_job(job_id)
210
+ return {"status": f"Job {job_id} resumed"}
211
+
212
+ @router.delete("/remove-job/{job_id}")
213
+ def _remove_job(job_id: str):
214
+ cron_manager.remove_job(job_id)
215
+ return {"status": f"Job {job_id} removed"}
216
+
217
+ @router.post("/execute-recurring")
218
+ def _execute_recurring():
219
+ cron_manager.execute_recurring_jobs()
220
+ return {"status": "All recurring jobs executed"}
221
+
222
+ @router.post("/pause-recurring")
223
+ def _pause_recurring():
224
+ cron_manager.pause_recurring_jobs()
225
+ return {"status": "All recurring jobs paused"}
226
+
227
+ @router.post("/resume-recurring")
228
+ def _resume_recurring():
229
+ cron_manager.resume_recurring_jobs()
230
+ return {"status": "All recurring jobs resumed"}
231
+
232
+ @router.delete("/remove-recurring")
233
+ def _remove_recurring():
234
+ cron_manager.remove_recurring_jobs()
235
+ return {"status": "All recurring jobs removed"}
236
+
237
+ @router.get("/start")
238
+ def _start():
239
+ cron_manager.start()
240
+ return {"status": "started"}
241
+
242
+ @router.delete("/stop")
243
+ def _stop():
244
+ cron_manager.clear()
245
+ return {"status": "stopped"}
246
+
247
+ @router.get("/shutdown")
248
+ def _shutdown():
249
+ cron_manager.shutdown()
250
+ return {"status": "shutdown"}
@@ -51,3 +51,4 @@ class WebhookNotifier:
51
51
  async with httpx.AsyncClient(headers=_headers,verify=False,timeout=timeout) as client:
52
52
  response = await client.post(endpoint, data=_data)
53
53
  response.raise_for_status()
54
+
@@ -5,7 +5,6 @@ from langchain_community.document_loaders.sitemap import SitemapLoader
5
5
  from langchain_community.document_transformers import MarkdownifyTransformer as markdownify
6
6
  from langchain_core.documents import Document
7
7
  from bs4 import BeautifulSoup, Tag
8
- import nest_asyncio, os
9
8
 
10
9
 
11
10
  class Sitemap(IntegrationStrategy):
@@ -1,5 +1,6 @@
1
1
  import datetime
2
2
  import platform
3
+ from fastapi.responses import FileResponse
3
4
  import uvicorn, os, sys
4
5
  from fastapi import FastAPI, Depends
5
6
  from fastapi.openapi.docs import get_swagger_ui_html
@@ -22,7 +23,10 @@ app.include_router(cron,dependencies=[Depends(authenticate)])
22
23
 
23
24
  @app.get("/")
24
25
  async def root():
25
- return {}
26
+ return health()
27
+ @app.get("/favicon.ico")
28
+ async def favicon():
29
+ return FileResponse("./favicon.ico")
26
30
 
27
31
  @app.get("/docs", include_in_schema=False)
28
32
  async def get_swagger_documentation(authenticate: bool = Depends(authenticate)):
@@ -31,7 +35,11 @@ async def get_swagger_documentation(authenticate: bool = Depends(authenticate)):
31
35
  async def openapi(authenticate: bool = Depends(authenticate)):
32
36
  return get_openapi(title=app.title, version=app.version, routes=app.routes)
33
37
 
34
- @app.get("/diag",tags=["diag"])
38
+ @app.get("/api/health",tags=["diag"])
39
+ def health():
40
+ return {"status": "ok"}
41
+
42
+ @app.get("/api/diag",tags=["diag"])
35
43
  def diag(authenticate: bool = Depends(authenticate)):
36
44
  import pkg_resources
37
45
  from ws_bom_robot_app.llm.vector_store.loader.base import Loader as wsll
@@ -57,6 +65,7 @@ def diag(authenticate: bool = Depends(authenticate)):
57
65
  "args": {k: arg for k, arg in enumerate(sys.argv)}
58
66
  },
59
67
  "os": {
68
+ "ppid": os.getppid(),
60
69
  "pid": os.getpid(),
61
70
  "cwd": os.getcwd(),
62
71
  "ws_bom_robot_app": pkg_resources.get_distribution("ws_bom_robot_app").version,
@@ -64,6 +73,7 @@ def diag(authenticate: bool = Depends(authenticate)):
64
73
  },
65
74
  },
66
75
  "config":config,
76
+ "runtime":config.runtime_options(),
67
77
  "extension": {
68
78
  "loader": ({item[0]: item[1].loader.__name__ if item[1] else None} for item in sorted(wsll._list.items(), key=lambda x: x[0]) if item[1]),
69
79
  "integration":({item[0]: type(item[1]).__name__} for item in wsim._list.items()),
@@ -1,6 +1,6 @@
1
1
  #app
2
2
  standardwebhooks==1.0.0
3
- schedule==1.2.2
3
+ apscheduler==3.11.0
4
4
  aiofiles==24.1.0
5
5
  pydantic==2.9.2
6
6
  pydantic-settings==2.6.0
@@ -0,0 +1,311 @@
1
+ import inspect
2
+ import asyncio, os
3
+ from datetime import datetime, timedelta
4
+ from enum import Enum
5
+ from typing import Annotated, Literal, TypeVar, Optional, Dict, Union, Any
6
+ from pydantic import BaseModel, ConfigDict, Field, computed_field
7
+ from uuid import uuid4
8
+ from fastapi import APIRouter, HTTPException
9
+ from ws_bom_robot_app.config import config
10
+ from ws_bom_robot_app.llm.models.base import IdentifiableEntity
11
+ from ws_bom_robot_app.llm.utils.webhooks import WebhookNotifier
12
+ from ws_bom_robot_app.util import _log
13
+ from sqlalchemy import create_engine, Column, String, JSON, DateTime, Enum
14
+ from sqlalchemy.ext.declarative import declarative_base
15
+ from sqlalchemy.orm import sessionmaker
16
+ from abc import ABC, abstractmethod
17
+
18
+ T = TypeVar('T')
19
+
20
+ #region models
21
+ class TaskStatistics(BaseModel):
22
+ class TaskStatisticExecutionTime(BaseModel):
23
+ min: str
24
+ max: str
25
+ avg: str
26
+ retention_days: float = config.robot_task_retention_days
27
+ total: int
28
+ pending: int
29
+ completed: int
30
+ failure: int
31
+ exec_time: TaskStatisticExecutionTime
32
+
33
+ class TaskHeader(BaseModel):
34
+ x_ws_bom_msg_type: Optional[str] = None
35
+ x_ws_bom_webhooks: Optional[str] = None
36
+ model_config = ConfigDict(
37
+ extra='allow'
38
+ )
39
+
40
+ class TaskMetaData(BaseModel):
41
+ start_time: str
42
+ end_time: Optional[str] = None
43
+ @computed_field
44
+ @property
45
+ def elapsed_time(self) -> Union[str, None]:
46
+ return str((datetime.now() if not self.end_time else datetime.fromisoformat(self.end_time)) - datetime.fromisoformat(self.start_time))
47
+ source: Optional[str] = None
48
+ pid: Optional[int] = None
49
+
50
+ class TaskStatus(IdentifiableEntity):
51
+ type: Optional[str] = None
52
+ status: Literal["pending", "completed", "failure"]
53
+ result: Optional[T] = None
54
+ metadata: TaskMetaData = None
55
+ error: Optional[str] = None
56
+ model_config = ConfigDict(
57
+ arbitrary_types_allowed=True
58
+ )
59
+
60
+ class TaskEntry(IdentifiableEntity):
61
+ task: Annotated[asyncio.Task, Field(default=None, validate_default=False)] = None
62
+ headers: TaskHeader | None = None
63
+ status: Union[TaskStatus, None] = None
64
+ def _get_coroutine_name(self, coroutine: asyncio.coroutines) -> str:
65
+ if inspect.iscoroutine(coroutine):
66
+ return coroutine.cr_code.co_name
67
+ return "<unknown>"
68
+ def __init__(self, **data):
69
+ #separate task from data to handle asyncio.Task
70
+ task = data.pop('task',None)
71
+ super().__init__(**data)
72
+ #bypass pydantic validation
73
+ object.__setattr__(self, 'task', task)
74
+ #init status
75
+ if not self.status:
76
+ self.status = TaskStatus(
77
+ id=self.id,
78
+ type=self.headers.x_ws_bom_msg_type if self.headers and self.headers.x_ws_bom_msg_type else self._get_coroutine_name(task._coro) if task else None,
79
+ status="pending",
80
+ metadata=TaskMetaData(
81
+ start_time=str(datetime.now().isoformat()),
82
+ source=self._get_coroutine_name(task._coro) if task else None,
83
+ pid=os.getpid())
84
+ )
85
+ model_config = ConfigDict(
86
+ arbitrary_types_allowed=True,
87
+ validate_assignment=True
88
+ )
89
+
90
+ #endregion
91
+
92
+ #region interface
93
+ class TaskManagerStrategy(ABC):
94
+ @abstractmethod
95
+ def create_task(self, coroutine, headers: TaskHeader | None = None) -> IdentifiableEntity:
96
+ pass
97
+
98
+ @abstractmethod
99
+ def update_task_status(self, task: TaskEntry) -> None:
100
+ """Hook for additional behavior, such as persisting the task status."""
101
+ pass
102
+
103
+ @abstractmethod
104
+ def get_task(self, id: str) -> TaskStatus | None:
105
+ pass
106
+
107
+ @abstractmethod
108
+ def get_tasks(self) -> list[TaskStatus] | None:
109
+ pass
110
+
111
+ @abstractmethod
112
+ def remove_task(self, id: str) -> None:
113
+ pass
114
+
115
+ @abstractmethod
116
+ def cleanup_task(self) -> None:
117
+ pass
118
+
119
+ @abstractmethod
120
+ def stats(self) -> TaskStatistics:
121
+ pass
122
+
123
+ def task_cleanup_rule(self, task: TaskEntry) -> bool:
124
+ return task.status.status in {"completed", "failure"} and datetime.fromisoformat(task.status.metadata.end_time) < datetime.now() - timedelta(days=config.robot_task_retention_days)
125
+
126
+ def task_done_callback(self, task_entry: TaskEntry, headers: TaskHeader | None = None) -> callable:
127
+ def callback(task: asyncio.Task):
128
+ try:
129
+ result = task_entry.task.result()
130
+ task_entry.status.status = "completed"
131
+ task_entry.status.result = result
132
+ except Exception as e:
133
+ task_entry.status.status = "failure"
134
+ task_entry.status.error = str(e)
135
+ finally:
136
+ task_entry.status.metadata.end_time = str(datetime.now().isoformat())
137
+ #strategy-specific behavior
138
+ self.update_task_status(task_entry)
139
+ #notify webhooks
140
+ if headers and headers.x_ws_bom_webhooks:
141
+ asyncio.create_task(
142
+ WebhookNotifier().notify_webhook(task_entry.status,headers.x_ws_bom_webhooks)
143
+ )
144
+ return callback
145
+ def create_task_entry(self, coroutine: asyncio.coroutines, headers: TaskHeader | None = None) -> TaskEntry:
146
+ task = TaskEntry(
147
+ id=str(uuid4()),
148
+ task=asyncio.create_task(coroutine),
149
+ headers=headers)
150
+ task.task.add_done_callback(self.task_done_callback(task, headers))
151
+ return task
152
+
153
+ def stats(self) -> TaskStatistics:
154
+ def __string_to_timedelta(value: str) -> timedelta:
155
+ if "." in value:
156
+ time_format = "%H:%M:%S.%f"
157
+ else:
158
+ time_format = "%H:%M:%S"
159
+ time_obj = datetime.strptime(value, time_format)
160
+ return timedelta(hours=time_obj.hour, minutes=time_obj.minute, seconds=time_obj.second, microseconds=time_obj.microsecond)
161
+ def __timedelta_to_string(td):
162
+ hours, remainder = divmod(td.total_seconds(), 3600)
163
+ minutes, seconds = divmod(remainder, 60)
164
+ return f"{int(hours):02d}:{int(minutes):02d}:{int(seconds):02d}.{td.microseconds}"
165
+ _all = self.get_tasks()
166
+ _not_pending = _all and [task for task in _all if task.status != "pending"]
167
+ _total_not_pending = len(_not_pending)
168
+ elapsed_times = [__string_to_timedelta(task.metadata.elapsed_time) for task in _not_pending]
169
+ _avg_exec_time = sum(elapsed_times, timedelta()) / _total_not_pending if _total_not_pending > 0 else timedelta()
170
+ _min_exec_time = min(elapsed_times) if _total_not_pending > 0 else timedelta()
171
+ _max_exec_time = max(elapsed_times) if _total_not_pending > 0 else timedelta()
172
+ return TaskStatistics(
173
+ total= _all and len(_all) or 0,
174
+ pending=_all and len([task for task in _all if task.status == "pending"]) or 0,
175
+ completed=_all and len([task for task in _all if task.status == "completed"]) or 0,
176
+ failure=_all and len([task for task in _all if task.status == "failure"]) or 0,
177
+ exec_time=TaskStatistics.TaskStatisticExecutionTime(
178
+ min=__timedelta_to_string(_min_exec_time),
179
+ max=__timedelta_to_string(_max_exec_time),
180
+ avg=__timedelta_to_string(_avg_exec_time)
181
+ )
182
+ )
183
+
184
+ #endregion
185
+
186
+ #memory implementation
187
+ class MemoryTaskManagerStrategy(TaskManagerStrategy):
188
+ def __init__(self):
189
+ self.tasks: Dict[str, TaskEntry] = {}
190
+
191
+ def create_task(self, coroutine: asyncio.coroutines, headers: TaskHeader | None = None) -> IdentifiableEntity:
192
+ task = self.create_task_entry(coroutine, headers)
193
+ self.tasks[task.id] = task
194
+ return IdentifiableEntity(id=task.id)
195
+
196
+ def update_task_status(self, task: TaskEntry) -> None:
197
+ """no-op for memory strategy."""
198
+ pass
199
+
200
+ def get_task(self, id: str) -> TaskStatus | None:
201
+ if _task := self.tasks.get(id):
202
+ return _task
203
+ return None
204
+
205
+ def get_tasks(self) -> list[TaskStatus] | None:
206
+ return [task.status for task in self.tasks.values()]
207
+
208
+ def remove_task(self, id: str) -> None:
209
+ if id in self.tasks:
210
+ del self.tasks[id]
211
+
212
+ def cleanup_task(self):
213
+ keys = [task.id for task in self.tasks.values() if self.task_cleanup_rule(task)]
214
+ for key in keys:
215
+ self.remove_task(key)
216
+
217
+ #endregion
218
+
219
+ #db implementation
220
+ Base = declarative_base()
221
+ class TaskEntryModel(Base):
222
+ __tablename__ = "entry"
223
+ id = Column(String, primary_key=True)
224
+ status = Column(JSON)
225
+ model_config = ConfigDict(
226
+ arbitrary_types_allowed=True
227
+ )
228
+ class DatabaseTaskManagerStrategy(TaskManagerStrategy):
229
+ def __init__(self, db_url: str = "sqlite:///.data/db/tasks.sqlite"):
230
+ self.engine = create_engine(db_url)
231
+ self.Session = sessionmaker(bind=self.engine)
232
+ Base.metadata.create_all(self.engine)
233
+
234
+ def create_task(self, coroutine: asyncio.coroutines, headers: TaskHeader | None = None) -> IdentifiableEntity:
235
+ task = self.create_task_entry(coroutine, headers)
236
+ with self.Session() as session:
237
+ session.add(TaskEntryModel(id=task.id, status=task.status.model_dump()))
238
+ session.commit()
239
+ return IdentifiableEntity(id=task.id)
240
+
241
+ def update_task_status(self, task: TaskEntry) -> None:
242
+ with self.Session() as session:
243
+ session.query(TaskEntryModel).filter_by(id=task.id).update(
244
+ {"status": task.status.model_dump()}
245
+ )
246
+ session.commit()
247
+
248
+ def get_task(self, id: str) -> TaskStatus | None:
249
+ with self.Session() as session:
250
+ task = session.query(TaskEntryModel).filter_by(id=id).first()
251
+ if task:
252
+ return TaskEntry(**task.__dict__)
253
+ return None
254
+
255
+ def get_tasks(self) -> list[TaskStatus] | None:
256
+ with self.Session() as session:
257
+ tasks = session.query(TaskEntryModel).all()
258
+ if tasks:
259
+ return [TaskEntry(**task.__dict__).status for task in tasks]
260
+ return None
261
+
262
+ def remove_task(self, id: str) -> None:
263
+ with self.Session() as session:
264
+ session.query(TaskEntryModel).filter_by(id=id).delete()
265
+ session.commit()
266
+
267
+ def cleanup_task(self):
268
+ with self.Session() as session:
269
+ for task in session.query(TaskEntryModel).all():
270
+ _task = TaskEntry(**task.__dict__)
271
+ if self.task_cleanup_rule(_task):
272
+ session.query(TaskEntryModel).filter_by(id=task.id).delete()
273
+ session.commit()
274
+ #endregion
275
+
276
+ # global instance
277
+ def __get_taskmanager_strategy() -> TaskManagerStrategy:
278
+ if config.runtime_options().is_multi_process:
279
+ return DatabaseTaskManagerStrategy()
280
+ return MemoryTaskManagerStrategy()
281
+ task_manager = __get_taskmanager_strategy()
282
+ _log.info(f"Task manager strategy: {task_manager.__class__.__name__}")
283
+
284
+ #region api
285
+ router = APIRouter(prefix="/api/task", tags=["task"])
286
+
287
+ @router.get("/status/{id}")
288
+ async def _status_task(id: str) -> TaskStatus:
289
+ task = task_manager.get_task(id)
290
+ if not task:
291
+ raise HTTPException(status_code=404, detail="Task not found")
292
+ return task.status
293
+
294
+ @router.get("/status")
295
+ async def _status_task_list() -> list[TaskStatus]:
296
+ return task_manager.get_tasks()
297
+
298
+ @router.delete("/status/{id}")
299
+ async def _remove_task(id: str):
300
+ task_manager.remove_task(id)
301
+ return {"success":"ok"}
302
+
303
+ @router.delete("/cleanup")
304
+ async def _remove_task_list():
305
+ task_manager.cleanup_task()
306
+ return {"success":"ok"}
307
+
308
+ @router.get("/stats")
309
+ async def _stats() -> TaskStatistics:
310
+ return task_manager.stats()
311
+ #endregion
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.10
3
+ Version: 0.0.11
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -11,7 +11,7 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.12
12
12
  Description-Content-Type: text/markdown
13
13
  Requires-Dist: standardwebhooks==1.0.0
14
- Requires-Dist: schedule==1.2.2
14
+ Requires-Dist: apscheduler==3.11.0
15
15
  Requires-Dist: aiofiles==24.1.0
16
16
  Requires-Dist: pydantic==2.9.2
17
17
  Requires-Dist: pydantic-settings==2.6.0
@@ -1,5 +1,5 @@
1
1
  standardwebhooks==1.0.0
2
- schedule==1.2.2
2
+ apscheduler==3.11.0
3
3
  aiofiles==24.1.0
4
4
  pydantic==2.9.2
5
5
  pydantic-settings==2.6.0
@@ -1,99 +0,0 @@
1
- from math import floor
2
- import schedule, time, threading
3
- import asyncio
4
- from fastapi import APIRouter
5
- from ws_bom_robot_app.task_manager import task_manager
6
- from ws_bom_robot_app.llm.utils.kb import kb_cleanup_data_file
7
- from ws_bom_robot_app.util import _log
8
- import random
9
-
10
- class RecurringJob():
11
- @staticmethod
12
- def __add_jitter(interval: int) -> int:
13
- #delay proportional with interval, min 10, max 100 sec
14
- jitter: int = max(10, min(100, floor(interval * 0.075)))
15
- return interval + random.randint(-jitter, jitter)
16
- def __init__(self, interval: int, job_func, tags: list[str]):
17
- #add a little jitter by default for better concurrency in case of multiple instances
18
- self.interval = RecurringJob.__add_jitter(interval)
19
- self.job_func = job_func
20
- self.is_coroutine = asyncio.iscoroutinefunction(job_func)
21
- self.job_func = job_func
22
- self.tags = tags
23
- def run(self):
24
- if self.is_coroutine:
25
- schedule.every(self.interval).seconds.do(self._run_async_job).tag(*self.tags)
26
- else:
27
- schedule.every(self.interval).seconds.do(self.job_func).tag(*self.tags)
28
- async def _run_async_job(self):
29
- await self.job_func()
30
-
31
- class CronManager:
32
-
33
- _list: dict[str, RecurringJob] = {
34
- 'cleanup-task': RecurringJob(5*60, task_manager.cleanup_task, tags=["cleanup","cleanup-task"]),
35
- 'cleanup-data': RecurringJob(180*60, kb_cleanup_data_file, tags=["cleanup","cleanup-data"]),
36
- }
37
-
38
- def __init__(self):
39
- self.jobs: dict[str, RecurringJob] = CronManager._list
40
- self.__scheduler_is_running = False
41
- def add_job(self, name:str, job: RecurringJob):
42
- job = {name: job}
43
- self.jobs.append(job)
44
- return job
45
- def run_pending(self):
46
- return schedule.run_pending()
47
- def run_all(self):
48
- return schedule.run_all()
49
- def clear(self):
50
- self.__scheduler_is_running = False
51
- return schedule.clear()
52
- def get_jobs(self):
53
- return schedule.get_jobs()
54
- def start(self):
55
- def _target():
56
- while self.__scheduler_is_running:
57
- time.sleep(1)
58
- self.run_pending()
59
- time.sleep(59)
60
- _log.info(f"__scheduler_is_running={self.__scheduler_is_running}")
61
- #clear all jobs
62
- self.clear()
63
- #prepare jobs
64
- for job in self.jobs.values():
65
- job.run()
66
- #start scheduler
67
- if not self.__scheduler_is_running:
68
- self.__scheduler_is_running = True
69
- t = threading.Thread(target=_target,args=(),daemon=True)
70
- t.start()
71
-
72
- cron_manager = CronManager()
73
-
74
- router = APIRouter(prefix="/api/cron", tags=["cron"])
75
- @router.get("/list")
76
- def _list():
77
- def __format(job: schedule.Job) -> dict:
78
- return {
79
- "job": {'module':job.job_func.__module__,'name':job.job_func.__name__},
80
- "at": job.at_time,
81
- "interval": job.interval,
82
- "last_run": job.last_run,
83
- "next_run": job.next_run,
84
- "tags": job.tags}
85
- _list = cron_manager.get_jobs()
86
- return [__format(_) for _ in _list]
87
-
88
- @router.get("/start")
89
- def _start():
90
- cron_manager.start()
91
- @router.delete("/stop")
92
- def _stop():
93
- return {"_": cron_manager.clear()}
94
- @router.get("/run/pending")
95
- def _run_pending():
96
- return {"_": cron_manager.run_pending()}
97
- @router.get("/run/all")
98
- def _run_all():
99
- return {"_": cron_manager.run_all()}
@@ -1,151 +0,0 @@
1
- import inspect
2
- import asyncio, os
3
- from datetime import datetime, timedelta
4
- from enum import Enum
5
- from typing import Annotated, TypeVar, Optional, Dict, Union, Any
6
- from pydantic import BaseModel, ConfigDict, Field, computed_field
7
- from uuid import uuid4
8
- from fastapi import APIRouter, HTTPException
9
- from ws_bom_robot_app.config import config
10
- from ws_bom_robot_app.llm.models.base import IdentifiableEntity
11
- from ws_bom_robot_app.llm.utils.webhooks import WebhookNotifier
12
-
13
- T = TypeVar('T')
14
-
15
- class TaskHeader(BaseModel):
16
- x_ws_bom_msg_type: Optional[str] = None
17
- x_ws_bom_webhooks: Optional[str] = None
18
- model_config = ConfigDict(
19
- extra='allow'
20
- )
21
-
22
- class TaskMetaData(BaseModel):
23
- start_time: datetime
24
- end_time: Optional[datetime] = None
25
- @computed_field
26
- @property
27
- def elapsed_time(self) -> Union[timedelta, None]:
28
- return (datetime.now() if not self.end_time else self.end_time) - self.start_time
29
- source: Optional[str] = None
30
- pid: Optional[int] = None
31
- model_config = ConfigDict(
32
- json_encoders={
33
- datetime: lambda v: v.isoformat(),
34
- timedelta: lambda v: str(v)
35
- }
36
- )
37
-
38
- class TaskStatus(IdentifiableEntity):
39
- class TaskStatusEnum(str, Enum):
40
- pending = "pending"
41
- completed = "completed"
42
- failure = "failure"
43
- type: Optional[str] = None
44
- status: TaskStatusEnum
45
- result: Optional[T] = None
46
- metadata: TaskMetaData = None
47
- error: Optional[str] = None
48
-
49
- class TaskEntry(IdentifiableEntity):
50
- task: Annotated[asyncio.Task, Field(default=None, validate_default=False)] = None
51
- headers: TaskHeader | None = None
52
- status: Union[TaskStatus, None] = None
53
- def _get_coroutine_name(self, coroutine: asyncio.coroutines) -> str:
54
- if inspect.iscoroutine(coroutine):
55
- return coroutine.cr_code.co_name
56
- return "<unknown>"
57
- def __init__(self, **data):
58
- #separate task from data to handle asyncio.Task
59
- task = data.pop('task',None)
60
- super().__init__(**data)
61
- #bypass pydantic validation
62
- object.__setattr__(self, 'task', task)
63
- #init status
64
- if not self.status:
65
- self.status = TaskStatus(
66
- id=self.id,
67
- type=self.headers.x_ws_bom_msg_type if self.headers and self.headers.x_ws_bom_msg_type else self._get_coroutine_name(task._coro) if task else None,
68
- status=TaskStatus.TaskStatusEnum.pending,
69
- metadata=TaskMetaData(
70
- start_time=datetime.now(),
71
- source=self._get_coroutine_name(task._coro) if task else None,
72
- pid=os.getpid())
73
- )
74
- model_config = ConfigDict(
75
- arbitrary_types_allowed=True,
76
- validate_assignment=True
77
- )
78
-
79
- class TaskManager:
80
- def __init__(self):
81
- self.tasks: Dict[str, TaskEntry] = {}
82
-
83
- def _task_done_callback(self, task_id: str, headers: TaskHeader | None = None):
84
- def callback(task: asyncio.Task):
85
- if _task := self.tasks.get(task_id):
86
- try:
87
- result = _task.task.result()
88
- _task.status.status = TaskStatus.TaskStatusEnum.completed
89
- _task.status.result = result
90
- except Exception as e:
91
- _task.status.status = TaskStatus.TaskStatusEnum.failure
92
- _task.status.error = str(e)
93
- finally:
94
- _task.status.metadata.end_time = datetime.now()
95
- if headers and headers.x_ws_bom_webhooks:
96
- asyncio.create_task(
97
- WebhookNotifier().notify_webhook(_task.status,headers.x_ws_bom_webhooks)
98
- )
99
- return callback
100
-
101
- def create_task(self, coroutine: asyncio.coroutines, headers: TaskHeader | None = None) -> IdentifiableEntity:
102
- _task = asyncio.create_task(coroutine)
103
- task = TaskEntry(
104
- id=str(uuid4()),
105
- task=_task,
106
- headers=headers)
107
- task.task.add_done_callback(self._task_done_callback(task.id, headers))
108
- self.tasks[task.id] = task
109
- return IdentifiableEntity(id=task.id)
110
-
111
- def get_task(self, task_id: str) -> TaskEntry | None:
112
- if _task := self.tasks.get(task_id):
113
- return _task
114
- return None
115
-
116
- def remove_task(self, task_id: str) -> None:
117
- if task_id in self.tasks:
118
- del self.tasks[task_id]
119
-
120
- def cleanup_task(self):
121
- for task_id in [task_id for task_id, task in self.tasks.items()
122
- if task.status.status in {TaskStatus.TaskStatusEnum.completed, TaskStatus.TaskStatusEnum.failure}
123
- and task.status.metadata.end_time < datetime.now() - timedelta(days=config.robot_task_retention_days)]:
124
- self.remove_task(task_id)
125
-
126
- # global instance
127
- task_manager = TaskManager()
128
-
129
- router = APIRouter(prefix="/api/task", tags=["task"])
130
- @router.get("/status/{task_id}", response_model=TaskStatus)
131
- async def _status_task(task_id: str) -> TaskStatus:
132
- task = task_manager.get_task(task_id)
133
- if not task:
134
- raise HTTPException(status_code=404, detail="Task not found")
135
- return task.status
136
- @router.get("/status")
137
- async def _status_task_list():
138
- _status_task_list = []
139
- for task in task_manager.tasks.values():
140
- _task = task_manager.get_task(task.id)
141
- _status_task_list.append(_task.status)
142
- return _status_task_list
143
- @router.delete("/status/{task_id}")
144
- async def _remove_task(task_id: str):
145
- task_manager.remove_task(task_id)
146
- return {"success":"ok"}
147
- @router.delete("/cleanup")
148
- async def _remove_task_list():
149
- task_manager.cleanup_task()
150
- return {"success":"ok"}
151
-