ws-bom-robot-app 0.0.83__py3-none-any.whl → 0.0.84__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,6 +23,8 @@ class Settings(BaseSettings):
23
23
  robot_task_strategy: str = 'memory' # memory / db
24
24
  robot_task_mp_enable: bool = True
25
25
  robot_task_mp_method: str = 'spawn' # spawn / fork
26
+ robot_task_mp_max_retries: int = 1
27
+ robot_task_mp_retry_delay: float = 60 # seconds
26
28
  robot_cron_strategy: str = 'memory' # memory / db
27
29
  robot_cms_host: str = ''
28
30
  robot_cms_auth: str = ''
@@ -1,12 +1,17 @@
1
1
  from langchain_core.documents import Document
2
2
  from langchain_text_splitters import CharacterTextSplitter
3
+ import logging
3
4
 
4
5
  class DocumentChunker:
6
+ _MAX_CHUNK_SIZE = 10_000
5
7
  @staticmethod
6
8
  def chunk(documents: list[Document]) -> list[Document]:
7
- text_splitter = CharacterTextSplitter(chunk_size=10_000, chunk_overlap=500)
9
+ text_splitter = CharacterTextSplitter(chunk_size=DocumentChunker._MAX_CHUNK_SIZE, chunk_overlap=int(DocumentChunker._MAX_CHUNK_SIZE * 0.02))
8
10
  chunked_documents = []
9
11
  for doc in documents:
12
+ if len(doc.page_content) <= DocumentChunker._MAX_CHUNK_SIZE:
13
+ chunked_documents.append(doc)
14
+ continue
10
15
  chunks = text_splitter.split_text(doc.page_content)
11
16
  for chunk in chunks:
12
17
  chunked_documents.append(
@@ -50,8 +50,8 @@ class VectorDBStrategy(ABC):
50
50
  Asynchronously invokes multiple retrievers in parallel, then merges
51
51
  their results while removing duplicates.
52
52
  """
53
+ MAX_TOKENS_PER_BATCH = 300_000 * 0.8
53
54
  def __init__(self):
54
- self.max_tokens_per_batch = 300_000 * 0.8 # conservative limit below 300k openai limit: https://platform.openai.com/docs/api-reference/embeddings/create
55
55
  try:
56
56
  self.encoding = tiktoken.get_encoding("cl100k_base") # text-embedding-3-small, text-embedding-3-large: https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken
57
57
  except Exception:
@@ -78,7 +78,7 @@ class VectorDBStrategy(ABC):
78
78
  for doc in documents:
79
79
  doc_tokens = self._count_tokens(doc.page_content)
80
80
  # check if adding this document exceeds the limit
81
- if current_token_count + doc_tokens > self.max_tokens_per_batch:
81
+ if current_token_count + doc_tokens > VectorDBStrategy.MAX_TOKENS_PER_BATCH:
82
82
  # start new batch if current batch is not empty
83
83
  if current_batch:
84
84
  batches.append(current_batch)
@@ -6,6 +6,7 @@ from langchain_core.runnables import run_in_executor
6
6
  from docling.document_converter import DocumentConverter, InputFormat, PdfFormatOption, ImageFormatOption
7
7
  from docling.datamodel.pipeline_options import PdfPipelineOptions, TableStructureOptions, TableFormerMode, TesseractCliOcrOptions
8
8
  from langchain_community.document_loaders import UnstructuredFileLoader
9
+ from ws_bom_robot_app.llm.vector_store.db.base import VectorDBStrategy
9
10
 
10
11
  class DoclingLoader(BaseLoader):
11
12
  def __init__(self, file_path: str | list[str], **kwargs: Any) -> None:
@@ -37,28 +38,37 @@ class DoclingLoader(BaseLoader):
37
38
  if doc is done:
38
39
  break
39
40
  yield doc # type: ignore[misc]
41
+ def _fallback_loader(self, source: str, error: Exception = None) -> Iterator[Document]:
42
+ if 'fallback' in self._kwargs:
43
+ if issubclass(self._kwargs['fallback'], (BaseLoader, UnstructuredFileLoader)):
44
+ logging.info(f"Using fallback loader {self._kwargs['fallback']} for {source}")
45
+ try:
46
+ loader: Union[BaseLoader, UnstructuredFileLoader] = self._kwargs['fallback'](
47
+ source,
48
+ **{k: v for k, v in self._kwargs.items() if k != 'fallback'}
49
+ )
50
+ yield from loader.lazy_load()
51
+ except Exception as e:
52
+ logging.warning(f"Failed to load document from {source}: {e} | {traceback.format_exc()}")
53
+ else:
54
+ logging.warning(f"Invalid fallback loader {self._kwargs['fallback']}[{type(self._kwargs['fallback'])}] for {source}")
55
+ else:
56
+ logging.warning(f"Failed to load document from {source}: {error}")
40
57
  def lazy_load(self) -> Iterator[Document]:
41
58
  for source in self._file_paths:
42
59
  try:
43
- _result = self._converter.convert(
44
- os.path.abspath(source),
45
- raises_on_error=True)
46
- doc = _result.document
47
- text = doc.export_to_markdown(image_placeholder="")
48
- yield Document(page_content=text, metadata={"source": source})
49
- except Exception as e:
50
- if 'fallback' in self._kwargs:
51
- if issubclass(self._kwargs['fallback'], (BaseLoader, UnstructuredFileLoader)):
52
- logging.info(f"Using fallback loader {self._kwargs['fallback']} for {source}")
53
- try:
54
- loader: Union[BaseLoader, UnstructuredFileLoader] = self._kwargs['fallback'](
55
- source,
56
- **{k: v for k, v in self._kwargs.items() if k != 'fallback'}
57
- )
58
- yield from loader.lazy_load()
59
- except Exception as e:
60
- logging.warning(f"Failed to load document from {source}: {e} | {traceback.format_exc()}")
61
- else:
62
- logging.warning(f"Invalid fallback loader {self._kwargs['fallback']}[{type(self._kwargs['fallback'])}] for {source}")
60
+ #manage only small file with header, preventing header stripping and improper chunking
61
+ if (source.endswith('.csv') or source.endswith('.xlsx')) \
62
+ and 'fallback' in self._kwargs \
63
+ and os.path.getsize(source) > (VectorDBStrategy.MAX_TOKENS_PER_BATCH // 4): #rough token estimate
64
+ yield from self._fallback_loader(source)
63
65
  else:
64
- logging.warning(f"Failed to load document from {source}: {e} | {traceback.format_exc()}")
66
+ _result = self._converter.convert(
67
+ os.path.abspath(source),
68
+ raises_on_error=True)
69
+ doc = _result.document
70
+ text = doc.export_to_markdown(image_placeholder="")
71
+ yield Document(page_content=text, metadata={"source": source})
72
+ except Exception as e:
73
+ yield from self._fallback_loader(source,e)
74
+
@@ -69,6 +69,7 @@ class TaskStatus(IdentifiableEntity):
69
69
  result: Optional[T] = None
70
70
  metadata: TaskMetaData = None
71
71
  error: Optional[str] = None
72
+ retry: int = 0
72
73
  model_config = ConfigDict(
73
74
  arbitrary_types_allowed=True
74
75
  )
@@ -118,7 +119,8 @@ class TaskEntry(IdentifiableEntity):
118
119
  class TaskStatistics(BaseModel):
119
120
  class TaskStatisticExecutionInfo(BaseModel):
120
121
  retention_days: float = config.robot_task_retention_days
121
- max_concurrent: int
122
+ max_parallelism: int
123
+ slot_available: int
122
124
  pid: int = os.getpid()
123
125
  running: list[TaskStatus]
124
126
  slowest: list
@@ -137,9 +139,12 @@ class TaskStatistics(BaseModel):
137
139
 
138
140
  #region interface
139
141
  class TaskManagerStrategy(ABC):
140
- def __init__(self, max_concurrent_tasks: int = max(1,floor(config.robot_task_max_total_parallelism / config.runtime_options().number_of_workers))):
141
- self.max_concurrent_tasks = max_concurrent_tasks
142
- self.semaphore = asyncio.Semaphore(self.max_concurrent_tasks)
142
+ def __init__(self, max_concurrent_tasks: Optional[int] = None):
143
+ if max_concurrent_tasks is None:
144
+ workers = config.runtime_options().number_of_workers
145
+ max_concurrent_tasks = max(1, floor(config.robot_task_max_total_parallelism / max(1, workers)))
146
+ self.max_parallelism = max_concurrent_tasks
147
+ self.semaphore = asyncio.Semaphore(max_concurrent_tasks)
143
148
  self.running_tasks = dict[str, TaskEntry]()
144
149
  self.loop = asyncio.get_event_loop()
145
150
 
@@ -201,14 +206,15 @@ class TaskManagerStrategy(ABC):
201
206
  #remove from running tasks
202
207
  if task_entry.id in self.running_tasks:
203
208
  del self.running_tasks[task_entry.id]
204
- #notify webhooks
205
- if task_entry.headers and task_entry.headers.x_ws_bom_webhooks:
206
- try:
207
- asyncio.create_task(
208
- WebhookNotifier().notify_webhook(task_entry.status, task_entry.headers.x_ws_bom_webhooks)
209
- )
210
- except Exception as e:
211
- _log.error(f"Failed to schedule webhook notification for task {task_entry.id}: {e}")
209
+ #notify webhooks: a task has completed or failed, if failed with retry policy the task remains in pending state, and will not be notified until complete/failure
210
+ if task_entry.status.status in ["completed","failure"]:
211
+ if task_entry.headers and task_entry.headers.x_ws_bom_webhooks:
212
+ try:
213
+ asyncio.create_task(
214
+ WebhookNotifier().notify_webhook(task_entry.status, task_entry.headers.x_ws_bom_webhooks)
215
+ )
216
+ except Exception as e:
217
+ _log.error(f"Failed to schedule webhook notification for task {task_entry.id}: {e}")
212
218
 
213
219
  def task_done_callback(self, task_entry: TaskEntry) -> Callable:
214
220
  def callback(task: asyncio.Task, context: Any | None = None):
@@ -260,6 +266,55 @@ class TaskManagerStrategy(ABC):
260
266
  async with self.semaphore:
261
267
  await self._execute_task(task_entry)
262
268
 
269
+ async def _monitor_subprocess(self, task_entry: TaskEntry, proc, conn):
270
+ try:
271
+ # Wait for the worker to send bytes (this blocks, so run via executor wrapper)
272
+ data_bytes = await _recv_from_connection_async(conn)
273
+ # unpickle bytes to get payload
274
+ try:
275
+ payload = _pickler.loads(data_bytes)
276
+ except Exception:
277
+ # fallback if pickler fails
278
+ payload = ("err", {"error": "Failed to unpickle subprocess result"})
279
+ if isinstance(payload, tuple) and payload[0] == "ok":
280
+ result = payload[1]
281
+ # write results into task_entry
282
+ self._update_task_by_event(task_entry, "completed", result)
283
+ else:
284
+ # error
285
+ err_info = payload[1]["error"] if isinstance(payload, tuple) else str(payload)
286
+ self._update_task_by_event(task_entry, "failure", err_info) # give up, no retry
287
+ except Exception:
288
+ # maybe subprocess is no more alive / killed due to memory pressure
289
+ if task_entry.status.retry < config.robot_task_mp_max_retries:
290
+ task_entry.status.retry += 1
291
+ _log.warning(f"Task {task_entry.id} failure, retrying {task_entry.status.retry}...")
292
+ async def delayed_retry():
293
+ _delay = config.robot_task_mp_retry_delay # help to backpressure when overloaded
294
+ if self.semaphore._value > 0: # free semaphore slots available
295
+ _delay = 5 # small/no delay if retry can run immediately
296
+ await asyncio.sleep(_delay) # delay in seconds
297
+ await self._run_task_with_semaphore(task_entry)
298
+ asyncio.create_task(delayed_retry())
299
+ # semaphore is released, so new task can be executed
300
+ return
301
+ else:
302
+ self._update_task_by_event(task_entry, "failure", "subprocess monitor error: failed to receive data from connection")
303
+ finally:
304
+ # ensure process termination / cleanup
305
+ try:
306
+ conn.close()
307
+ except Exception:
308
+ pass
309
+ try:
310
+ if proc.is_alive():
311
+ proc.terminate()
312
+ proc.join(timeout=1)
313
+ except Exception:
314
+ pass
315
+ # callback
316
+ self._update_task_by_event(task_entry, "callback", None)
317
+
263
318
  async def _execute_task(self, task_entry: TaskEntry):
264
319
  """
265
320
  Execute the task. Try to run it inside a subprocess (if serializable).
@@ -269,56 +324,19 @@ class TaskManagerStrategy(ABC):
269
324
  """
270
325
  self.running_tasks[task_entry.id]=task_entry
271
326
  task_entry.status.metadata.start_at = str(datetime.now().isoformat())
272
- # Try to spawn subprocess (non-blocking)
327
+ # try to spawn subprocess (non-blocking)
273
328
  can_use_subprocess = task_entry.status.metadata.extra.get("can_use_subprocess", False)
274
329
  if config.robot_task_mp_enable and can_use_subprocess:
275
330
  proc, conn, used_subprocess = _start_subprocess_for_coroutine(task_entry.coroutine)
276
331
  if used_subprocess and proc is not None and conn is not None:
277
- # We will monitor the subprocess asynchronously
332
+ # monitor subprocess asynchronously
278
333
  task_entry.status.status = "pending"
279
334
  task_entry.status.metadata.pid_child = proc.pid
280
335
  _log.info(f"Task {task_entry.id} started in subprocess (pid={proc.pid})")
281
-
282
- async def _monitor_subprocess():
283
- try:
284
- # Wait for the worker to send bytes (this blocks, so run via executor wrapper)
285
- data_bytes = await _recv_from_connection_async(conn)
286
- # unpickle bytes to get payload
287
- try:
288
- payload = _pickler.loads(data_bytes)
289
- except Exception:
290
- # fallback if pickler fails
291
- payload = ("err", {"error": "Failed to unpickle subprocess result"})
292
- if isinstance(payload, tuple) and payload[0] == "ok":
293
- result = payload[1]
294
- # write results into task_entry
295
- self._update_task_by_event(task_entry, "completed", result)
296
- else:
297
- # error
298
- err_info = payload[1]["error"] if isinstance(payload, tuple) else str(payload)
299
- self._update_task_by_event(task_entry, "failure", err_info)
300
- except Exception:
301
- # maybe subprocess is no more alive / killed
302
- self._update_task_by_event(task_entry, "failure", "subprocess monitor error: failed to receive data from connection")
303
- finally:
304
- # Ensure process termination / cleanup
305
- try:
306
- conn.close()
307
- except Exception:
308
- pass
309
- try:
310
- if proc.is_alive():
311
- proc.terminate()
312
- proc.join(timeout=1)
313
- except Exception:
314
- pass
315
- # callback
316
- self._update_task_by_event(task_entry, "callback", None)
317
-
318
- # schedule monitor task and return
319
- asyncio.create_task(_monitor_subprocess())
320
- return
321
- # fallback
336
+ # await monitor process, then return: important to acquire semaphore
337
+ await self._monitor_subprocess(task_entry, proc, conn)
338
+ return
339
+ # default fallback (in-process)
322
340
  try:
323
341
  async def _callable_to_coroutine(func: Any) -> Any:
324
342
  if callable(func) and not inspect.iscoroutine(func):
@@ -373,7 +391,8 @@ class TaskManagerStrategy(ABC):
373
391
  ),
374
392
  exec_info=TaskStatistics.TaskStatisticExecutionInfo(
375
393
  retention_days=config.robot_task_retention_days,
376
- max_concurrent=self.max_concurrent_tasks,
394
+ max_parallelism=self.max_parallelism,
395
+ slot_available=self.semaphore._value,
377
396
  running=[task.status for task in self.running_task()],
378
397
  slowest=_slowest
379
398
  )
@@ -383,8 +402,8 @@ class TaskManagerStrategy(ABC):
383
402
 
384
403
  #region memory implementation
385
404
  class MemoryTaskManagerStrategy(TaskManagerStrategy):
386
- def __init__(self):
387
- super().__init__()
405
+ def __init__(self, max_concurrent_tasks: Optional[int] = None):
406
+ super().__init__(max_concurrent_tasks)
388
407
  self.tasks: Dict[str, TaskEntry] = {}
389
408
 
390
409
  def create_task(self, coroutine: Any, headers: TaskHeader | None = None) -> IdentifiableEntity:
@@ -425,8 +444,8 @@ class TaskEntryModel(Base):
425
444
  arbitrary_types_allowed=True
426
445
  )
427
446
  class DatabaseTaskManagerStrategy(TaskManagerStrategy):
428
- def __init__(self, db_url: str = f"sqlite:///{config.robot_data_folder}/db/tasks.sqlite"):
429
- super().__init__()
447
+ def __init__(self, db_url: str = f"sqlite:///{config.robot_data_folder}/db/tasks.sqlite", max_concurrent_tasks: Optional[int] = None):
448
+ super().__init__(max_concurrent_tasks)
430
449
  self.engine = create_engine(db_url)
431
450
  self.Session = sessionmaker(bind=self.engine)
432
451
  Base.metadata.create_all(self.engine)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ws_bom_robot_app
3
- Version: 0.0.83
3
+ Version: 0.0.84
4
4
  Summary: A FastAPI application serving ws bom/robot/llm platform ai.
5
5
  Home-page: https://github.com/websolutespa/bom
6
6
  Author: Websolute Spa
@@ -1,10 +1,10 @@
1
1
  ws_bom_robot_app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  ws_bom_robot_app/auth.py,sha256=84nIbmJsMrNs0sxIQGEHbjsjc2P6ZrZZGSn8dkiL6is,895
3
- ws_bom_robot_app/config.py,sha256=sNoJyxxZM7xTmqMdojjyjrlmV_7Pctl8ZKRNju_VtoE,5214
3
+ ws_bom_robot_app/config.py,sha256=CASD6fCCBp9YODBdlJWGN0vw7__hwrbq8WjrrLVRbTg,5307
4
4
  ws_bom_robot_app/cron_manager.py,sha256=HU_0CGqR7YUyI9MJ5CMTLp-s-A6HgdZCIFeV7SoGKvo,9361
5
5
  ws_bom_robot_app/main.py,sha256=LZH4z9BmVlxpFJf8TrIo_JxH1YhpeZRrrOYgKky7S7w,6712
6
6
  ws_bom_robot_app/subprocess_runner.py,sha256=sq06r3sdHc_1FdPcpN9Frf2-xKu7dM76FK9x8J7I0kc,4100
7
- ws_bom_robot_app/task_manager.py,sha256=lxCrF07kwmrPgam5dNZDzqRPrPC4m9GxJWfvNeyhenI,22748
7
+ ws_bom_robot_app/task_manager.py,sha256=jaxRnMCVMlxQzHyhNrt6duH4ov1zblf3-Sv8cwmesyI,24039
8
8
  ws_bom_robot_app/util.py,sha256=RjVD6B9sHje788Lndqq5DHy6TJM0KLs9qx3JYt81Wyk,4834
9
9
  ws_bom_robot_app/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  ws_bom_robot_app/llm/agent_context.py,sha256=uatHJ8wcRly6h0S762BgfzDMpmcwCHwNzwo37aWjeE0,1305
@@ -32,7 +32,7 @@ ws_bom_robot_app/llm/tools/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeR
32
32
  ws_bom_robot_app/llm/tools/models/main.py,sha256=1hICqHs-KS2heenkH7b2eH0N2GrPaaNGBrn64cl_A40,827
33
33
  ws_bom_robot_app/llm/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  ws_bom_robot_app/llm/utils/agent.py,sha256=_CY5Dji3UeAIi2iuU7ttz4fml1q8aCFgVWOv970x8Fw,1411
35
- ws_bom_robot_app/llm/utils/chunker.py,sha256=N7570xBYlObneg-fsvDhPAJ-Pv8C8OaYZOBK6q7LmMI,607
35
+ ws_bom_robot_app/llm/utils/chunker.py,sha256=zVXjRMloc3KbNEqiDcycYzy4N0Ey1g8XYeq6ftyvkyg,857
36
36
  ws_bom_robot_app/llm/utils/cleanup.py,sha256=0V-4CNucOIDlw7wu8zd-0_M4Xu5AVkPokCyuoys49qM,3059
37
37
  ws_bom_robot_app/llm/utils/cms.py,sha256=XhrLQyHQ2JUOInDCCf_uvR4Jiud0YvH2FwwiiuCnnsg,6352
38
38
  ws_bom_robot_app/llm/utils/download.py,sha256=yBrw9n6lbz1QlWhApIlEwuQ8kMa3u11OFXx84X_NRvA,7130
@@ -42,7 +42,7 @@ ws_bom_robot_app/llm/utils/webhooks.py,sha256=LAAZqyN6VhV13wu4X-X85TwdDgAV2rNvIw
42
42
  ws_bom_robot_app/llm/vector_store/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
43
  ws_bom_robot_app/llm/vector_store/generator.py,sha256=9_xdtCKJhmt1OP0GXDjvFERXMP7ozLZT92KuYEBDgC0,6314
44
44
  ws_bom_robot_app/llm/vector_store/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
- ws_bom_robot_app/llm/vector_store/db/base.py,sha256=t0Z1VCcg604evEzJENGNqYFBi_AZLTEUzmxA5wgoE_A,8419
45
+ ws_bom_robot_app/llm/vector_store/db/base.py,sha256=iQBY-1O8uJ1_dRzmCDmhWAMBYtpims0u5RelQMvOVLo,8310
46
46
  ws_bom_robot_app/llm/vector_store/db/chroma.py,sha256=2riMQvwe2T99X_NtO9yO9lpZ0zj2Nb06l9Hb1lWJ00E,4509
47
47
  ws_bom_robot_app/llm/vector_store/db/faiss.py,sha256=Y2LpMsU0Ce2RCaGM1n69BxMpXWXpBoj1T5aAAJpX2qE,3860
48
48
  ws_bom_robot_app/llm/vector_store/db/manager.py,sha256=5rqBvc0QKmHFUgVHqBAr1Y4FZRl-w-ylGMjgXZywrdA,533
@@ -66,9 +66,9 @@ ws_bom_robot_app/llm/vector_store/integration/slack.py,sha256=hiE1kkg7868mbP2wVW
66
66
  ws_bom_robot_app/llm/vector_store/integration/thron.py,sha256=PylagYLzhSY_wMu_hR4PzAwSm4Jp6zi2aymuF0XN4Hw,4271
67
67
  ws_bom_robot_app/llm/vector_store/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
68
68
  ws_bom_robot_app/llm/vector_store/loader/base.py,sha256=b__KxrYql5j6I6vqO43gnEocN43Ic3o1OpTfV2n8pko,5656
69
- ws_bom_robot_app/llm/vector_store/loader/docling.py,sha256=TPLtqr4oG4tHZwv9db5KRb3vx4ZPo9Ez6mGpgC4dVkg,3477
69
+ ws_bom_robot_app/llm/vector_store/loader/docling.py,sha256=IOv1A0HSIWiHWQFzI4fdApfxrKgXOqwmC3mPXlKplqQ,4012
70
70
  ws_bom_robot_app/llm/vector_store/loader/json_loader.py,sha256=qo9ejRZyKv_k6jnGgXnu1W5uqsMMtgqK_uvPpZQ0p74,833
71
- ws_bom_robot_app-0.0.83.dist-info/METADATA,sha256=bBfHufKR-l-Aap7_nPrHAjP7jzEBD1VfM30XwzdMOak,9971
72
- ws_bom_robot_app-0.0.83.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
73
- ws_bom_robot_app-0.0.83.dist-info/top_level.txt,sha256=Yl0akyHVbynsBX_N7wx3H3ZTkcMLjYyLJs5zBMDAKcM,17
74
- ws_bom_robot_app-0.0.83.dist-info/RECORD,,
71
+ ws_bom_robot_app-0.0.84.dist-info/METADATA,sha256=voKK2OL7tpgb8zEIHK8zUBFSJQE8-5aQYFFAWousmd0,9971
72
+ ws_bom_robot_app-0.0.84.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
73
+ ws_bom_robot_app-0.0.84.dist-info/top_level.txt,sha256=Yl0akyHVbynsBX_N7wx3H3ZTkcMLjYyLJs5zBMDAKcM,17
74
+ ws_bom_robot_app-0.0.84.dist-info/RECORD,,