PraisonAI 2.2.24__cp313-cp313-manylinux_2_39_x86_64.whl → 2.2.25__cp313-cp313-manylinux_2_39_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of PraisonAI might be problematic. Click here for more details.
- praisonai/deploy.py +1 -1
- praisonai/ui/chat.py +1 -1
- praisonai/ui/code.py +1 -1
- praisonai/ui/realtime.py +20 -8
- praisonai/ui/realtimeclient/__init__.py +115 -27
- {praisonai-2.2.24.dist-info → praisonai-2.2.25.dist-info}/METADATA +2 -2
- {praisonai-2.2.24.dist-info → praisonai-2.2.25.dist-info}/RECORD +9 -10
- praisonai/ui/realtimeclient/realtimedocs.txt +0 -1484
- {praisonai-2.2.24.dist-info → praisonai-2.2.25.dist-info}/WHEEL +0 -0
- {praisonai-2.2.24.dist-info → praisonai-2.2.25.dist-info}/entry_points.txt +0 -0
praisonai/deploy.py
CHANGED
|
@@ -56,7 +56,7 @@ class CloudDeployer:
|
|
|
56
56
|
file.write("FROM python:3.11-slim\n")
|
|
57
57
|
file.write("WORKDIR /app\n")
|
|
58
58
|
file.write("COPY . .\n")
|
|
59
|
-
file.write("RUN pip install flask praisonai==2.2.
|
|
59
|
+
file.write("RUN pip install flask praisonai==2.2.25 gunicorn markdown\n")
|
|
60
60
|
file.write("EXPOSE 8080\n")
|
|
61
61
|
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
|
|
62
62
|
|
praisonai/ui/chat.py
CHANGED
praisonai/ui/code.py
CHANGED
|
@@ -12,7 +12,7 @@ from dotenv import load_dotenv
|
|
|
12
12
|
from PIL import Image
|
|
13
13
|
from context import ContextGatherer
|
|
14
14
|
from tavily import TavilyClient
|
|
15
|
-
from crawl4ai import
|
|
15
|
+
from crawl4ai import AsyncWebCrawler
|
|
16
16
|
|
|
17
17
|
# Local application/library imports
|
|
18
18
|
import chainlit as cl
|
praisonai/ui/realtime.py
CHANGED
|
@@ -229,7 +229,7 @@ except Exception as e:
|
|
|
229
229
|
@cl.on_chat_start
|
|
230
230
|
async def start():
|
|
231
231
|
initialize_db()
|
|
232
|
-
model_name =
|
|
232
|
+
model_name = os.getenv("MODEL_NAME", "gpt-4o-mini-realtime-preview-2024-12-17")
|
|
233
233
|
cl.user_session.set("model_name", model_name)
|
|
234
234
|
cl.user_session.set("message_history", []) # Initialize message history
|
|
235
235
|
logger.debug(f"Model name: {model_name}")
|
|
@@ -238,7 +238,7 @@ async def start():
|
|
|
238
238
|
# TextInput(
|
|
239
239
|
# id="model_name",
|
|
240
240
|
# label="Enter the Model Name",
|
|
241
|
-
# placeholder="e.g., gpt-4o-mini-realtime-preview",
|
|
241
|
+
# placeholder="e.g., gpt-4o-mini-realtime-preview-2024-12-17",
|
|
242
242
|
# initial=model_name
|
|
243
243
|
# )
|
|
244
244
|
# ]
|
|
@@ -382,7 +382,8 @@ async def on_audio_start():
|
|
|
382
382
|
openai_realtime = cl.user_session.get("openai_realtime")
|
|
383
383
|
|
|
384
384
|
if not openai_realtime.is_connected():
|
|
385
|
-
|
|
385
|
+
model_name = cl.user_session.get("model_name", "gpt-4o-mini-realtime-preview-2024-12-17")
|
|
386
|
+
await openai_realtime.connect(model_name)
|
|
386
387
|
|
|
387
388
|
logger.info("Connected to OpenAI realtime")
|
|
388
389
|
return True
|
|
@@ -394,11 +395,22 @@ async def on_audio_start():
|
|
|
394
395
|
|
|
395
396
|
@cl.on_audio_chunk
|
|
396
397
|
async def on_audio_chunk(chunk: cl.InputAudioChunk):
|
|
397
|
-
openai_realtime: RealtimeClient = cl.user_session.get("openai_realtime")
|
|
398
|
+
openai_realtime: RealtimeClient = cl.user_session.get("openai_realtime")
|
|
399
|
+
|
|
400
|
+
if not openai_realtime:
|
|
401
|
+
logger.debug("No realtime client available")
|
|
402
|
+
return
|
|
403
|
+
|
|
398
404
|
if openai_realtime.is_connected():
|
|
399
|
-
|
|
405
|
+
try:
|
|
406
|
+
success = await openai_realtime.append_input_audio(chunk.data)
|
|
407
|
+
if not success:
|
|
408
|
+
logger.debug("Failed to append audio data - connection may be lost")
|
|
409
|
+
except Exception as e:
|
|
410
|
+
logger.debug(f"Error processing audio chunk: {e}")
|
|
411
|
+
# Optionally try to reconnect here if needed
|
|
400
412
|
else:
|
|
401
|
-
logger.
|
|
413
|
+
logger.debug("RealtimeClient is not connected - audio chunk ignored")
|
|
402
414
|
|
|
403
415
|
@cl.on_audio_end
|
|
404
416
|
@cl.on_chat_end
|
|
@@ -423,14 +435,14 @@ def auth_callback(username: str, password: str):
|
|
|
423
435
|
@cl.on_chat_resume
|
|
424
436
|
async def on_chat_resume(thread: ThreadDict):
|
|
425
437
|
logger.info(f"Resuming chat: {thread['id']}")
|
|
426
|
-
model_name =
|
|
438
|
+
model_name = os.getenv("MODEL_NAME") or "gpt-4o-mini-realtime-preview-2024-12-17"
|
|
427
439
|
logger.debug(f"Model name: {model_name}")
|
|
428
440
|
settings = cl.ChatSettings(
|
|
429
441
|
[
|
|
430
442
|
TextInput(
|
|
431
443
|
id="model_name",
|
|
432
444
|
label="Enter the Model Name",
|
|
433
|
-
placeholder="e.g., gpt-4o-mini-realtime-preview",
|
|
445
|
+
placeholder="e.g., gpt-4o-mini-realtime-preview-2024-12-17",
|
|
434
446
|
initial=model_name
|
|
435
447
|
)
|
|
436
448
|
]
|
|
@@ -6,6 +6,7 @@ import inspect
|
|
|
6
6
|
import numpy as np
|
|
7
7
|
import json
|
|
8
8
|
import websockets
|
|
9
|
+
from websockets.exceptions import ConnectionClosed
|
|
9
10
|
from datetime import datetime
|
|
10
11
|
from collections import defaultdict
|
|
11
12
|
import base64
|
|
@@ -97,29 +98,66 @@ class RealtimeAPI(RealtimeEventHandler):
|
|
|
97
98
|
self.ws = None
|
|
98
99
|
|
|
99
100
|
def is_connected(self):
|
|
100
|
-
|
|
101
|
+
if self.ws is None:
|
|
102
|
+
return False
|
|
103
|
+
# Some websockets versions don't have a closed attribute
|
|
104
|
+
try:
|
|
105
|
+
return not self.ws.closed
|
|
106
|
+
except AttributeError:
|
|
107
|
+
# Fallback: check if websocket is still alive by checking state
|
|
108
|
+
try:
|
|
109
|
+
return hasattr(self.ws, 'state') and self.ws.state.name == 'OPEN'
|
|
110
|
+
except:
|
|
111
|
+
# Last fallback: assume connected if ws exists
|
|
112
|
+
return True
|
|
101
113
|
|
|
102
114
|
def log(self, *args):
|
|
103
115
|
logger.debug(f"[Websocket/{datetime.utcnow().isoformat()}]", *args)
|
|
104
116
|
|
|
105
|
-
async def connect(self, model='gpt-4o-realtime-preview-2024-
|
|
117
|
+
async def connect(self, model='gpt-4o-mini-realtime-preview-2024-12-17'):
|
|
106
118
|
if self.is_connected():
|
|
107
119
|
raise Exception("Already connected")
|
|
108
|
-
|
|
120
|
+
|
|
121
|
+
headers = {
|
|
109
122
|
'Authorization': f'Bearer {self.api_key}',
|
|
110
123
|
'OpenAI-Beta': 'realtime=v1'
|
|
111
|
-
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
# Try different header parameter names for compatibility
|
|
127
|
+
try:
|
|
128
|
+
self.ws = await websockets.connect(f"{self.url}?model={model}", additional_headers=headers)
|
|
129
|
+
except TypeError:
|
|
130
|
+
# Fallback to older websockets versions
|
|
131
|
+
try:
|
|
132
|
+
self.ws = await websockets.connect(f"{self.url}?model={model}", extra_headers=headers)
|
|
133
|
+
except TypeError:
|
|
134
|
+
# Last fallback - some versions might not support headers parameter
|
|
135
|
+
raise Exception("Websockets library version incompatible. Please update websockets to version 11.0 or higher.")
|
|
136
|
+
|
|
112
137
|
self.log(f"Connected to {self.url}")
|
|
113
138
|
asyncio.create_task(self._receive_messages())
|
|
114
139
|
|
|
115
140
|
async def _receive_messages(self):
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
141
|
+
try:
|
|
142
|
+
async for message in self.ws:
|
|
143
|
+
event = json.loads(message)
|
|
144
|
+
if event['type'] == "error":
|
|
145
|
+
logger.error(f"OpenAI Realtime API Error: {event}")
|
|
146
|
+
self.log("received:", event)
|
|
147
|
+
self.dispatch(f"server.{event['type']}", event)
|
|
148
|
+
self.dispatch("server.*", event)
|
|
149
|
+
except ConnectionClosed as e:
|
|
150
|
+
logger.info(f"WebSocket connection closed normally: {e}")
|
|
151
|
+
# Mark connection as closed
|
|
152
|
+
self.ws = None
|
|
153
|
+
# Dispatch disconnection event
|
|
154
|
+
self.dispatch("disconnected", {"reason": str(e)})
|
|
155
|
+
except Exception as e:
|
|
156
|
+
logger.warning(f"WebSocket receive loop ended: {e}")
|
|
157
|
+
# Mark connection as closed
|
|
158
|
+
self.ws = None
|
|
159
|
+
# Dispatch disconnection event
|
|
160
|
+
self.dispatch("disconnected", {"reason": str(e)})
|
|
123
161
|
|
|
124
162
|
async def send(self, event_name, data=None):
|
|
125
163
|
if not self.is_connected():
|
|
@@ -135,16 +173,33 @@ class RealtimeAPI(RealtimeEventHandler):
|
|
|
135
173
|
self.dispatch(f"client.{event_name}", event)
|
|
136
174
|
self.dispatch("client.*", event)
|
|
137
175
|
self.log("sent:", event)
|
|
138
|
-
|
|
176
|
+
|
|
177
|
+
try:
|
|
178
|
+
await self.ws.send(json.dumps(event))
|
|
179
|
+
except ConnectionClosed as e:
|
|
180
|
+
logger.info(f"WebSocket connection closed during send: {e}")
|
|
181
|
+
# Mark connection as closed if send fails
|
|
182
|
+
self.ws = None
|
|
183
|
+
raise Exception(f"WebSocket connection lost: {e}")
|
|
184
|
+
except Exception as e:
|
|
185
|
+
logger.error(f"Failed to send WebSocket message: {e}")
|
|
186
|
+
# Mark connection as closed if send fails
|
|
187
|
+
self.ws = None
|
|
188
|
+
raise Exception(f"WebSocket connection lost: {e}")
|
|
139
189
|
|
|
140
190
|
def _generate_id(self, prefix):
|
|
141
191
|
return f"{prefix}{int(datetime.utcnow().timestamp() * 1000)}"
|
|
142
192
|
|
|
143
193
|
async def disconnect(self):
|
|
144
194
|
if self.ws:
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
195
|
+
try:
|
|
196
|
+
await self.ws.close()
|
|
197
|
+
logger.info(f"Disconnected from {self.url}")
|
|
198
|
+
except Exception as e:
|
|
199
|
+
logger.warning(f"Error during WebSocket close: {e}")
|
|
200
|
+
finally:
|
|
201
|
+
self.ws = None
|
|
202
|
+
self.log(f"WebSocket connection cleaned up")
|
|
148
203
|
|
|
149
204
|
class RealtimeConversation:
|
|
150
205
|
default_frequency = config.features.audio.sample_rate
|
|
@@ -341,8 +396,7 @@ class RealtimeConversation:
|
|
|
341
396
|
return None, None
|
|
342
397
|
array_buffer = base64_to_array_buffer(delta)
|
|
343
398
|
append_values = array_buffer.tobytes()
|
|
344
|
-
|
|
345
|
-
# item['formatted']['audio'] = merge_int16_arrays(item['formatted']['audio'], append_values)
|
|
399
|
+
item['formatted']['audio'].append(append_values)
|
|
346
400
|
return item, {'audio': append_values}
|
|
347
401
|
|
|
348
402
|
def _process_text_delta(self, event):
|
|
@@ -381,7 +435,6 @@ class RealtimeClient(RealtimeEventHandler):
|
|
|
381
435
|
"tools": [],
|
|
382
436
|
"tool_choice": "auto",
|
|
383
437
|
"temperature": 0.8,
|
|
384
|
-
"max_response_output_tokens": 4096,
|
|
385
438
|
}
|
|
386
439
|
self.session_config = {}
|
|
387
440
|
self.transcription_models = [{"model": "whisper-1"}]
|
|
@@ -431,8 +484,13 @@ class RealtimeClient(RealtimeEventHandler):
|
|
|
431
484
|
self.dispatch("realtime.event", realtime_event)
|
|
432
485
|
|
|
433
486
|
def _on_session_created(self, event):
|
|
434
|
-
|
|
435
|
-
|
|
487
|
+
try:
|
|
488
|
+
session_id = event.get('session', {}).get('id', 'unknown')
|
|
489
|
+
model = event.get('session', {}).get('model', 'unknown')
|
|
490
|
+
logger.info(f"OpenAI Realtime session created - ID: {session_id}, Model: {model}")
|
|
491
|
+
except Exception as e:
|
|
492
|
+
logger.warning(f"Error processing session created event: {e}")
|
|
493
|
+
logger.debug(f"Session event details: {event}")
|
|
436
494
|
self.session_created = True
|
|
437
495
|
|
|
438
496
|
def _process_event(self, event, *args):
|
|
@@ -497,10 +555,15 @@ class RealtimeClient(RealtimeEventHandler):
|
|
|
497
555
|
self._add_api_event_handlers()
|
|
498
556
|
return True
|
|
499
557
|
|
|
500
|
-
async def connect(self):
|
|
558
|
+
async def connect(self, model=None):
|
|
501
559
|
if self.is_connected():
|
|
502
560
|
raise Exception("Already connected, use .disconnect() first")
|
|
503
|
-
|
|
561
|
+
|
|
562
|
+
# Use provided model or default
|
|
563
|
+
if model is None:
|
|
564
|
+
model = 'gpt-4o-mini-realtime-preview-2024-12-17'
|
|
565
|
+
|
|
566
|
+
await self.realtime.connect(model)
|
|
504
567
|
await self.update_session()
|
|
505
568
|
return True
|
|
506
569
|
|
|
@@ -516,6 +579,7 @@ class RealtimeClient(RealtimeEventHandler):
|
|
|
516
579
|
self.conversation.clear()
|
|
517
580
|
if self.realtime.is_connected():
|
|
518
581
|
await self.realtime.disconnect()
|
|
582
|
+
logger.info("RealtimeClient disconnected")
|
|
519
583
|
|
|
520
584
|
def get_turn_detection_type(self):
|
|
521
585
|
return self.session_config.get("turn_detection", {}).get("type")
|
|
@@ -579,11 +643,22 @@ class RealtimeClient(RealtimeEventHandler):
|
|
|
579
643
|
return True
|
|
580
644
|
|
|
581
645
|
async def append_input_audio(self, array_buffer):
|
|
646
|
+
if not self.is_connected():
|
|
647
|
+
logger.warning("Cannot append audio: RealtimeClient is not connected")
|
|
648
|
+
return False
|
|
649
|
+
|
|
582
650
|
if len(array_buffer) > 0:
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
651
|
+
try:
|
|
652
|
+
await self.realtime.send("input_audio_buffer.append", {
|
|
653
|
+
"audio": array_buffer_to_base64(np.array(array_buffer)),
|
|
654
|
+
})
|
|
655
|
+
self.input_audio_buffer.extend(array_buffer)
|
|
656
|
+
except Exception as e:
|
|
657
|
+
logger.error(f"Failed to append input audio: {e}")
|
|
658
|
+
# Connection might be lost, mark as disconnected
|
|
659
|
+
if "connection" in str(e).lower() or "closed" in str(e).lower():
|
|
660
|
+
logger.warning("WebSocket connection appears to be lost. Audio input will be queued until reconnection.")
|
|
661
|
+
return False
|
|
587
662
|
return True
|
|
588
663
|
|
|
589
664
|
async def create_response(self):
|
|
@@ -650,4 +725,17 @@ class RealtimeClient(RealtimeEventHandler):
|
|
|
650
725
|
logger.debug(f"Unhandled item type:\n{json.dumps(item, indent=2)}")
|
|
651
726
|
|
|
652
727
|
# Additional debug logging
|
|
653
|
-
logger.debug(f"Processed Chainlit message for item: {item.get('id', 'unknown')}")
|
|
728
|
+
logger.debug(f"Processed Chainlit message for item: {item.get('id', 'unknown')}")
|
|
729
|
+
|
|
730
|
+
async def ensure_connected(self):
|
|
731
|
+
"""Check connection health and attempt reconnection if needed"""
|
|
732
|
+
if not self.is_connected():
|
|
733
|
+
try:
|
|
734
|
+
logger.info("Attempting to reconnect to OpenAI Realtime API...")
|
|
735
|
+
model = 'gpt-4o-mini-realtime-preview-2024-12-17'
|
|
736
|
+
await self.connect(model)
|
|
737
|
+
return True
|
|
738
|
+
except Exception as e:
|
|
739
|
+
logger.error(f"Failed to reconnect: {e}")
|
|
740
|
+
return False
|
|
741
|
+
return True
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: PraisonAI
|
|
3
|
-
Version: 2.2.
|
|
3
|
+
Version: 2.2.25
|
|
4
4
|
Summary: PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration.
|
|
5
5
|
Author: Mervin Praison
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -64,7 +64,7 @@ Requires-Dist: playwright (>=1.47.0) ; extra == "code"
|
|
|
64
64
|
Requires-Dist: plotly (>=5.24.0) ; extra == "realtime"
|
|
65
65
|
Requires-Dist: praisonai-tools (>=0.0.15) ; extra == "autogen"
|
|
66
66
|
Requires-Dist: praisonai-tools (>=0.0.15) ; extra == "crewai"
|
|
67
|
-
Requires-Dist: praisonaiagents (>=0.0.
|
|
67
|
+
Requires-Dist: praisonaiagents (>=0.0.96)
|
|
68
68
|
Requires-Dist: pyautogen (>=0.2.19) ; extra == "autogen"
|
|
69
69
|
Requires-Dist: pydantic (<=2.10.1) ; extra == "chat"
|
|
70
70
|
Requires-Dist: pydantic (<=2.10.1) ; extra == "code"
|
|
@@ -6,7 +6,7 @@ praisonai/api/call.py,sha256=-dV9DKNDi4w9vN6K63TUh15_PC0M5KzYOmBqHbuJqq0,11079
|
|
|
6
6
|
praisonai/auto.py,sha256=0omuyIIuu-zBAXpsGo3JwuhX6zpjQg3ZtqbPtF5LZbg,12331
|
|
7
7
|
praisonai/chainlit_ui.py,sha256=VKf_--cONLIBMymMY8j-oj6Pq_rw3pHtXOqF2wZ9gYI,12220
|
|
8
8
|
praisonai/cli.py,sha256=LK6__iJP9jr1QAmG7E4kDbmlYqKIRivu9GedfBRz0_w,36311
|
|
9
|
-
praisonai/deploy.py,sha256=
|
|
9
|
+
praisonai/deploy.py,sha256=t1vst9Qs_S0efj9y0CHxS6gh843MkYCbSv8s0cOISxc,6028
|
|
10
10
|
praisonai/inbuilt_tools/__init__.py,sha256=mZOEximj3zCyJHq9Lz0bGXhQpBsa_QR-R-yA9UKC3zI,565
|
|
11
11
|
praisonai/inbuilt_tools/autogen_tools.py,sha256=kJdEv61BTYvdHOaURNEpBcWq8Rs-oC03loNFTIjT-ak,4687
|
|
12
12
|
praisonai/inc/__init__.py,sha256=sPDlYBBwdk0VlWzaaM_lG0_LD07lS2HRGvPdxXJFiYg,62
|
|
@@ -40,8 +40,8 @@ praisonai/train_vision.py,sha256=OLDtr5u9rszWQ80LC5iFy37yPuYguES6AQybm_2RtM4,125
|
|
|
40
40
|
praisonai/ui/README.md,sha256=QG9yucvBieVjCjWFzu6hL9xNtYllkoqyJ_q1b0YYAco,1124
|
|
41
41
|
praisonai/ui/agents.py,sha256=wWtVHCQAvLxAe3vtcnivM0JWGuxshbhhwbX8t5VYTD4,32817
|
|
42
42
|
praisonai/ui/callbacks.py,sha256=V4_-GjxmjDFmugUZGfQHKtNSysx7rT6i1UblbM_8lIM,1968
|
|
43
|
-
praisonai/ui/chat.py,sha256=
|
|
44
|
-
praisonai/ui/code.py,sha256=
|
|
43
|
+
praisonai/ui/chat.py,sha256=mfNU-fmJt4-x3sKe10DuiieOTZYsP5046yGlZq3yVI0,13570
|
|
44
|
+
praisonai/ui/code.py,sha256=W4lNfbHTl6VeVYCdGi1T3qOL8VN4guUVKA68ZUCunJU,16665
|
|
45
45
|
praisonai/ui/colab.py,sha256=A2NceDVazMy53mIpp-NIn5w3y8aQKwQu5LmHTepVwlo,19584
|
|
46
46
|
praisonai/ui/colab_chainlit.py,sha256=wrB1O0ttRlmOH8aMxU8QdGpse-X54U87ZcEEA3R1aFg,2432
|
|
47
47
|
praisonai/ui/components/aicoder.py,sha256=E2Tz3sWR9WKIPquO30T7aNzpe41XwYwy9UY3CXvSTlw,11165
|
|
@@ -67,15 +67,14 @@ praisonai/ui/public/logo_light.png,sha256=8cQRti_Ysa30O3_7C3ku2w40LnVUUlUok47H-3
|
|
|
67
67
|
praisonai/ui/public/movie.svg,sha256=aJ2EQ8vXZusVsF2SeuAVxP4RFJzQ14T26ejrGYdBgzk,1289
|
|
68
68
|
praisonai/ui/public/praison.css,sha256=fBYbJn4Uuv2AH6ThWkMmdAy_uBbw9a9ZeW0hIGsqotA,75
|
|
69
69
|
praisonai/ui/public/thriller.svg,sha256=2dYY72EcgbEyTxS4QzjAm37Y4srtPWEW4vCMFki98ZI,3163
|
|
70
|
-
praisonai/ui/realtime.py,sha256=
|
|
71
|
-
praisonai/ui/realtimeclient/__init__.py,sha256=
|
|
72
|
-
praisonai/ui/realtimeclient/realtimedocs.txt,sha256=hmgd8Uwy2SkjSndyyF_-ZOaNxiyHwGaQLGc67DvV-sI,26395
|
|
70
|
+
praisonai/ui/realtime.py,sha256=ucWMGftLoaFzqeIb9EUL5FBDq_Ho6_gHts0JHrmkLMo,18353
|
|
71
|
+
praisonai/ui/realtimeclient/__init__.py,sha256=0cGv1da-M0c23klikCM3gPFdsjODelK-pU64_XqDeSk,31488
|
|
73
72
|
praisonai/ui/realtimeclient/tools.py,sha256=ujkTZQIha6DQBIfTkhInI-iYD3wi3do2r_EBJCddQy8,8364
|
|
74
73
|
praisonai/ui/sql_alchemy.py,sha256=ilWAWicUGja7ADbXW9_OgIYeyKNuAQ1ZI_RMqjmMI9k,29667
|
|
75
74
|
praisonai/ui/tools.md,sha256=Ad3YH_ZCLMWlz3mDXllQnQ_S5l55LWqLdcZSh-EXrHI,3956
|
|
76
75
|
praisonai/upload_vision.py,sha256=lMpFn993UiYVJxRNZQTmcbPbEajQ5TFKCNGK1Icn_hg,5253
|
|
77
76
|
praisonai/version.py,sha256=ugyuFliEqtAwQmH4sTlc16YXKYbFWDmfyk87fErB8-8,21
|
|
78
|
-
praisonai-2.2.
|
|
79
|
-
praisonai-2.2.
|
|
80
|
-
praisonai-2.2.
|
|
81
|
-
praisonai-2.2.
|
|
77
|
+
praisonai-2.2.25.dist-info/METADATA,sha256=RNwyw26LtSSllWmV8_Hx0-qSr2R8qU5JQIq7Y416zNc,4761
|
|
78
|
+
praisonai-2.2.25.dist-info/WHEEL,sha256=dCzwOzx-VmbmLA5u8QpkARaxx3rsePBxa1nmZphhNQk,110
|
|
79
|
+
praisonai-2.2.25.dist-info/entry_points.txt,sha256=QSSfuXjZMhf16FZ201I_oSoX_s1nWYbi_4_UXPE3S-o,145
|
|
80
|
+
praisonai-2.2.25.dist-info/RECORD,,
|