ai-parrot 0.3.3__tar.gz → 0.3.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ai-parrot might be problematic. Click here for more details.
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/.github/workflows/release.yml +1 -1
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/.gitignore +3 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/Makefile +1 -1
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/PKG-INFO +9 -27
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/ai_parrot.egg-info/PKG-INFO +9 -27
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/ai_parrot.egg-info/SOURCES.txt +7 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/ai_parrot.egg-info/requires.txt +7 -26
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/app.py +2 -2
- ai_parrot-0.3.6/documents/AR_Certification_Skill_Practice_Scorecard_EXAMPLE.pdf +0 -0
- ai_parrot-0.3.6/documents/Day 1_Essentials_AR_PPT.pdf +0 -0
- ai_parrot-0.3.6/documents/video_2024-09-11_19-43-58.mp3 +0 -0
- ai_parrot-0.3.6/documents/video_2024-09-11_19-43-58.mp4 +0 -0
- ai_parrot-0.3.6/documents/video_2024-09-11_19-43-58.vtt +122 -0
- ai_parrot-0.3.6/examples/analyze_video.py +33 -0
- ai_parrot-0.3.6/examples/extract_frames.py +74 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/examples/load_pdf.py +3 -2
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/examples/test_bot.py +1 -1
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/pdf.py +1 -8
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/videolocal.py +27 -7
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/version.py +1 -1
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/pyproject.toml +1 -1
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/setup.py +9 -33
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/.flake8 +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/.github/dependabot.yml +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/.github/workflows/codeql-analysis.yml +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/.isort.cfg +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/.pylintrc +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/INSTALL +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/LICENSE +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/README.md +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/SECURITY.md +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/ai_parrot.egg-info/dependency_links.txt +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/ai_parrot.egg-info/top_level.txt +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/documents/ex-code-loaders.txt +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/navigator-ssl.ini +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/navigator.ini +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/ssl/domain.ext +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/ssl/navigator.local.crt +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/ssl/navigator.local.csr +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/ssl/navigator.local.key +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/ssl/rootCA.crt +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/ssl/rootCA.key +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/etc/ssl/rootCA.srl +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/examples/check_bot.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/examples/test_question.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/mypy.ini +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/abstract.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/asktroc.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/base.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/basic.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/bose.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/cody.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/copilot.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/dataframe.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/hragents.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/odoo.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/retrievals/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/chatbots/retrievals/constitutional.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/conf.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/bing.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/config.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/duckgo.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/file.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/google.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/gtrends.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/md2pdf.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/rag.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/search.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/crew/tools/url.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/exceptions.c +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/exceptions.pyx +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/handlers/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/handlers/bots.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/handlers/chat.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/interfaces/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/interfaces/database.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/abstract.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/anthropic.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/google.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/groq.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/hf.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/openai.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/pipes.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/llms/vertex.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/abstract.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/audio.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/basepdf.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/basevideo.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/csv.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/dir.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/excel.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/github.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/handlers/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/handlers/data.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/image.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/json.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/pdfchapters.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/pdffn.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/pdfimages.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/pdfmark.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/pdftables.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/ppt.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/qa.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/repo.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/rtd.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/txt.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/utils/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/utils/models.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/video.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/vimeo.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/web.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/web_base.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/word.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/loaders/youtube.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/manager.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/models.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/py.typed +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/stores/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/stores/abstract.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/stores/milvus.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/stores/qdrant.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/abstract.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/asknews.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/bing.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/duck.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/google.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/stack.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/weather.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/wikipedia.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/tools/zipcode.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/parsers/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/parsers/toml.c +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/parsers/toml.pyx +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/toml.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/types.cpp +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/types.pyx +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/parrot/utils/uv.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/pytest.ini +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/requirements/requirements-dev.txt +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/resources/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/resources/quick.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/resources/users/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/resources/users/handlers.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/resources/users/models.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/run.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/settings/__init__.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/settings/settings.py +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/setup.cfg +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/templates/.compiled +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/templates/README.md +0 -0
- {ai_parrot-0.3.3 → ai_parrot-0.3.6}/tox.ini +0 -0
|
@@ -26,7 +26,7 @@ jobs:
|
|
|
26
26
|
|
|
27
27
|
- name: Build wheels on Ubuntu
|
|
28
28
|
if: matrix.os == 'ubuntu-latest'
|
|
29
|
-
uses: RalfG/python-wheels-manylinux-build@v0.
|
|
29
|
+
uses: RalfG/python-wheels-manylinux-build@v0.7.1-manylinux2014_x86_64
|
|
30
30
|
with:
|
|
31
31
|
python-versions: 'cp39-cp39 cp310-cp310 cp311-cp311 cp312-cp312'
|
|
32
32
|
build-requirements: 'cython numpy'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ai-parrot
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.6
|
|
4
4
|
Summary: Live Chatbots based on Langchain chatbots and Agents Integrated into Navigator Framework or used into aiohttp applications.
|
|
5
5
|
Home-page: https://github.com/phenobarbital/ai-parrot
|
|
6
6
|
Author: Jesus Lara
|
|
@@ -26,7 +26,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
26
26
|
Classifier: Programming Language :: Python :: 3.12
|
|
27
27
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
28
28
|
Classifier: Framework :: AsyncIO
|
|
29
|
-
Requires-Python: >=3.
|
|
29
|
+
Requires-Python: >=3.9.20
|
|
30
30
|
Description-Content-Type: text/markdown
|
|
31
31
|
License-File: LICENSE
|
|
32
32
|
Requires-Dist: Cython==3.0.11
|
|
@@ -58,11 +58,8 @@ Requires-Dist: sentence-transformers==3.0.1
|
|
|
58
58
|
Requires-Dist: tabulate==0.9.0
|
|
59
59
|
Requires-Dist: tiktoken==0.7.0
|
|
60
60
|
Requires-Dist: tokenizers==0.19.1
|
|
61
|
-
Requires-Dist:
|
|
62
|
-
Requires-Dist:
|
|
63
|
-
Requires-Dist: youtube-transcript-api==0.6.2
|
|
64
|
-
Requires-Dist: selenium==4.18.1
|
|
65
|
-
Requires-Dist: webdriver_manager==4.0.1
|
|
61
|
+
Requires-Dist: selenium>=4.18.1
|
|
62
|
+
Requires-Dist: webdriver_manager>=4.0.1
|
|
66
63
|
Requires-Dist: transitions==0.9.0
|
|
67
64
|
Requires-Dist: sentencepiece==0.2.0
|
|
68
65
|
Requires-Dist: duckduckgo-search==5.3.0
|
|
@@ -79,9 +76,12 @@ Requires-Dist: mediawikiapi==1.2
|
|
|
79
76
|
Requires-Dist: pyowm==3.3.0
|
|
80
77
|
Requires-Dist: O365==2.0.35
|
|
81
78
|
Requires-Dist: stackapi==0.3.1
|
|
82
|
-
Requires-Dist: timm==1.0.9
|
|
83
79
|
Requires-Dist: torchvision==0.19.1
|
|
80
|
+
Requires-Dist: tf-keras==2.17.0
|
|
84
81
|
Provides-Extra: loaders
|
|
82
|
+
Requires-Dist: unstructured==0.14.3; extra == "loaders"
|
|
83
|
+
Requires-Dist: unstructured-client==0.18.0; extra == "loaders"
|
|
84
|
+
Requires-Dist: youtube-transcript-api==0.6.2; extra == "loaders"
|
|
85
85
|
Requires-Dist: pymupdf==1.24.4; extra == "loaders"
|
|
86
86
|
Requires-Dist: pymupdf4llm==0.0.1; extra == "loaders"
|
|
87
87
|
Requires-Dist: pdf4llm==0.0.6; extra == "loaders"
|
|
@@ -107,7 +107,7 @@ Requires-Dist: paddleocr==2.8.1; extra == "loaders"
|
|
|
107
107
|
Requires-Dist: ftfy==6.2.3; extra == "loaders"
|
|
108
108
|
Requires-Dist: librosa==0.10.1; extra == "loaders"
|
|
109
109
|
Requires-Dist: XlsxWriter==3.2.0; extra == "loaders"
|
|
110
|
-
Requires-Dist:
|
|
110
|
+
Requires-Dist: timm==1.0.9; extra == "loaders"
|
|
111
111
|
Provides-Extra: anthropic
|
|
112
112
|
Requires-Dist: langchain-anthropic==0.1.11; extra == "anthropic"
|
|
113
113
|
Requires-Dist: anthropic==0.25.2; extra == "anthropic"
|
|
@@ -142,24 +142,6 @@ Requires-Dist: gradio-client==0.2.9; extra == "analytics"
|
|
|
142
142
|
Requires-Dist: streamlit==1.37.1; extra == "analytics"
|
|
143
143
|
Requires-Dist: simsimd==4.3.1; extra == "analytics"
|
|
144
144
|
Requires-Dist: opencv-python==4.10.0.84; extra == "analytics"
|
|
145
|
-
Provides-Extra: all
|
|
146
|
-
Requires-Dist: langchain-milvus==0.1.1; extra == "all"
|
|
147
|
-
Requires-Dist: milvus==2.3.5; extra == "all"
|
|
148
|
-
Requires-Dist: pymilvus==2.4.4; extra == "all"
|
|
149
|
-
Requires-Dist: groq==0.11.0; extra == "all"
|
|
150
|
-
Requires-Dist: langchain-groq==0.1.4; extra == "all"
|
|
151
|
-
Requires-Dist: llama-index-llms-huggingface==0.2.7; extra == "all"
|
|
152
|
-
Requires-Dist: langchain-google-vertexai==1.0.8; extra == "all"
|
|
153
|
-
Requires-Dist: langchain-google-genai==1.0.8; extra == "all"
|
|
154
|
-
Requires-Dist: google-generativeai==0.7.2; extra == "all"
|
|
155
|
-
Requires-Dist: vertexai==1.60.0; extra == "all"
|
|
156
|
-
Requires-Dist: google-cloud-aiplatform>=1.60.0; extra == "all"
|
|
157
|
-
Requires-Dist: grpc-google-iam-v1==0.13.0; extra == "all"
|
|
158
|
-
Requires-Dist: langchain-openai==0.1.21; extra == "all"
|
|
159
|
-
Requires-Dist: openai==1.40.8; extra == "all"
|
|
160
|
-
Requires-Dist: llama-index-llms-openai==0.1.11; extra == "all"
|
|
161
|
-
Requires-Dist: langchain-anthropic==0.1.23; extra == "all"
|
|
162
|
-
Requires-Dist: anthropic==0.34.0; extra == "all"
|
|
163
145
|
|
|
164
146
|
# AI Parrot: Python package for creating Chatbots
|
|
165
147
|
This is an open-source Python package for creating Chatbots based on Langchain and Navigator.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ai-parrot
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.6
|
|
4
4
|
Summary: Live Chatbots based on Langchain chatbots and Agents Integrated into Navigator Framework or used into aiohttp applications.
|
|
5
5
|
Home-page: https://github.com/phenobarbital/ai-parrot
|
|
6
6
|
Author: Jesus Lara
|
|
@@ -26,7 +26,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
26
26
|
Classifier: Programming Language :: Python :: 3.12
|
|
27
27
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
28
28
|
Classifier: Framework :: AsyncIO
|
|
29
|
-
Requires-Python: >=3.
|
|
29
|
+
Requires-Python: >=3.9.20
|
|
30
30
|
Description-Content-Type: text/markdown
|
|
31
31
|
License-File: LICENSE
|
|
32
32
|
Requires-Dist: Cython==3.0.11
|
|
@@ -58,11 +58,8 @@ Requires-Dist: sentence-transformers==3.0.1
|
|
|
58
58
|
Requires-Dist: tabulate==0.9.0
|
|
59
59
|
Requires-Dist: tiktoken==0.7.0
|
|
60
60
|
Requires-Dist: tokenizers==0.19.1
|
|
61
|
-
Requires-Dist:
|
|
62
|
-
Requires-Dist:
|
|
63
|
-
Requires-Dist: youtube-transcript-api==0.6.2
|
|
64
|
-
Requires-Dist: selenium==4.18.1
|
|
65
|
-
Requires-Dist: webdriver_manager==4.0.1
|
|
61
|
+
Requires-Dist: selenium>=4.18.1
|
|
62
|
+
Requires-Dist: webdriver_manager>=4.0.1
|
|
66
63
|
Requires-Dist: transitions==0.9.0
|
|
67
64
|
Requires-Dist: sentencepiece==0.2.0
|
|
68
65
|
Requires-Dist: duckduckgo-search==5.3.0
|
|
@@ -79,9 +76,12 @@ Requires-Dist: mediawikiapi==1.2
|
|
|
79
76
|
Requires-Dist: pyowm==3.3.0
|
|
80
77
|
Requires-Dist: O365==2.0.35
|
|
81
78
|
Requires-Dist: stackapi==0.3.1
|
|
82
|
-
Requires-Dist: timm==1.0.9
|
|
83
79
|
Requires-Dist: torchvision==0.19.1
|
|
80
|
+
Requires-Dist: tf-keras==2.17.0
|
|
84
81
|
Provides-Extra: loaders
|
|
82
|
+
Requires-Dist: unstructured==0.14.3; extra == "loaders"
|
|
83
|
+
Requires-Dist: unstructured-client==0.18.0; extra == "loaders"
|
|
84
|
+
Requires-Dist: youtube-transcript-api==0.6.2; extra == "loaders"
|
|
85
85
|
Requires-Dist: pymupdf==1.24.4; extra == "loaders"
|
|
86
86
|
Requires-Dist: pymupdf4llm==0.0.1; extra == "loaders"
|
|
87
87
|
Requires-Dist: pdf4llm==0.0.6; extra == "loaders"
|
|
@@ -107,7 +107,7 @@ Requires-Dist: paddleocr==2.8.1; extra == "loaders"
|
|
|
107
107
|
Requires-Dist: ftfy==6.2.3; extra == "loaders"
|
|
108
108
|
Requires-Dist: librosa==0.10.1; extra == "loaders"
|
|
109
109
|
Requires-Dist: XlsxWriter==3.2.0; extra == "loaders"
|
|
110
|
-
Requires-Dist:
|
|
110
|
+
Requires-Dist: timm==1.0.9; extra == "loaders"
|
|
111
111
|
Provides-Extra: anthropic
|
|
112
112
|
Requires-Dist: langchain-anthropic==0.1.11; extra == "anthropic"
|
|
113
113
|
Requires-Dist: anthropic==0.25.2; extra == "anthropic"
|
|
@@ -142,24 +142,6 @@ Requires-Dist: gradio-client==0.2.9; extra == "analytics"
|
|
|
142
142
|
Requires-Dist: streamlit==1.37.1; extra == "analytics"
|
|
143
143
|
Requires-Dist: simsimd==4.3.1; extra == "analytics"
|
|
144
144
|
Requires-Dist: opencv-python==4.10.0.84; extra == "analytics"
|
|
145
|
-
Provides-Extra: all
|
|
146
|
-
Requires-Dist: langchain-milvus==0.1.1; extra == "all"
|
|
147
|
-
Requires-Dist: milvus==2.3.5; extra == "all"
|
|
148
|
-
Requires-Dist: pymilvus==2.4.4; extra == "all"
|
|
149
|
-
Requires-Dist: groq==0.11.0; extra == "all"
|
|
150
|
-
Requires-Dist: langchain-groq==0.1.4; extra == "all"
|
|
151
|
-
Requires-Dist: llama-index-llms-huggingface==0.2.7; extra == "all"
|
|
152
|
-
Requires-Dist: langchain-google-vertexai==1.0.8; extra == "all"
|
|
153
|
-
Requires-Dist: langchain-google-genai==1.0.8; extra == "all"
|
|
154
|
-
Requires-Dist: google-generativeai==0.7.2; extra == "all"
|
|
155
|
-
Requires-Dist: vertexai==1.60.0; extra == "all"
|
|
156
|
-
Requires-Dist: google-cloud-aiplatform>=1.60.0; extra == "all"
|
|
157
|
-
Requires-Dist: grpc-google-iam-v1==0.13.0; extra == "all"
|
|
158
|
-
Requires-Dist: langchain-openai==0.1.21; extra == "all"
|
|
159
|
-
Requires-Dist: openai==1.40.8; extra == "all"
|
|
160
|
-
Requires-Dist: llama-index-llms-openai==0.1.11; extra == "all"
|
|
161
|
-
Requires-Dist: langchain-anthropic==0.1.23; extra == "all"
|
|
162
|
-
Requires-Dist: anthropic==0.34.0; extra == "all"
|
|
163
145
|
|
|
164
146
|
# AI Parrot: Python package for creating Chatbots
|
|
165
147
|
This is an open-source Python package for creating Chatbots based on Langchain and Navigator.
|
|
@@ -22,7 +22,12 @@ ai_parrot.egg-info/SOURCES.txt
|
|
|
22
22
|
ai_parrot.egg-info/dependency_links.txt
|
|
23
23
|
ai_parrot.egg-info/requires.txt
|
|
24
24
|
ai_parrot.egg-info/top_level.txt
|
|
25
|
+
documents/AR_Certification_Skill_Practice_Scorecard_EXAMPLE.pdf
|
|
26
|
+
documents/Day 1_Essentials_AR_PPT.pdf
|
|
25
27
|
documents/ex-code-loaders.txt
|
|
28
|
+
documents/video_2024-09-11_19-43-58.mp3
|
|
29
|
+
documents/video_2024-09-11_19-43-58.mp4
|
|
30
|
+
documents/video_2024-09-11_19-43-58.vtt
|
|
26
31
|
etc/navigator-ssl.ini
|
|
27
32
|
etc/navigator.ini
|
|
28
33
|
etc/ssl/domain.ext
|
|
@@ -32,7 +37,9 @@ etc/ssl/navigator.local.key
|
|
|
32
37
|
etc/ssl/rootCA.crt
|
|
33
38
|
etc/ssl/rootCA.key
|
|
34
39
|
etc/ssl/rootCA.srl
|
|
40
|
+
examples/analyze_video.py
|
|
35
41
|
examples/check_bot.py
|
|
42
|
+
examples/extract_frames.py
|
|
36
43
|
examples/load_pdf.py
|
|
37
44
|
examples/test_bot.py
|
|
38
45
|
examples/test_question.py
|
|
@@ -27,11 +27,8 @@ sentence-transformers==3.0.1
|
|
|
27
27
|
tabulate==0.9.0
|
|
28
28
|
tiktoken==0.7.0
|
|
29
29
|
tokenizers==0.19.1
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
youtube-transcript-api==0.6.2
|
|
33
|
-
selenium==4.18.1
|
|
34
|
-
webdriver_manager==4.0.1
|
|
30
|
+
selenium>=4.18.1
|
|
31
|
+
webdriver_manager>=4.0.1
|
|
35
32
|
transitions==0.9.0
|
|
36
33
|
sentencepiece==0.2.0
|
|
37
34
|
duckduckgo-search==5.3.0
|
|
@@ -48,27 +45,8 @@ mediawikiapi==1.2
|
|
|
48
45
|
pyowm==3.3.0
|
|
49
46
|
O365==2.0.35
|
|
50
47
|
stackapi==0.3.1
|
|
51
|
-
timm==1.0.9
|
|
52
48
|
torchvision==0.19.1
|
|
53
|
-
|
|
54
|
-
[all]
|
|
55
|
-
langchain-milvus==0.1.1
|
|
56
|
-
milvus==2.3.5
|
|
57
|
-
pymilvus==2.4.4
|
|
58
|
-
groq==0.11.0
|
|
59
|
-
langchain-groq==0.1.4
|
|
60
|
-
llama-index-llms-huggingface==0.2.7
|
|
61
|
-
langchain-google-vertexai==1.0.8
|
|
62
|
-
langchain-google-genai==1.0.8
|
|
63
|
-
google-generativeai==0.7.2
|
|
64
|
-
vertexai==1.60.0
|
|
65
|
-
google-cloud-aiplatform>=1.60.0
|
|
66
|
-
grpc-google-iam-v1==0.13.0
|
|
67
|
-
langchain-openai==0.1.21
|
|
68
|
-
openai==1.40.8
|
|
69
|
-
llama-index-llms-openai==0.1.11
|
|
70
|
-
langchain-anthropic==0.1.23
|
|
71
|
-
anthropic==0.34.0
|
|
49
|
+
tf-keras==2.17.0
|
|
72
50
|
|
|
73
51
|
[analytics]
|
|
74
52
|
annoy==1.17.3
|
|
@@ -100,6 +78,9 @@ langchain-groq==0.1.9
|
|
|
100
78
|
llama-index-llms-huggingface==0.2.7
|
|
101
79
|
|
|
102
80
|
[loaders]
|
|
81
|
+
unstructured==0.14.3
|
|
82
|
+
unstructured-client==0.18.0
|
|
83
|
+
youtube-transcript-api==0.6.2
|
|
103
84
|
pymupdf==1.24.4
|
|
104
85
|
pymupdf4llm==0.0.1
|
|
105
86
|
pdf4llm==0.0.6
|
|
@@ -125,7 +106,7 @@ paddleocr==2.8.1
|
|
|
125
106
|
ftfy==6.2.3
|
|
126
107
|
librosa==0.10.1
|
|
127
108
|
XlsxWriter==3.2.0
|
|
128
|
-
|
|
109
|
+
timm==1.0.9
|
|
129
110
|
|
|
130
111
|
[milvus]
|
|
131
112
|
langchain-milvus>=0.1.4
|
|
@@ -3,7 +3,7 @@ from navigator.handlers.types import AppHandler
|
|
|
3
3
|
from navigator.background import BackgroundQueue
|
|
4
4
|
from navigator_auth import AuthHandler
|
|
5
5
|
from parrot.manager import ChatbotManager
|
|
6
|
-
from parrot.loaders.handlers import DataManagement
|
|
6
|
+
# from parrot.loaders.handlers import DataManagement
|
|
7
7
|
from parrot.conf import STATIC_DIR
|
|
8
8
|
from parrot.handlers.bots import (
|
|
9
9
|
FeedbackTypeHandler,
|
|
@@ -53,7 +53,7 @@ class Main(AppHandler):
|
|
|
53
53
|
ChatbotSharingQuestion
|
|
54
54
|
)
|
|
55
55
|
# Management APIs:
|
|
56
|
-
DataManagement.configure(self.app)
|
|
56
|
+
# DataManagement.configure(self.app)
|
|
57
57
|
|
|
58
58
|
|
|
59
59
|
async def on_prepare(self, request, response):
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
WEBVTT
|
|
2
|
+
|
|
3
|
+
1
|
|
4
|
+
00:00:00.000 --> 00:00:09.320
|
|
5
|
+
Hi, I'm Kyle from the Bose Support Team and here on the Bose Team we take merchandising
|
|
6
|
+
|
|
7
|
+
2
|
|
8
|
+
00:00:09.320 --> 00:00:11.980
|
|
9
|
+
and brand advocacy to the next level.
|
|
10
|
+
|
|
11
|
+
3
|
|
12
|
+
00:00:11.980 --> 00:00:16.540
|
|
13
|
+
Every visit starts with arriving at the location and greeting an associate by the door.
|
|
14
|
+
|
|
15
|
+
4
|
|
16
|
+
00:00:16.540 --> 00:00:19.000
|
|
17
|
+
We move on to the merchandising side of our visit.
|
|
18
|
+
|
|
19
|
+
5
|
|
20
|
+
00:00:19.000 --> 00:00:23.440
|
|
21
|
+
We head over to our displays to ensure that they're clean, bright and fully stocked.
|
|
22
|
+
|
|
23
|
+
6
|
|
24
|
+
00:00:23.440 --> 00:00:27.800
|
|
25
|
+
After our displays are clean and bright, we need to test the functionality of these displays. We go
|
|
26
|
+
|
|
27
|
+
7
|
|
28
|
+
00:00:27.800 --> 00:00:32.220
|
|
29
|
+
through each display ensuring that the demo is functioning as intended. If we
|
|
30
|
+
|
|
31
|
+
8
|
|
32
|
+
00:00:32.220 --> 00:00:36.980
|
|
33
|
+
run into an issue, we troubleshoot immediately or we call the call center
|
|
34
|
+
|
|
35
|
+
9
|
|
36
|
+
00:00:36.980 --> 00:00:41.300
|
|
37
|
+
for further support. This can be more in-depth troubleshooting or to order
|
|
38
|
+
|
|
39
|
+
10
|
|
40
|
+
00:00:41.300 --> 00:00:48.400
|
|
41
|
+
parts that are needed to resolve the issue. These can be simple fixes such as installing a wire, swapping out the media card, or even
|
|
42
|
+
|
|
43
|
+
11
|
|
44
|
+
00:00:48.400 --> 00:00:49.400
|
|
45
|
+
a simple reboot.
|
|
46
|
+
|
|
47
|
+
12
|
|
48
|
+
00:00:49.400 --> 00:00:54.680
|
|
49
|
+
They can even be more advanced fixes such as entire display rewiring, product changeouts,
|
|
50
|
+
|
|
51
|
+
13
|
|
52
|
+
00:00:54.680 --> 00:00:56.920
|
|
53
|
+
or even full display changeouts.
|
|
54
|
+
|
|
55
|
+
14
|
|
56
|
+
00:00:56.920 --> 00:01:01.280
|
|
57
|
+
Our ticketing system houses all of our tickets and allows for reps to properly manage their
|
|
58
|
+
|
|
59
|
+
15
|
|
60
|
+
00:01:01.280 --> 00:01:05.000
|
|
61
|
+
market opening, closing tickets, as well as seeing when parts
|
|
62
|
+
|
|
63
|
+
16
|
|
64
|
+
00:01:05.000 --> 00:01:06.280
|
|
65
|
+
have been shipped or delivered.
|
|
66
|
+
|
|
67
|
+
17
|
|
68
|
+
00:01:06.280 --> 00:01:11.120
|
|
69
|
+
Once we've ensured that our displays are properly merchandised and fully functional,
|
|
70
|
+
|
|
71
|
+
18
|
|
72
|
+
00:01:11.120 --> 00:01:13.720
|
|
73
|
+
we move on to the brand advocacy part of our visit.
|
|
74
|
+
|
|
75
|
+
19
|
|
76
|
+
00:01:13.720 --> 00:01:17.520
|
|
77
|
+
We do this in several ways, by building the relationship with the store, this is done
|
|
78
|
+
|
|
79
|
+
20
|
|
80
|
+
00:01:17.520 --> 00:01:22.700
|
|
81
|
+
through routine check-ins with staff and management in the field to cover common concerns or questions
|
|
82
|
+
|
|
83
|
+
21
|
|
84
|
+
00:01:22.700 --> 00:01:28.160
|
|
85
|
+
that they may have, as well as fill them in on some new features that they may not know. Training events such
|
|
86
|
+
|
|
87
|
+
22
|
|
88
|
+
00:01:28.160 --> 00:01:32.880
|
|
89
|
+
as morning huddles, lunch and learns, or customized training experiences where we
|
|
90
|
+
|
|
91
|
+
23
|
|
92
|
+
00:01:32.880 --> 00:01:37.360
|
|
93
|
+
can take a full team sit down and do some in-depth training. Demoing new
|
|
94
|
+
|
|
95
|
+
24
|
|
96
|
+
00:01:37.360 --> 00:01:42.040
|
|
97
|
+
products letting both customers and store associates get a hands-on
|
|
98
|
+
|
|
99
|
+
25
|
|
100
|
+
00:01:42.040 --> 00:01:49.640
|
|
101
|
+
experience with a newly released product and promotion events where we have an opportunity to get on the front line and practice what we preach,
|
|
102
|
+
|
|
103
|
+
26
|
|
104
|
+
00:01:49.640 --> 00:01:51.780
|
|
105
|
+
demoing and selling them ourselves.
|
|
106
|
+
|
|
107
|
+
27
|
|
108
|
+
00:01:51.780 --> 00:01:57.680
|
|
109
|
+
Through these regular engagements, we are able to impact not just a store but an entire
|
|
110
|
+
|
|
111
|
+
28
|
|
112
|
+
00:01:57.680 --> 00:01:58.680
|
|
113
|
+
market.
|
|
114
|
+
|
|
115
|
+
29
|
|
116
|
+
00:01:58.680 --> 00:02:02.960
|
|
117
|
+
This is just a quick walkthrough of what we do day to day here at the Bose program.
|
|
118
|
+
|
|
119
|
+
30
|
|
120
|
+
00:02:02.960 --> 00:02:05.000
|
|
121
|
+
I hope you enjoyed and have a great day.
|
|
122
|
+
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from navconfig import BASE_DIR
|
|
3
|
+
from parrot.llms.vertex import VertexLLM
|
|
4
|
+
from parrot.loaders.videolocal import (
|
|
5
|
+
VideoLocalLoader
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
async def process_video(doc):
|
|
12
|
+
llm = VertexLLM(
|
|
13
|
+
model='gemini-1.5-pro',
|
|
14
|
+
temperature=0.1,
|
|
15
|
+
top_k=30,
|
|
16
|
+
Top_p=0.5,
|
|
17
|
+
)
|
|
18
|
+
print(':: Processing: ', doc)
|
|
19
|
+
loader = VideoLocalLoader(
|
|
20
|
+
doc,
|
|
21
|
+
source_type=f"Video {doc.name}",
|
|
22
|
+
llm=llm.get_llm(),
|
|
23
|
+
language="en"
|
|
24
|
+
)
|
|
25
|
+
docs = loader.extract()
|
|
26
|
+
print('DOCS > ', docs)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
if __name__ == '__main__':
|
|
30
|
+
doc = BASE_DIR.joinpath('documents', 'video_2024-09-11_19-43-58.mp4')
|
|
31
|
+
asyncio.run(
|
|
32
|
+
process_video(doc)
|
|
33
|
+
)
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import PurePath, Path
|
|
3
|
+
import cv2
|
|
4
|
+
from navconfig import BASE_DIR
|
|
5
|
+
|
|
6
|
+
def better_resolution(image_path, output_path):
|
|
7
|
+
# Load the super-resolution model (example using the EDSR model)
|
|
8
|
+
sr = cv2.dnn_superres.DnnSuperResImpl_create()
|
|
9
|
+
|
|
10
|
+
# Read the model
|
|
11
|
+
path_to_model = 'EDSR_x4.pb'
|
|
12
|
+
# You need to download this model from OpenCV's model zoo
|
|
13
|
+
sr.readModel(path_to_model)
|
|
14
|
+
|
|
15
|
+
# Set the model and scale
|
|
16
|
+
sr.setModel("edsr", 4) # EDSR model with 4x upscaling
|
|
17
|
+
|
|
18
|
+
# Read the input image
|
|
19
|
+
image = cv2.imread(image_path)
|
|
20
|
+
|
|
21
|
+
# Upscale the image
|
|
22
|
+
upscaled_image = sr.upsample(image)
|
|
23
|
+
|
|
24
|
+
# Save the result
|
|
25
|
+
cv2.imwrite(output_path, upscaled_image)
|
|
26
|
+
print(f"Saved super-resolution image: {output_path}")
|
|
27
|
+
|
|
28
|
+
def extract_frames(
|
|
29
|
+
video_path,
|
|
30
|
+
output_dir: PurePath,
|
|
31
|
+
interval=5,
|
|
32
|
+
upscale_factor=2
|
|
33
|
+
):
|
|
34
|
+
if not output_dir.exists():
|
|
35
|
+
output_dir.mkdir(mode=0o777, parents=True, exist_ok=True)
|
|
36
|
+
|
|
37
|
+
cap = cv2.VideoCapture(str(video_path))
|
|
38
|
+
|
|
39
|
+
# Get frames per second (fps) of the video
|
|
40
|
+
fps = cap.get(cv2.CAP_PROP_FPS)
|
|
41
|
+
frame_interval = int(fps * interval)
|
|
42
|
+
|
|
43
|
+
frame_count = 0
|
|
44
|
+
success, frame = cap.read()
|
|
45
|
+
|
|
46
|
+
while success:
|
|
47
|
+
if frame_count % frame_interval == 0:
|
|
48
|
+
# Get the original dimensions
|
|
49
|
+
height, width = frame.shape[:2]
|
|
50
|
+
# Upscale the frame by the given factor
|
|
51
|
+
frame_upscaled = cv2.resize(
|
|
52
|
+
frame,
|
|
53
|
+
(width * upscale_factor, height * upscale_factor),
|
|
54
|
+
interpolation=cv2.INTER_CUBIC
|
|
55
|
+
)
|
|
56
|
+
frame_name = f"frame_{frame_count}.jpg"
|
|
57
|
+
upscaled_name = f"frame_{frame_count}_upscaled.jpg"
|
|
58
|
+
frame_path = os.path.join(output_dir, frame_name)
|
|
59
|
+
upscaled_path = output_dir.joinpath(upscaled_name)
|
|
60
|
+
cv2.imwrite(frame_path, frame_upscaled)
|
|
61
|
+
# better_resolution(frame_path, upscaled_path)
|
|
62
|
+
print(f"Extracted {frame_name}")
|
|
63
|
+
|
|
64
|
+
frame_count += 1
|
|
65
|
+
success, frame = cap.read()
|
|
66
|
+
|
|
67
|
+
cap.release()
|
|
68
|
+
print("Finished extracting frames.")
|
|
69
|
+
|
|
70
|
+
# Usage
|
|
71
|
+
if __name__ == '__main__':
|
|
72
|
+
video_file = BASE_DIR.joinpath('documents', 'video_2024-09-11_19-43-58.mp4')
|
|
73
|
+
output_folder = BASE_DIR.joinpath('documents', 'extracted_frames')
|
|
74
|
+
extract_frames(video_file, output_folder, interval=2)
|
|
@@ -7,13 +7,14 @@ from parrot.loaders import (
|
|
|
7
7
|
|
|
8
8
|
async def process_pdf():
|
|
9
9
|
llm = VertexLLM(
|
|
10
|
-
model='gemini-1.5-
|
|
10
|
+
model='gemini-1.5-pro',
|
|
11
11
|
temperature=0.1,
|
|
12
12
|
top_k=30,
|
|
13
13
|
Top_p=0.5,
|
|
14
14
|
)
|
|
15
15
|
# Add LLM
|
|
16
|
-
doc = BASE_DIR.joinpath('documents', 'AR_Certification_Skill_Practice_Scorecard_EXAMPLE.pdf')
|
|
16
|
+
# doc = BASE_DIR.joinpath('documents', 'AR_Certification_Skill_Practice_Scorecard_EXAMPLE.pdf')
|
|
17
|
+
doc = BASE_DIR.joinpath('documents', 'Day 1_Essentials_AR_PPT.pdf')
|
|
17
18
|
print(':: Processing: ', doc)
|
|
18
19
|
# PDF Files
|
|
19
20
|
loader = PDFLoader(
|
|
@@ -49,14 +49,6 @@ class PDFLoader(BasePDF):
|
|
|
49
49
|
self.parse_images = kwargs.get('parse_images', False)
|
|
50
50
|
self.page_as_images = kwargs.get('page_as_images', False)
|
|
51
51
|
if self.page_as_images is True:
|
|
52
|
-
# # Load the processor and model from Hugging Face
|
|
53
|
-
# self.image_processor = DonutProcessor.from_pretrained(
|
|
54
|
-
# "naver-clova-ix/donut-base-finetuned-docvqa"
|
|
55
|
-
# )
|
|
56
|
-
# self.image_model = VisionEncoderDecoderModel.from_pretrained(
|
|
57
|
-
# "naver-clova-ix/donut-base-finetuned-docvqa",
|
|
58
|
-
|
|
59
|
-
# )
|
|
60
52
|
# Load the processor and model from Hugging Face
|
|
61
53
|
self.image_processor = LayoutLMv3Processor.from_pretrained(
|
|
62
54
|
"microsoft/layoutlmv3-base",
|
|
@@ -388,6 +380,7 @@ class PDFLoader(BasePDF):
|
|
|
388
380
|
# TODO passing the image to a AI visual to get explanation
|
|
389
381
|
# Get the extracted text from the image
|
|
390
382
|
text = self.extract_page_text(img_path)
|
|
383
|
+
print('TEXT EXTRACTED >> ', text)
|
|
391
384
|
url = f'/static/images/{img_name}'
|
|
392
385
|
image_meta = {
|
|
393
386
|
"url": url,
|
|
@@ -1,10 +1,26 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
from collections.abc import Callable
|
|
3
|
+
import math
|
|
3
4
|
from pathlib import PurePath
|
|
4
5
|
from langchain.docstore.document import Document
|
|
5
6
|
from .basevideo import BaseVideoLoader
|
|
6
7
|
|
|
7
8
|
|
|
9
|
+
def split_text(text, max_length):
|
|
10
|
+
"""Split text into chunks of a maximum length, ensuring not to break words."""
|
|
11
|
+
chunks = []
|
|
12
|
+
while len(text) > max_length:
|
|
13
|
+
# Find the last space before the max_length
|
|
14
|
+
split_point = text.rfind(' ', 0, max_length)
|
|
15
|
+
# If no space found, split at max_length
|
|
16
|
+
if split_point == -1:
|
|
17
|
+
split_point = max_length
|
|
18
|
+
chunks.append(text[:split_point])
|
|
19
|
+
text = text[split_point:].strip()
|
|
20
|
+
chunks.append(text)
|
|
21
|
+
return chunks
|
|
22
|
+
|
|
23
|
+
|
|
8
24
|
class VideoLocalLoader(BaseVideoLoader):
|
|
9
25
|
"""
|
|
10
26
|
Generating Video transcripts from local Videos.
|
|
@@ -26,14 +42,15 @@ class VideoLocalLoader(BaseVideoLoader):
|
|
|
26
42
|
|
|
27
43
|
def load_video(self, path: PurePath) -> list:
|
|
28
44
|
metadata = {
|
|
29
|
-
"source": f"{path}",
|
|
30
45
|
"url": f"{path.name}",
|
|
31
|
-
"
|
|
46
|
+
"source": f"{path}",
|
|
32
47
|
"filename": f"{path}",
|
|
48
|
+
"index": path.stem,
|
|
33
49
|
"question": '',
|
|
34
50
|
"answer": '',
|
|
35
51
|
'type': 'video_transcript',
|
|
36
52
|
"source_type": self._source_type,
|
|
53
|
+
"data": {},
|
|
37
54
|
"summary": '',
|
|
38
55
|
"document_meta": {
|
|
39
56
|
"language": self._language,
|
|
@@ -53,14 +70,17 @@ class VideoLocalLoader(BaseVideoLoader):
|
|
|
53
70
|
transcript = ''
|
|
54
71
|
# Summarize the transcript
|
|
55
72
|
if transcript:
|
|
73
|
+
# Split transcript into chunks
|
|
74
|
+
transcript_chunks = split_text(transcript, 32767)
|
|
56
75
|
summary = self.get_summary_from_text(transcript)
|
|
57
76
|
# Create Two Documents, one is for transcript, second is VTT:
|
|
58
77
|
metadata['summary'] = summary
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
78
|
+
for chunk in transcript_chunks:
|
|
79
|
+
doc = Document(
|
|
80
|
+
page_content=chunk,
|
|
81
|
+
metadata=metadata
|
|
82
|
+
)
|
|
83
|
+
documents.append(doc)
|
|
64
84
|
if transcript_whisper:
|
|
65
85
|
# VTT version:
|
|
66
86
|
transcript = self.transcript_to_vtt(transcript_whisper, transcript_path)
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
__title__ = "ai-parrot"
|
|
4
4
|
__description__ = "Live Chatbots based on Langchain chatbots and Agents \
|
|
5
5
|
Integrated into Navigator Framework or used into aiohttp applications."
|
|
6
|
-
__version__ = "0.3.
|
|
6
|
+
__version__ = "0.3.6"
|
|
7
7
|
__author__ = "Jesus Lara"
|
|
8
8
|
__author_email__ = "jesuslarag@gmail.com"
|
|
9
9
|
__license__ = "MIT"
|