sunholo 0.70.4__tar.gz → 0.70.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sunholo-0.70.4 → sunholo-0.70.6}/PKG-INFO +2 -2
- {sunholo-0.70.4 → sunholo-0.70.6}/setup.py +1 -1
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/flask/qna_routes.py +4 -4
- sunholo-0.70.6/sunholo/vertex/extensions.py +326 -0
- sunholo-0.70.6/sunholo/vertex/extensions_class.py +241 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/vertex/memory_tools.py +2 -2
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo.egg-info/PKG-INFO +2 -2
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo.egg-info/SOURCES.txt +1 -0
- sunholo-0.70.4/sunholo/vertex/extensions.py +0 -138
- {sunholo-0.70.4 → sunholo-0.70.6}/LICENSE.txt +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/MANIFEST.in +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/README.md +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/setup.cfg +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/chat_history.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/dispatch_to_qa.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/fastapi/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/fastapi/base.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/fastapi/qna_routes.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/flask/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/flask/base.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/flask/vac_routes.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/langserve.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/pubsub.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/route.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/special_commands.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/agents/swagger.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/archive/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/archive/archive.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/auth/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/auth/run.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/bots/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/bots/discord.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/bots/github_webhook.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/bots/webapp.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/data_to_embed_pubsub.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/doc_handling.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/images.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/loaders.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/message_data.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/pdfs.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/publish.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/chunker/splitter.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/chat_vac.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/cli.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/cli_init.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/configs.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/deploy.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/embedder.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/merge_texts.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/run_proxy.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/sun_rich.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/cli/swagger.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/components/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/components/llm.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/components/retriever.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/components/vectorstore.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/alloydb.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/alloydb_client.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/database.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/lancedb.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/sql/sb/create_function.sql +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/sql/sb/create_function_time.sql +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/sql/sb/create_table.sql +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/sql/sb/delete_source_row.sql +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/sql/sb/return_sources.sql +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/sql/sb/setup.sql +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/static_dbs.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/database/uuid.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/discovery_engine/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/discovery_engine/chunker_handler.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/discovery_engine/create_new.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/discovery_engine/discovery_engine_client.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/embedder/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/embedder/embed_chunk.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/gcs/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/gcs/add_file.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/gcs/download_url.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/gcs/metadata.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/langfuse/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/langfuse/callback.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/langfuse/prompts.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/llamaindex/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/llamaindex/generate.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/llamaindex/get_files.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/llamaindex/import_files.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/logging.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/lookup/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/lookup/model_lookup.yaml +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/patches/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/patches/langchain/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/patches/langchain/lancedb.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/patches/langchain/vertexai.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/pubsub/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/pubsub/process_pubsub.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/pubsub/pubsub_manager.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/qna/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/qna/parsers.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/qna/retry.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/streaming/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/streaming/content_buffer.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/streaming/langserve.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/streaming/stream_lookup.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/streaming/streaming.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/summarise/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/summarise/summarise.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/api_key.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/big_context.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/config.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/config_schema.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/gcp.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/gcp_project.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/parsers.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/timedelta.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/user_ids.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/utils/version.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/vertex/__init__.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/vertex/init.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo/vertex/safety.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo.egg-info/dependency_links.txt +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo.egg-info/entry_points.txt +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo.egg-info/requires.txt +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/sunholo.egg-info/top_level.txt +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/tests/test_chat_history.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/tests/test_chunker.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/tests/test_config.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/tests/test_dispatch_to_qa.py +0 -0
- {sunholo-0.70.4 → sunholo-0.70.6}/tests/test_swagger.py +0 -0
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: sunholo
|
|
3
|
-
Version: 0.70.
|
|
3
|
+
Version: 0.70.6
|
|
4
4
|
Summary: Large Language Model DevOps - a package to help deploy LLMs to the Cloud.
|
|
5
5
|
Home-page: https://github.com/sunholo-data/sunholo-py
|
|
6
|
-
Download-URL: https://github.com/sunholo-data/sunholo-py/archive/refs/tags/v0.70.
|
|
6
|
+
Download-URL: https://github.com/sunholo-data/sunholo-py/archive/refs/tags/v0.70.6.tar.gz
|
|
7
7
|
Author: Holosun ApS
|
|
8
8
|
Author-email: multivac@sunholo.com
|
|
9
9
|
License: Apache License, Version 2.0
|
|
@@ -46,7 +46,7 @@ cache_duration = timedelta(minutes=5) # Cache duration
|
|
|
46
46
|
|
|
47
47
|
def make_openai_response(user_message, vector_name, answer):
|
|
48
48
|
response_id = str(uuid.uuid4())
|
|
49
|
-
log.info("openai response: Q: {user_message} to VECTOR_NAME: {vector_name} - A: {answer}")
|
|
49
|
+
log.info(f"openai response: Q: {user_message} to VECTOR_NAME: {vector_name} - A: {answer}")
|
|
50
50
|
openai_response = {
|
|
51
51
|
"id": response_id,
|
|
52
52
|
"object": "chat.completion",
|
|
@@ -63,9 +63,9 @@ def make_openai_response(user_message, vector_name, answer):
|
|
|
63
63
|
"finish_reason": "stop"
|
|
64
64
|
}],
|
|
65
65
|
"usage": {
|
|
66
|
-
"prompt_tokens":
|
|
67
|
-
"completion_tokens":
|
|
68
|
-
"total_tokens":
|
|
66
|
+
"prompt_tokens": 0,
|
|
67
|
+
"completion_tokens": 0,
|
|
68
|
+
"total_tokens": 0
|
|
69
69
|
}
|
|
70
70
|
}
|
|
71
71
|
|
|
@@ -0,0 +1,326 @@
|
|
|
1
|
+
# https://cloud.google.com/vertex-ai/generative-ai/docs/extensions/create-extension
|
|
2
|
+
# https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/extension#python
|
|
3
|
+
from vertexai.preview import extensions
|
|
4
|
+
from .init import init_vertex
|
|
5
|
+
from ..logging import log
|
|
6
|
+
from ..utils.gcp_project import get_gcp_project
|
|
7
|
+
from ..utils.parsers import validate_extension_id
|
|
8
|
+
|
|
9
|
+
# https://github.com/GoogleCloudPlatform/applied-ai-engineering-samples/blob/main/genai-on-vertex-ai/vertex_ai_extensions/notebooks/pandas_code_interpreter.ipynb
|
|
10
|
+
import base64
|
|
11
|
+
import json
|
|
12
|
+
import pprint
|
|
13
|
+
import pandas
|
|
14
|
+
from io import StringIO
|
|
15
|
+
|
|
16
|
+
global CODE_INTERPRETER_WRITTEN_FILES
|
|
17
|
+
CODE_INTERPRETER_WRITTEN_FILES = []
|
|
18
|
+
|
|
19
|
+
def get_extension_import_config(
|
|
20
|
+
display_name: str,
|
|
21
|
+
description: str,
|
|
22
|
+
api_spec_gcs: dict,
|
|
23
|
+
service_account_name: dict,
|
|
24
|
+
tool_use_examples: list):
|
|
25
|
+
|
|
26
|
+
tool_use_examples = [
|
|
27
|
+
{
|
|
28
|
+
"extensionOperation": {
|
|
29
|
+
"operationId": "say_hello",
|
|
30
|
+
},
|
|
31
|
+
"displayName": "Say hello in the requested language",
|
|
32
|
+
"query": "Say hello in French",
|
|
33
|
+
"requestParams": {
|
|
34
|
+
"fields": [
|
|
35
|
+
{
|
|
36
|
+
"key": "apiServicePrompt",
|
|
37
|
+
"value": {
|
|
38
|
+
"string_value": "French",
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
]
|
|
42
|
+
},
|
|
43
|
+
"responseParams": {
|
|
44
|
+
"fields": [
|
|
45
|
+
{
|
|
46
|
+
"key": "apiServiceOutput",
|
|
47
|
+
"value": {
|
|
48
|
+
"string_value": "bonjour",
|
|
49
|
+
},
|
|
50
|
+
}
|
|
51
|
+
],
|
|
52
|
+
},
|
|
53
|
+
"responseSummary": "Bonjour"
|
|
54
|
+
}
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
"displayName": display_name,
|
|
60
|
+
"description": description,
|
|
61
|
+
"manifest": {
|
|
62
|
+
"name": "EXTENSION_NAME_LLM",
|
|
63
|
+
"description": "DESCRIPTION_LLM",
|
|
64
|
+
"apiSpec": {
|
|
65
|
+
"openApiGcsUri": api_spec_gcs,
|
|
66
|
+
},
|
|
67
|
+
"authConfig": {
|
|
68
|
+
"authType": "OAUTH",
|
|
69
|
+
"oauthConfig": {"service_account": service_account_name}
|
|
70
|
+
}
|
|
71
|
+
},
|
|
72
|
+
"toolUseExamples": tool_use_examples,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
# once an extension is available, call it in code here
|
|
76
|
+
def create_extension_instance(
|
|
77
|
+
display_name: str,
|
|
78
|
+
description: str,
|
|
79
|
+
open_api_gcs_uri: str,
|
|
80
|
+
llm_name: str=None,
|
|
81
|
+
llm_description: str=None,
|
|
82
|
+
runtime_config: dict=None,
|
|
83
|
+
service_account: str=None,
|
|
84
|
+
):
|
|
85
|
+
"""
|
|
86
|
+
Args:
|
|
87
|
+
- display_name: for the human. parsed to be used as extension_name
|
|
88
|
+
- description: for the human
|
|
89
|
+
- open_api_gcs_uri: location on GCS where open_ai yaml spec is
|
|
90
|
+
- llm_name: for the model. If None, uses display_name
|
|
91
|
+
- llm_description: for the model. If None, uses description
|
|
92
|
+
- service_account: If not specified, the Vertex AI Extension Service Agent is used to execute the extension.
|
|
93
|
+
|
|
94
|
+
"""
|
|
95
|
+
project_id = get_gcp_project()
|
|
96
|
+
extension_name = f"projects/{project_id}/locations/us-central1/extensions/{validate_extension_id(display_name)}"
|
|
97
|
+
|
|
98
|
+
extension = extensions.Extension.create(
|
|
99
|
+
extension_name=extension_name,
|
|
100
|
+
display_name=display_name,
|
|
101
|
+
description=description,
|
|
102
|
+
runtime_config=runtime_config or None,
|
|
103
|
+
manifest={
|
|
104
|
+
"name": llm_name or display_name,
|
|
105
|
+
"description": llm_description or description,
|
|
106
|
+
"api_spec": {
|
|
107
|
+
"open_api_gcs_uri": open_api_gcs_uri
|
|
108
|
+
},
|
|
109
|
+
"auth_config": {
|
|
110
|
+
"auth_type": "GOOGLE_SERVICE_ACCOUNT_AUTH",
|
|
111
|
+
"google_service_account_config": service_account or {},
|
|
112
|
+
},
|
|
113
|
+
},
|
|
114
|
+
)
|
|
115
|
+
log.info(f"Created Vertex Extension: {extension_name}")
|
|
116
|
+
|
|
117
|
+
return extension
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def create_extension_code_interpreter(
|
|
122
|
+
code_artifacts_bucket=None
|
|
123
|
+
):
|
|
124
|
+
|
|
125
|
+
# only us-central for now
|
|
126
|
+
location = "us-central1"
|
|
127
|
+
init_vertex(location=location)
|
|
128
|
+
|
|
129
|
+
runtime_config=None
|
|
130
|
+
if code_artifacts_bucket:
|
|
131
|
+
runtime_config = {"codeInterpreterRuntimeConfig":
|
|
132
|
+
{
|
|
133
|
+
"fileInputGcsBucket": code_artifacts_bucket,
|
|
134
|
+
"fileOutputGcsBucket": code_artifacts_bucket
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
llm_description="""
|
|
139
|
+
Tool to generate and execute valid Python code from a natural
|
|
140
|
+
language description, or to execute custom Python code.
|
|
141
|
+
Use this tool to:
|
|
142
|
+
- generate and/or execute code for various tasks:
|
|
143
|
+
- perform a wide variety of mathematical calculations, for example, add,
|
|
144
|
+
subtract, multiply, divide, average, power, factorial, quotient,
|
|
145
|
+
formulae, logarithms, random numbers, trigonometric functions, and
|
|
146
|
+
equations;
|
|
147
|
+
- sort, filter, select top results, and otherwise analyze data (including
|
|
148
|
+
data acquired from other tools and Extensions);
|
|
149
|
+
- create visualizations, plot charts, draw graphs, shapes, print results,
|
|
150
|
+
etc.
|
|
151
|
+
- execute custom code and get results and output files.
|
|
152
|
+
"""
|
|
153
|
+
|
|
154
|
+
code_extension = create_extension_instance(
|
|
155
|
+
display_name="Code Interpreter",
|
|
156
|
+
description="This extension generates and executes code in the specified language",
|
|
157
|
+
open_api_gcs_uri="gs://vertex-extension-public/code_interpreter.yaml",
|
|
158
|
+
llm_name="code_interpreter_tool",
|
|
159
|
+
llm_description=llm_description,
|
|
160
|
+
runtime_config=runtime_config
|
|
161
|
+
)
|
|
162
|
+
log.info(f"Created code extension: {code_extension=}")
|
|
163
|
+
|
|
164
|
+
return code_extension
|
|
165
|
+
|
|
166
|
+
def execute_extension(operation_id: str,
|
|
167
|
+
operation_params: dict,
|
|
168
|
+
extension_id: str):
|
|
169
|
+
|
|
170
|
+
# only us-central for now
|
|
171
|
+
location = "us-central1"
|
|
172
|
+
init_vertex(location=location)
|
|
173
|
+
|
|
174
|
+
if not extension_id.startswith("projects/"):
|
|
175
|
+
project_id=get_gcp_project()
|
|
176
|
+
extension_name = f"projects/{project_id}/locations/{location}/extensions/{extension_id}"
|
|
177
|
+
else:
|
|
178
|
+
extension_name=extension_id
|
|
179
|
+
|
|
180
|
+
extension = extensions.Extension(extension_name)
|
|
181
|
+
|
|
182
|
+
response = extension.execute(
|
|
183
|
+
operation_id=operation_id,
|
|
184
|
+
# {"query": "find the max value in the list: [1,2,3,4,-5]"}
|
|
185
|
+
operation_params=operation_params,
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
return response
|
|
189
|
+
|
|
190
|
+
def execute_code_extension(query:str, filenames: list[str]=None, gcs_files: list[str]=None):
|
|
191
|
+
|
|
192
|
+
if filenames and gcs_files:
|
|
193
|
+
raise ValueError("Can't specify both filenames and gcs_files")
|
|
194
|
+
|
|
195
|
+
extension_code_interpreter = extensions.Extension.from_hub("code_interpreter")
|
|
196
|
+
|
|
197
|
+
file_arr=None
|
|
198
|
+
if filenames:
|
|
199
|
+
file_arr = [
|
|
200
|
+
{
|
|
201
|
+
"name": filename,
|
|
202
|
+
"contents": base64.b64encode(open(filename, "rb").read()).decode()
|
|
203
|
+
}
|
|
204
|
+
for filename in filenames
|
|
205
|
+
]
|
|
206
|
+
|
|
207
|
+
response = extension_code_interpreter.execute(
|
|
208
|
+
operation_id = "generate_and_execute",
|
|
209
|
+
operation_params={
|
|
210
|
+
"query": query,
|
|
211
|
+
"files": file_arr,
|
|
212
|
+
"file_gcs_uris": gcs_files
|
|
213
|
+
})
|
|
214
|
+
|
|
215
|
+
CODE_INTERPRETER_WRITTEN_FILES.extend(
|
|
216
|
+
[item['name'] for item in response['output_files']])
|
|
217
|
+
|
|
218
|
+
if response.get('execution_error'):
|
|
219
|
+
log.error(f"Code Execution Response failed with: {response.get('execution_error')} - maybe retry?")
|
|
220
|
+
|
|
221
|
+
return response
|
|
222
|
+
|
|
223
|
+
css_styles = """
|
|
224
|
+
<style>
|
|
225
|
+
.main_summary {
|
|
226
|
+
font-weight: bold;
|
|
227
|
+
font-size: 14px; color: #4285F4;
|
|
228
|
+
background-color:rgba(221, 221, 221, 0.5); padding:8px;}
|
|
229
|
+
</style>
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
# Parser to visualise the content of returned files as HTML.
|
|
233
|
+
def parse_files_to_html(outputFiles, save_files_locally = True):
|
|
234
|
+
IMAGE_FILE_EXTENSIONS = set(["jpg", "jpeg", "png"])
|
|
235
|
+
file_list = []
|
|
236
|
+
details_tml = """<details><summary>{name}</summary><div>{html_content}</div></details>"""
|
|
237
|
+
|
|
238
|
+
if not outputFiles:
|
|
239
|
+
return "No Files generated from the code"
|
|
240
|
+
# Sort output_files so images are displayed before other files such as JSON.
|
|
241
|
+
for output_file in sorted(
|
|
242
|
+
outputFiles,
|
|
243
|
+
key=lambda x: x["name"].split(".")[-1] not in IMAGE_FILE_EXTENSIONS,
|
|
244
|
+
):
|
|
245
|
+
file_name = output_file.get("name")
|
|
246
|
+
file_contents = base64.b64decode(output_file.get("contents"))
|
|
247
|
+
if save_files_locally:
|
|
248
|
+
open(file_name,"wb").write(file_contents)
|
|
249
|
+
|
|
250
|
+
if file_name.split(".")[-1] in IMAGE_FILE_EXTENSIONS:
|
|
251
|
+
# Render Image
|
|
252
|
+
file_html_content = ('<img src="data:image/png;base64, '
|
|
253
|
+
f'{output_file.get("contents")}" />')
|
|
254
|
+
elif file_name.endswith(".json"):
|
|
255
|
+
# Pretty print JSON
|
|
256
|
+
json_pp = pprint.pformat(
|
|
257
|
+
json.loads(file_contents.decode()),
|
|
258
|
+
compact=False,
|
|
259
|
+
width=160)
|
|
260
|
+
file_html_content = (f'<span>{json_pp}</span>')
|
|
261
|
+
elif file_name.endswith(".csv"):
|
|
262
|
+
# CSV
|
|
263
|
+
csv_md = pandas.read_csv(
|
|
264
|
+
StringIO(file_contents.decode())).to_markdown(index=False)
|
|
265
|
+
file_html_content = f'<span>{csv_md}</span>'
|
|
266
|
+
elif file_name.endswith(".pkl"):
|
|
267
|
+
# PKL
|
|
268
|
+
file_html_content = f'<span>Preview N/A</span>'
|
|
269
|
+
else:
|
|
270
|
+
file_html_content = f"<span>{file_contents.decode()}</span>"
|
|
271
|
+
|
|
272
|
+
file_list.append({'name': file_name, "html_content": file_html_content})
|
|
273
|
+
|
|
274
|
+
buffer_html = [ details_tml.format(**_file) for _file in file_list ]
|
|
275
|
+
return "".join(buffer_html)
|
|
276
|
+
|
|
277
|
+
# Processing code interpreter response to html visualization.
|
|
278
|
+
def process_response(response: dict, save_files_locally = None) -> None:
|
|
279
|
+
|
|
280
|
+
result_template = """
|
|
281
|
+
<details open>
|
|
282
|
+
<summary class='main_summary'>{summary}:</summary>
|
|
283
|
+
<div><pre>{content}</pre></div>
|
|
284
|
+
</details>
|
|
285
|
+
"""
|
|
286
|
+
|
|
287
|
+
result = ""
|
|
288
|
+
code = response.get('generated_code')
|
|
289
|
+
if 'execution_result' in response and response['execution_result']!="":
|
|
290
|
+
result = result_template.format(
|
|
291
|
+
summary="Executed Code Output",
|
|
292
|
+
content=response.get('execution_result'))
|
|
293
|
+
else:
|
|
294
|
+
result = result_template.format(
|
|
295
|
+
summary="Executed Code Output",
|
|
296
|
+
content="Code does not produce printable output.")
|
|
297
|
+
|
|
298
|
+
if response.get('execution_error', None):
|
|
299
|
+
result += result_template.format(
|
|
300
|
+
summary="Generated Code Raised a (Possibly Non-Fatal) Exception",
|
|
301
|
+
content=response.get('execution_error', None))
|
|
302
|
+
|
|
303
|
+
result += result_template.format(
|
|
304
|
+
summary="Files Created <u>(Click on filename to view content)</u>",
|
|
305
|
+
content=parse_files_to_html(
|
|
306
|
+
response.get('output_files', []),
|
|
307
|
+
save_files_locally = True))
|
|
308
|
+
|
|
309
|
+
html_content = f"""
|
|
310
|
+
{css_styles}
|
|
311
|
+
<div id='main'>
|
|
312
|
+
<div id="right">
|
|
313
|
+
<h3>Generated Code by Code Interpreter</h3>
|
|
314
|
+
<pre><code>{code}</code></pre>
|
|
315
|
+
</div>
|
|
316
|
+
<div id="left">
|
|
317
|
+
<h3>Code Execution Results</h3>
|
|
318
|
+
{result}
|
|
319
|
+
</div>
|
|
320
|
+
</div>
|
|
321
|
+
"""
|
|
322
|
+
if save_files_locally:
|
|
323
|
+
# write to local file
|
|
324
|
+
pass
|
|
325
|
+
|
|
326
|
+
return html_content
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
from vertexai.preview import extensions
|
|
2
|
+
from .init import init_vertex
|
|
3
|
+
from ..logging import log
|
|
4
|
+
from ..utils.gcp_project import get_gcp_project
|
|
5
|
+
from ..utils.parsers import validate_extension_id
|
|
6
|
+
import base64
|
|
7
|
+
import json
|
|
8
|
+
from io import StringIO
|
|
9
|
+
|
|
10
|
+
class VertexAIExtensions:
|
|
11
|
+
def __init__(self):
|
|
12
|
+
self.CODE_INTERPRETER_WRITTEN_FILES = []
|
|
13
|
+
self.css_styles = """
|
|
14
|
+
<style>
|
|
15
|
+
.main_summary {
|
|
16
|
+
font-weight: bold;
|
|
17
|
+
font-size: 14px; color: #4285F4;
|
|
18
|
+
background-color:rgba(221, 221, 221, 0.5); padding:8px;}
|
|
19
|
+
</style>
|
|
20
|
+
"""
|
|
21
|
+
self.IMAGE_FILE_EXTENSIONS = set(["jpg", "jpeg", "png"])
|
|
22
|
+
self.location = "us-central1"
|
|
23
|
+
|
|
24
|
+
def get_extension_import_config(self, display_name: str, description: str,
|
|
25
|
+
api_spec_gcs: dict, service_account_name: dict, tool_use_examples: list):
|
|
26
|
+
tool_use_examples = [
|
|
27
|
+
{
|
|
28
|
+
"extensionOperation": {
|
|
29
|
+
"operationId": "say_hello",
|
|
30
|
+
},
|
|
31
|
+
"displayName": "Say hello in the requested language",
|
|
32
|
+
"query": "Say hello in French",
|
|
33
|
+
"requestParams": {
|
|
34
|
+
"fields": [
|
|
35
|
+
{
|
|
36
|
+
"key": "apiServicePrompt",
|
|
37
|
+
"value": {
|
|
38
|
+
"string_value": "French",
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
]
|
|
42
|
+
},
|
|
43
|
+
"responseParams": {
|
|
44
|
+
"fields": [
|
|
45
|
+
{
|
|
46
|
+
"key": "apiServiceOutput",
|
|
47
|
+
"value": {
|
|
48
|
+
"string_value": "bonjour",
|
|
49
|
+
},
|
|
50
|
+
}
|
|
51
|
+
],
|
|
52
|
+
},
|
|
53
|
+
"responseSummary": "Bonjour"
|
|
54
|
+
}
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
return {
|
|
58
|
+
"displayName": display_name,
|
|
59
|
+
"description": description,
|
|
60
|
+
"manifest": {
|
|
61
|
+
"name": "EXTENSION_NAME_LLM",
|
|
62
|
+
"description": "DESCRIPTION_LLM",
|
|
63
|
+
"apiSpec": {
|
|
64
|
+
"openApiGcsUri": api_spec_gcs,
|
|
65
|
+
},
|
|
66
|
+
"authConfig": {
|
|
67
|
+
"authType": "OAUTH",
|
|
68
|
+
"oauthConfig": {"service_account": service_account_name}
|
|
69
|
+
}
|
|
70
|
+
},
|
|
71
|
+
"toolUseExamples": tool_use_examples,
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
def create_extension_instance(self, display_name: str, description: str, open_api_gcs_uri: str,
|
|
75
|
+
llm_name: str = None, llm_description: str = None, runtime_config: dict = None, service_account: str = None):
|
|
76
|
+
project_id = get_gcp_project()
|
|
77
|
+
extension_name = f"projects/{project_id}/locations/us-central1/extensions/{validate_extension_id(display_name)}"
|
|
78
|
+
|
|
79
|
+
extension = extensions.Extension.create(
|
|
80
|
+
extension_name=extension_name,
|
|
81
|
+
display_name=display_name,
|
|
82
|
+
description=description,
|
|
83
|
+
runtime_config=runtime_config or None,
|
|
84
|
+
manifest={
|
|
85
|
+
"name": llm_name or display_name,
|
|
86
|
+
"description": llm_description or description,
|
|
87
|
+
"api_spec": {
|
|
88
|
+
"open_api_gcs_uri": open_api_gcs_uri
|
|
89
|
+
},
|
|
90
|
+
"auth_config": {
|
|
91
|
+
"auth_type": "GOOGLE_SERVICE_ACCOUNT_AUTH",
|
|
92
|
+
"google_service_account_config": service_account or {},
|
|
93
|
+
},
|
|
94
|
+
},
|
|
95
|
+
)
|
|
96
|
+
log.info(f"Created Vertex Extension: {extension_name}")
|
|
97
|
+
|
|
98
|
+
return extension
|
|
99
|
+
|
|
100
|
+
def execute_extension(self, operation_id: str, operation_params: dict, extension_id: str):
|
|
101
|
+
init_vertex(location=self.location)
|
|
102
|
+
|
|
103
|
+
if not extension_id.startswith("projects/"):
|
|
104
|
+
project_id = get_gcp_project()
|
|
105
|
+
extension_name = f"projects/{project_id}/locations/{self.location}/extensions/{extension_id}"
|
|
106
|
+
else:
|
|
107
|
+
extension_name = extension_id
|
|
108
|
+
|
|
109
|
+
extension = extensions.Extension(extension_name)
|
|
110
|
+
|
|
111
|
+
response = extension.execute(
|
|
112
|
+
operation_id=operation_id,
|
|
113
|
+
operation_params=operation_params,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
return response
|
|
117
|
+
|
|
118
|
+
def execute_code_extension(self, query: str, filenames: list[str] = None, gcs_files: list[str] = None):
|
|
119
|
+
if filenames and gcs_files:
|
|
120
|
+
raise ValueError("Can't specify both filenames and gcs_files")
|
|
121
|
+
|
|
122
|
+
extension_code_interpreter = extensions.Extension.from_hub("code_interpreter")
|
|
123
|
+
|
|
124
|
+
file_arr = None
|
|
125
|
+
if filenames:
|
|
126
|
+
file_arr = [
|
|
127
|
+
{
|
|
128
|
+
"name": filename,
|
|
129
|
+
"contents": base64.b64encode(open(filename, "rb").read()).decode()
|
|
130
|
+
}
|
|
131
|
+
for filename in filenames
|
|
132
|
+
]
|
|
133
|
+
|
|
134
|
+
response = extension_code_interpreter.execute(
|
|
135
|
+
operation_id="generate_and_execute",
|
|
136
|
+
operation_params={
|
|
137
|
+
"query": query,
|
|
138
|
+
"files": file_arr,
|
|
139
|
+
"file_gcs_uris": gcs_files
|
|
140
|
+
})
|
|
141
|
+
|
|
142
|
+
self.CODE_INTERPRETER_WRITTEN_FILES.extend(
|
|
143
|
+
[item['name'] for item in response['output_files']])
|
|
144
|
+
|
|
145
|
+
if response.get('execution_error'):
|
|
146
|
+
log.error(f"Code Execution Response failed with: {response.get('execution_error')} - maybe retry?")
|
|
147
|
+
|
|
148
|
+
return response
|
|
149
|
+
|
|
150
|
+
def parse_files_to_html(self, outputFiles, save_files_locally=True):
|
|
151
|
+
file_list = []
|
|
152
|
+
details_tml = """<details><summary>{name}</summary><div>{html_content}</div></details>"""
|
|
153
|
+
|
|
154
|
+
if not outputFiles:
|
|
155
|
+
return "No Files generated from the code"
|
|
156
|
+
# Sort output_files so images are displayed before other files such as JSON.
|
|
157
|
+
for output_file in sorted(
|
|
158
|
+
outputFiles,
|
|
159
|
+
key=lambda x: x["name"].split(".")[-1] not in self.IMAGE_FILE_EXTENSIONS,
|
|
160
|
+
):
|
|
161
|
+
file_name = output_file.get("name")
|
|
162
|
+
file_contents = base64.b64decode(output_file.get("contents"))
|
|
163
|
+
if save_files_locally:
|
|
164
|
+
open(file_name, "wb").write(file_contents)
|
|
165
|
+
|
|
166
|
+
if file_name.split(".")[-1] in self.IMAGE_FILE_EXTENSIONS:
|
|
167
|
+
# Render Image
|
|
168
|
+
file_html_content = ('<img src="data:image/png;base64, '
|
|
169
|
+
f'{output_file.get("contents")}" />')
|
|
170
|
+
elif file_name.endswith(".json"):
|
|
171
|
+
import pprint
|
|
172
|
+
# Pretty print JSON
|
|
173
|
+
json_pp = pprint.pformat(
|
|
174
|
+
json.loads(file_contents.decode()),
|
|
175
|
+
compact=False,
|
|
176
|
+
width=160)
|
|
177
|
+
file_html_content = (f'<span>{json_pp}</span>')
|
|
178
|
+
elif file_name.endswith(".csv"):
|
|
179
|
+
# CSV
|
|
180
|
+
try:
|
|
181
|
+
import pandas
|
|
182
|
+
except ImportError:
|
|
183
|
+
log.error("Need pandas for csv processing")
|
|
184
|
+
csv_md = pandas.read_csv(
|
|
185
|
+
StringIO(file_contents.decode())).to_markdown(index=False)
|
|
186
|
+
file_html_content = f'<span>{csv_md}</span>'
|
|
187
|
+
elif file_name.endswith(".pkl"):
|
|
188
|
+
# PKL
|
|
189
|
+
file_html_content = f'<span>Preview N/A</span>'
|
|
190
|
+
else:
|
|
191
|
+
file_html_content = f"<span>{file_contents.decode()}</span>"
|
|
192
|
+
|
|
193
|
+
file_list.append({'name': file_name, "html_content": file_html_content})
|
|
194
|
+
|
|
195
|
+
buffer_html = [details_tml.format(**_file) for _file in file_list]
|
|
196
|
+
return "".join(buffer_html)
|
|
197
|
+
|
|
198
|
+
def process_response(self, response: dict, save_files_locally=None) -> str:
|
|
199
|
+
result_template = """
|
|
200
|
+
<details open>
|
|
201
|
+
<summary class='main_summary'>{summary}:</summary>
|
|
202
|
+
<div><pre>{content}</pre></div>
|
|
203
|
+
</details>
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
result = ""
|
|
207
|
+
code = response.get('generated_code')
|
|
208
|
+
if 'execution_result' in response and response['execution_result'] != "":
|
|
209
|
+
result = result_template.format(
|
|
210
|
+
summary="Executed Code Output",
|
|
211
|
+
content=response.get('execution_result'))
|
|
212
|
+
else:
|
|
213
|
+
result = result_template.format(
|
|
214
|
+
summary="Executed Code Output",
|
|
215
|
+
content="Code does not produce printable output.")
|
|
216
|
+
|
|
217
|
+
if response.get('execution_error', None):
|
|
218
|
+
result += result_template.format(
|
|
219
|
+
summary="Generated Code Raised a (Possibly Non-Fatal) Exception",
|
|
220
|
+
content=response.get('execution_error', None))
|
|
221
|
+
|
|
222
|
+
result += result_template.format(
|
|
223
|
+
summary="Files Created <u>(Click on filename to view content)</u>",
|
|
224
|
+
content=self.parse_files_to_html(
|
|
225
|
+
response.get('output_files', []),
|
|
226
|
+
save_files_locally=True))
|
|
227
|
+
|
|
228
|
+
html_content = f"""
|
|
229
|
+
{self.css_styles}
|
|
230
|
+
<div id='main'>
|
|
231
|
+
<h3>Generated Code by Code Interpreter</h3>
|
|
232
|
+
<pre><code>{code}</code></pre>
|
|
233
|
+
<h3>Code Execution Results</h3>
|
|
234
|
+
{result}
|
|
235
|
+
</div>
|
|
236
|
+
"""
|
|
237
|
+
if save_files_locally:
|
|
238
|
+
with open('code_execution_results.html', 'w') as file:
|
|
239
|
+
file.write(html_content)
|
|
240
|
+
|
|
241
|
+
return html_content
|
|
@@ -90,13 +90,13 @@ def get_vertex_memories(vector_name):
|
|
|
90
90
|
de = DiscoveryEngineClient(vector_name, project_id=get_gcp_project())
|
|
91
91
|
log.info(f"Found vectorstore {vectorstore}")
|
|
92
92
|
|
|
93
|
-
data_store_path = de.data_store_path()
|
|
93
|
+
data_store_path = f"{de.data_store_path()}/dataStores/{vector_name}"
|
|
94
94
|
corpus_tool = Tool.from_retrieval(
|
|
95
95
|
grounding.Retrieval(grounding.VertexAISearch(datastore=data_store_path))
|
|
96
96
|
)
|
|
97
97
|
tools.append(corpus_tool)
|
|
98
98
|
except Exception as err:
|
|
99
|
-
log.error(f"Failed to fetch DiscoveryEngine
|
|
99
|
+
log.error(f"Failed to fetch DiscoveryEngine grounding - {str(err)} - skipping")
|
|
100
100
|
continue
|
|
101
101
|
|
|
102
102
|
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: sunholo
|
|
3
|
-
Version: 0.70.
|
|
3
|
+
Version: 0.70.6
|
|
4
4
|
Summary: Large Language Model DevOps - a package to help deploy LLMs to the Cloud.
|
|
5
5
|
Home-page: https://github.com/sunholo-data/sunholo-py
|
|
6
|
-
Download-URL: https://github.com/sunholo-data/sunholo-py/archive/refs/tags/v0.70.
|
|
6
|
+
Download-URL: https://github.com/sunholo-data/sunholo-py/archive/refs/tags/v0.70.6.tar.gz
|
|
7
7
|
Author: Holosun ApS
|
|
8
8
|
Author-email: multivac@sunholo.com
|
|
9
9
|
License: Apache License, Version 2.0
|