semantic-kernel 0.3.1.dev0__tar.gz → 0.3.2.dev0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/PKG-INFO +2 -1
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/pyproject.toml +9 -5
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/__init__.py +2 -0
- semantic_kernel-0.3.2.dev0/semantic_kernel/connectors/memory/pinecone/__init__.py +7 -0
- semantic_kernel-0.3.2.dev0/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py +401 -0
- semantic_kernel-0.3.2.dev0/semantic_kernel/connectors/memory/pinecone/utils.py +37 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/file_io_skill.py +1 -1
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/math_skill.py +1 -1
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/text_skill.py +1 -1
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/time_skill.py +13 -1
- semantic_kernel-0.3.2.dev0/semantic_kernel/core_skills/wait_skill.py +23 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/semantic_text_memory.py +9 -5
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/orchestration/sk_function.py +5 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/planning/basic_planner.py +9 -1
- semantic_kernel-0.3.2.dev0/semantic_kernel/text/text_chunker.py +333 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/utils/settings.py +29 -0
- semantic_kernel-0.3.1.dev0/semantic_kernel/text/text_chunker.py +0 -250
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/pip/README.md +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/ai_exception.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/chat_completion_client_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/chat_request_settings.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/complete_request_settings.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/embeddings/embedding_generator_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/hugging_face/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/hugging_face/services/hf_text_completion.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/hugging_face/services/hf_text_embedding.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/open_ai/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/open_ai/services/azure_chat_completion.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/open_ai/services/azure_text_completion.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/open_ai/services/azure_text_embedding.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/open_ai/services/open_ai_chat_completion.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_completion.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/open_ai/services/open_ai_text_embedding.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/ai/text_completion_client_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/memory/chroma/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/memory/chroma/chroma_memory_store.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/memory/chroma/utils.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/connectors/memory/weaviate/weaviate_memory_store.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/conversation_summary_skill.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/http_skill.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/text_memory_skill.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/kernel.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/kernel_exception.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/memory_query_result.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/memory_record.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/memory_store_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/null_memory.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/semantic_text_memory_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/memory/volatile_memory_store.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/orchestration/context_variables.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/orchestration/delegate_handlers.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/orchestration/delegate_inference.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/orchestration/delegate_types.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/orchestration/sk_context.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/orchestration/sk_function_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/planning/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/planning/plan.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/reliability/pass_through_without_retry.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/reliability/retry_mechanism_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/semantic_functions/chat_prompt_template.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/semantic_functions/prompt_template.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/semantic_functions/prompt_template_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/semantic_functions/prompt_template_config.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/semantic_functions/semantic_function_config.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/function_view.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/functions_view.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/parameter_view.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/read_only_skill_collection.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/read_only_skill_collection_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/sk_function_decorator.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/skill_collection.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/skill_definition/skill_collection_base.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/README.md +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/block.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/block_types.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/code_block.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/function_id_block.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/symbols.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/text_block.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/val_block.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/blocks/var_block.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/code_tokenizer.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/prompt_template_engine.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/protocols/code_renderer.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/protocols/prompt_templating_engine.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/protocols/text_renderer.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/template_engine/template_tokenizer.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/text/__init__.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/text/function_extension.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/utils/null_logger.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/utils/static_property.py +0 -0
- {semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/utils/validation.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: semantic-kernel
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.2.dev0
|
|
4
4
|
Summary:
|
|
5
5
|
Author: Microsoft
|
|
6
6
|
Author-email: SK-Support@microsoft.com
|
|
@@ -14,6 +14,7 @@ Requires-Dist: aiofiles (>=23.1.0,<24.0.0)
|
|
|
14
14
|
Requires-Dist: numpy (>=1.24.2,<2.0.0)
|
|
15
15
|
Requires-Dist: openai (>=0.27.0,<0.28.0)
|
|
16
16
|
Requires-Dist: python-dotenv (==1.0.0)
|
|
17
|
+
Requires-Dist: regex (>=2023.6.3,<2024.0.0)
|
|
17
18
|
Description-Content-Type: text/markdown
|
|
18
19
|
|
|
19
20
|
# About Semantic Kernel
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "semantic-kernel"
|
|
3
|
-
version = "0.3.
|
|
3
|
+
version = "0.3.2.dev"
|
|
4
4
|
description = ""
|
|
5
5
|
authors = ["Microsoft <SK-Support@microsoft.com>"]
|
|
6
6
|
readme = "pip/README.md"
|
|
@@ -12,16 +12,16 @@ numpy = "^1.24.2"
|
|
|
12
12
|
openai = "^0.27.0"
|
|
13
13
|
aiofiles = "^23.1.0"
|
|
14
14
|
python-dotenv = "1.0.0"
|
|
15
|
+
regex = "^2023.6.3"
|
|
15
16
|
|
|
16
17
|
[tool.poetry.group.dev.dependencies]
|
|
17
|
-
pre-commit = "
|
|
18
|
+
pre-commit = "3.3.3"
|
|
18
19
|
black = {version = "23.3.0", allow-prereleases = true}
|
|
19
20
|
ipykernel = "^6.21.1"
|
|
20
|
-
pytest = "7.
|
|
21
|
-
ruff = "0.0.
|
|
21
|
+
pytest = "7.4.0"
|
|
22
|
+
ruff = "0.0.277"
|
|
22
23
|
pytest-asyncio = "0.21.0"
|
|
23
24
|
|
|
24
|
-
|
|
25
25
|
[tool.poetry.group.hugging_face.dependencies]
|
|
26
26
|
transformers = "^4.28.1"
|
|
27
27
|
sentence-transformers = "^2.2.2"
|
|
@@ -34,10 +34,14 @@ chromadb = "^0.3.23"
|
|
|
34
34
|
[tool.poetry.group.weaviate.dependencies]
|
|
35
35
|
weaviate-client = "^3.18.0"
|
|
36
36
|
|
|
37
|
+
[tool.poetry.group.pinecone.dependencies]
|
|
38
|
+
pinecone-client = "^2.2.2"
|
|
39
|
+
|
|
37
40
|
[tool.isort]
|
|
38
41
|
profile = "black"
|
|
39
42
|
|
|
40
43
|
[tool.ruff]
|
|
44
|
+
select = ["E", "F", "I"]
|
|
41
45
|
line-length = 120
|
|
42
46
|
|
|
43
47
|
[build-system]
|
|
@@ -17,6 +17,7 @@ from semantic_kernel.utils.null_logger import NullLogger
|
|
|
17
17
|
from semantic_kernel.utils.settings import (
|
|
18
18
|
azure_openai_settings_from_dot_env,
|
|
19
19
|
openai_settings_from_dot_env,
|
|
20
|
+
pinecone_settings_from_dot_env,
|
|
20
21
|
)
|
|
21
22
|
|
|
22
23
|
__all__ = [
|
|
@@ -24,6 +25,7 @@ __all__ = [
|
|
|
24
25
|
"NullLogger",
|
|
25
26
|
"openai_settings_from_dot_env",
|
|
26
27
|
"azure_openai_settings_from_dot_env",
|
|
28
|
+
"pinecone_settings_from_dot_env",
|
|
27
29
|
"PromptTemplateConfig",
|
|
28
30
|
"PromptTemplate",
|
|
29
31
|
"ChatPromptTemplate",
|
semantic_kernel-0.3.2.dev0/semantic_kernel/connectors/memory/pinecone/pinecone_memory_store.py
ADDED
|
@@ -0,0 +1,401 @@
|
|
|
1
|
+
# Copyright (c) Microsoft. All rights reserved.
|
|
2
|
+
|
|
3
|
+
from logging import Logger
|
|
4
|
+
from typing import List, Optional, Tuple
|
|
5
|
+
|
|
6
|
+
from numpy import ndarray
|
|
7
|
+
|
|
8
|
+
import pinecone
|
|
9
|
+
from pinecone import FetchResponse, IndexDescription
|
|
10
|
+
from semantic_kernel.connectors.memory.pinecone.utils import (
|
|
11
|
+
build_payload,
|
|
12
|
+
parse_payload,
|
|
13
|
+
)
|
|
14
|
+
from semantic_kernel.memory.memory_record import MemoryRecord
|
|
15
|
+
from semantic_kernel.memory.memory_store_base import MemoryStoreBase
|
|
16
|
+
from semantic_kernel.utils.null_logger import NullLogger
|
|
17
|
+
|
|
18
|
+
# Limitations set by Pinecone at https://docs.pinecone.io/docs/limits
|
|
19
|
+
MAX_DIMENSIONALITY = 20000
|
|
20
|
+
MAX_UPSERT_BATCH_SIZE = 100
|
|
21
|
+
MAX_QUERY_WITHOUT_METADATA_BATCH_SIZE = 10000
|
|
22
|
+
MAX_QUERY_WITH_METADATA_BATCH_SIZE = 1000
|
|
23
|
+
MAX_FETCH_BATCH_SIZE = 1000
|
|
24
|
+
MAX_DELETE_BATCH_SIZE = 1000
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class PineconeMemoryStore(MemoryStoreBase):
|
|
28
|
+
_logger: Logger
|
|
29
|
+
_pinecone_api_key: str
|
|
30
|
+
_pinecone_environment: str
|
|
31
|
+
_default_dimensionality: int
|
|
32
|
+
|
|
33
|
+
def __init__(
|
|
34
|
+
self,
|
|
35
|
+
api_key: str,
|
|
36
|
+
environment: str,
|
|
37
|
+
default_dimensionality: int,
|
|
38
|
+
logger: Optional[Logger] = None,
|
|
39
|
+
) -> None:
|
|
40
|
+
"""Initializes a new instance of the PineconeMemoryStore class.
|
|
41
|
+
|
|
42
|
+
Arguments:
|
|
43
|
+
pinecone_api_key {str} -- The Pinecone API key.
|
|
44
|
+
pinecone_environment {str} -- The Pinecone environment.
|
|
45
|
+
logger {Optional[Logger]} -- The logger to use. (default: {None})
|
|
46
|
+
default_dimensionality {int} -- The default dimensionality to use for new collections.
|
|
47
|
+
"""
|
|
48
|
+
if default_dimensionality > MAX_DIMENSIONALITY:
|
|
49
|
+
raise ValueError(
|
|
50
|
+
f"Dimensionality of {default_dimensionality} exceeds "
|
|
51
|
+
+ f"the maximum allowed value of {MAX_DIMENSIONALITY}."
|
|
52
|
+
)
|
|
53
|
+
self._pinecone_api_key = api_key
|
|
54
|
+
self._pinecone_environment = environment
|
|
55
|
+
self._default_dimensionality = default_dimensionality
|
|
56
|
+
self._logger = logger or NullLogger()
|
|
57
|
+
|
|
58
|
+
pinecone.init(
|
|
59
|
+
api_key=self._pinecone_api_key, environment=self._pinecone_environment
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
def get_collections(self) -> List[str]:
|
|
63
|
+
return pinecone.list_indexes()
|
|
64
|
+
|
|
65
|
+
async def create_collection_async(
|
|
66
|
+
self,
|
|
67
|
+
collection_name: str,
|
|
68
|
+
dimension_num: Optional[int] = None,
|
|
69
|
+
distance_type: Optional[str] = "cosine",
|
|
70
|
+
num_of_pods: Optional[int] = 1,
|
|
71
|
+
replica_num: Optional[int] = 0,
|
|
72
|
+
type_of_pod: Optional[str] = "p1.x1",
|
|
73
|
+
metadata_config: Optional[dict] = None,
|
|
74
|
+
) -> None:
|
|
75
|
+
"""Creates a new collection in Pinecone if it does not exist.
|
|
76
|
+
This function creates an index, by default the following index
|
|
77
|
+
settings are used: metric = cosine, pods = 1, replicas = 0,
|
|
78
|
+
pod_type = p1.x1, metadata_config = None.
|
|
79
|
+
|
|
80
|
+
Arguments:
|
|
81
|
+
collection_name {str} -- The name of the collection to create.
|
|
82
|
+
In Pinecone, a collection is represented as an index. The concept
|
|
83
|
+
of "collection" in Pinecone is just a static copy of an index.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
None
|
|
87
|
+
"""
|
|
88
|
+
if dimension_num is None:
|
|
89
|
+
dimension_num = self._default_dimensionality
|
|
90
|
+
if dimension_num > MAX_DIMENSIONALITY:
|
|
91
|
+
raise ValueError(
|
|
92
|
+
f"Dimensionality of {dimension_num} exceeds "
|
|
93
|
+
+ f"the maximum allowed value of {MAX_DIMENSIONALITY}."
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
if collection_name not in pinecone.list_indexes():
|
|
97
|
+
pinecone.create_index(
|
|
98
|
+
name=collection_name,
|
|
99
|
+
dimension=dimension_num,
|
|
100
|
+
metric=distance_type,
|
|
101
|
+
pods=num_of_pods,
|
|
102
|
+
replicas=replica_num,
|
|
103
|
+
pod_type=type_of_pod,
|
|
104
|
+
metadata_config=metadata_config,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
async def describe_collection_async(
|
|
108
|
+
self, collection_name: str
|
|
109
|
+
) -> Optional[IndexDescription]:
|
|
110
|
+
"""Gets the description of the index.
|
|
111
|
+
Arguments:
|
|
112
|
+
collection_name {str} -- The name of the index to get.
|
|
113
|
+
Returns:
|
|
114
|
+
Optional[dict] -- The index.
|
|
115
|
+
"""
|
|
116
|
+
if collection_name in pinecone.list_indexes():
|
|
117
|
+
return pinecone.describe_index(collection_name)
|
|
118
|
+
return None
|
|
119
|
+
|
|
120
|
+
async def get_collections_async(
|
|
121
|
+
self,
|
|
122
|
+
) -> List[str]:
|
|
123
|
+
"""Gets the list of collections.
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
List[str] -- The list of collections.
|
|
127
|
+
"""
|
|
128
|
+
return list(pinecone.list_indexes())
|
|
129
|
+
|
|
130
|
+
async def delete_collection_async(self, collection_name: str) -> None:
|
|
131
|
+
"""Deletes a collection.
|
|
132
|
+
|
|
133
|
+
Arguments:
|
|
134
|
+
collection_name {str} -- The name of the collection to delete.
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
None
|
|
138
|
+
"""
|
|
139
|
+
if collection_name in pinecone.list_indexes():
|
|
140
|
+
pinecone.delete_index(collection_name)
|
|
141
|
+
|
|
142
|
+
async def does_collection_exist_async(self, collection_name: str) -> bool:
|
|
143
|
+
"""Checks if a collection exists.
|
|
144
|
+
|
|
145
|
+
Arguments:
|
|
146
|
+
collection_name {str} -- The name of the collection to check.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
bool -- True if the collection exists; otherwise, False.
|
|
150
|
+
"""
|
|
151
|
+
return collection_name in pinecone.list_indexes()
|
|
152
|
+
|
|
153
|
+
async def upsert_async(self, collection_name: str, record: MemoryRecord) -> str:
|
|
154
|
+
"""Upserts a record.
|
|
155
|
+
|
|
156
|
+
Arguments:
|
|
157
|
+
collection_name {str} -- The name of the collection to upsert the record into.
|
|
158
|
+
record {MemoryRecord} -- The record to upsert.
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
str -- The unique database key of the record. In Pinecone, this is the record ID.
|
|
162
|
+
"""
|
|
163
|
+
if collection_name not in pinecone.list_indexes():
|
|
164
|
+
raise Exception(f"Collection '{collection_name}' does not exist")
|
|
165
|
+
|
|
166
|
+
collection = pinecone.Index(collection_name)
|
|
167
|
+
|
|
168
|
+
upsert_response = collection.upsert(
|
|
169
|
+
vectors=[(record._id, record.embedding.tolist(), build_payload(record))],
|
|
170
|
+
namespace="",
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
if upsert_response.upserted_count is None:
|
|
174
|
+
raise Exception(f"Error upserting record: {upsert_response.message}")
|
|
175
|
+
|
|
176
|
+
return record._id
|
|
177
|
+
|
|
178
|
+
async def upsert_batch_async(
|
|
179
|
+
self, collection_name: str, records: List[MemoryRecord]
|
|
180
|
+
) -> List[str]:
|
|
181
|
+
"""Upserts a batch of records.
|
|
182
|
+
|
|
183
|
+
Arguments:
|
|
184
|
+
collection_name {str} -- The name of the collection to upsert the records into.
|
|
185
|
+
records {List[MemoryRecord]} -- The records to upsert.
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
List[str] -- The unique database keys of the records.
|
|
189
|
+
"""
|
|
190
|
+
if collection_name not in pinecone.list_indexes():
|
|
191
|
+
raise Exception(f"Collection '{collection_name}' does not exist")
|
|
192
|
+
|
|
193
|
+
collection = pinecone.Index(collection_name)
|
|
194
|
+
|
|
195
|
+
vectors = [
|
|
196
|
+
(
|
|
197
|
+
record._id,
|
|
198
|
+
record.embedding.tolist(),
|
|
199
|
+
build_payload(record),
|
|
200
|
+
)
|
|
201
|
+
for record in records
|
|
202
|
+
]
|
|
203
|
+
|
|
204
|
+
upsert_response = collection.upsert(
|
|
205
|
+
vectors, namespace="", batch_size=MAX_UPSERT_BATCH_SIZE
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
if upsert_response.upserted_count is None:
|
|
209
|
+
raise Exception(f"Error upserting record: {upsert_response.message}")
|
|
210
|
+
else:
|
|
211
|
+
return [record._id for record in records]
|
|
212
|
+
|
|
213
|
+
async def get_async(
|
|
214
|
+
self, collection_name: str, key: str, with_embedding: bool = False
|
|
215
|
+
) -> MemoryRecord:
|
|
216
|
+
"""Gets a record.
|
|
217
|
+
|
|
218
|
+
Arguments:
|
|
219
|
+
collection_name {str} -- The name of the collection to get the record from.
|
|
220
|
+
key {str} -- The unique database key of the record.
|
|
221
|
+
with_embedding {bool} -- Whether to include the embedding in the result. (default: {False})
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
MemoryRecord -- The record.
|
|
225
|
+
"""
|
|
226
|
+
if collection_name not in pinecone.list_indexes():
|
|
227
|
+
raise Exception(f"Collection '{collection_name}' does not exist")
|
|
228
|
+
|
|
229
|
+
collection = pinecone.Index(collection_name)
|
|
230
|
+
fetch_response = collection.fetch([key])
|
|
231
|
+
|
|
232
|
+
if len(fetch_response.vectors) == 0:
|
|
233
|
+
raise KeyError(f"Record with key '{key}' does not exist")
|
|
234
|
+
|
|
235
|
+
return parse_payload(fetch_response.vectors[key], with_embedding)
|
|
236
|
+
|
|
237
|
+
async def get_batch_async(
|
|
238
|
+
self, collection_name: str, keys: List[str], with_embeddings: bool = False
|
|
239
|
+
) -> List[MemoryRecord]:
|
|
240
|
+
"""Gets a batch of records.
|
|
241
|
+
|
|
242
|
+
Arguments:
|
|
243
|
+
collection_name {str} -- The name of the collection to get the records from.
|
|
244
|
+
keys {List[str]} -- The unique database keys of the records.
|
|
245
|
+
with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False})
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
List[MemoryRecord] -- The records.
|
|
249
|
+
"""
|
|
250
|
+
if collection_name not in pinecone.list_indexes():
|
|
251
|
+
raise Exception(f"Collection '{collection_name}' does not exist")
|
|
252
|
+
|
|
253
|
+
fetch_response = await self.__get_batch_async(
|
|
254
|
+
collection_name, keys, with_embeddings
|
|
255
|
+
)
|
|
256
|
+
return [
|
|
257
|
+
parse_payload(fetch_response.vectors[key], with_embeddings)
|
|
258
|
+
for key in fetch_response.vectors.keys()
|
|
259
|
+
]
|
|
260
|
+
|
|
261
|
+
async def remove_async(self, collection_name: str, key: str) -> None:
|
|
262
|
+
"""Removes a record.
|
|
263
|
+
|
|
264
|
+
Arguments:
|
|
265
|
+
collection_name {str} -- The name of the collection to remove the record from.
|
|
266
|
+
key {str} -- The unique database key of the record to remove.
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
None
|
|
270
|
+
"""
|
|
271
|
+
if collection_name not in pinecone.list_indexes():
|
|
272
|
+
raise Exception(f"Collection '{collection_name}' does not exist")
|
|
273
|
+
|
|
274
|
+
collection = pinecone.Index(collection_name)
|
|
275
|
+
collection.delete([key])
|
|
276
|
+
|
|
277
|
+
async def remove_batch_async(self, collection_name: str, keys: List[str]) -> None:
|
|
278
|
+
"""Removes a batch of records.
|
|
279
|
+
|
|
280
|
+
Arguments:
|
|
281
|
+
collection_name {str} -- The name of the collection to remove the records from.
|
|
282
|
+
keys {List[str]} -- The unique database keys of the records to remove.
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
None
|
|
286
|
+
"""
|
|
287
|
+
if collection_name not in pinecone.list_indexes():
|
|
288
|
+
raise Exception(f"Collection '{collection_name}' does not exist")
|
|
289
|
+
|
|
290
|
+
collection = pinecone.Index(collection_name)
|
|
291
|
+
for i in range(0, len(keys), MAX_DELETE_BATCH_SIZE):
|
|
292
|
+
collection.delete(keys[i : i + MAX_DELETE_BATCH_SIZE])
|
|
293
|
+
collection.delete(keys)
|
|
294
|
+
|
|
295
|
+
async def get_nearest_match_async(
|
|
296
|
+
self,
|
|
297
|
+
collection_name: str,
|
|
298
|
+
embedding: ndarray,
|
|
299
|
+
min_relevance_score: float = 0.0,
|
|
300
|
+
with_embedding: bool = False,
|
|
301
|
+
) -> Tuple[MemoryRecord, float]:
|
|
302
|
+
"""Gets the nearest match to an embedding using cosine similarity.
|
|
303
|
+
|
|
304
|
+
Arguments:
|
|
305
|
+
collection_name {str} -- The name of the collection to get the nearest match from.
|
|
306
|
+
embedding {ndarray} -- The embedding to find the nearest match to.
|
|
307
|
+
min_relevance_score {float} -- The minimum relevance score of the match. (default: {0.0})
|
|
308
|
+
with_embedding {bool} -- Whether to include the embedding in the result. (default: {False})
|
|
309
|
+
|
|
310
|
+
Returns:
|
|
311
|
+
Tuple[MemoryRecord, float] -- The record and the relevance score.
|
|
312
|
+
"""
|
|
313
|
+
matches = await self.get_nearest_matches_async(
|
|
314
|
+
collection_name=collection_name,
|
|
315
|
+
embedding=embedding,
|
|
316
|
+
limit=1,
|
|
317
|
+
min_relevance_score=min_relevance_score,
|
|
318
|
+
with_embeddings=with_embedding,
|
|
319
|
+
)
|
|
320
|
+
return matches[0]
|
|
321
|
+
|
|
322
|
+
async def get_nearest_matches_async(
|
|
323
|
+
self,
|
|
324
|
+
collection_name: str,
|
|
325
|
+
embedding: ndarray,
|
|
326
|
+
limit: int,
|
|
327
|
+
min_relevance_score: float = 0.0,
|
|
328
|
+
with_embeddings: bool = False,
|
|
329
|
+
) -> List[Tuple[MemoryRecord, float]]:
|
|
330
|
+
"""Gets the nearest matches to an embedding using cosine similarity.
|
|
331
|
+
|
|
332
|
+
Arguments:
|
|
333
|
+
collection_name {str} -- The name of the collection to get the nearest matches from.
|
|
334
|
+
embedding {ndarray} -- The embedding to find the nearest matches to.
|
|
335
|
+
limit {int} -- The maximum number of matches to return.
|
|
336
|
+
min_relevance_score {float} -- The minimum relevance score of the matches. (default: {0.0})
|
|
337
|
+
with_embeddings {bool} -- Whether to include the embeddings in the results. (default: {False})
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
List[Tuple[MemoryRecord, float]] -- The records and their relevance scores.
|
|
341
|
+
"""
|
|
342
|
+
if collection_name not in pinecone.list_indexes():
|
|
343
|
+
raise Exception(f"Collection '{collection_name}' does not exist")
|
|
344
|
+
|
|
345
|
+
collection = pinecone.Index(collection_name)
|
|
346
|
+
|
|
347
|
+
if limit > MAX_QUERY_WITHOUT_METADATA_BATCH_SIZE:
|
|
348
|
+
raise Exception(
|
|
349
|
+
"Limit must be less than or equal to "
|
|
350
|
+
+ f"{MAX_QUERY_WITHOUT_METADATA_BATCH_SIZE}"
|
|
351
|
+
)
|
|
352
|
+
elif limit > MAX_QUERY_WITH_METADATA_BATCH_SIZE:
|
|
353
|
+
query_response = collection.query(
|
|
354
|
+
vector=embedding.tolist(),
|
|
355
|
+
top_k=limit,
|
|
356
|
+
include_values=False,
|
|
357
|
+
include_metadata=False,
|
|
358
|
+
)
|
|
359
|
+
keys = [match.id for match in query_response.matches]
|
|
360
|
+
fetch_response = await self.__get_batch_async(
|
|
361
|
+
collection_name, keys, with_embeddings
|
|
362
|
+
)
|
|
363
|
+
vectors = fetch_response.vectors
|
|
364
|
+
for match in query_response.matches:
|
|
365
|
+
vectors[match.id].update(match)
|
|
366
|
+
matches = [vectors[key] for key in vectors.keys()]
|
|
367
|
+
else:
|
|
368
|
+
query_response = collection.query(
|
|
369
|
+
vector=embedding.tolist(),
|
|
370
|
+
top_k=limit,
|
|
371
|
+
include_values=with_embeddings,
|
|
372
|
+
include_metadata=True,
|
|
373
|
+
)
|
|
374
|
+
matches = query_response.matches
|
|
375
|
+
if min_relevance_score is not None:
|
|
376
|
+
matches = [match for match in matches if match.score >= min_relevance_score]
|
|
377
|
+
return (
|
|
378
|
+
[
|
|
379
|
+
(
|
|
380
|
+
parse_payload(match, with_embeddings),
|
|
381
|
+
match["score"],
|
|
382
|
+
)
|
|
383
|
+
for match in matches
|
|
384
|
+
]
|
|
385
|
+
if len(matches) > 0
|
|
386
|
+
else []
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
async def __get_batch_async(
|
|
390
|
+
self, collection_name: str, keys: List[str], with_embeddings: bool = False
|
|
391
|
+
) -> "FetchResponse":
|
|
392
|
+
index = pinecone.Index(collection_name)
|
|
393
|
+
if len(keys) > MAX_FETCH_BATCH_SIZE:
|
|
394
|
+
fetch_response = index.fetch(keys[0:MAX_FETCH_BATCH_SIZE])
|
|
395
|
+
for i in range(MAX_FETCH_BATCH_SIZE, len(keys), MAX_FETCH_BATCH_SIZE):
|
|
396
|
+
fetch_response.vectors.update(
|
|
397
|
+
index.fetch(keys[i : i + MAX_FETCH_BATCH_SIZE]).vectors
|
|
398
|
+
)
|
|
399
|
+
else:
|
|
400
|
+
fetch_response = index.fetch(keys)
|
|
401
|
+
return fetch_response
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# Copyright (c) Microsoft. All rights reserved.
|
|
2
|
+
|
|
3
|
+
import numpy
|
|
4
|
+
|
|
5
|
+
from pinecone import Vector
|
|
6
|
+
from semantic_kernel.memory.memory_record import MemoryRecord
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def build_payload(record: MemoryRecord) -> dict:
|
|
10
|
+
"""
|
|
11
|
+
Builds a metadata payload to be sent to Pinecone from a MemoryRecord.
|
|
12
|
+
"""
|
|
13
|
+
payload: dict = {}
|
|
14
|
+
if record._text:
|
|
15
|
+
payload["text"] = record._text
|
|
16
|
+
if record._description:
|
|
17
|
+
payload["description"] = record._description
|
|
18
|
+
if record._additional_metadata:
|
|
19
|
+
payload["additional_metadata"] = record._additional_metadata
|
|
20
|
+
return payload
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def parse_payload(record: Vector, with_embeddings: bool) -> MemoryRecord:
|
|
24
|
+
"""
|
|
25
|
+
Parses a record from Pinecone into a MemoryRecord.
|
|
26
|
+
"""
|
|
27
|
+
payload = record.metadata
|
|
28
|
+
description = payload.get("description", None)
|
|
29
|
+
text = payload.get("text", None)
|
|
30
|
+
additional_metadata = payload.get("additional_metadata", None)
|
|
31
|
+
return MemoryRecord.local_record(
|
|
32
|
+
id=record.id,
|
|
33
|
+
description=description,
|
|
34
|
+
text=text,
|
|
35
|
+
additional_metadata=additional_metadata,
|
|
36
|
+
embedding=record.values if with_embeddings else numpy.array([]),
|
|
37
|
+
)
|
{semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/math_skill.py
RENAMED
|
@@ -9,7 +9,7 @@ class MathSkill:
|
|
|
9
9
|
Description: MathSkill provides a set of functions to make Math calculations.
|
|
10
10
|
|
|
11
11
|
Usage:
|
|
12
|
-
kernel.import_skill("math"
|
|
12
|
+
kernel.import_skill(MathSkill(), skill_name="math")
|
|
13
13
|
|
|
14
14
|
Examples:
|
|
15
15
|
{{math.Add}} => Returns the sum of initial_value_text and Amount (provided in the SKContext)
|
{semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/core_skills/time_skill.py
RENAMED
|
@@ -9,11 +9,12 @@ class TimeSkill:
|
|
|
9
9
|
to get the current time and date.
|
|
10
10
|
|
|
11
11
|
Usage:
|
|
12
|
-
kernel.import_skill("time"
|
|
12
|
+
kernel.import_skill(TimeSkill(), skill_name="time")
|
|
13
13
|
|
|
14
14
|
Examples:
|
|
15
15
|
{{time.date}} => Sunday, 12 January, 2031
|
|
16
16
|
{{time.today}} => Sunday, 12 January, 2031
|
|
17
|
+
{{time.iso_date}} => 2031-01-12
|
|
17
18
|
{{time.now}} => Sunday, January 12, 2031 9:15 PM
|
|
18
19
|
{{time.utcNow}} => Sunday, January 13, 2031 5:15 AM
|
|
19
20
|
{{time.time}} => 09:15:07 PM
|
|
@@ -55,6 +56,17 @@ class TimeSkill:
|
|
|
55
56
|
"""
|
|
56
57
|
return self.date()
|
|
57
58
|
|
|
59
|
+
@sk_function(description="Get the current date in iso format.")
|
|
60
|
+
def iso_date(self) -> str:
|
|
61
|
+
"""
|
|
62
|
+
Get the current date in iso format
|
|
63
|
+
|
|
64
|
+
Example:
|
|
65
|
+
{{time.iso_date}} => 2031-01-12
|
|
66
|
+
"""
|
|
67
|
+
today = datetime.date.today()
|
|
68
|
+
return today.isoformat()
|
|
69
|
+
|
|
58
70
|
@sk_function(description="Get the current date and time in the local time zone")
|
|
59
71
|
def now(self) -> str:
|
|
60
72
|
"""
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
from semantic_kernel.skill_definition import sk_function
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class WaitSkill:
|
|
7
|
+
"""
|
|
8
|
+
WaitSkill provides a set of functions to wait for a certain amount of time.
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
kernel.import_skill(WaitSkill(), skill_name="wait")
|
|
12
|
+
|
|
13
|
+
Examples:
|
|
14
|
+
{{wait.seconds 5}} => Wait for 5 seconds
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
@sk_function(description="Wait for a certain number of seconds.")
|
|
18
|
+
async def wait(self, seconds_text: str):
|
|
19
|
+
try:
|
|
20
|
+
seconds = max(float(seconds_text), 0)
|
|
21
|
+
except ValueError:
|
|
22
|
+
raise ValueError("seconds text must be a number")
|
|
23
|
+
await asyncio.sleep(seconds)
|
|
@@ -56,7 +56,9 @@ class SemanticTextMemory(SemanticTextMemoryBase):
|
|
|
56
56
|
):
|
|
57
57
|
await self._storage.create_collection_async(collection_name=collection)
|
|
58
58
|
|
|
59
|
-
embedding =
|
|
59
|
+
embedding = (
|
|
60
|
+
await self._embeddings_generator.generate_embeddings_async([text])
|
|
61
|
+
)[0]
|
|
60
62
|
data = MemoryRecord.local_record(
|
|
61
63
|
id=id,
|
|
62
64
|
text=text,
|
|
@@ -94,7 +96,9 @@ class SemanticTextMemory(SemanticTextMemoryBase):
|
|
|
94
96
|
):
|
|
95
97
|
await self._storage.create_collection_async(collection_name=collection)
|
|
96
98
|
|
|
97
|
-
embedding =
|
|
99
|
+
embedding = (
|
|
100
|
+
await self._embeddings_generator.generate_embeddings_async([text])
|
|
101
|
+
)[0]
|
|
98
102
|
data = MemoryRecord.reference_record(
|
|
99
103
|
external_id=external_id,
|
|
100
104
|
source_name=external_source_name,
|
|
@@ -142,9 +146,9 @@ class SemanticTextMemory(SemanticTextMemoryBase):
|
|
|
142
146
|
Returns:
|
|
143
147
|
List[MemoryQueryResult] -- The list of MemoryQueryResult found.
|
|
144
148
|
"""
|
|
145
|
-
query_embedding =
|
|
146
|
-
[query]
|
|
147
|
-
)
|
|
149
|
+
query_embedding = (
|
|
150
|
+
await self._embeddings_generator.generate_embeddings_async([query])
|
|
151
|
+
)[0]
|
|
148
152
|
results = await self._storage.get_nearest_matches_async(
|
|
149
153
|
collection_name=collection,
|
|
150
154
|
embedding=query_embedding,
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
# Copyright (c) Microsoft. All rights reserved.
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
+
import platform
|
|
5
|
+
import sys
|
|
4
6
|
import threading
|
|
5
7
|
from enum import Enum
|
|
6
8
|
from logging import Logger
|
|
@@ -36,6 +38,9 @@ from semantic_kernel.skill_definition.read_only_skill_collection_base import (
|
|
|
36
38
|
)
|
|
37
39
|
from semantic_kernel.utils.null_logger import NullLogger
|
|
38
40
|
|
|
41
|
+
if platform.system() == "Windows" and sys.version_info >= (3, 8, 0):
|
|
42
|
+
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
|
43
|
+
|
|
39
44
|
|
|
40
45
|
class SKFunction(SKFunctionBase):
|
|
41
46
|
"""
|
{semantic_kernel-0.3.1.dev0 → semantic_kernel-0.3.2.dev0}/semantic_kernel/planning/basic_planner.py
RENAMED
|
@@ -3,6 +3,8 @@
|
|
|
3
3
|
"""A basic JSON-based planner for the Python Semantic Kernel"""
|
|
4
4
|
import json
|
|
5
5
|
|
|
6
|
+
import regex
|
|
7
|
+
|
|
6
8
|
from semantic_kernel.kernel import Kernel
|
|
7
9
|
from semantic_kernel.orchestration.context_variables import ContextVariables
|
|
8
10
|
from semantic_kernel.planning.plan import Plan
|
|
@@ -187,7 +189,13 @@ class BasicPlanner:
|
|
|
187
189
|
Given a plan, execute each of the functions within the plan
|
|
188
190
|
from start to finish and output the result.
|
|
189
191
|
"""
|
|
190
|
-
|
|
192
|
+
|
|
193
|
+
# Filter out good JSON from the result in case additional text is present
|
|
194
|
+
json_regex = r"\{(?:[^{}]|(?R))*\}"
|
|
195
|
+
generated_plan_string = regex.search(
|
|
196
|
+
json_regex, plan.generated_plan.result
|
|
197
|
+
).group()
|
|
198
|
+
generated_plan = json.loads(generated_plan_string)
|
|
191
199
|
|
|
192
200
|
context = ContextVariables()
|
|
193
201
|
context["input"] = generated_plan["input"]
|