glchat-plugin 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -78,5 +78,32 @@ class ReferenceFormatterType(StrEnum):
78
78
  NONE = "none"
79
79
 
80
80
 
81
+ class TopicSafetyMode(StrEnum):
82
+ """Topic safety mode enumeration for guardrail configuration."""
83
+
84
+ ALLOWLIST = "allowlist"
85
+ DENYLIST = "denylist"
86
+ HYBRID = "hybrid"
87
+ DISABLED = "disabled"
88
+
89
+
90
+ class GuardrailMode(StrEnum):
91
+ """Guardrail mode enumeration for guardrail configuration."""
92
+
93
+ DISABLED = "disabled"
94
+ INPUT_ONLY = "input_only"
95
+ OUTPUT_ONLY = "output_only"
96
+ BOTH = "both"
97
+
98
+
81
99
  WEB_SEARCH_BLACKLIST_DEFAULT = "[]"
82
100
  WEB_SEARCH_WHITELIST_DEFAULT = "[]"
101
+
102
+ GUARDRAIL_ERR_MSG = (
103
+ "I apologize, but I cannot process your request as it appears to violate our content guidelines. "
104
+ "This could be due to:\n"
105
+ "- Inappropriate or harmful content\n"
106
+ "- Requests that go against our safety policies\n"
107
+ "- Content that may violate legal or ethical standards\n\n"
108
+ "Please rephrase your question or ask about a different topic that complies with our guidelines."
109
+ )
@@ -12,10 +12,13 @@ from typing import Any
12
12
  from pydantic import BaseModel, Field
13
13
 
14
14
  from glchat_plugin.config.constant import (
15
+ GUARDRAIL_ERR_MSG,
15
16
  WEB_SEARCH_BLACKLIST_DEFAULT,
16
17
  WEB_SEARCH_WHITELIST_DEFAULT,
18
+ GuardrailMode,
17
19
  ReferenceFormatterType,
18
20
  SearchType,
21
+ TopicSafetyMode,
19
22
  )
20
23
 
21
24
 
@@ -30,12 +33,17 @@ class BasePipelinePresetConfig(BaseModel):
30
33
  support_multimodal (bool): Whether the pipeline supports multimodal.
31
34
  use_docproc (bool): Whether to use the document processor.
32
35
  search_types (list[SearchType]): The supported search types.
36
+ allowed_topics (str): The allowed topics.
33
37
  anonymize_em (bool): Whether to anonymize before using the embedding model.
34
38
  anonymize_lm (bool): Whether to anonymize before using the language model.
35
39
  augment_context (bool): Whether context augmentation from the knowledge base is allowed.
40
+ banned_phrases (str): The banned phrases.
36
41
  chat_history_limit (int): The chat history limit. If the value is negative, no limit will be applied.
37
42
  enable_guardrails (bool): Whether to enable guardrails.
43
+ enable_memory (bool): Whether to enable memory.
38
44
  enable_smart_search_integration (bool): Whether to enable smart search integration.
45
+ guardrail_fallback_message (str): The guardrail fallback message.
46
+ guardrail_mode (GuardrailMode): The guardrail mode.
39
47
  normal_search_top_k (int): The top k for normal search. Must be greater than or equal to 1.
40
48
  prompt_context_char_threshold (int): The character limit above which the prompt is assumed
41
49
  to have contained the context.
@@ -44,7 +52,10 @@ class BasePipelinePresetConfig(BaseModel):
44
52
  reference_formatter_type (ReferenceFormatterType): The reference formatter type.
45
53
  rerank_kwargs (str): The rerank kwargs.
46
54
  rerank_type (str): The rerank type.
55
+ retrieve_memory_threshold (float): The retrieve memory threshold.
56
+ retrieve_memory_top_k (int): The retrieve memory top k.
47
57
  smart_search_top_k (int): The top k for smart search. Must be greater than or equal to 1.
58
+ topic_safety_mode (TopicSafetyMode): The topic safety mode.
48
59
  use_cache (bool): Whether to use cache.
49
60
  use_model_knowledge (bool): Whether to use model knowledge.
50
61
  vector_weight (float): The vector weight. Must be between 0 and 1 (inclusive).
@@ -60,12 +71,17 @@ class BasePipelinePresetConfig(BaseModel):
60
71
  support_multimodal: bool
61
72
  use_docproc: bool
62
73
  search_types: list[SearchType]
74
+ allowed_topics: str = "[]"
63
75
  anonymize_em: bool
64
76
  anonymize_lm: bool
65
77
  augment_context: bool
78
+ banned_phrases: str = "[]"
66
79
  chat_history_limit: int
67
80
  enable_guardrails: bool = False
81
+ enable_memory: bool = False
68
82
  enable_smart_search_integration: bool = False
83
+ guardrail_fallback_message: str = Field(default=GUARDRAIL_ERR_MSG)
84
+ guardrail_mode: GuardrailMode = Field(default=GuardrailMode.INPUT_ONLY)
69
85
  normal_search_top_k: int = Field(ge=1)
70
86
  prompt_context_char_threshold: int = 32000
71
87
  reference_formatter_batch_size: int = Field(ge=1)
@@ -73,7 +89,10 @@ class BasePipelinePresetConfig(BaseModel):
73
89
  reference_formatter_type: ReferenceFormatterType
74
90
  rerank_kwargs: str = "{}"
75
91
  rerank_type: str = ""
92
+ retrieve_memory_threshold: float = Field(default=0.3, ge=0, le=1)
93
+ retrieve_memory_top_k: int = Field(default=10, ge=1)
76
94
  smart_search_top_k: int = Field(ge=1)
95
+ topic_safety_mode: TopicSafetyMode = Field(default=TopicSafetyMode.HYBRID)
77
96
  use_cache: bool
78
97
  use_model_knowledge: bool
79
98
  vector_weight: float = Field(ge=0, le=1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: glchat-plugin
3
- Version: 0.3.0
3
+ Version: 0.3.1
4
4
  Author-email: GenAI SDK Team <gat-sdk@gdplabs.id>
5
5
  Requires-Python: <3.13,>=3.11
6
6
  Description-Content-Type: text/markdown
@@ -1,13 +1,13 @@
1
1
  glchat_plugin/__init__.py,sha256=SHSBMz7JDU6MecyIrhHu5-3NVs89JkXhyvD3ZGOkWOE,37
2
2
  glchat_plugin/config/__init__.py,sha256=DNnX8B_TvAN89oyMgq32zG1DeaezODrihiAXTwOPT5o,39
3
3
  glchat_plugin/config/app_config.py,sha256=9_ShYtaQ7Rp14sSkrIFLoOAMlbwVlm13EuCxzOn4NCI,426
4
- glchat_plugin/config/constant.py,sha256=iRiY-wN7dhIHmxl8zzmYtOtE9B-oBFadThqALcnSoEE,2744
4
+ glchat_plugin/config/constant.py,sha256=Hsex2hjz4yV9lSFeLWMp5i9lRTUOiwBXYx5FZu584Nk,3577
5
5
  glchat_plugin/context/__init__.py,sha256=3Wx_apMIS6z-m6eRs6hoyOsJFLJfKmMFOkrPDkPQfJI,40
6
6
  glchat_plugin/context/context_manager.py,sha256=0lhO0w_hd5dUdIEJQ2LOJFZsgpzitQU_aPZfTfQK3vw,1302
7
7
  glchat_plugin/handler/__init__.py,sha256=H5DJaAfwwtRsvMcOaEzHfGMQk25H7la0E7uPfksWtoQ,40
8
8
  glchat_plugin/handler/base_post_login_handler.py,sha256=48xSbe_LwTCjRY-lCuzWXqbnEr1ql8bAhQih1Xeh8f8,2835
9
9
  glchat_plugin/pipeline/__init__.py,sha256=Sk-NfIGyA9VKIg0Bt5OHatNUYyWVPh9i5xhE5DFAfbo,41
10
- glchat_plugin/pipeline/base_pipeline_preset_config.py,sha256=F-u4WzMuNlzEmKE-xgeAG-mI8TB6d4JLTsjnh_89-Y0,3677
10
+ glchat_plugin/pipeline/base_pipeline_preset_config.py,sha256=QkFDllWXwj4bKyH4sF9yjknhVNrSR9ZPiXHIS_hANSc,4683
11
11
  glchat_plugin/pipeline/pipeline_handler.py,sha256=aCRvhS6Dkhmqsx_Ya-2t2PbMseacw1VI6PUEOQq0RsM,25620
12
12
  glchat_plugin/pipeline/pipeline_plugin.py,sha256=fozvxVrOphgwLIF7uPrEkF8ZQcu8xgifYAQyuxj9628,4393
13
13
  glchat_plugin/service/__init__.py,sha256=9T4qzyYL052qLqva5el1F575OTRNaaf9tb9UvW-leTc,47
@@ -19,7 +19,7 @@ glchat_plugin/storage/base_anonymizer_storage.py,sha256=oFwovWrsjM7v1YjeN-4p-M3O
19
19
  glchat_plugin/storage/base_chat_history_storage.py,sha256=JvUUFMu_9jRBQ9yug_x7S4rQjZEA1vM5ombDvz-7zCE,11095
20
20
  glchat_plugin/tools/__init__.py,sha256=OFotHbgQ8mZEbdlvlv5aVMdxfubPvkVWAcTwhIPdIqQ,542
21
21
  glchat_plugin/tools/decorators.py,sha256=AvQBV18wzXWdC483RSSmpfh92zsqTyp8SzDLIkreIGU,3925
22
- glchat_plugin-0.3.0.dist-info/METADATA,sha256=hl5d1CqDpK27hGQCDKYX8Uw2JlWWCshBr9rphZRvDsE,2063
23
- glchat_plugin-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
24
- glchat_plugin-0.3.0.dist-info/top_level.txt,sha256=fzKSXmct5dY4CAKku4-mkdHX-QPAyQVvo8vpQj8qizY,14
25
- glchat_plugin-0.3.0.dist-info/RECORD,,
22
+ glchat_plugin-0.3.1.dist-info/METADATA,sha256=lUYcNuRxb6cJitJlkaLH5dMuO9F1vWyczplOYi7LsFM,2063
23
+ glchat_plugin-0.3.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
24
+ glchat_plugin-0.3.1.dist-info/top_level.txt,sha256=fzKSXmct5dY4CAKku4-mkdHX-QPAyQVvo8vpQj8qizY,14
25
+ glchat_plugin-0.3.1.dist-info/RECORD,,