kweaver-dolphin 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (199) hide show
  1. DolphinLanguageSDK/__init__.py +58 -0
  2. dolphin/__init__.py +62 -0
  3. dolphin/cli/__init__.py +20 -0
  4. dolphin/cli/args/__init__.py +9 -0
  5. dolphin/cli/args/parser.py +567 -0
  6. dolphin/cli/builtin_agents/__init__.py +22 -0
  7. dolphin/cli/commands/__init__.py +4 -0
  8. dolphin/cli/interrupt/__init__.py +8 -0
  9. dolphin/cli/interrupt/handler.py +205 -0
  10. dolphin/cli/interrupt/keyboard.py +82 -0
  11. dolphin/cli/main.py +49 -0
  12. dolphin/cli/multimodal/__init__.py +34 -0
  13. dolphin/cli/multimodal/clipboard.py +327 -0
  14. dolphin/cli/multimodal/handler.py +249 -0
  15. dolphin/cli/multimodal/image_processor.py +214 -0
  16. dolphin/cli/multimodal/input_parser.py +149 -0
  17. dolphin/cli/runner/__init__.py +8 -0
  18. dolphin/cli/runner/runner.py +989 -0
  19. dolphin/cli/ui/__init__.py +10 -0
  20. dolphin/cli/ui/console.py +2795 -0
  21. dolphin/cli/ui/input.py +340 -0
  22. dolphin/cli/ui/layout.py +425 -0
  23. dolphin/cli/ui/stream_renderer.py +302 -0
  24. dolphin/cli/utils/__init__.py +8 -0
  25. dolphin/cli/utils/helpers.py +135 -0
  26. dolphin/cli/utils/version.py +49 -0
  27. dolphin/core/__init__.py +107 -0
  28. dolphin/core/agent/__init__.py +10 -0
  29. dolphin/core/agent/agent_state.py +69 -0
  30. dolphin/core/agent/base_agent.py +970 -0
  31. dolphin/core/code_block/__init__.py +0 -0
  32. dolphin/core/code_block/agent_init_block.py +0 -0
  33. dolphin/core/code_block/assign_block.py +98 -0
  34. dolphin/core/code_block/basic_code_block.py +1865 -0
  35. dolphin/core/code_block/explore_block.py +1327 -0
  36. dolphin/core/code_block/explore_block_v2.py +712 -0
  37. dolphin/core/code_block/explore_strategy.py +672 -0
  38. dolphin/core/code_block/judge_block.py +220 -0
  39. dolphin/core/code_block/prompt_block.py +32 -0
  40. dolphin/core/code_block/skill_call_deduplicator.py +291 -0
  41. dolphin/core/code_block/tool_block.py +129 -0
  42. dolphin/core/common/__init__.py +17 -0
  43. dolphin/core/common/constants.py +176 -0
  44. dolphin/core/common/enums.py +1173 -0
  45. dolphin/core/common/exceptions.py +133 -0
  46. dolphin/core/common/multimodal.py +539 -0
  47. dolphin/core/common/object_type.py +165 -0
  48. dolphin/core/common/output_format.py +432 -0
  49. dolphin/core/common/types.py +36 -0
  50. dolphin/core/config/__init__.py +16 -0
  51. dolphin/core/config/global_config.py +1289 -0
  52. dolphin/core/config/ontology_config.py +133 -0
  53. dolphin/core/context/__init__.py +12 -0
  54. dolphin/core/context/context.py +1580 -0
  55. dolphin/core/context/context_manager.py +161 -0
  56. dolphin/core/context/var_output.py +82 -0
  57. dolphin/core/context/variable_pool.py +356 -0
  58. dolphin/core/context_engineer/__init__.py +41 -0
  59. dolphin/core/context_engineer/config/__init__.py +5 -0
  60. dolphin/core/context_engineer/config/settings.py +402 -0
  61. dolphin/core/context_engineer/core/__init__.py +7 -0
  62. dolphin/core/context_engineer/core/budget_manager.py +327 -0
  63. dolphin/core/context_engineer/core/context_assembler.py +583 -0
  64. dolphin/core/context_engineer/core/context_manager.py +637 -0
  65. dolphin/core/context_engineer/core/tokenizer_service.py +260 -0
  66. dolphin/core/context_engineer/example/incremental_example.py +267 -0
  67. dolphin/core/context_engineer/example/traditional_example.py +334 -0
  68. dolphin/core/context_engineer/services/__init__.py +5 -0
  69. dolphin/core/context_engineer/services/compressor.py +399 -0
  70. dolphin/core/context_engineer/utils/__init__.py +6 -0
  71. dolphin/core/context_engineer/utils/context_utils.py +441 -0
  72. dolphin/core/context_engineer/utils/message_formatter.py +270 -0
  73. dolphin/core/context_engineer/utils/token_utils.py +139 -0
  74. dolphin/core/coroutine/__init__.py +15 -0
  75. dolphin/core/coroutine/context_snapshot.py +154 -0
  76. dolphin/core/coroutine/context_snapshot_profile.py +922 -0
  77. dolphin/core/coroutine/context_snapshot_store.py +268 -0
  78. dolphin/core/coroutine/execution_frame.py +145 -0
  79. dolphin/core/coroutine/execution_state_registry.py +161 -0
  80. dolphin/core/coroutine/resume_handle.py +101 -0
  81. dolphin/core/coroutine/step_result.py +101 -0
  82. dolphin/core/executor/__init__.py +18 -0
  83. dolphin/core/executor/debug_controller.py +630 -0
  84. dolphin/core/executor/dolphin_executor.py +1063 -0
  85. dolphin/core/executor/executor.py +624 -0
  86. dolphin/core/flags/__init__.py +27 -0
  87. dolphin/core/flags/definitions.py +49 -0
  88. dolphin/core/flags/manager.py +113 -0
  89. dolphin/core/hook/__init__.py +95 -0
  90. dolphin/core/hook/expression_evaluator.py +499 -0
  91. dolphin/core/hook/hook_dispatcher.py +380 -0
  92. dolphin/core/hook/hook_types.py +248 -0
  93. dolphin/core/hook/isolated_variable_pool.py +284 -0
  94. dolphin/core/interfaces.py +53 -0
  95. dolphin/core/llm/__init__.py +0 -0
  96. dolphin/core/llm/llm.py +495 -0
  97. dolphin/core/llm/llm_call.py +100 -0
  98. dolphin/core/llm/llm_client.py +1285 -0
  99. dolphin/core/llm/message_sanitizer.py +120 -0
  100. dolphin/core/logging/__init__.py +20 -0
  101. dolphin/core/logging/logger.py +526 -0
  102. dolphin/core/message/__init__.py +8 -0
  103. dolphin/core/message/compressor.py +749 -0
  104. dolphin/core/parser/__init__.py +8 -0
  105. dolphin/core/parser/parser.py +405 -0
  106. dolphin/core/runtime/__init__.py +10 -0
  107. dolphin/core/runtime/runtime_graph.py +926 -0
  108. dolphin/core/runtime/runtime_instance.py +446 -0
  109. dolphin/core/skill/__init__.py +14 -0
  110. dolphin/core/skill/context_retention.py +157 -0
  111. dolphin/core/skill/skill_function.py +686 -0
  112. dolphin/core/skill/skill_matcher.py +282 -0
  113. dolphin/core/skill/skillkit.py +700 -0
  114. dolphin/core/skill/skillset.py +72 -0
  115. dolphin/core/trajectory/__init__.py +10 -0
  116. dolphin/core/trajectory/recorder.py +189 -0
  117. dolphin/core/trajectory/trajectory.py +522 -0
  118. dolphin/core/utils/__init__.py +9 -0
  119. dolphin/core/utils/cache_kv.py +212 -0
  120. dolphin/core/utils/tools.py +340 -0
  121. dolphin/lib/__init__.py +93 -0
  122. dolphin/lib/debug/__init__.py +8 -0
  123. dolphin/lib/debug/visualizer.py +409 -0
  124. dolphin/lib/memory/__init__.py +28 -0
  125. dolphin/lib/memory/async_processor.py +220 -0
  126. dolphin/lib/memory/llm_calls.py +195 -0
  127. dolphin/lib/memory/manager.py +78 -0
  128. dolphin/lib/memory/sandbox.py +46 -0
  129. dolphin/lib/memory/storage.py +245 -0
  130. dolphin/lib/memory/utils.py +51 -0
  131. dolphin/lib/ontology/__init__.py +12 -0
  132. dolphin/lib/ontology/basic/__init__.py +0 -0
  133. dolphin/lib/ontology/basic/base.py +102 -0
  134. dolphin/lib/ontology/basic/concept.py +130 -0
  135. dolphin/lib/ontology/basic/object.py +11 -0
  136. dolphin/lib/ontology/basic/relation.py +63 -0
  137. dolphin/lib/ontology/datasource/__init__.py +27 -0
  138. dolphin/lib/ontology/datasource/datasource.py +66 -0
  139. dolphin/lib/ontology/datasource/oracle_datasource.py +338 -0
  140. dolphin/lib/ontology/datasource/sql.py +845 -0
  141. dolphin/lib/ontology/mapping.py +177 -0
  142. dolphin/lib/ontology/ontology.py +733 -0
  143. dolphin/lib/ontology/ontology_context.py +16 -0
  144. dolphin/lib/ontology/ontology_manager.py +107 -0
  145. dolphin/lib/skill_results/__init__.py +31 -0
  146. dolphin/lib/skill_results/cache_backend.py +559 -0
  147. dolphin/lib/skill_results/result_processor.py +181 -0
  148. dolphin/lib/skill_results/result_reference.py +179 -0
  149. dolphin/lib/skill_results/skillkit_hook.py +324 -0
  150. dolphin/lib/skill_results/strategies.py +328 -0
  151. dolphin/lib/skill_results/strategy_registry.py +150 -0
  152. dolphin/lib/skillkits/__init__.py +44 -0
  153. dolphin/lib/skillkits/agent_skillkit.py +155 -0
  154. dolphin/lib/skillkits/cognitive_skillkit.py +82 -0
  155. dolphin/lib/skillkits/env_skillkit.py +250 -0
  156. dolphin/lib/skillkits/mcp_adapter.py +616 -0
  157. dolphin/lib/skillkits/mcp_skillkit.py +771 -0
  158. dolphin/lib/skillkits/memory_skillkit.py +650 -0
  159. dolphin/lib/skillkits/noop_skillkit.py +31 -0
  160. dolphin/lib/skillkits/ontology_skillkit.py +89 -0
  161. dolphin/lib/skillkits/plan_act_skillkit.py +452 -0
  162. dolphin/lib/skillkits/resource/__init__.py +52 -0
  163. dolphin/lib/skillkits/resource/models/__init__.py +6 -0
  164. dolphin/lib/skillkits/resource/models/skill_config.py +109 -0
  165. dolphin/lib/skillkits/resource/models/skill_meta.py +127 -0
  166. dolphin/lib/skillkits/resource/resource_skillkit.py +393 -0
  167. dolphin/lib/skillkits/resource/skill_cache.py +215 -0
  168. dolphin/lib/skillkits/resource/skill_loader.py +395 -0
  169. dolphin/lib/skillkits/resource/skill_validator.py +406 -0
  170. dolphin/lib/skillkits/resource_skillkit.py +11 -0
  171. dolphin/lib/skillkits/search_skillkit.py +163 -0
  172. dolphin/lib/skillkits/sql_skillkit.py +274 -0
  173. dolphin/lib/skillkits/system_skillkit.py +509 -0
  174. dolphin/lib/skillkits/vm_skillkit.py +65 -0
  175. dolphin/lib/utils/__init__.py +9 -0
  176. dolphin/lib/utils/data_process.py +207 -0
  177. dolphin/lib/utils/handle_progress.py +178 -0
  178. dolphin/lib/utils/security.py +139 -0
  179. dolphin/lib/utils/text_retrieval.py +462 -0
  180. dolphin/lib/vm/__init__.py +11 -0
  181. dolphin/lib/vm/env_executor.py +895 -0
  182. dolphin/lib/vm/python_session_manager.py +453 -0
  183. dolphin/lib/vm/vm.py +610 -0
  184. dolphin/sdk/__init__.py +60 -0
  185. dolphin/sdk/agent/__init__.py +12 -0
  186. dolphin/sdk/agent/agent_factory.py +236 -0
  187. dolphin/sdk/agent/dolphin_agent.py +1106 -0
  188. dolphin/sdk/api/__init__.py +4 -0
  189. dolphin/sdk/runtime/__init__.py +8 -0
  190. dolphin/sdk/runtime/env.py +363 -0
  191. dolphin/sdk/skill/__init__.py +10 -0
  192. dolphin/sdk/skill/global_skills.py +706 -0
  193. dolphin/sdk/skill/traditional_toolkit.py +260 -0
  194. kweaver_dolphin-0.1.0.dist-info/METADATA +521 -0
  195. kweaver_dolphin-0.1.0.dist-info/RECORD +199 -0
  196. kweaver_dolphin-0.1.0.dist-info/WHEEL +5 -0
  197. kweaver_dolphin-0.1.0.dist-info/entry_points.txt +27 -0
  198. kweaver_dolphin-0.1.0.dist-info/licenses/LICENSE.txt +201 -0
  199. kweaver_dolphin-0.1.0.dist-info/top_level.txt +2 -0
@@ -0,0 +1,446 @@
1
+ from enum import Enum
2
+ import time
3
+ from typing import List, Optional, TYPE_CHECKING
4
+ import uuid
5
+
6
+ from dolphin.core.common.enums import Messages, SkillInfo, Status, TypeStage
7
+ from dolphin.core.common.constants import estimate_tokens_from_chars
8
+
9
+ if TYPE_CHECKING:
10
+ from dolphin.core.agent.base_agent import BaseAgent
11
+ from dolphin.core.code_block.basic_code_block import BasicCodeBlock
12
+
13
+
14
+ class TypeRuntimeInstance(Enum):
15
+ AGENT = "agent"
16
+ BLOCK = "block"
17
+ PROGRESS = "progress"
18
+ STAGE = "stage"
19
+
20
+
21
+ class RuntimeInstance:
22
+ def __init__(self, type: TypeRuntimeInstance):
23
+ self.type = type
24
+ self.id = str(uuid.uuid4())
25
+ self.parent: RuntimeInstance = None
26
+ self.children: List[RuntimeInstance] = []
27
+ # Provide time fields uniformly for all instances to avoid missing attributes when accessing RuntimeGraph
28
+ self.start_time = time.time()
29
+ self.end_time = None
30
+
31
+ def set_parent(self, parent: "RuntimeInstance"):
32
+ self.parent = parent
33
+ parent.children.append(self)
34
+
35
+ def get_parent(self):
36
+ return self.parent
37
+
38
+ def get_children(self):
39
+ return self.children
40
+
41
+ def get_type(self):
42
+ return self.type
43
+
44
+
45
+ class AgentInstance(RuntimeInstance):
46
+ def __init__(self, name: str, agent: "BaseAgent"):
47
+ super().__init__(type=TypeRuntimeInstance.AGENT)
48
+ self.name = name
49
+ self.agent = agent
50
+
51
+
52
+ class BlockInstance(RuntimeInstance):
53
+ def __init__(self, name: str, block: "BasicCodeBlock"):
54
+ super().__init__(type=TypeRuntimeInstance.BLOCK)
55
+ self.name = name
56
+ self.block = block
57
+
58
+
59
+ class LLMInput:
60
+ def __init__(
61
+ self, content: Optional[str] = None, messages: Optional[Messages] = None
62
+ ) -> None:
63
+ self.content = content
64
+ self.messages = messages
65
+
66
+
67
+ class LLMOutput:
68
+ def __init__(
69
+ self,
70
+ raw_output: Optional[str] = None,
71
+ answer: Optional[str] = None,
72
+ think: Optional[str] = None,
73
+ block_answer: Optional[str] = None,
74
+ ) -> None:
75
+ self.raw_output = raw_output
76
+ self.answer = answer
77
+ self.think = think
78
+ self.block_answer = block_answer
79
+
80
+
81
+ class StageInstance(RuntimeInstance):
82
+ def __init__(
83
+ self,
84
+ agent_name: str = "",
85
+ stage: TypeStage = TypeStage.LLM,
86
+ answer: Optional[str] = None,
87
+ think: Optional[str] = None,
88
+ raw_output: Optional[str] = None,
89
+ status: Status = Status.PROCESSING,
90
+ skill_info: Optional[SkillInfo] = None,
91
+ block_answer: Optional[str] = None,
92
+ input_content: Optional[str] = None,
93
+ input_messages: Optional[Messages] = None,
94
+ interrupted: bool = False,
95
+ flags: str = "",
96
+ ):
97
+ super().__init__(type=TypeRuntimeInstance.STAGE)
98
+
99
+ self.agent_name = agent_name
100
+ self.stage = stage
101
+ self.input = LLMInput(
102
+ content=input_content,
103
+ messages=input_messages.copy() if input_messages is not None else None,
104
+ )
105
+
106
+ if not self.input.content and self.input.messages:
107
+ self.input.content = self.input.messages[-1].content
108
+
109
+ self.output = LLMOutput(
110
+ answer=answer, think=think, raw_output=raw_output, block_answer=block_answer
111
+ )
112
+ self.status = status
113
+ self.skill_info = skill_info
114
+ self.interrupted = interrupted
115
+ self.flags = flags
116
+
117
+ self.start_time = time.time()
118
+ self.end_time = self.start_time
119
+
120
+ self.token_usage = {}
121
+
122
+ def get_agent_name(self):
123
+ return self.agent_name
124
+
125
+ def get_answer(self):
126
+ return self.output.answer
127
+
128
+ def get_think(self):
129
+ return self.output.think
130
+
131
+ def get_raw_output(self):
132
+ return self.output.raw_output
133
+
134
+ def get_block_answer(self):
135
+ return self.output.block_answer
136
+
137
+ def set_end_time(self):
138
+ self.end_time = time.time()
139
+
140
+ def get_estimated_input_tokens(self):
141
+ if self.stage != TypeStage.LLM:
142
+ return 0
143
+
144
+ # First try to get from whole_messages
145
+ if self.input.messages:
146
+ return self.input.messages.estimated_tokens()
147
+ return 0
148
+
149
+ def get_estimated_output_tokens(self):
150
+ if self.stage != TypeStage.LLM:
151
+ return 0
152
+
153
+ if self.output.raw_output:
154
+ tokens = estimate_tokens_from_chars(self.output.raw_output)
155
+ return tokens if tokens is not None else 0
156
+
157
+ return 0
158
+
159
+ def get_estimated_ratio_tokens(self) -> float:
160
+ if self.stage != TypeStage.LLM or self.input.messages is None:
161
+ return 0
162
+
163
+ total_tokens = (
164
+ self.get_estimated_input_tokens() + self.get_estimated_output_tokens()
165
+ )
166
+ return (
167
+ (float)(total_tokens) / (float)(self.input.messages.get_max_tokens())
168
+ if self.input.messages.get_max_tokens() > 0
169
+ else 0
170
+ )
171
+
172
+ def update(
173
+ self,
174
+ stage: Optional[TypeStage] = None,
175
+ answer: Optional[str] = None,
176
+ think: Optional[str] = None,
177
+ raw_output: Optional[str] = None,
178
+ status: Optional[str] = None,
179
+ skill_info: Optional[SkillInfo] = None,
180
+ block_answer: Optional[str] = None,
181
+ input_messages: Optional[Messages] = None,
182
+ **kwargs,
183
+ ):
184
+ if stage is not None:
185
+ self.stage = stage
186
+ if answer is not None:
187
+ self.output.answer = answer
188
+ if think is not None:
189
+ self.output.think = think
190
+ if raw_output is not None:
191
+ self.output.raw_output = raw_output
192
+ if status is not None:
193
+ self.status = status
194
+ if skill_info is not None:
195
+ self.skill_info = skill_info
196
+ if block_answer is not None:
197
+ self.output.block_answer = block_answer
198
+ if input_messages is not None:
199
+ self.input.messages = input_messages.copy()
200
+
201
+ if kwargs:
202
+ for key, value in kwargs.items():
203
+ if key not in [
204
+ "stage",
205
+ "answer",
206
+ "think",
207
+ "raw_output",
208
+ "status",
209
+ "skill_info",
210
+ "block_answer",
211
+ "input_messages",
212
+ ]:
213
+ setattr(self, key, value)
214
+
215
+ def get_traditional_dict(self):
216
+ # Safe access to enum values with fallback
217
+ stage_value = (
218
+ self.stage.value if hasattr(self.stage, "value") else str(self.stage)
219
+ )
220
+ status_value = (
221
+ self.status.value
222
+ if self.status and hasattr(self.status, "value")
223
+ else str(self.status)
224
+ )
225
+
226
+ # Unified answer field: prefer block_answer if answer is empty
227
+ # This ensures answer field always contains the streaming text output
228
+ # while maintaining backward compatibility with block_answer field
229
+ answer_value = self.output.answer
230
+ if not answer_value and self.output.block_answer:
231
+ answer_value = self.output.block_answer
232
+
233
+ return {
234
+ "id": self.id,
235
+ "agent_name": self.agent_name,
236
+ "stage": stage_value,
237
+ "answer": answer_value, # Unified streaming text output
238
+ "think": self.output.think,
239
+ "status": status_value,
240
+ "skill_info": self.skill_info.to_dict() if self.skill_info else None,
241
+ "block_answer": self.output.block_answer, # Kept for backward compatibility (deprecated)
242
+ "input_message": self.input.content,
243
+ "interrupted": self.interrupted,
244
+ "flags": self.flags,
245
+ "start_time": self.start_time,
246
+ "end_time": self.end_time,
247
+ "estimated_input_tokens": self.get_estimated_input_tokens(),
248
+ "estimated_output_tokens": self.get_estimated_output_tokens(),
249
+ "estimated_ratio_tokens": self.get_estimated_ratio_tokens(),
250
+ "token_usage": self.token_usage,
251
+ }
252
+
253
+ def get_triditional_dict(self):
254
+ """Deprecated: Use get_traditional_dict() instead.
255
+
256
+ This method is kept for backward compatibility and will be removed in v3.0.
257
+ The method name was a typo ('triditional' instead of 'traditional').
258
+
259
+ .. deprecated:: 2.1
260
+ Use :meth:`get_traditional_dict` instead.
261
+ """
262
+ import warnings
263
+ warnings.warn(
264
+ "get_triditional_dict() is deprecated due to typo. "
265
+ "Use get_traditional_dict() instead. "
266
+ "This method will be removed in v3.0.",
267
+ DeprecationWarning,
268
+ stacklevel=2
269
+ )
270
+ return self.get_traditional_dict()
271
+
272
+ def llm_empty_answer(self):
273
+ return (
274
+ self.stage == TypeStage.LLM
275
+ and not self.output.answer
276
+ and not self.output.think
277
+ and not self.output.block_answer
278
+ )
279
+
280
+ def empty_answer(self):
281
+ return (
282
+ not self.output.answer
283
+ and not self.output.think
284
+ and not self.output.block_answer
285
+ )
286
+
287
+
288
+ class ProgressInstance(RuntimeInstance):
289
+ def __init__(self, context, parent: Optional["ProgressInstance"] = None, flags=""):
290
+ super().__init__(type=TypeRuntimeInstance.PROGRESS)
291
+
292
+ self.context = context
293
+ self.stages: List[StageInstance] = []
294
+ self.flags = flags
295
+
296
+ def add_stage(
297
+ self,
298
+ agent_name: str = "",
299
+ stage: TypeStage = TypeStage.LLM,
300
+ answer: str = "",
301
+ think: str = "",
302
+ raw_output: str = "",
303
+ status: Status = Status.PROCESSING,
304
+ skill_info: Optional[SkillInfo] = None,
305
+ block_answer: str = "",
306
+ input_content: str = "",
307
+ input_messages: Optional[Messages] = None,
308
+ interrupted: bool = False,
309
+ ):
310
+ pop_last_stage = False
311
+ if len(self.stages) > 0 and self.stages[-1].llm_empty_answer():
312
+ pop_last_stage = True
313
+
314
+ stage_instance = StageInstance(
315
+ agent_name=agent_name,
316
+ stage=stage,
317
+ answer=answer,
318
+ think=think,
319
+ raw_output=raw_output,
320
+ status=status,
321
+ skill_info=skill_info,
322
+ block_answer=block_answer,
323
+ input_content=input_content,
324
+ input_messages=input_messages,
325
+ interrupted=interrupted,
326
+ flags=self.flags,
327
+ )
328
+ self.add_stage_instance(stage_instance, pop_last_stage)
329
+
330
+ def add_stage_instance(
331
+ self, stage_instance: StageInstance, pop_last_stage: bool = False
332
+ ):
333
+ stage_instance.set_parent(self)
334
+ if pop_last_stage:
335
+ self.stages.pop()
336
+ self.stages.append(stage_instance)
337
+
338
+ # Register stage instance to runtime_graph if available
339
+ if (
340
+ self.context
341
+ and hasattr(self.context, "runtime_graph")
342
+ and self.context.runtime_graph
343
+ ):
344
+ self.context.runtime_graph.set_stage(stage_instance, pop_last_stage)
345
+
346
+ self.set_variable()
347
+
348
+ def set_last_stage(
349
+ self,
350
+ stage: Optional[TypeStage] = None,
351
+ answer: Optional[str] = None,
352
+ think: Optional[str] = None,
353
+ raw_output: Optional[str] = None,
354
+ status: Status = Status.PROCESSING,
355
+ skill_info: Optional[SkillInfo] = None,
356
+ block_answer: Optional[str] = None,
357
+ input_messages: Optional[Messages] = None,
358
+ **kwargs,
359
+ ):
360
+ # If no stages exist, create a new one
361
+ if len(self.stages) == 0:
362
+ # If stage is None and we have no stages, default to LLM
363
+ default_stage = stage if stage is not None else TypeStage.LLM
364
+ self.add_stage(
365
+ stage=default_stage,
366
+ answer=answer,
367
+ think=think,
368
+ raw_output=raw_output,
369
+ status=status,
370
+ skill_info=skill_info,
371
+ block_answer=block_answer,
372
+ input_messages=input_messages,
373
+ )
374
+ return
375
+
376
+ # Check if we need to create a new stage (when stage type changes)
377
+ last_stage = self.stages[-1]
378
+
379
+ # Create new stage if stage type is changing (and it's not None)
380
+ if stage is not None and stage != last_stage.stage:
381
+ self.add_stage(
382
+ stage=stage,
383
+ answer=answer,
384
+ think=think,
385
+ raw_output=raw_output,
386
+ status=status,
387
+ skill_info=skill_info,
388
+ block_answer=block_answer,
389
+ input_messages=input_messages,
390
+ )
391
+ return
392
+
393
+ last_stage.update(
394
+ stage=stage,
395
+ answer=answer,
396
+ think=think,
397
+ raw_output=raw_output,
398
+ status=status,
399
+ skill_info=skill_info,
400
+ block_answer=block_answer,
401
+ input_messages=input_messages,
402
+ **kwargs,
403
+ )
404
+ last_stage.set_end_time()
405
+ self.set_variable()
406
+
407
+ def get_last_stage(self):
408
+ return self.stages[-1] if len(self.stages) > 0 else None
409
+
410
+ def get_last_answer(self) -> dict:
411
+ return self.stages[-1].get_traditional_dict()
412
+
413
+ def get_step_answers(self):
414
+ last_stage = self.get_last_stage()
415
+ if last_stage is None:
416
+ return ""
417
+
418
+ last_answer = last_stage.get_answer()
419
+ if isinstance(last_answer, str) and len(last_answer.strip()) != 0:
420
+ return last_answer
421
+ elif not isinstance(last_answer, str):
422
+ return last_answer
423
+ else:
424
+ block_answer = last_stage.get_block_answer()
425
+ think = last_stage.get_think()
426
+ answer = last_stage.get_answer()
427
+ return str(block_answer) + "\n\n" + str(think) + "\n\n" + str(answer)
428
+
429
+ def get(self):
430
+ """
431
+ Get stages as serializable dictionaries instead of raw objects
432
+ This ensures compatibility when stages are used as variable values
433
+ """
434
+ return [stage.get_traditional_dict() for stage in self.stages]
435
+
436
+ def get_raw_stages(self):
437
+ """
438
+ Get raw StageInstance objects for internal use
439
+ Use this method when you need direct access to StageInstance objects
440
+ """
441
+ return self.stages
442
+
443
+ def set_variable(self):
444
+ self.context.set_variable(
445
+ "_progress", [stage.get_traditional_dict() for stage in self.stages]
446
+ )
@@ -0,0 +1,14 @@
1
+ # -*- coding: utf-8 -*-
2
+ """Skill 模块 - Skill 核心"""
3
+
4
+ from dolphin.core.skill.skillkit import Skillkit
5
+ from dolphin.core.skill.skillset import Skillset
6
+ from dolphin.core.skill.skill_function import SkillFunction
7
+ from dolphin.core.skill.skill_matcher import SkillMatcher
8
+
9
+ __all__ = [
10
+ "Skillkit",
11
+ "Skillset",
12
+ "SkillFunction",
13
+ "SkillMatcher",
14
+ ]
@@ -0,0 +1,157 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Any, Optional, List, Dict
3
+ from dataclasses import dataclass
4
+ from enum import Enum
5
+
6
+ class ContextRetentionMode(Enum):
7
+ """Context retention mode for skill results"""
8
+ SUMMARY = "summary" # Keep head and tail, truncate middle
9
+ FULL = "full" # Keep everything, no processing (default)
10
+ PIN = "pin" # Keep full, skip compression, persist to history
11
+ REFERENCE = "reference" # Keep only reference_id, fetch full via cache
12
+
13
+
14
+ @dataclass
15
+ class SkillContextRetention:
16
+ """Skill context retention configuration"""
17
+ mode: ContextRetentionMode = ContextRetentionMode.FULL
18
+ max_length: int = 2000 # Only used by SUMMARY mode
19
+ summary_prompt: Optional[str] = None
20
+ ttl_turns: int = -1
21
+ reference_hint: Optional[str] = None # Hint text for REFERENCE mode
22
+
23
+
24
+ class ContextRetentionStrategy(ABC):
25
+ """Base class for context retention strategies"""
26
+
27
+ @abstractmethod
28
+ def process(self, result: str, config: SkillContextRetention,
29
+ reference_id: str = None) -> str:
30
+ """Process result and return content for context
31
+
32
+ Args:
33
+ result: Original result
34
+ config: Retention configuration
35
+ reference_id: Result reference ID (for REFERENCE mode)
36
+ """
37
+ pass
38
+
39
+
40
+ class SummaryContextStrategy(ContextRetentionStrategy):
41
+ """Summary strategy - keep head and tail, truncate middle"""
42
+
43
+ def process(self, result: str, config: SkillContextRetention,
44
+ reference_id: str = None) -> str:
45
+ if len(result) <= config.max_length:
46
+ return result
47
+
48
+ # Keep head and tail, truncate middle
49
+ head_ratio = 0.6
50
+ tail_ratio = 0.2
51
+ head_chars = int(config.max_length * head_ratio)
52
+ tail_chars = int(config.max_length * tail_ratio)
53
+
54
+ # Provide reference_id so LLM can fetch full content if needed
55
+ ref_hint = ""
56
+ if reference_id:
57
+ ref_hint = f"\n[For full content, call _get_result_detail('{reference_id}')]"
58
+
59
+ omitted = len(result) - head_chars - tail_chars
60
+ # Ensure we don't have negative omission if rounding puts us over
61
+ if omitted <= 0:
62
+ return result
63
+
64
+ return (f"{result[:head_chars]}\n"
65
+ f"... ({omitted} chars omitted) ...\n"
66
+ f"{result[-tail_chars:]}"
67
+ f"{ref_hint}")
68
+
69
+
70
+ class FullContextStrategy(ContextRetentionStrategy):
71
+ """Full strategy - keep everything without any processing
72
+
73
+ Note: This strategy does NOT truncate. If the result is too large,
74
+ it will be handled by the Compression Strategy at LLM call time.
75
+ """
76
+
77
+ def process(self, result: str, config: SkillContextRetention,
78
+ reference_id: str = None) -> str:
79
+ # No processing, return as-is
80
+ # Compression Strategy will handle if context is too large
81
+ return result
82
+
83
+
84
+ class PinContextStrategy(ContextRetentionStrategy):
85
+ """Pin strategy - keep full, mark as non-compressible, persist to history"""
86
+
87
+ def process(self, result: str, config: SkillContextRetention,
88
+ reference_id: str = None) -> str:
89
+ # Keep full, compression behavior controlled by metadata
90
+ # PIN_MARKER is recognized by _update_history_and_cleanup
91
+ from dolphin.core.common.constants import PIN_MARKER
92
+ # If result already has PIN_MARKER, don't add it again
93
+ if PIN_MARKER in result:
94
+ return result
95
+ return f"{PIN_MARKER}{result}"
96
+
97
+
98
+ class ReferenceContextStrategy(ContextRetentionStrategy):
99
+ """Reference strategy - keep only reference_id, fetch full via cache
100
+
101
+ Use cases:
102
+ - Very large results (datasets, full web pages)
103
+ - Results that may need to be fetched later via reference_id
104
+ - Minimize context usage as much as possible
105
+ """
106
+
107
+ def process(self, result: str, config: SkillContextRetention,
108
+ reference_id: str = None) -> str:
109
+ if not reference_id:
110
+ # Fallback to SUMMARY if no reference_id
111
+ return SummaryContextStrategy().process(result, config, reference_id)
112
+
113
+ # Build short reference info with fetch instructions
114
+ hint = config.reference_hint or "Full result stored"
115
+ return (f"[{hint}]\n"
116
+ f"Original length: {len(result)} chars\n"
117
+ f"Get full content: _get_result_detail('{reference_id}')\n"
118
+ f"Get range: _get_result_detail('{reference_id}', offset=0, limit=2000)")
119
+
120
+
121
+ # Strategy mapping
122
+ CONTEXT_RETENTION_STRATEGIES: Dict[ContextRetentionMode, ContextRetentionStrategy] = {
123
+ ContextRetentionMode.SUMMARY: SummaryContextStrategy(),
124
+ ContextRetentionMode.FULL: FullContextStrategy(),
125
+ ContextRetentionMode.PIN: PinContextStrategy(),
126
+ ContextRetentionMode.REFERENCE: ReferenceContextStrategy(),
127
+ }
128
+
129
+
130
+ def get_context_retention_strategy(mode: ContextRetentionMode) -> ContextRetentionStrategy:
131
+ """Get context retention strategy"""
132
+ return CONTEXT_RETENTION_STRATEGIES.get(mode, FullContextStrategy())
133
+
134
+
135
+ def context_retention(
136
+ mode: str = "full",
137
+ max_length: int = 2000,
138
+ summary_prompt: str = None,
139
+ ttl_turns: int = -1,
140
+ reference_hint: str = None,
141
+ ):
142
+ """Skill context retention strategy decorator"""
143
+ def decorator(func):
144
+ try:
145
+ retention_mode = ContextRetentionMode(mode)
146
+ except ValueError:
147
+ retention_mode = ContextRetentionMode.FULL
148
+
149
+ func._context_retention = SkillContextRetention(
150
+ mode=retention_mode,
151
+ max_length=max_length,
152
+ summary_prompt=summary_prompt,
153
+ ttl_turns=ttl_turns,
154
+ reference_hint=reference_hint,
155
+ )
156
+ return func
157
+ return decorator