krons 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- krons/__init__.py +49 -0
- krons/agent/__init__.py +144 -0
- krons/agent/mcps/__init__.py +14 -0
- krons/agent/mcps/loader.py +287 -0
- krons/agent/mcps/wrapper.py +799 -0
- krons/agent/message/__init__.py +20 -0
- krons/agent/message/action.py +69 -0
- krons/agent/message/assistant.py +52 -0
- krons/agent/message/common.py +49 -0
- krons/agent/message/instruction.py +130 -0
- krons/agent/message/prepare_msg.py +187 -0
- krons/agent/message/role.py +53 -0
- krons/agent/message/system.py +53 -0
- krons/agent/operations/__init__.py +82 -0
- krons/agent/operations/act.py +100 -0
- krons/agent/operations/generate.py +145 -0
- krons/agent/operations/llm_reparse.py +89 -0
- krons/agent/operations/operate.py +247 -0
- krons/agent/operations/parse.py +243 -0
- krons/agent/operations/react.py +286 -0
- krons/agent/operations/specs.py +235 -0
- krons/agent/operations/structure.py +151 -0
- krons/agent/operations/utils.py +79 -0
- krons/agent/providers/__init__.py +17 -0
- krons/agent/providers/anthropic_messages.py +146 -0
- krons/agent/providers/claude_code.py +276 -0
- krons/agent/providers/gemini.py +268 -0
- krons/agent/providers/match.py +75 -0
- krons/agent/providers/oai_chat.py +174 -0
- krons/agent/third_party/__init__.py +2 -0
- krons/agent/third_party/anthropic_models.py +154 -0
- krons/agent/third_party/claude_code.py +682 -0
- krons/agent/third_party/gemini_models.py +508 -0
- krons/agent/third_party/openai_models.py +295 -0
- krons/agent/tool.py +291 -0
- krons/core/__init__.py +127 -0
- krons/core/base/__init__.py +121 -0
- {kronos/core → krons/core/base}/broadcaster.py +7 -3
- {kronos/core → krons/core/base}/element.py +15 -7
- {kronos/core → krons/core/base}/event.py +41 -8
- {kronos/core → krons/core/base}/eventbus.py +4 -2
- {kronos/core → krons/core/base}/flow.py +14 -7
- {kronos/core → krons/core/base}/graph.py +27 -11
- {kronos/core → krons/core/base}/node.py +47 -22
- {kronos/core → krons/core/base}/pile.py +26 -12
- {kronos/core → krons/core/base}/processor.py +23 -9
- {kronos/core → krons/core/base}/progression.py +5 -3
- {kronos → krons/core}/specs/__init__.py +0 -5
- {kronos → krons/core}/specs/adapters/dataclass_field.py +16 -8
- {kronos → krons/core}/specs/adapters/pydantic_adapter.py +11 -5
- {kronos → krons/core}/specs/adapters/sql_ddl.py +16 -10
- {kronos → krons/core}/specs/catalog/__init__.py +2 -2
- {kronos → krons/core}/specs/catalog/_audit.py +3 -3
- {kronos → krons/core}/specs/catalog/_common.py +2 -2
- {kronos → krons/core}/specs/catalog/_content.py +5 -5
- {kronos → krons/core}/specs/catalog/_enforcement.py +4 -4
- {kronos → krons/core}/specs/factory.py +7 -7
- {kronos → krons/core}/specs/operable.py +9 -3
- {kronos → krons/core}/specs/protocol.py +4 -2
- {kronos → krons/core}/specs/spec.py +25 -13
- {kronos → krons/core}/types/base.py +7 -5
- {kronos → krons/core}/types/db_types.py +2 -2
- {kronos → krons/core}/types/identity.py +1 -1
- {kronos → krons}/errors.py +13 -13
- {kronos → krons}/protocols.py +9 -4
- krons/resource/__init__.py +89 -0
- {kronos/services → krons/resource}/backend.py +50 -24
- {kronos/services → krons/resource}/endpoint.py +28 -14
- {kronos/services → krons/resource}/hook.py +22 -9
- {kronos/services → krons/resource}/imodel.py +50 -32
- {kronos/services → krons/resource}/registry.py +27 -25
- {kronos/services → krons/resource}/utilities/rate_limited_executor.py +10 -6
- {kronos/services → krons/resource}/utilities/rate_limiter.py +4 -2
- {kronos/services → krons/resource}/utilities/resilience.py +17 -7
- krons/resource/utilities/token_calculator.py +185 -0
- {kronos → krons}/session/__init__.py +12 -17
- krons/session/constraints.py +70 -0
- {kronos → krons}/session/exchange.py +14 -6
- {kronos → krons}/session/message.py +4 -2
- krons/session/registry.py +35 -0
- {kronos → krons}/session/session.py +165 -174
- krons/utils/__init__.py +85 -0
- krons/utils/_function_arg_parser.py +99 -0
- krons/utils/_pythonic_function_call.py +249 -0
- {kronos → krons}/utils/_to_list.py +9 -3
- {kronos → krons}/utils/_utils.py +9 -5
- {kronos → krons}/utils/concurrency/__init__.py +38 -38
- {kronos → krons}/utils/concurrency/_async_call.py +6 -4
- {kronos → krons}/utils/concurrency/_errors.py +3 -1
- {kronos → krons}/utils/concurrency/_patterns.py +3 -1
- {kronos → krons}/utils/concurrency/_resource_tracker.py +6 -2
- krons/utils/display.py +257 -0
- {kronos → krons}/utils/fuzzy/__init__.py +6 -1
- {kronos → krons}/utils/fuzzy/_fuzzy_match.py +14 -8
- {kronos → krons}/utils/fuzzy/_string_similarity.py +3 -1
- {kronos → krons}/utils/fuzzy/_to_dict.py +3 -1
- krons/utils/schemas/__init__.py +26 -0
- krons/utils/schemas/_breakdown_pydantic_annotation.py +131 -0
- krons/utils/schemas/_formatter.py +72 -0
- krons/utils/schemas/_minimal_yaml.py +151 -0
- krons/utils/schemas/_typescript.py +153 -0
- {kronos → krons}/utils/sql/_sql_validation.py +1 -1
- krons/utils/validators/__init__.py +3 -0
- krons/utils/validators/_validate_image_url.py +56 -0
- krons/work/__init__.py +126 -0
- krons/work/engine.py +333 -0
- krons/work/form.py +305 -0
- {kronos → krons/work}/operations/__init__.py +7 -4
- {kronos → krons/work}/operations/builder.py +4 -4
- {kronos/enforcement → krons/work/operations}/context.py +37 -6
- {kronos → krons/work}/operations/flow.py +17 -9
- krons/work/operations/node.py +103 -0
- krons/work/operations/registry.py +103 -0
- {kronos/specs → krons/work}/phrase.py +131 -14
- {kronos/enforcement → krons/work}/policy.py +3 -3
- krons/work/report.py +268 -0
- krons/work/rules/__init__.py +47 -0
- {kronos/enforcement → krons/work/rules}/common/boolean.py +3 -1
- {kronos/enforcement → krons/work/rules}/common/choice.py +9 -3
- {kronos/enforcement → krons/work/rules}/common/number.py +3 -1
- {kronos/enforcement → krons/work/rules}/common/string.py +9 -3
- {kronos/enforcement → krons/work/rules}/rule.py +2 -2
- {kronos/enforcement → krons/work/rules}/validator.py +21 -6
- {kronos/enforcement → krons/work}/service.py +16 -7
- krons/work/worker.py +266 -0
- {krons-0.1.0.dist-info → krons-0.2.0.dist-info}/METADATA +19 -5
- krons-0.2.0.dist-info/RECORD +154 -0
- kronos/core/__init__.py +0 -145
- kronos/enforcement/__init__.py +0 -57
- kronos/operations/node.py +0 -101
- kronos/operations/registry.py +0 -92
- kronos/services/__init__.py +0 -81
- kronos/specs/adapters/__init__.py +0 -0
- kronos/utils/__init__.py +0 -40
- krons-0.1.0.dist-info/RECORD +0 -101
- {kronos → krons/core/specs/adapters}/__init__.py +0 -0
- {kronos → krons/core}/specs/adapters/_utils.py +0 -0
- {kronos → krons/core}/specs/adapters/factory.py +0 -0
- {kronos → krons/core}/types/__init__.py +0 -0
- {kronos → krons/core}/types/_sentinel.py +0 -0
- {kronos → krons}/py.typed +0 -0
- {kronos/services → krons/resource}/utilities/__init__.py +0 -0
- {kronos/services → krons/resource}/utilities/header_factory.py +0 -0
- {kronos → krons}/utils/_hash.py +0 -0
- {kronos → krons}/utils/_json_dump.py +0 -0
- {kronos → krons}/utils/_lazy_init.py +0 -0
- {kronos → krons}/utils/_to_num.py +0 -0
- {kronos → krons}/utils/concurrency/_cancel.py +0 -0
- {kronos → krons}/utils/concurrency/_primitives.py +0 -0
- {kronos → krons}/utils/concurrency/_priority_queue.py +0 -0
- {kronos → krons}/utils/concurrency/_run_async.py +0 -0
- {kronos → krons}/utils/concurrency/_task.py +0 -0
- {kronos → krons}/utils/concurrency/_utils.py +0 -0
- {kronos → krons}/utils/fuzzy/_extract_json.py +0 -0
- {kronos → krons}/utils/fuzzy/_fuzzy_json.py +0 -0
- {kronos → krons}/utils/sql/__init__.py +0 -0
- {kronos/enforcement → krons/work/rules}/common/__init__.py +0 -0
- {kronos/enforcement → krons/work/rules}/common/mapping.py +0 -0
- {kronos/enforcement → krons/work/rules}/common/model.py +0 -0
- {kronos/enforcement → krons/work/rules}/registry.py +0 -0
- {krons-0.1.0.dist-info → krons-0.2.0.dist-info}/WHEEL +0 -0
- {krons-0.1.0.dist-info → krons-0.2.0.dist-info}/licenses/LICENSE +0 -0
krons/utils/__init__.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from ._hash import (
|
|
2
|
+
GENESIS_HASH,
|
|
3
|
+
MAX_HASH_INPUT_BYTES,
|
|
4
|
+
HashAlgorithm,
|
|
5
|
+
compute_chain_hash,
|
|
6
|
+
compute_hash,
|
|
7
|
+
hash_obj,
|
|
8
|
+
)
|
|
9
|
+
from ._json_dump import json_dump, json_dumpb, json_lines_iter
|
|
10
|
+
from ._to_list import to_list
|
|
11
|
+
from ._to_num import to_num
|
|
12
|
+
from ._utils import (
|
|
13
|
+
async_synchronized,
|
|
14
|
+
coerce_created_at,
|
|
15
|
+
create_path,
|
|
16
|
+
extract_types,
|
|
17
|
+
get_bins,
|
|
18
|
+
import_module,
|
|
19
|
+
is_import_installed,
|
|
20
|
+
load_type_from_string,
|
|
21
|
+
now_utc,
|
|
22
|
+
register_type_prefix,
|
|
23
|
+
synchronized,
|
|
24
|
+
to_uuid,
|
|
25
|
+
)
|
|
26
|
+
from .concurrency import alcall, is_coro_func
|
|
27
|
+
from .fuzzy import (
|
|
28
|
+
SimilarityAlgo,
|
|
29
|
+
extract_json,
|
|
30
|
+
fuzzy_json,
|
|
31
|
+
fuzzy_match_keys,
|
|
32
|
+
string_similarity,
|
|
33
|
+
to_dict,
|
|
34
|
+
)
|
|
35
|
+
from .sql._sql_validation import (
|
|
36
|
+
MAX_IDENTIFIER_LENGTH,
|
|
37
|
+
SAFE_IDENTIFIER_PATTERN,
|
|
38
|
+
sanitize_order_by,
|
|
39
|
+
validate_identifier,
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
__all__ = (
|
|
43
|
+
# _hash
|
|
44
|
+
"GENESIS_HASH",
|
|
45
|
+
"MAX_HASH_INPUT_BYTES",
|
|
46
|
+
"HashAlgorithm",
|
|
47
|
+
"compute_chain_hash",
|
|
48
|
+
"compute_hash",
|
|
49
|
+
"hash_obj",
|
|
50
|
+
# _json_dump
|
|
51
|
+
"json_dump",
|
|
52
|
+
"json_dumpb",
|
|
53
|
+
"json_lines_iter",
|
|
54
|
+
# _to_list, _to_num
|
|
55
|
+
"to_list",
|
|
56
|
+
"to_num",
|
|
57
|
+
# _utils
|
|
58
|
+
"async_synchronized",
|
|
59
|
+
"coerce_created_at",
|
|
60
|
+
"create_path",
|
|
61
|
+
"extract_types",
|
|
62
|
+
"get_bins",
|
|
63
|
+
"import_module",
|
|
64
|
+
"is_import_installed",
|
|
65
|
+
"load_type_from_string",
|
|
66
|
+
"now_utc",
|
|
67
|
+
"register_type_prefix",
|
|
68
|
+
"synchronized",
|
|
69
|
+
"to_uuid",
|
|
70
|
+
# concurrency
|
|
71
|
+
"alcall",
|
|
72
|
+
"is_coro_func",
|
|
73
|
+
# fuzzy
|
|
74
|
+
"SimilarityAlgo",
|
|
75
|
+
"extract_json",
|
|
76
|
+
"fuzzy_json",
|
|
77
|
+
"fuzzy_match_keys",
|
|
78
|
+
"string_similarity",
|
|
79
|
+
"to_dict",
|
|
80
|
+
# sql
|
|
81
|
+
"MAX_IDENTIFIER_LENGTH",
|
|
82
|
+
"SAFE_IDENTIFIER_PATTERN",
|
|
83
|
+
"sanitize_order_by",
|
|
84
|
+
"validate_identifier",
|
|
85
|
+
)
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import types
|
|
2
|
+
from typing import Any, Union, get_args, get_origin
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def map_positional_args(
|
|
8
|
+
arguments: dict[str, Any], param_names: list[str]
|
|
9
|
+
) -> dict[str, Any]:
|
|
10
|
+
"""Map positional arguments (_pos_0, _pos_1, ...) to actual parameter names."""
|
|
11
|
+
mapped = {}
|
|
12
|
+
pos_count = 0
|
|
13
|
+
|
|
14
|
+
for key, value in arguments.items():
|
|
15
|
+
if key.startswith("_pos_"):
|
|
16
|
+
if pos_count >= len(param_names):
|
|
17
|
+
raise ValueError(
|
|
18
|
+
f"Too many positional arguments (expected {len(param_names)})"
|
|
19
|
+
)
|
|
20
|
+
mapped[param_names[pos_count]] = value
|
|
21
|
+
pos_count += 1
|
|
22
|
+
else:
|
|
23
|
+
# Keep keyword arguments as-is
|
|
24
|
+
mapped[key] = value
|
|
25
|
+
|
|
26
|
+
return mapped
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _get_nested_fields_from_annotation(annotation) -> set[str]:
|
|
30
|
+
"""Extract all field names from an annotation that may be a Pydantic model or Union."""
|
|
31
|
+
origin = get_origin(annotation)
|
|
32
|
+
|
|
33
|
+
# Handle Union types (typing.Union or types.UnionType)
|
|
34
|
+
if origin is Union or isinstance(annotation, types.UnionType):
|
|
35
|
+
union_members = get_args(annotation)
|
|
36
|
+
all_fields = set()
|
|
37
|
+
for member in union_members:
|
|
38
|
+
if member is type(None):
|
|
39
|
+
continue
|
|
40
|
+
# Recursively check nested unions or models
|
|
41
|
+
nested = _get_nested_fields_from_annotation(member)
|
|
42
|
+
all_fields.update(nested)
|
|
43
|
+
return all_fields
|
|
44
|
+
|
|
45
|
+
# Handle direct Pydantic model
|
|
46
|
+
if isinstance(annotation, type) and issubclass(annotation, BaseModel):
|
|
47
|
+
return set(annotation.model_fields.keys())
|
|
48
|
+
|
|
49
|
+
# Handle Pydantic model class (not instance)
|
|
50
|
+
if hasattr(annotation, "model_fields"):
|
|
51
|
+
return set(annotation.model_fields.keys())
|
|
52
|
+
|
|
53
|
+
return set()
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def nest_arguments_by_schema(arguments: dict[str, Any], schema_cls) -> dict[str, Any]:
|
|
57
|
+
"""Restructure flat arguments into nested format based on schema structure."""
|
|
58
|
+
if not schema_cls or not hasattr(schema_cls, "model_fields"):
|
|
59
|
+
return arguments
|
|
60
|
+
|
|
61
|
+
# Get top-level field names
|
|
62
|
+
top_level_fields = set(schema_cls.model_fields.keys())
|
|
63
|
+
|
|
64
|
+
# Find fields that are nested objects (Pydantic models or unions)
|
|
65
|
+
nested_field_mappings = {}
|
|
66
|
+
for field_name, field_info in schema_cls.model_fields.items():
|
|
67
|
+
annotation = field_info.annotation
|
|
68
|
+
nested_fields = _get_nested_fields_from_annotation(annotation)
|
|
69
|
+
if nested_fields:
|
|
70
|
+
nested_field_mappings[field_name] = nested_fields
|
|
71
|
+
|
|
72
|
+
# If no nested fields detected, return as-is
|
|
73
|
+
if not nested_field_mappings:
|
|
74
|
+
return arguments
|
|
75
|
+
|
|
76
|
+
# Separate top-level args from nested args
|
|
77
|
+
result: dict[str, Any] = {}
|
|
78
|
+
nested_args: dict[str, dict[str, Any]] = {}
|
|
79
|
+
|
|
80
|
+
for key, value in arguments.items():
|
|
81
|
+
if key in top_level_fields:
|
|
82
|
+
# This is a top-level field
|
|
83
|
+
result[key] = value
|
|
84
|
+
else:
|
|
85
|
+
# Check if this belongs to a nested field
|
|
86
|
+
for nested_field, nested_keys in nested_field_mappings.items():
|
|
87
|
+
if key in nested_keys:
|
|
88
|
+
if nested_field not in nested_args:
|
|
89
|
+
nested_args[nested_field] = {}
|
|
90
|
+
nested_args[nested_field][key] = value
|
|
91
|
+
break
|
|
92
|
+
else:
|
|
93
|
+
# Unknown field - keep at top level (will fail validation later)
|
|
94
|
+
result[key] = value
|
|
95
|
+
|
|
96
|
+
# Add nested structures to result
|
|
97
|
+
result.update(nested_args)
|
|
98
|
+
|
|
99
|
+
return result
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
# Copyright (c) 2025, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Function call parser with khive-mcp extensions for unified tool paradigm.
|
|
5
|
+
|
|
6
|
+
Core parsing with support for:
|
|
7
|
+
- Service namespacing: cognition.remember_episodic(...)
|
|
8
|
+
- Batch parsing: [call1(...), call2(...)]
|
|
9
|
+
- Reserved keyword handling: from= -> from_= (Python keywords as args)
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import ast
|
|
15
|
+
import re
|
|
16
|
+
from typing import Any
|
|
17
|
+
|
|
18
|
+
# Python reserved keywords that might be used as field names
|
|
19
|
+
# These get mapped to underscore versions for parsing
|
|
20
|
+
RESERVED_KEYWORDS = {
|
|
21
|
+
"from",
|
|
22
|
+
"import",
|
|
23
|
+
"class",
|
|
24
|
+
"def",
|
|
25
|
+
"return",
|
|
26
|
+
"yield",
|
|
27
|
+
"async",
|
|
28
|
+
"await",
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
# Regex to match keyword arguments with reserved names
|
|
32
|
+
# Matches: from="value" or from='value' at word boundary
|
|
33
|
+
_RESERVED_KWARG_PATTERN = re.compile(
|
|
34
|
+
r"\b(" + "|".join(RESERVED_KEYWORDS) + r")\s*=", re.MULTILINE
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
__all__ = (
|
|
38
|
+
"parse_function_call",
|
|
39
|
+
"parse_batch_function_calls",
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _escape_reserved_keywords(call_str: str) -> str:
|
|
44
|
+
"""Escape Python reserved keywords used as argument names.
|
|
45
|
+
|
|
46
|
+
Converts `from=` to `from_=` so ast.parse can handle it.
|
|
47
|
+
The underscore version is what Pydantic expects for aliased fields.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
call_str: Function call string that may contain reserved keywords
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
String with reserved keywords escaped
|
|
54
|
+
"""
|
|
55
|
+
return _RESERVED_KWARG_PATTERN.sub(r"\1_=", call_str)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _ast_to_value(node: ast.AST) -> Any:
|
|
59
|
+
"""Convert AST node to Python value with recursive dict/list processing.
|
|
60
|
+
|
|
61
|
+
Handles nested dicts, lists, tuples, and JSON-style literals (true/false/null).
|
|
62
|
+
Normalizes JSON literals: true->True, false->False, null->None.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
node: AST node to convert
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
Python value
|
|
69
|
+
|
|
70
|
+
Raises:
|
|
71
|
+
ValueError: If node cannot be converted to a value
|
|
72
|
+
"""
|
|
73
|
+
# Handle JSON-style boolean/null names: true, false, null
|
|
74
|
+
if isinstance(node, ast.Name):
|
|
75
|
+
if node.id in ("true", "false", "null"):
|
|
76
|
+
return {"true": True, "false": False, "null": None}[node.id]
|
|
77
|
+
raise ValueError(f"Name '{node.id}' is not a valid literal")
|
|
78
|
+
|
|
79
|
+
# Handle dict nodes: {key1: val1, key2: val2, ...}
|
|
80
|
+
if isinstance(node, ast.Dict):
|
|
81
|
+
return {
|
|
82
|
+
_ast_to_value(k): _ast_to_value(v) for k, v in zip(node.keys, node.values)
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
# Handle list nodes: [elem1, elem2, ...]
|
|
86
|
+
if isinstance(node, ast.List):
|
|
87
|
+
return [_ast_to_value(elem) for elem in node.elts]
|
|
88
|
+
|
|
89
|
+
# Handle tuple nodes: (elem1, elem2, ...)
|
|
90
|
+
if isinstance(node, ast.Tuple):
|
|
91
|
+
return tuple(_ast_to_value(elem) for elem in node.elts)
|
|
92
|
+
|
|
93
|
+
# Handle simple literals (str, int, float, bool, None) via ast.literal_eval
|
|
94
|
+
try:
|
|
95
|
+
return ast.literal_eval(node)
|
|
96
|
+
except (ValueError, TypeError) as e:
|
|
97
|
+
raise ValueError(f"Cannot convert AST node: {type(node).__name__}") from e
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def parse_function_call(call_str: str) -> dict[str, Any]:
|
|
101
|
+
"""Parse Python function call syntax into unified tool format.
|
|
102
|
+
|
|
103
|
+
Supports service namespacing for unified tool paradigm:
|
|
104
|
+
- Simple: search("query") -> {operation: "search", arguments: {...}}
|
|
105
|
+
- Namespaced: cognition.remember("...") -> {service: "cognition", ...}
|
|
106
|
+
- Deep: recall.search("...") -> {service: "recall", operation: "search", ...}
|
|
107
|
+
|
|
108
|
+
Examples:
|
|
109
|
+
>>> parse_function_call('search("AI news")')
|
|
110
|
+
{'operation': 'search', 'arguments': {'query': 'AI news'}}
|
|
111
|
+
|
|
112
|
+
>>> parse_function_call('cognition.remember_episodic(content="...")')
|
|
113
|
+
{'service': 'cognition', 'operation': 'remember_episodic', 'arguments': {'content': '...'}}
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
call_str: Python function call as string
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
Dict with 'operation', optional 'service', and 'arguments' keys
|
|
120
|
+
Legacy 'tool' key also included for backward compatibility
|
|
121
|
+
|
|
122
|
+
Raises:
|
|
123
|
+
ValueError: If the string is not a valid function call
|
|
124
|
+
"""
|
|
125
|
+
try:
|
|
126
|
+
# Escape reserved keywords before parsing (e.g., from= -> from_=)
|
|
127
|
+
escaped_str = _escape_reserved_keywords(call_str)
|
|
128
|
+
|
|
129
|
+
# Parse the call as a Python expression
|
|
130
|
+
tree = ast.parse(escaped_str, mode="eval")
|
|
131
|
+
call = tree.body
|
|
132
|
+
|
|
133
|
+
if not isinstance(call, ast.Call):
|
|
134
|
+
raise ValueError("Not a function call")
|
|
135
|
+
|
|
136
|
+
# Extract function name and service namespace
|
|
137
|
+
service = None
|
|
138
|
+
operation = None
|
|
139
|
+
|
|
140
|
+
if isinstance(call.func, ast.Name):
|
|
141
|
+
# Simple call: search(...)
|
|
142
|
+
operation = call.func.id
|
|
143
|
+
elif isinstance(call.func, ast.Attribute):
|
|
144
|
+
# Namespaced call: cognition.remember(...) or recall.search(...)
|
|
145
|
+
operation = call.func.attr
|
|
146
|
+
|
|
147
|
+
# Walk up the attribute chain to get service name
|
|
148
|
+
node = call.func.value
|
|
149
|
+
if isinstance(node, ast.Name):
|
|
150
|
+
service = node.id
|
|
151
|
+
elif isinstance(node, ast.Attribute):
|
|
152
|
+
# Multi-level: could be module.service.operation
|
|
153
|
+
# For now, take the last attribute as service
|
|
154
|
+
service = node.attr
|
|
155
|
+
else:
|
|
156
|
+
raise ValueError(f"Unsupported function type: {type(call.func)}")
|
|
157
|
+
|
|
158
|
+
# Extract arguments
|
|
159
|
+
arguments = {}
|
|
160
|
+
|
|
161
|
+
# Positional arguments (will be mapped by parameter order in schema)
|
|
162
|
+
for i, arg in enumerate(call.args):
|
|
163
|
+
# For now, use position-based keys; will be mapped to param names later
|
|
164
|
+
arguments[f"_pos_{i}"] = _ast_to_value(arg)
|
|
165
|
+
|
|
166
|
+
# Keyword arguments
|
|
167
|
+
for keyword in call.keywords:
|
|
168
|
+
if keyword.arg is None:
|
|
169
|
+
# **kwargs syntax
|
|
170
|
+
raise ValueError("**kwargs not supported")
|
|
171
|
+
arguments[keyword.arg] = _ast_to_value(keyword.value)
|
|
172
|
+
|
|
173
|
+
# Build result with new unified format
|
|
174
|
+
result = {
|
|
175
|
+
"operation": operation,
|
|
176
|
+
"arguments": arguments,
|
|
177
|
+
"tool": operation, # Backward compatibility
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
if service:
|
|
181
|
+
result["service"] = service
|
|
182
|
+
|
|
183
|
+
return result
|
|
184
|
+
|
|
185
|
+
except (SyntaxError, ValueError) as e:
|
|
186
|
+
raise ValueError(f"Invalid function call syntax: {e}") from e
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def parse_batch_function_calls(batch_str: str) -> list[dict[str, Any]]:
|
|
190
|
+
"""Parse batch function calls (array of function calls).
|
|
191
|
+
|
|
192
|
+
Supports:
|
|
193
|
+
- Same service batch: [remember(...), recall(...)]
|
|
194
|
+
- Cross-service batch: [cognition.remember(...), waves.check_in()]
|
|
195
|
+
|
|
196
|
+
Examples:
|
|
197
|
+
>>> parse_batch_function_calls('[search("A"), search("B")]')
|
|
198
|
+
[
|
|
199
|
+
{'operation': 'search', 'arguments': {'query': 'A'}},
|
|
200
|
+
{'operation': 'search', 'arguments': {'query': 'B'}}
|
|
201
|
+
]
|
|
202
|
+
|
|
203
|
+
>>> parse_batch_function_calls('[cognition.remember(...), waves.check_in()]')
|
|
204
|
+
[
|
|
205
|
+
{'service': 'cognition', 'operation': 'remember', 'arguments': {...}},
|
|
206
|
+
{'service': 'waves', 'operation': 'check_in', 'arguments': {}}
|
|
207
|
+
]
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
batch_str: String containing array of function calls
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
List of parsed function call dicts
|
|
214
|
+
|
|
215
|
+
Raises:
|
|
216
|
+
ValueError: If the string is not a valid array of function calls
|
|
217
|
+
"""
|
|
218
|
+
try:
|
|
219
|
+
# Remove whitespace for easier parsing
|
|
220
|
+
batch_str = batch_str.strip()
|
|
221
|
+
|
|
222
|
+
# Must start with [ and end with ]
|
|
223
|
+
if not (batch_str.startswith("[") and batch_str.endswith("]")):
|
|
224
|
+
raise ValueError("Batch call must be enclosed in [ ]")
|
|
225
|
+
|
|
226
|
+
# Escape reserved keywords before parsing (e.g., from= -> from_=)
|
|
227
|
+
escaped_str = _escape_reserved_keywords(batch_str)
|
|
228
|
+
|
|
229
|
+
# Parse as Python list expression
|
|
230
|
+
tree = ast.parse(escaped_str, mode="eval")
|
|
231
|
+
if not isinstance(tree.body, ast.List):
|
|
232
|
+
raise ValueError("Not a list expression")
|
|
233
|
+
|
|
234
|
+
results = []
|
|
235
|
+
for element in tree.body.elts:
|
|
236
|
+
if not isinstance(element, ast.Call):
|
|
237
|
+
raise ValueError(
|
|
238
|
+
f"List element is not a function call: {ast.dump(element)}"
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
# Convert the Call node back to source code and parse it
|
|
242
|
+
call_str = ast.unparse(element)
|
|
243
|
+
parsed = parse_function_call(call_str)
|
|
244
|
+
results.append(parsed)
|
|
245
|
+
|
|
246
|
+
return results
|
|
247
|
+
|
|
248
|
+
except (SyntaxError, ValueError) as e:
|
|
249
|
+
raise ValueError(f"Invalid batch function call syntax: {e}") from e
|
|
@@ -29,7 +29,7 @@ def _do_init() -> None:
|
|
|
29
29
|
from pydantic import BaseModel
|
|
30
30
|
from pydantic_core import PydanticUndefinedType
|
|
31
31
|
|
|
32
|
-
from
|
|
32
|
+
from krons.core.types import UndefinedType, UnsetType
|
|
33
33
|
|
|
34
34
|
global _MODEL_LIKE, _MAP_LIKE, _SINGLETONE_TYPES, _SKIP_TYPE, _SKIP_TUPLE_SET
|
|
35
35
|
_MODEL_LIKE = (BaseModel,)
|
|
@@ -117,7 +117,11 @@ def to_list(
|
|
|
117
117
|
if isinstance(input_, _BYTE_LIKE):
|
|
118
118
|
return list(input_) if use_values else [input_]
|
|
119
119
|
if isinstance(input_, Mapping):
|
|
120
|
-
return
|
|
120
|
+
return (
|
|
121
|
+
list(input_.values())
|
|
122
|
+
if use_values and hasattr(input_, "values")
|
|
123
|
+
else [input_]
|
|
124
|
+
)
|
|
121
125
|
if isinstance(input_, _MODEL_LIKE):
|
|
122
126
|
return [input_]
|
|
123
127
|
if isinstance(input_, Iterable) and not isinstance(input_, _BYTE_LIKE):
|
|
@@ -129,7 +133,9 @@ def to_list(
|
|
|
129
133
|
|
|
130
134
|
initial_list = _to_list_type(input_, use_values=use_values)
|
|
131
135
|
skip_types: tuple[type, ...] = _SKIP_TYPE if flatten_tuple_set else _SKIP_TUPLE_SET
|
|
132
|
-
processed = _process_list(
|
|
136
|
+
processed = _process_list(
|
|
137
|
+
initial_list, flatten=flatten, dropna=dropna, skip_types=skip_types
|
|
138
|
+
)
|
|
133
139
|
|
|
134
140
|
if unique:
|
|
135
141
|
seen = set()
|
{kronos → krons}/utils/_utils.py
RENAMED
|
@@ -13,7 +13,7 @@ from uuid import UUID, uuid4
|
|
|
13
13
|
|
|
14
14
|
from anyio import Path as AsyncPath
|
|
15
15
|
|
|
16
|
-
from
|
|
16
|
+
from krons.protocols import Observable
|
|
17
17
|
|
|
18
18
|
__all__ = (
|
|
19
19
|
"create_path",
|
|
@@ -163,10 +163,14 @@ def import_module(
|
|
|
163
163
|
ImportError: If module or attribute not found.
|
|
164
164
|
"""
|
|
165
165
|
try:
|
|
166
|
-
full_import_path =
|
|
166
|
+
full_import_path = (
|
|
167
|
+
f"{package_name}.{module_name}" if module_name else package_name
|
|
168
|
+
)
|
|
167
169
|
|
|
168
170
|
if import_name:
|
|
169
|
-
import_name =
|
|
171
|
+
import_name = (
|
|
172
|
+
[import_name] if not isinstance(import_name, list) else import_name
|
|
173
|
+
)
|
|
170
174
|
a = __import__(
|
|
171
175
|
full_import_path,
|
|
172
176
|
fromlist=import_name,
|
|
@@ -188,7 +192,7 @@ def is_import_installed(package_name: str) -> bool:
|
|
|
188
192
|
|
|
189
193
|
_TYPE_CACHE: dict[str, type] = {}
|
|
190
194
|
|
|
191
|
-
_DEFAULT_ALLOWED_PREFIXES: frozenset[str] = frozenset({"
|
|
195
|
+
_DEFAULT_ALLOWED_PREFIXES: frozenset[str] = frozenset({"krons."})
|
|
192
196
|
_ALLOWED_MODULE_PREFIXES: set[str] = set(_DEFAULT_ALLOWED_PREFIXES)
|
|
193
197
|
|
|
194
198
|
|
|
@@ -210,7 +214,7 @@ def register_type_prefix(prefix: str) -> None:
|
|
|
210
214
|
|
|
211
215
|
|
|
212
216
|
def load_type_from_string(type_str: str) -> type:
|
|
213
|
-
"""Load type from fully qualified path (e.g., '
|
|
217
|
+
"""Load type from fully qualified path (e.g., 'krons.core.Node').
|
|
214
218
|
|
|
215
219
|
Security: Only allowlisted module prefixes can be loaded.
|
|
216
220
|
|
|
@@ -37,63 +37,63 @@ from typing import TYPE_CHECKING
|
|
|
37
37
|
|
|
38
38
|
_LAZY_IMPORTS: dict[str, tuple[str, str]] = {
|
|
39
39
|
# _cancel
|
|
40
|
-
"CancelScope": ("
|
|
41
|
-
"effective_deadline": ("
|
|
42
|
-
"fail_after": ("
|
|
43
|
-
"fail_at": ("
|
|
44
|
-
"move_on_after": ("
|
|
45
|
-
"move_on_at": ("
|
|
40
|
+
"CancelScope": ("krons.utils.concurrency._cancel", "CancelScope"),
|
|
41
|
+
"effective_deadline": ("krons.utils.concurrency._cancel", "effective_deadline"),
|
|
42
|
+
"fail_after": ("krons.utils.concurrency._cancel", "fail_after"),
|
|
43
|
+
"fail_at": ("krons.utils.concurrency._cancel", "fail_at"),
|
|
44
|
+
"move_on_after": ("krons.utils.concurrency._cancel", "move_on_after"),
|
|
45
|
+
"move_on_at": ("krons.utils.concurrency._cancel", "move_on_at"),
|
|
46
46
|
# _errors
|
|
47
47
|
"get_cancelled_exc_class": (
|
|
48
|
-
"
|
|
48
|
+
"krons.utils.concurrency._errors",
|
|
49
49
|
"get_cancelled_exc_class",
|
|
50
50
|
),
|
|
51
|
-
"is_cancelled": ("
|
|
51
|
+
"is_cancelled": ("krons.utils.concurrency._errors", "is_cancelled"),
|
|
52
52
|
"non_cancel_subgroup": (
|
|
53
|
-
"
|
|
53
|
+
"krons.utils.concurrency._errors",
|
|
54
54
|
"non_cancel_subgroup",
|
|
55
55
|
),
|
|
56
|
-
"shield": ("
|
|
56
|
+
"shield": ("krons.utils.concurrency._errors", "shield"),
|
|
57
57
|
# _patterns
|
|
58
|
-
"CompletionStream": ("
|
|
59
|
-
"bounded_map": ("
|
|
60
|
-
"gather": ("
|
|
61
|
-
"race": ("
|
|
62
|
-
"retry": ("
|
|
58
|
+
"CompletionStream": ("krons.utils.concurrency._patterns", "CompletionStream"),
|
|
59
|
+
"bounded_map": ("krons.utils.concurrency._patterns", "bounded_map"),
|
|
60
|
+
"gather": ("krons.utils.concurrency._patterns", "gather"),
|
|
61
|
+
"race": ("krons.utils.concurrency._patterns", "race"),
|
|
62
|
+
"retry": ("krons.utils.concurrency._patterns", "retry"),
|
|
63
63
|
# _primitives
|
|
64
|
-
"CapacityLimiter": ("
|
|
65
|
-
"Condition": ("
|
|
66
|
-
"Event": ("
|
|
67
|
-
"Lock": ("
|
|
68
|
-
"Queue": ("
|
|
69
|
-
"Semaphore": ("
|
|
64
|
+
"CapacityLimiter": ("krons.utils.concurrency._primitives", "CapacityLimiter"),
|
|
65
|
+
"Condition": ("krons.utils.concurrency._primitives", "Condition"),
|
|
66
|
+
"Event": ("krons.utils.concurrency._primitives", "Event"),
|
|
67
|
+
"Lock": ("krons.utils.concurrency._primitives", "Lock"),
|
|
68
|
+
"Queue": ("krons.utils.concurrency._primitives", "Queue"),
|
|
69
|
+
"Semaphore": ("krons.utils.concurrency._primitives", "Semaphore"),
|
|
70
70
|
# _priority_queue
|
|
71
|
-
"PriorityQueue": ("
|
|
72
|
-
"QueueEmpty": ("
|
|
73
|
-
"QueueFull": ("
|
|
71
|
+
"PriorityQueue": ("krons.utils.concurrency._priority_queue", "PriorityQueue"),
|
|
72
|
+
"QueueEmpty": ("krons.utils.concurrency._priority_queue", "QueueEmpty"),
|
|
73
|
+
"QueueFull": ("krons.utils.concurrency._priority_queue", "QueueFull"),
|
|
74
74
|
# _resource_tracker
|
|
75
|
-
"LeakInfo": ("
|
|
76
|
-
"LeakTracker": ("
|
|
75
|
+
"LeakInfo": ("krons.utils.concurrency._resource_tracker", "LeakInfo"),
|
|
76
|
+
"LeakTracker": ("krons.utils.concurrency._resource_tracker", "LeakTracker"),
|
|
77
77
|
"track_resource": (
|
|
78
|
-
"
|
|
78
|
+
"krons.utils.concurrency._resource_tracker",
|
|
79
79
|
"track_resource",
|
|
80
80
|
),
|
|
81
81
|
"untrack_resource": (
|
|
82
|
-
"
|
|
82
|
+
"krons.utils.concurrency._resource_tracker",
|
|
83
83
|
"untrack_resource",
|
|
84
84
|
),
|
|
85
85
|
# _run_async
|
|
86
|
-
"run_async": ("
|
|
86
|
+
"run_async": ("krons.utils.concurrency._run_async", "run_async"),
|
|
87
87
|
# _task
|
|
88
|
-
"TaskGroup": ("
|
|
89
|
-
"create_task_group": ("
|
|
88
|
+
"TaskGroup": ("krons.utils.concurrency._task", "TaskGroup"),
|
|
89
|
+
"create_task_group": ("krons.utils.concurrency._task", "create_task_group"),
|
|
90
90
|
# _utils
|
|
91
|
-
"current_time": ("
|
|
92
|
-
"is_coro_func": ("
|
|
93
|
-
"run_sync": ("
|
|
94
|
-
"sleep": ("
|
|
95
|
-
"alcall": ("
|
|
96
|
-
"bcall": ("
|
|
91
|
+
"current_time": ("krons.utils.concurrency._utils", "current_time"),
|
|
92
|
+
"is_coro_func": ("krons.utils.concurrency._utils", "is_coro_func"),
|
|
93
|
+
"run_sync": ("krons.utils.concurrency._utils", "run_sync"),
|
|
94
|
+
"sleep": ("krons.utils.concurrency._utils", "sleep"),
|
|
95
|
+
"alcall": ("krons.utils.concurrency._async_call", "alcall"),
|
|
96
|
+
"bcall": ("krons.utils.concurrency._async_call", "bcall"),
|
|
97
97
|
}
|
|
98
98
|
|
|
99
99
|
_LOADED: dict[str, object] = {}
|
|
@@ -122,7 +122,7 @@ def __getattr__(name: str) -> object:
|
|
|
122
122
|
_LOADED[name] = value
|
|
123
123
|
return value
|
|
124
124
|
|
|
125
|
-
raise AttributeError(f"module '
|
|
125
|
+
raise AttributeError(f"module 'krons.utils.concurrency' has no attribute {name!r}")
|
|
126
126
|
|
|
127
127
|
|
|
128
128
|
def __dir__() -> list[str]:
|
|
@@ -11,9 +11,9 @@ Primary exports:
|
|
|
11
11
|
from collections.abc import AsyncGenerator, Callable
|
|
12
12
|
from typing import Any, ParamSpec, TypeVar
|
|
13
13
|
|
|
14
|
-
from
|
|
15
|
-
from
|
|
16
|
-
from
|
|
14
|
+
from krons.core.types._sentinel import Unset, not_sentinel
|
|
15
|
+
from krons.utils._lazy_init import LazyInit
|
|
16
|
+
from krons.utils._to_list import to_list
|
|
17
17
|
|
|
18
18
|
from ._cancel import move_on_after
|
|
19
19
|
from ._errors import get_cancelled_exc_class
|
|
@@ -66,7 +66,9 @@ def _validate_func(func: Any) -> Callable:
|
|
|
66
66
|
try:
|
|
67
67
|
func_list = list(func)
|
|
68
68
|
except TypeError:
|
|
69
|
-
raise ValueError(
|
|
69
|
+
raise ValueError(
|
|
70
|
+
"func must be callable or an iterable containing one callable."
|
|
71
|
+
)
|
|
70
72
|
|
|
71
73
|
if len(func_list) != 1 or not callable(func_list[0]):
|
|
72
74
|
raise ValueError("Only one callable function is allowed.")
|
|
@@ -48,7 +48,9 @@ def is_cancelled(exc: BaseException) -> bool:
|
|
|
48
48
|
return isinstance(exc, anyio.get_cancelled_exc_class())
|
|
49
49
|
|
|
50
50
|
|
|
51
|
-
async def shield(
|
|
51
|
+
async def shield(
|
|
52
|
+
func: Callable[P, Awaitable[T]], *args: P.args, **kwargs: P.kwargs
|
|
53
|
+
) -> T:
|
|
52
54
|
"""Execute async function protected from outer cancellation.
|
|
53
55
|
|
|
54
56
|
Args:
|
|
@@ -39,7 +39,9 @@ __all__ = (
|
|
|
39
39
|
)
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
async def gather(
|
|
42
|
+
async def gather(
|
|
43
|
+
*aws: Awaitable[T], return_exceptions: bool = False
|
|
44
|
+
) -> list[T | BaseException]:
|
|
43
45
|
"""Run awaitables concurrently and collect results in input order.
|
|
44
46
|
|
|
45
47
|
Args:
|