mycode-sdk 0.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mycode/__init__.py +72 -0
- mycode/agent.py +616 -0
- mycode/messages.py +182 -0
- mycode/models.py +161 -0
- mycode/models_catalog.json +2547 -0
- mycode/providers/__init__.py +86 -0
- mycode/providers/anthropic_like.py +387 -0
- mycode/providers/base.py +387 -0
- mycode/providers/gemini.py +316 -0
- mycode/providers/openai_chat.py +368 -0
- mycode/providers/openai_responses.py +372 -0
- mycode/py.typed +0 -0
- mycode/session.py +562 -0
- mycode/tools.py +1275 -0
- mycode/utils.py +38 -0
- mycode_sdk-0.4.2.dist-info/METADATA +114 -0
- mycode_sdk-0.4.2.dist-info/RECORD +19 -0
- mycode_sdk-0.4.2.dist-info/WHEEL +4 -0
- mycode_sdk-0.4.2.dist-info/licenses/LICENSE +21 -0
mycode/messages.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""Internal conversation model shared by the runtime, session store, CLI, and UI.
|
|
2
|
+
|
|
3
|
+
The runtime persists a single message shape everywhere:
|
|
4
|
+
|
|
5
|
+
- user message: text blocks, image blocks, document blocks, and tool_result blocks
|
|
6
|
+
- assistant message: thinking blocks, text blocks, and tool_use blocks
|
|
7
|
+
|
|
8
|
+
Provider adapters translate between this internal shape and provider-specific wire
|
|
9
|
+
formats. The agent loop and session store should never need to know provider wire
|
|
10
|
+
details.
|
|
11
|
+
|
|
12
|
+
Metadata contract:
|
|
13
|
+
|
|
14
|
+
- assistant message `meta` keeps normalized top-level fields only:
|
|
15
|
+
`provider`, `model`, `provider_message_id`, `stop_reason`, `usage`
|
|
16
|
+
- provider-specific assistant message extras live under `meta.native`
|
|
17
|
+
- provider-specific block replay hints live under `block.meta.native`
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
from mycode.utils import omit_none
|
|
25
|
+
|
|
26
|
+
ContentBlock = dict[str, Any]
|
|
27
|
+
ConversationMessage = dict[str, Any]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def text_block(text: str, *, meta: dict[str, Any] | None = None) -> ContentBlock:
|
|
31
|
+
block: ContentBlock = {"type": "text", "text": text}
|
|
32
|
+
if meta:
|
|
33
|
+
block["meta"] = dict(meta)
|
|
34
|
+
return block
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def thinking_block(text: str, *, meta: dict[str, Any] | None = None) -> ContentBlock:
|
|
38
|
+
block: ContentBlock = {"type": "thinking", "text": text}
|
|
39
|
+
if meta:
|
|
40
|
+
block["meta"] = dict(meta)
|
|
41
|
+
return block
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def image_block(
|
|
45
|
+
data: str,
|
|
46
|
+
*,
|
|
47
|
+
mime_type: str,
|
|
48
|
+
name: str | None = None,
|
|
49
|
+
meta: dict[str, Any] | None = None,
|
|
50
|
+
) -> ContentBlock:
|
|
51
|
+
block: ContentBlock = {"type": "image", "data": data, "mime_type": mime_type}
|
|
52
|
+
if name:
|
|
53
|
+
block["name"] = name
|
|
54
|
+
if meta:
|
|
55
|
+
block["meta"] = dict(meta)
|
|
56
|
+
return block
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def document_block(
|
|
60
|
+
data: str,
|
|
61
|
+
*,
|
|
62
|
+
mime_type: str,
|
|
63
|
+
name: str | None = None,
|
|
64
|
+
meta: dict[str, Any] | None = None,
|
|
65
|
+
) -> ContentBlock:
|
|
66
|
+
block: ContentBlock = {"type": "document", "data": data, "mime_type": mime_type}
|
|
67
|
+
if name:
|
|
68
|
+
block["name"] = name
|
|
69
|
+
if meta:
|
|
70
|
+
block["meta"] = dict(meta)
|
|
71
|
+
return block
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def tool_use_block(
|
|
75
|
+
*,
|
|
76
|
+
tool_id: str,
|
|
77
|
+
name: str,
|
|
78
|
+
input: dict[str, Any] | None = None,
|
|
79
|
+
meta: dict[str, Any] | None = None,
|
|
80
|
+
) -> ContentBlock:
|
|
81
|
+
block: ContentBlock = {
|
|
82
|
+
"type": "tool_use",
|
|
83
|
+
"id": tool_id,
|
|
84
|
+
"name": name,
|
|
85
|
+
"input": dict(input or {}),
|
|
86
|
+
}
|
|
87
|
+
if meta:
|
|
88
|
+
block["meta"] = dict(meta)
|
|
89
|
+
return block
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def tool_result_block(
|
|
93
|
+
*,
|
|
94
|
+
tool_use_id: str,
|
|
95
|
+
model_text: str,
|
|
96
|
+
display_text: str,
|
|
97
|
+
is_error: bool = False,
|
|
98
|
+
content: list[ContentBlock] | None = None,
|
|
99
|
+
meta: dict[str, Any] | None = None,
|
|
100
|
+
) -> ContentBlock:
|
|
101
|
+
"""Build a tool-result block.
|
|
102
|
+
|
|
103
|
+
`model_text` is replayed back to providers on later turns.
|
|
104
|
+
`display_text` is the user-facing text shown by CLI and web UI.
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
block: ContentBlock = {
|
|
108
|
+
"type": "tool_result",
|
|
109
|
+
"tool_use_id": tool_use_id,
|
|
110
|
+
"model_text": model_text,
|
|
111
|
+
"display_text": display_text,
|
|
112
|
+
"is_error": is_error,
|
|
113
|
+
}
|
|
114
|
+
if content:
|
|
115
|
+
block["content"] = [dict(item) for item in content]
|
|
116
|
+
if meta:
|
|
117
|
+
block["meta"] = dict(meta)
|
|
118
|
+
return block
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def user_text_message(text: str, *, meta: dict[str, Any] | None = None) -> ConversationMessage:
|
|
122
|
+
return build_message("user", [text_block(text)], meta=meta)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def build_message(
|
|
126
|
+
role: str,
|
|
127
|
+
blocks: list[ContentBlock],
|
|
128
|
+
*,
|
|
129
|
+
meta: dict[str, Any] | None = None,
|
|
130
|
+
) -> ConversationMessage:
|
|
131
|
+
message: ConversationMessage = {"role": role, "content": blocks}
|
|
132
|
+
if meta:
|
|
133
|
+
message["meta"] = dict(meta)
|
|
134
|
+
return message
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def assistant_message(
|
|
138
|
+
blocks: list[ContentBlock],
|
|
139
|
+
*,
|
|
140
|
+
provider: str | None = None,
|
|
141
|
+
model: str | None = None,
|
|
142
|
+
provider_message_id: str | None = None,
|
|
143
|
+
stop_reason: str | None = None,
|
|
144
|
+
usage: Any = None,
|
|
145
|
+
native_meta: dict[str, Any] | None = None,
|
|
146
|
+
) -> ConversationMessage:
|
|
147
|
+
"""Build a normalized assistant message with shared metadata fields."""
|
|
148
|
+
|
|
149
|
+
meta: dict[str, Any] = {}
|
|
150
|
+
if provider:
|
|
151
|
+
meta["provider"] = provider
|
|
152
|
+
if model:
|
|
153
|
+
meta["model"] = model
|
|
154
|
+
if provider_message_id:
|
|
155
|
+
meta["provider_message_id"] = provider_message_id
|
|
156
|
+
if stop_reason:
|
|
157
|
+
meta["stop_reason"] = stop_reason
|
|
158
|
+
if usage is not None:
|
|
159
|
+
meta["usage"] = usage
|
|
160
|
+
if native_meta:
|
|
161
|
+
native = omit_none(native_meta)
|
|
162
|
+
if native:
|
|
163
|
+
meta["native"] = native
|
|
164
|
+
return build_message("assistant", blocks, meta=meta or None)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def flatten_message_text(message: ConversationMessage, *, include_thinking: bool = True) -> str:
|
|
168
|
+
"""Flatten readable text while skipping synthetic attachment payload blocks."""
|
|
169
|
+
|
|
170
|
+
parts: list[str] = []
|
|
171
|
+
for block in message.get("content") or []:
|
|
172
|
+
if not isinstance(block, dict):
|
|
173
|
+
continue
|
|
174
|
+
raw_meta = block.get("meta")
|
|
175
|
+
meta = raw_meta if isinstance(raw_meta, dict) else {}
|
|
176
|
+
# Attached file snapshots should not become session titles or history labels.
|
|
177
|
+
if meta.get("attachment"):
|
|
178
|
+
continue
|
|
179
|
+
btype = block.get("type")
|
|
180
|
+
if btype == "text" or (include_thinking and btype == "thinking"):
|
|
181
|
+
parts.append(str(block.get("text") or ""))
|
|
182
|
+
return " ".join(part.strip() for part in parts if part and part.strip()).strip()
|
mycode/models.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
"""Load and query the bundled model metadata catalog."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import functools
|
|
6
|
+
import json
|
|
7
|
+
from dataclasses import dataclass, replace
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from mycode.utils import as_bool, as_int
|
|
12
|
+
|
|
13
|
+
_MODELS_CATALOG_PATH = Path(__file__).with_name("models_catalog.json")
|
|
14
|
+
|
|
15
|
+
# Catalogs consulted only for capability bits (context window, image / pdf
|
|
16
|
+
# support, …) when the requested provider has no entry for the model. They
|
|
17
|
+
# are NOT registered providers; the metadata returned from a fallback hit is
|
|
18
|
+
# always attributed to a real provider type the caller already has in hand.
|
|
19
|
+
_FALLBACK_CAPABILITY_CATALOGS: tuple[str, ...] = ("aihubmix",)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass(frozen=True)
|
|
23
|
+
class ModelMetadata:
|
|
24
|
+
"""Normalized metadata used by provider resolution."""
|
|
25
|
+
|
|
26
|
+
provider: str
|
|
27
|
+
model: str
|
|
28
|
+
context_window: int | None = None
|
|
29
|
+
max_output_tokens: int | None = None
|
|
30
|
+
supports_reasoning: bool | None = None
|
|
31
|
+
supports_image_input: bool | None = None
|
|
32
|
+
supports_pdf_input: bool | None = None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@functools.cache
|
|
36
|
+
def load_models_catalog() -> dict[str, Any] | None:
|
|
37
|
+
"""Load the bundled model catalog from disk once per process."""
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
data = json.loads(_MODELS_CATALOG_PATH.read_text(encoding="utf-8"))
|
|
41
|
+
except Exception:
|
|
42
|
+
data = None
|
|
43
|
+
return data if isinstance(data, dict) else None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def infer_provider_from_model(model: str | None) -> str | None:
|
|
47
|
+
"""Return the canonical built-in provider id for a known model id, else None.
|
|
48
|
+
|
|
49
|
+
Recognizes well-known prefixes on bare model ids and ``provider/model``
|
|
50
|
+
ids alike. Returns ``None`` for unknown ids — callers should require an
|
|
51
|
+
explicit provider in that case rather than guess.
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
bare = (model or "").strip().split("/", 1)[-1].strip().lower()
|
|
55
|
+
if bare.startswith("claude-"):
|
|
56
|
+
return "anthropic"
|
|
57
|
+
if bare.startswith("deepseek-"):
|
|
58
|
+
return "deepseek"
|
|
59
|
+
if bare.startswith("gemini-"):
|
|
60
|
+
return "google"
|
|
61
|
+
if bare.startswith("glm-"):
|
|
62
|
+
return "zai"
|
|
63
|
+
if bare.startswith(("gpt-", "o1", "o3", "o4")):
|
|
64
|
+
return "openai"
|
|
65
|
+
if bare.startswith("kimi-"):
|
|
66
|
+
return "moonshotai"
|
|
67
|
+
if bare.startswith("minimax-"):
|
|
68
|
+
return "minimax"
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def resolve_model_metadata(
|
|
73
|
+
*,
|
|
74
|
+
provider: str,
|
|
75
|
+
model: str,
|
|
76
|
+
**overrides: Any,
|
|
77
|
+
) -> ModelMetadata:
|
|
78
|
+
"""Return catalog metadata for ``(provider, model)`` with non-None overrides layered on top.
|
|
79
|
+
|
|
80
|
+
Override keys must match :class:`ModelMetadata` fields. Missing overrides
|
|
81
|
+
and an absent catalog entry both leave the corresponding fields at ``None``
|
|
82
|
+
so callers can apply their own fallback defaults.
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
base = lookup_model_metadata(provider_type=provider, model=model) or ModelMetadata(provider=provider, model=model)
|
|
86
|
+
return replace(base, **{k: v for k, v in overrides.items() if v is not None})
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def lookup_model_metadata(
|
|
90
|
+
*,
|
|
91
|
+
provider_type: str | None,
|
|
92
|
+
model: str | None,
|
|
93
|
+
) -> ModelMetadata | None:
|
|
94
|
+
"""Resolve metadata for one provider type and model.
|
|
95
|
+
|
|
96
|
+
Lookup tiers, in order:
|
|
97
|
+
|
|
98
|
+
1. Exact ``(provider_type, model)`` entry.
|
|
99
|
+
2. Canonical provider inferred from the model id prefix (``gpt-`` →
|
|
100
|
+
``openai``, ``claude-`` → ``anthropic``, …) when the requested
|
|
101
|
+
provider had no hit.
|
|
102
|
+
3. Capability fallback from a secondary catalog (currently
|
|
103
|
+
``aihubmix``), attributed to the caller's real provider — never
|
|
104
|
+
to the secondary catalog id.
|
|
105
|
+
"""
|
|
106
|
+
|
|
107
|
+
raw = (model or "").strip()
|
|
108
|
+
if not raw:
|
|
109
|
+
return None
|
|
110
|
+
catalog = load_models_catalog()
|
|
111
|
+
if not catalog:
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
bare = raw.split("/", 1)[1].strip() if "/" in raw else raw
|
|
115
|
+
inferred = infer_provider_from_model(bare)
|
|
116
|
+
|
|
117
|
+
if provider_type:
|
|
118
|
+
hit = _match(catalog, lookup=provider_type, model_id=raw, attributed=provider_type)
|
|
119
|
+
if hit is not None:
|
|
120
|
+
return hit
|
|
121
|
+
|
|
122
|
+
if inferred and inferred != provider_type:
|
|
123
|
+
hit = _match(catalog, lookup=inferred, model_id=bare, attributed=inferred)
|
|
124
|
+
if hit is not None:
|
|
125
|
+
return hit
|
|
126
|
+
|
|
127
|
+
attributed = provider_type or inferred
|
|
128
|
+
if attributed is None:
|
|
129
|
+
return None
|
|
130
|
+
for source in _FALLBACK_CAPABILITY_CATALOGS:
|
|
131
|
+
hit = _match(catalog, lookup=source, model_id=bare, attributed=attributed)
|
|
132
|
+
if hit is not None:
|
|
133
|
+
return hit
|
|
134
|
+
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _match(
|
|
139
|
+
catalog: dict[str, Any],
|
|
140
|
+
*,
|
|
141
|
+
lookup: str,
|
|
142
|
+
model_id: str,
|
|
143
|
+
attributed: str,
|
|
144
|
+
) -> ModelMetadata | None:
|
|
145
|
+
"""Look up one model in a catalog section and build metadata if present."""
|
|
146
|
+
|
|
147
|
+
section = catalog.get(lookup)
|
|
148
|
+
if not isinstance(section, dict):
|
|
149
|
+
return None
|
|
150
|
+
raw = section.get(model_id)
|
|
151
|
+
if not isinstance(raw, dict):
|
|
152
|
+
return None
|
|
153
|
+
return ModelMetadata(
|
|
154
|
+
provider=attributed,
|
|
155
|
+
model=model_id,
|
|
156
|
+
context_window=as_int(raw.get("context_window")),
|
|
157
|
+
max_output_tokens=as_int(raw.get("max_output_tokens")),
|
|
158
|
+
supports_reasoning=as_bool(raw.get("supports_reasoning")),
|
|
159
|
+
supports_image_input=as_bool(raw.get("supports_image_input")),
|
|
160
|
+
supports_pdf_input=as_bool(raw.get("supports_pdf_input")),
|
|
161
|
+
)
|