modelsdotdev 0.20260514.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,261 @@
1
+ """Package distribution utilities."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import dataclasses
6
+ import functools
7
+ import importlib.metadata
8
+ import json
9
+ import os
10
+ import pathlib
11
+ import re
12
+ import subprocess
13
+ import urllib.parse
14
+ from typing import TYPE_CHECKING, Any, cast
15
+
16
+ if TYPE_CHECKING:
17
+ from collections.abc import Sequence
18
+
19
+
20
+ _PROJECT_DIST_NAME = "modelsdotdev"
21
+
22
+
23
+ @dataclasses.dataclass(kw_only=True, frozen=True)
24
+ class DirectURLOrigin:
25
+ url: str
26
+ editable: bool = False
27
+ commit_id: str | None = None
28
+
29
+
30
+ @functools.cache
31
+ def get_direct_url_origin(
32
+ dist_name: str,
33
+ path: Sequence[str] | None = None,
34
+ ) -> DirectURLOrigin | None:
35
+ """Return PEP 660 Direct URL Origin metadata for package if present."""
36
+ if path is not None:
37
+ dists = importlib.metadata.distributions(name=dist_name, path=[*path])
38
+ else:
39
+ dists = importlib.metadata.distributions(name=dist_name)
40
+
41
+ # Distribution finder will return a Distribution for
42
+ # each matching distribution in sys.path even if they're
43
+ # duplicate. We try them in order until we find one that
44
+ # has direct_url.json in it.
45
+ for dist in dists:
46
+ url_origin = _get_direct_url_origin(dist)
47
+ if url_origin is not None:
48
+ return url_origin
49
+
50
+ return None
51
+
52
+
53
+ @functools.cache
54
+ def get_dist_version(
55
+ dist_name: str,
56
+ path: Sequence[str] | None = None,
57
+ ) -> str | None:
58
+ """Return version of a given distribution, if present."""
59
+ if path is not None:
60
+ dists = importlib.metadata.distributions(name=dist_name, path=[*path])
61
+ else:
62
+ dists = importlib.metadata.distributions(name=dist_name)
63
+
64
+ for dist in dists:
65
+ return dist.version
66
+
67
+ return None
68
+
69
+
70
+ def _get_direct_url_origin(
71
+ dist: importlib.metadata.Distribution,
72
+ ) -> DirectURLOrigin | None:
73
+ try:
74
+ data = dist.read_text("direct_url.json")
75
+ except OSError:
76
+ return None
77
+ if data is None:
78
+ return None
79
+ try:
80
+ info = json.loads(data)
81
+ except ValueError:
82
+ return None
83
+ if not isinstance(info, dict):
84
+ return None
85
+
86
+ info = cast("dict[str, Any]", info)
87
+ url = info.get("url")
88
+ if not url:
89
+ # URL must be present, metadata is corrupt
90
+ return None
91
+
92
+ dir_info = info.get("dir_info")
93
+ if isinstance(dir_info, dict):
94
+ dir_info = cast("dict[str, Any]", dir_info)
95
+ editable = dir_info.get("editable", False)
96
+ else:
97
+ editable = False
98
+ vcs_info = info.get("vcs_info")
99
+ if isinstance(vcs_info, dict):
100
+ vcs_info = cast("dict[str, Any]", vcs_info)
101
+ commit_id = vcs_info.get("commit_id")
102
+ else:
103
+ commit_id = None
104
+
105
+ return DirectURLOrigin(
106
+ url=url,
107
+ editable=editable,
108
+ commit_id=commit_id,
109
+ )
110
+
111
+
112
+ def get_origin_source_dir(dist_name: str) -> pathlib.Path | None:
113
+ url_origin = get_direct_url_origin(dist_name)
114
+ if url_origin is None:
115
+ return None
116
+
117
+ try:
118
+ dir_url = urllib.parse.urlparse(url_origin.url)
119
+ except ValueError:
120
+ return None
121
+
122
+ if dir_url.scheme != "file":
123
+ # Non-local URL?
124
+ return None
125
+
126
+ if not dir_url.path:
127
+ # No path?
128
+ return None
129
+
130
+ path = pathlib.Path(dir_url.path)
131
+ if not path.is_dir():
132
+ # Not a directory (wheel?)
133
+ return None
134
+
135
+ return path
136
+
137
+
138
+ @functools.cache
139
+ def get_project_source_root() -> pathlib.Path | None:
140
+ return get_origin_source_dir(_PROJECT_DIST_NAME)
141
+
142
+
143
+ def is_project_editable() -> bool:
144
+ origin = get_direct_url_origin(_PROJECT_DIST_NAME)
145
+ return False if origin is None else origin.editable
146
+
147
+
148
+ def find_project_root() -> pathlib.Path:
149
+ """Find the vercel-runtime project root directory."""
150
+ if gh_checkout := os.environ.get("GITHUB_WORKSPACE"):
151
+ return pathlib.Path(gh_checkout)
152
+ elif src_root := get_project_source_root():
153
+ return src_root
154
+ else:
155
+ return pathlib.Path(__file__).parent.parent
156
+
157
+
158
+ PROJECT_ROOT = find_project_root()
159
+
160
+
161
+ def _is_revision_sha(s: str) -> bool:
162
+ return bool(re.match(r"^\b[0-9a-f]{5,40}\b$", s))
163
+
164
+
165
+ def _get_head_commit_id_from_git(source_dir: pathlib.Path) -> str | None:
166
+ """Ask Git to resolve HEAD across different ref storage backends."""
167
+ try:
168
+ proc = subprocess.run(
169
+ ["git", "rev-parse", "HEAD"],
170
+ check=True,
171
+ capture_output=True,
172
+ cwd=source_dir,
173
+ text=True,
174
+ )
175
+ except (FileNotFoundError, subprocess.CalledProcessError):
176
+ return None
177
+
178
+ stdout = proc.stdout
179
+ if not isinstance(stdout, str):
180
+ return None
181
+
182
+ commit_id = stdout.strip()
183
+ if _is_revision_sha(commit_id):
184
+ return commit_id
185
+ return None
186
+
187
+
188
+ def get_origin_commit_id(dist_name: str) -> str | None:
189
+ url_origin = get_direct_url_origin(dist_name)
190
+ if url_origin is None:
191
+ return None
192
+
193
+ if url_origin.commit_id is not None:
194
+ return url_origin.commit_id
195
+
196
+ source_dir = get_origin_source_dir(dist_name)
197
+ if source_dir is None:
198
+ return None
199
+
200
+ git_dir = source_dir / ".git"
201
+ if not git_dir.exists():
202
+ return None
203
+
204
+ if commit_id := _get_head_commit_id_from_git(source_dir):
205
+ return commit_id
206
+
207
+ try:
208
+ head = (git_dir / "HEAD").read_text().strip()
209
+ except OSError:
210
+ return None
211
+
212
+ if not head:
213
+ return None
214
+
215
+ if m := re.match(r"ref:\s*(.*)", head):
216
+ head_ref_path = m.group(1)
217
+ head_ref = git_dir / pathlib.Path(head_ref_path)
218
+ if not head_ref.is_relative_to(git_dir):
219
+ # Huh?
220
+ return None
221
+
222
+ if head_ref.exists():
223
+ try:
224
+ commit_id = head_ref.read_text().strip()
225
+ except OSError:
226
+ return None
227
+ else:
228
+ # Check packed refs
229
+ try:
230
+ packed_refs = (git_dir / "packed-refs").read_text()
231
+ except OSError:
232
+ return None
233
+
234
+ for line in packed_refs.splitlines():
235
+ if line.startswith("#"):
236
+ continue
237
+ sha, _, ref = line.partition(" ")
238
+ ref = ref.strip()
239
+ if not ref:
240
+ continue
241
+ if ref == head_ref_path:
242
+ commit_id = sha
243
+ break
244
+ else:
245
+ return None
246
+ else:
247
+ commit_id = head
248
+
249
+ if not _is_revision_sha(commit_id):
250
+ return None
251
+ else:
252
+ return commit_id
253
+
254
+
255
+ @functools.cache
256
+ def get_project_version_key() -> str:
257
+ ver_key = get_dist_version(_PROJECT_DIST_NAME) or "0.0.0"
258
+ commit_id = get_origin_commit_id(_PROJECT_DIST_NAME)
259
+ if commit_id:
260
+ ver_key = f"{ver_key}.dev{commit_id[:9]}"
261
+ return ver_key
@@ -0,0 +1,205 @@
1
+ """Shared SQLite schema definitions."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+
7
+
8
+ @dataclass(frozen=True, kw_only=True, slots=True)
9
+ class Column:
10
+ """SQLite table column definition."""
11
+
12
+ name: str
13
+ definition: str
14
+
15
+
16
+ @dataclass(frozen=True, kw_only=True, slots=True)
17
+ class Table:
18
+ """SQLite table definition."""
19
+
20
+ name: str
21
+ columns: tuple[Column, ...]
22
+ constraints: tuple[str, ...] = ()
23
+ indexes: tuple[str, ...] = ()
24
+
25
+
26
+ def _column_list(table: Table) -> str:
27
+ return ", ".join(column.name for column in table.columns)
28
+
29
+
30
+ def _insert_sql(table: Table) -> str:
31
+ columns = _column_list(table)
32
+ placeholders = ", ".join("?" for _ in table.columns)
33
+ return f"INSERT INTO {table.name} ({columns}) VALUES ({placeholders})"
34
+
35
+
36
+ def _create_table_sql(table: Table) -> str:
37
+ definitions = [
38
+ f"{column.name} {column.definition}" for column in table.columns
39
+ ]
40
+ definitions.extend(table.constraints)
41
+ joined_definitions = ",\n ".join(definitions)
42
+ return f"CREATE TABLE {table.name} (\n {joined_definitions}\n);"
43
+
44
+
45
+ def _schema_sql(tables: tuple[Table, ...]) -> str:
46
+ statements = []
47
+ for table in tables:
48
+ statements.append(_create_table_sql(table))
49
+ statements.extend(table.indexes)
50
+ return "\n\n".join(statements)
51
+
52
+
53
+ PRICING_COLUMNS = (
54
+ Column(name="cost_input", definition="REAL"),
55
+ Column(name="cost_output", definition="REAL"),
56
+ Column(name="cost_reasoning", definition="REAL"),
57
+ Column(name="cost_cache_read", definition="REAL"),
58
+ Column(name="cost_cache_write", definition="REAL"),
59
+ Column(name="cost_input_audio", definition="REAL"),
60
+ Column(name="cost_output_audio", definition="REAL"),
61
+ )
62
+
63
+ PROVIDER_CONFIG_COLUMNS = (
64
+ Column(name="provider_npm", definition="TEXT"),
65
+ Column(name="provider_api", definition="TEXT"),
66
+ Column(name="provider_api_shape", definition="TEXT"),
67
+ Column(name="provider_body_json", definition="TEXT"),
68
+ Column(name="provider_headers_json", definition="TEXT"),
69
+ )
70
+
71
+ METADATA = Table(
72
+ name="metadata",
73
+ columns=(
74
+ Column(name="key", definition="TEXT PRIMARY KEY"),
75
+ Column(name="value", definition="TEXT NOT NULL"),
76
+ ),
77
+ )
78
+
79
+ PROVIDERS = Table(
80
+ name="providers",
81
+ columns=(
82
+ Column(name="id", definition="TEXT PRIMARY KEY"),
83
+ Column(name="name", definition="TEXT NOT NULL"),
84
+ Column(name="npm", definition="TEXT NOT NULL"),
85
+ Column(name="api", definition="TEXT"),
86
+ Column(name="doc", definition="TEXT NOT NULL"),
87
+ Column(name="env", definition="TEXT NOT NULL"),
88
+ ),
89
+ indexes=(
90
+ "CREATE UNIQUE INDEX providers_name_nocase_idx "
91
+ "ON providers(name COLLATE NOCASE);",
92
+ ),
93
+ )
94
+
95
+ MODELS = Table(
96
+ name="models",
97
+ columns=(
98
+ Column(name="full_id", definition="TEXT PRIMARY KEY"),
99
+ Column(
100
+ name="provider_id",
101
+ definition=(
102
+ "TEXT NOT NULL REFERENCES providers(id) ON DELETE CASCADE"
103
+ ),
104
+ ),
105
+ Column(name="id", definition="TEXT NOT NULL"),
106
+ Column(name="name", definition="TEXT NOT NULL"),
107
+ Column(name="family", definition="TEXT"),
108
+ Column(name="attachment", definition="INTEGER NOT NULL"),
109
+ Column(name="reasoning", definition="INTEGER NOT NULL"),
110
+ Column(name="tool_call", definition="INTEGER NOT NULL"),
111
+ Column(name="interleaved_field", definition="TEXT"),
112
+ Column(name="structured_output", definition="INTEGER"),
113
+ Column(name="temperature", definition="INTEGER"),
114
+ Column(name="knowledge", definition="TEXT"),
115
+ Column(name="open_weights", definition="INTEGER NOT NULL"),
116
+ Column(name="limit_context", definition="INTEGER NOT NULL"),
117
+ Column(name="limit_input", definition="INTEGER"),
118
+ Column(name="limit_output", definition="INTEGER NOT NULL"),
119
+ Column(name="status", definition="TEXT"),
120
+ *PROVIDER_CONFIG_COLUMNS,
121
+ ),
122
+ indexes=(
123
+ "CREATE INDEX models_provider_id_idx ON models(provider_id);",
124
+ "CREATE INDEX models_id_idx ON models(id);",
125
+ ),
126
+ )
127
+
128
+ MODEL_MODALITIES = Table(
129
+ name="model_modalities",
130
+ columns=(
131
+ Column(
132
+ name="model_full_id",
133
+ definition=(
134
+ "TEXT NOT NULL REFERENCES models(full_id) ON DELETE CASCADE"
135
+ ),
136
+ ),
137
+ Column(name="direction", definition="TEXT NOT NULL"),
138
+ Column(name="position", definition="INTEGER NOT NULL"),
139
+ Column(name="value", definition="TEXT NOT NULL"),
140
+ ),
141
+ constraints=("PRIMARY KEY (model_full_id, direction, position)",),
142
+ )
143
+
144
+ EXPERIMENTAL_MODES = Table(
145
+ name="experimental_modes",
146
+ columns=(
147
+ Column(
148
+ name="model_full_id",
149
+ definition=(
150
+ "TEXT NOT NULL REFERENCES models(full_id) ON DELETE CASCADE"
151
+ ),
152
+ ),
153
+ Column(name="name", definition="TEXT NOT NULL"),
154
+ *PROVIDER_CONFIG_COLUMNS,
155
+ ),
156
+ constraints=("PRIMARY KEY (model_full_id, name)",),
157
+ )
158
+
159
+ PRICING = Table(
160
+ name="pricing",
161
+ columns=(
162
+ Column(
163
+ name="model_full_id",
164
+ definition=(
165
+ "TEXT NOT NULL REFERENCES models(full_id) ON DELETE CASCADE"
166
+ ),
167
+ ),
168
+ Column(name="experimental_mode_name", definition="TEXT"),
169
+ Column(name="min_context", definition="INTEGER NOT NULL"),
170
+ *PRICING_COLUMNS,
171
+ ),
172
+ constraints=(
173
+ "CHECK (min_context >= 0)",
174
+ "CHECK (cost_input IS NOT NULL)",
175
+ "CHECK (cost_output IS NOT NULL)",
176
+ "FOREIGN KEY (model_full_id, experimental_mode_name) "
177
+ "REFERENCES experimental_modes(model_full_id, name) "
178
+ "ON DELETE CASCADE",
179
+ ),
180
+ indexes=(
181
+ "CREATE UNIQUE INDEX pricing_owner_min_context_idx "
182
+ "ON pricing(model_full_id, "
183
+ "COALESCE(experimental_mode_name, ''), min_context);",
184
+ "CREATE INDEX pricing_experimental_mode_idx "
185
+ "ON pricing(model_full_id, experimental_mode_name);",
186
+ ),
187
+ )
188
+
189
+ TABLES = (
190
+ METADATA,
191
+ PROVIDERS,
192
+ MODELS,
193
+ MODEL_MODALITIES,
194
+ EXPERIMENTAL_MODES,
195
+ PRICING,
196
+ )
197
+
198
+ PROVIDER_COLUMNS = _column_list(PROVIDERS)
199
+ MODEL_COLUMNS = _column_list(MODELS)
200
+ PROVIDER_INSERT_SQL = _insert_sql(PROVIDERS)
201
+ MODEL_INSERT_SQL = _insert_sql(MODELS)
202
+ MODEL_MODALITY_INSERT_SQL = _insert_sql(MODEL_MODALITIES)
203
+ EXPERIMENTAL_MODE_INSERT_SQL = _insert_sql(EXPERIMENTAL_MODES)
204
+ PRICING_INSERT_SQL = _insert_sql(PRICING)
205
+ CREATE_SCHEMA_SQL = _schema_sql(TABLES)