nemo-evaluator-launcher 0.1.41__py3-none-any.whl → 0.1.67__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. nemo_evaluator_launcher/api/functional.py +55 -5
  2. nemo_evaluator_launcher/api/types.py +21 -14
  3. nemo_evaluator_launcher/cli/ls_task.py +280 -0
  4. nemo_evaluator_launcher/cli/ls_tasks.py +208 -55
  5. nemo_evaluator_launcher/cli/main.py +17 -2
  6. nemo_evaluator_launcher/cli/run.py +43 -52
  7. nemo_evaluator_launcher/common/container_metadata/__init__.py +61 -0
  8. nemo_evaluator_launcher/common/container_metadata/intermediate_repr.py +530 -0
  9. nemo_evaluator_launcher/common/container_metadata/loading.py +1126 -0
  10. nemo_evaluator_launcher/common/container_metadata/registries.py +824 -0
  11. nemo_evaluator_launcher/common/container_metadata/utils.py +63 -0
  12. nemo_evaluator_launcher/common/helpers.py +44 -28
  13. nemo_evaluator_launcher/common/mapping.py +166 -177
  14. nemo_evaluator_launcher/common/printing_utils.py +18 -12
  15. nemo_evaluator_launcher/configs/deployment/nim.yaml +3 -1
  16. nemo_evaluator_launcher/executors/lepton/executor.py +26 -8
  17. nemo_evaluator_launcher/executors/local/executor.py +6 -2
  18. nemo_evaluator_launcher/executors/slurm/executor.py +270 -22
  19. nemo_evaluator_launcher/package_info.py +1 -1
  20. nemo_evaluator_launcher/resources/all_tasks_irs.yaml +17016 -0
  21. nemo_evaluator_launcher/resources/mapping.toml +62 -354
  22. {nemo_evaluator_launcher-0.1.41.dist-info → nemo_evaluator_launcher-0.1.67.dist-info}/METADATA +2 -1
  23. {nemo_evaluator_launcher-0.1.41.dist-info → nemo_evaluator_launcher-0.1.67.dist-info}/RECORD +27 -20
  24. {nemo_evaluator_launcher-0.1.41.dist-info → nemo_evaluator_launcher-0.1.67.dist-info}/WHEEL +0 -0
  25. {nemo_evaluator_launcher-0.1.41.dist-info → nemo_evaluator_launcher-0.1.67.dist-info}/entry_points.txt +0 -0
  26. {nemo_evaluator_launcher-0.1.41.dist-info → nemo_evaluator_launcher-0.1.67.dist-info}/licenses/LICENSE +0 -0
  27. {nemo_evaluator_launcher-0.1.41.dist-info → nemo_evaluator_launcher-0.1.67.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,63 @@
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ #
16
+ """Utility functions for container metadata processing."""
17
+
18
+
19
+ def parse_container_image(container_image: str) -> tuple[str, str, str, str]:
20
+ """Parse a container image string into registry type, registry URL, repository, and tag.
21
+
22
+ Args:
23
+ container_image: Container image string (e.g., "nvcr.io/nvidia/eval-factory/simple-evals:25.10")
24
+
25
+ Returns:
26
+ Tuple of (registry_type, registry_url, repository, tag)
27
+ """
28
+ # Split tag from image
29
+ if ":" in container_image:
30
+ image_part, tag = container_image.rsplit(":", 1)
31
+ else:
32
+ image_part = container_image
33
+ tag = "latest"
34
+
35
+ # Parse registry and repository
36
+ parts = image_part.split("/")
37
+ if len(parts) < 2:
38
+ raise ValueError(f"Invalid container image format: {container_image}")
39
+
40
+ # Check if first part is a registry (contains '.' or is 'localhost')
41
+ if "." in parts[0] or parts[0] == "localhost":
42
+ registry_host = parts[0]
43
+ # Determine registry type
44
+ if "gitlab" in registry_host.lower():
45
+ registry_type = "gitlab"
46
+ elif "nvcr.io" in registry_host:
47
+ registry_type = "nvcr"
48
+ else:
49
+ registry_type = "nvcr" # Default to nvcr for other registries
50
+
51
+ # Check if registry has a port
52
+ if ":" in registry_host:
53
+ registry_url = registry_host
54
+ else:
55
+ registry_url = registry_host
56
+ repository = "/".join(parts[1:])
57
+ else:
58
+ # Default registry (Docker Hub)
59
+ registry_type = "nvcr"
60
+ registry_url = "registry-1.docker.io"
61
+ repository = image_part
62
+
63
+ return registry_type, registry_url, repository, tag
@@ -14,7 +14,6 @@
14
14
  # limitations under the License.
15
15
  #
16
16
  import base64
17
- import copy
18
17
  import datetime
19
18
  from dataclasses import dataclass
20
19
  from typing import Optional
@@ -71,6 +70,47 @@ def _set_nested_optionally_overriding(
71
70
  temp[keys[-1]] = val
72
71
 
73
72
 
73
+ _MIGRATION_MESSAGE = """
74
+ `overrides` field is no longer supported. Use `nemo_evaluator_config` field instead, e.g.:
75
+
76
+ 1. If you are using overrides in your yaml config, replace:
77
+
78
+ ```yaml
79
+ evaluation:
80
+ overrides:
81
+ config.params.temperature: 0.6
82
+ config.params.top_p: 0.95
83
+ ```
84
+
85
+ with:
86
+
87
+ ```yaml
88
+ evaluation:
89
+ nemo_evaluator_config:
90
+ config:
91
+ params:
92
+ temperature: 0.6
93
+ top_p: 0.95
94
+ ```
95
+
96
+ 2. If you are using overrides in your cli command, replace:
97
+
98
+ ```bash
99
+ nemo-evaluator-launcher run --config my_config.yaml \\
100
+ -o evaluation.overrides.config.params.temperature=0.6 \\
101
+ -o evaluation.overrides.config.params.top_p=0.95
102
+ ```
103
+
104
+ with:
105
+
106
+ ```bash
107
+ nemo-evaluator-launcher run --config my_config.yaml \\
108
+ -o evaluation.nemo_evaluator_config.config.params.temperature=0.6 \\
109
+ -o evaluation.nemo_evaluator_config.config.params.top_p=0.95
110
+ ```
111
+ """
112
+
113
+
74
114
  def get_eval_factory_config(
75
115
  cfg: DictConfig,
76
116
  user_task_config: DictConfig,
@@ -79,17 +119,11 @@ def get_eval_factory_config(
79
119
 
80
120
  This function extracts the config field similar to how overrides are handled.
81
121
 
82
- Overrides will start to be deprecated (or not, but at least a warning will be logged).
122
+ It applies task-level overrides to the global overrides.
83
123
  """
84
124
 
85
125
  if cfg.evaluation.get("overrides") or user_task_config.get("overrides"):
86
- # TODO(agronskiy): start removing overrides, test `test_start_deprecating_overrides`
87
- # will start failing soon.
88
- logger.warning(
89
- "We are deprecating using old-style dot-delimited overrides "
90
- "in favour of `nemo_evaluator_config` field. Please check "
91
- "the documentation."
92
- )
126
+ raise ValueError(_MIGRATION_MESSAGE)
93
127
 
94
128
  logger.debug("Getting nemo evaluator merged config")
95
129
  # Extract config fields similar to overrides - convert to basic Python types first
@@ -163,6 +197,7 @@ def get_eval_factory_command(
163
197
  ["config", "output_dir"],
164
198
  "/results",
165
199
  )
200
+ # FIXME(martas): update to api_key_name after 25.12 is released
166
201
  _set_nested_optionally_overriding(
167
202
  merged_nemo_evaluator_config,
168
203
  ["target", "api_endpoint", "api_key"],
@@ -216,18 +251,6 @@ def get_eval_factory_command(
216
251
  + "&& $cmd run_eval --run_config config_ef.yaml"
217
252
  )
218
253
 
219
- # NOTE: see note and test about deprecating that.
220
- overrides = copy.deepcopy(dict(cfg.evaluation.get("overrides", {})))
221
- overrides.update(dict(user_task_config.get("overrides", {})))
222
- # NOTE(dfridman): Temporary fix to make sure that the overrides arg is not split into multiple lines.
223
- # Consider passing a JSON object on Eval Factory side
224
- overrides = {
225
- k: (v.strip("\n") if isinstance(v, str) else v) for k, v in overrides.items()
226
- }
227
- overrides_str = ",".join([f"{k}={v}" for k, v in overrides.items()])
228
- if overrides_str:
229
- eval_command = f"{eval_command} --overrides {overrides_str}"
230
-
231
254
  # We return both the command and the debugging base64-decoded strings, useful
232
255
  # for exposing when building scripts.
233
256
  return CmdAndReadableComment(
@@ -257,13 +280,6 @@ def get_endpoint_url(
257
280
  if nemo_evaluator_config_url:
258
281
  return nemo_evaluator_config_url
259
282
 
260
- # Being deprecated, see `get_eval_factory_config` message.
261
- overrides_old_style_url = merged_nemo_evaluator_config.get("overrides", {}).get(
262
- "target.api_endpoint.url", None
263
- )
264
- if overrides_old_style_url:
265
- return overrides_old_style_url
266
-
267
283
  return url
268
284
 
269
285
  if cfg.deployment.type == "none":
@@ -13,222 +13,163 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
  #
16
- import importlib
17
16
  import pathlib
18
- import sys
19
- from importlib import resources
20
- from typing import Any, Optional
21
-
22
- import requests
23
-
24
- if sys.version_info >= (3, 11):
25
- import tomllib
26
- else:
27
- import tomli as tomllib
17
+ from typing import Any
28
18
 
19
+ from nemo_evaluator_launcher.common.container_metadata import (
20
+ TaskIntermediateRepresentation,
21
+ load_tasks_from_tasks_file,
22
+ )
29
23
  from nemo_evaluator_launcher.common.logging_utils import logger
30
24
 
31
- # Configuration constants
32
- # For below, see docs: https://docs.github.com/en/rest/repos/contents
33
- MAPPING_URL = "https://raw.githubusercontent.com/NVIDIA-NeMo/Eval/main/packages/nemo-evaluator-launcher/src/nemo_evaluator_launcher/resources/mapping.toml"
34
- CACHE_DIR = pathlib.Path.home() / ".nemo-evaluator" / "cache"
35
- CACHE_FILENAME = "mapping.toml"
36
- INTERNAL_RESOURCES_PKG = "nemo_evaluator_launcher_internal.resources"
37
-
38
-
39
- def _ensure_cache_dir() -> None:
40
- """Ensure the cache directory exists."""
41
- CACHE_DIR.mkdir(parents=True, exist_ok=True)
42
-
43
-
44
- def _get_cache_file() -> pathlib.Path:
45
- """Get the cache file path.
46
-
47
- Returns:
48
- pathlib.Path: Path to the cache file.
49
- """
50
- return CACHE_DIR / CACHE_FILENAME
51
-
52
-
53
- def _download_latest_mapping() -> Optional[bytes]:
54
- """Download latest mapping from MAPPING_URL and return raw bytes.
55
-
56
- Returns:
57
- Optional[bytes]: Downloaded mapping bytes, or None if download fails.
58
- """
59
- try:
60
- response = requests.get(MAPPING_URL, timeout=10)
61
- response.raise_for_status()
62
-
63
- # For GitHub raw URLs, the response content is the file content directly
64
- mapping_bytes = response.content
65
- assert isinstance(mapping_bytes, bytes)
66
-
67
- logger.debug("Successfully downloaded mapping from remote URL")
68
- return mapping_bytes
69
- except (requests.RequestException, OSError) as e:
70
- logger.warning("Failed to download mapping from remote URL", error=str(e))
71
- return None
72
25
 
26
+ def _convert_irs_to_mapping_format(
27
+ tasks: list[TaskIntermediateRepresentation],
28
+ ) -> dict[tuple[str, str], dict]:
29
+ """Convert list of TaskIntermediateRepresentation objects to mapping dict format.
73
30
 
74
- def _load_cached_mapping() -> Optional[dict[Any, Any]]:
75
- """Load mapping from cache file.
31
+ Args:
32
+ tasks: List of TaskIntermediateRepresentation objects.
33
+ harnesses_by_name: Optional mapping of harness name -> Harness IR. If provided,
34
+ adds harness-level metadata (e.g., arch) to each task mapping entry.
76
35
 
77
36
  Returns:
78
- Optional[dict]: Loaded mapping data, or None if loading fails.
37
+ dict: Mapping of (harness_name, task_name) to dict holding their configuration.
79
38
  """
80
- cache_file = _get_cache_file()
81
- if not cache_file.exists():
82
- return None
83
-
84
- try:
85
- with open(cache_file, "rb") as f:
86
- mapping = tomllib.load(f)
87
- logger.debug("Loaded mapping from cache")
88
- return mapping # type: ignore[no-any-return]
89
- except (OSError, tomllib.TOMLDecodeError) as e:
90
- logger.warning("Failed to load mapping from cache", error=str(e))
91
- return None
92
-
39
+ mapping: dict[tuple[str, str], dict] = {}
40
+
41
+ for task_ir in tasks:
42
+ harness_name = task_ir.harness
43
+ task_name = task_ir.name
44
+ key = (harness_name, task_name)
45
+
46
+ if key in mapping:
47
+ logger.warning(
48
+ "Duplicate task key found in IRs, keeping first occurrence",
49
+ harness=harness_name,
50
+ task=task_name,
51
+ )
52
+ continue
53
+
54
+ # Extract endpoint_type from defaults.config.supported_endpoint_types
55
+ defaults = task_ir.defaults or {}
56
+ config = defaults.get("config", {})
57
+ supported_endpoint_types = config.get("supported_endpoint_types", ["chat"])
58
+ endpoint_type = (
59
+ supported_endpoint_types[0] if supported_endpoint_types else "chat"
60
+ )
93
61
 
94
- def _save_mapping_to_cache(mapping_bytes: bytes) -> None:
95
- """Save mapping to cache file.
62
+ # Extract type from defaults.config.type
63
+ task_type = config.get("type", "")
96
64
 
97
- Args:
98
- mapping_bytes: Mapping data to save.
99
- """
100
- try:
101
- _ensure_cache_dir()
102
- cache_file = _get_cache_file()
65
+ # Build mapping entry
66
+ mapping[key] = {
67
+ "task": task_name,
68
+ "harness": harness_name,
69
+ "endpoint_type": endpoint_type,
70
+ "container": task_ir.container,
71
+ }
103
72
 
104
- # Save the mapping data
105
- with open(cache_file, "wb") as f:
106
- f.write(mapping_bytes)
73
+ if task_ir.container_arch:
74
+ mapping[key]["arch"] = task_ir.container_arch
107
75
 
108
- except OSError as e:
109
- logger.warning("Failed to save mapping to cache", error=str(e))
76
+ # Backwards-compatible enhancement: keep full IR defaults available.
77
+ # Existing code uses flattened defaults (excluding `config`) below; this adds a
78
+ # new field without changing any existing keys.
79
+ mapping[key]["defaults"] = defaults
110
80
 
81
+ # Backwards-compatible enhancement: surface command explicitly if present.
82
+ # Note: `command` is already included via flattened defaults merge, but
83
+ # keeping it explicit makes downstream usage simpler.
84
+ if "command" in defaults and "command" not in mapping[key]:
85
+ mapping[key]["command"] = defaults["command"]
111
86
 
112
- def _load_packaged_resource(
113
- resource_name: str, pkg_name: str = "nemo_evaluator_launcher.resources"
114
- ) -> dict[str, Any]:
115
- """Load a resource from the packaged resources.
87
+ # Add description if available
88
+ if task_ir.description:
89
+ mapping[key]["description"] = task_ir.description
116
90
 
117
- Args:
118
- resource_name: The name of the resource to load.
119
- """
120
- try:
121
- resource_toml: dict[str, Any] = {}
122
- with resources.files(pkg_name).joinpath(resource_name).open("rb") as f:
123
- resource_toml = tomllib.load(f)
124
- logger.info(
125
- "Loaded resource from packaged file", resource=resource_name, pkg=pkg_name
126
- )
127
- return resource_toml
128
- except (OSError, tomllib.TOMLDecodeError) as e:
129
- logger.error(
130
- "Failed to load from packaged file",
131
- resource=resource_name,
132
- pkg=pkg_name,
133
- error=str(e),
134
- )
135
- raise RuntimeError(f"Failed to load {resource_name} from packaged file") from e
91
+ # Add type if available
92
+ if task_type:
93
+ mapping[key]["type"] = task_type
136
94
 
95
+ # Add container_digest if available
96
+ if task_ir.container_digest:
97
+ mapping[key]["container_digest"] = task_ir.container_digest
137
98
 
138
- def _process_mapping(mapping_toml: dict) -> dict:
139
- """Process the raw mapping TOML into the expected format.
99
+ # Merge defaults (flattened, excluding config which is already processed)
100
+ defaults_copy = {k: v for k, v in defaults.items() if k != "config"}
101
+ mapping[key].update(defaults_copy)
140
102
 
141
- Args:
142
- mapping_toml: Raw mapping TOML data.
143
- Returns:
144
- dict: Processed mapping in the expected format.
145
- """
146
- mapping = {}
147
- for harness_name, harness_data in mapping_toml.items():
148
- assert isinstance(harness_data["tasks"], dict)
149
- for endpoint_type, harness_tasks in harness_data["tasks"].items():
150
- assert isinstance(harness_tasks, dict)
151
- for task_name, task_data in harness_tasks.items():
152
- assert isinstance(task_data, dict)
153
- key = (harness_name, task_name)
154
- if key in mapping:
155
- raise KeyError(
156
- f"(harness,task)-tuple key {repr(key)} already exists in the mapping"
157
- )
158
- mapping[key] = {
159
- "task": task_name,
160
- "harness": harness_name,
161
- "container": harness_data["container"],
162
- "endpoint_type": endpoint_type,
163
- }
164
- for task_data_key in task_data.keys():
165
- if task_data_key in mapping[key]:
166
- raise KeyError(
167
- f"{repr(task_data_key)} is not allowed as key under {repr(key)} in the mapping"
168
- )
169
- mapping[key].update(task_data)
170
103
  return mapping
171
104
 
172
105
 
173
106
  def load_tasks_mapping(
174
- latest: bool = False,
175
107
  mapping_toml: pathlib.Path | str | None = None,
108
+ *,
109
+ from_container: str | None = None,
176
110
  ) -> dict[tuple[str, str], dict]:
177
111
  """Load tasks mapping.
178
112
 
179
113
  The function obeys the following priority rules:
180
- 1. (Default) If latest==False and mapping_toml is None -> load packaged mapping.
181
- 2. If latest==True -> fetch MAPPING_URL, save to cache, load it.
182
- 3. If mapping_toml is not None -> load mapping from this path.
114
+ 1. If from_container is not None -> extract framework.yml from that container and build mapping from the resulting IRs.
115
+ 2. Otherwise -> load packaged IRs (all_tasks_irs.yaml) and build mapping from those IRs.
116
+
117
+ Args:
118
+ mapping_toml: Deprecated. mapping.toml is no longer supported (IR-only mode).
119
+ from_container: Optional container image identifier. If provided, tasks are loaded on-the-fly from that container.
183
120
 
184
121
  Returns:
185
122
  dict: Mapping of (harness_name, task_name) to dict holding their configuration.
186
123
 
187
124
  """
188
- local_mapping: dict = {}
189
- if latest:
190
- mapping_bytes = _download_latest_mapping()
191
- if mapping_bytes:
192
- _save_mapping_to_cache(mapping_bytes)
193
- local_mapping = _process_mapping(
194
- tomllib.loads(mapping_bytes.decode("utf-8"))
125
+ if mapping_toml is not None:
126
+ raise ValueError(
127
+ "mapping_toml is no longer supported. This project has switched to packaged IRs (all_tasks_irs.yaml)."
128
+ )
129
+
130
+ # Explicit container path: extract tasks from container and return mapping built from IRs.
131
+ # This bypasses packaged IRs.
132
+ if from_container is not None:
133
+ try:
134
+ # Optional dependency path; importing may fail in "IR-only" environments.
135
+ from nemo_evaluator_launcher.common.container_metadata import (
136
+ load_tasks_from_container,
137
+ )
138
+ except ModuleNotFoundError as e:
139
+ raise RuntimeError(
140
+ "Loading tasks from a container requires optional dependencies. "
141
+ "Install nemo-evaluator-launcher with the full runtime dependencies."
142
+ ) from e
143
+
144
+ tasks = load_tasks_from_container(from_container)
145
+ if not tasks:
146
+ logger.warning(
147
+ "No tasks loaded from container via container-metadata",
148
+ container=from_container,
195
149
  )
196
150
  else:
197
- # Fallback to cached mapping; raise only if cache is missing/invalid
198
- cached = _load_cached_mapping()
199
- if cached:
200
- local_mapping = _process_mapping(cached)
201
- else:
202
- raise RuntimeError("could not download latest mapping")
203
-
204
- elif mapping_toml is not None:
205
- with open(mapping_toml, "rb") as f:
206
- local_mapping = _process_mapping(tomllib.load(f))
207
- else:
208
- local_mapping = _process_mapping(_load_packaged_resource(CACHE_FILENAME))
151
+ logger.info(
152
+ "Loaded tasks from container via container-metadata",
153
+ container=from_container,
154
+ num_tasks=len(tasks),
155
+ )
209
156
 
210
- # TODO: make more elegant. We consider it ok to avoid a fully-blown plugin system.
211
- # Check if nemo_evaluator_launcher_internal is available and load its mapping.toml
212
- # CAVEAT: lazy-loading here, not somewhere top level, is important, to ensure
213
- # order of package initialization.
214
- try:
215
- importlib.import_module("nemo_evaluator_launcher_internal")
216
- logger.debug("Internal package available, loading internal mapping")
217
- internal_mapping = _process_mapping(
218
- _load_packaged_resource(CACHE_FILENAME, INTERNAL_RESOURCES_PKG)
219
- )
157
+ return _convert_irs_to_mapping_format(tasks)
220
158
 
221
- # Merge internal mapping with local mapping (internal takes precedence)
222
- local_mapping.update(internal_mapping)
223
- logger.info(
224
- "Successfully merged internal mapping", internal_tasks=len(internal_mapping)
225
- )
226
- except ImportError:
227
- logger.debug("Internal package not available, using external mapping only")
159
+ try:
160
+ tasks, mapping_verified = load_tasks_from_tasks_file()
228
161
  except Exception as e:
229
- logger.warning("Failed to load internal mapping", error=str(e))
162
+ raise RuntimeError("Failed to load tasks from packaged IRs") from e
163
+
164
+ if not tasks:
165
+ raise RuntimeError("No tasks available in packaged IRs (all_tasks_irs.yaml)")
230
166
 
231
- return local_mapping
167
+ logger.info(
168
+ "Loaded tasks from packaged IRs",
169
+ num_tasks=len(tasks),
170
+ mapping_verified=mapping_verified,
171
+ )
172
+ return _convert_irs_to_mapping_format(tasks)
232
173
 
233
174
 
234
175
  def get_task_from_mapping(query: str, mapping: dict[Any, Any]) -> dict[Any, Any]:
@@ -293,3 +234,51 @@ def get_task_from_mapping(query: str, mapping: dict[Any, Any]) -> dict[Any, Any]
293
234
  f"invalid query={repr(query)} for task mapping,"
294
235
  " it must contain exactly zero or one occurrence of '.' character"
295
236
  )
237
+
238
+
239
+ def _minimal_task_definition(task_query: str, *, container: str) -> dict[str, Any]:
240
+ """Create a minimal task definition when task is not known in any mapping."""
241
+ if task_query.count(".") == 1:
242
+ harness, task = task_query.split(".")
243
+ else:
244
+ harness, task = "", task_query
245
+
246
+ # Default to chat; most configs and endpoints use chat unless explicitly known.
247
+ return {
248
+ "task": task,
249
+ "harness": harness,
250
+ "endpoint_type": "chat",
251
+ "container": container,
252
+ }
253
+
254
+
255
+ def get_task_definition_for_job(
256
+ *,
257
+ task_query: str,
258
+ base_mapping: dict[Any, Any],
259
+ container: str | None = None,
260
+ ) -> dict[str, Any]:
261
+ """Resolve task definition for a job.
262
+
263
+ If a container is provided, tasks are loaded from that container (using
264
+ container-metadata) and we attempt to resolve the task from that mapping.
265
+ If the task isn't found in the container, we warn and return a minimal
266
+ task definition so submission can proceed.
267
+ """
268
+ if not container:
269
+ return get_task_from_mapping(task_query, base_mapping)
270
+
271
+ # `load_tasks_mapping(from_container=...)` uses container-metadata extraction,
272
+ # which already has its own caching (e.g., caching extracted framework.yml).
273
+ container_mapping = load_tasks_mapping(from_container=container)
274
+
275
+ try:
276
+ return get_task_from_mapping(task_query, container_mapping)
277
+ except ValueError as e:
278
+ logger.warning(
279
+ "Task not found in provided container; proceeding with minimal task definition",
280
+ task=task_query,
281
+ container=container,
282
+ error=str(e),
283
+ )
284
+ return _minimal_task_definition(task_query, container=container)
@@ -62,39 +62,45 @@ _CODES: dict[str, str] = dict(
62
62
  reset="\033[0m",
63
63
  )
64
64
 
65
- # If the colors are disabled, we null-out all the codes.
66
- if _is_color_disabled():
67
- for c in _CODES.keys():
68
- _CODES[c] = ""
65
+
66
+ def _apply(code_key: str, s: str) -> str:
67
+ """Apply an ANSI code if colors are enabled.
68
+
69
+ Note: Color enablement is evaluated at call time so tests that redirect
70
+ stdout (e.g. to StringIO) correctly disable ANSI sequences.
71
+ """
72
+ if _is_color_disabled():
73
+ return s
74
+ return _CODES[code_key] + s + _CODES["reset"]
69
75
 
70
76
 
71
77
  def green(s: str) -> str:
72
- return _CODES["green"] + s + _CODES["reset"]
78
+ return _apply("green", s)
73
79
 
74
80
 
75
81
  def red(s: str) -> str:
76
- return _CODES["red"] + s + _CODES["reset"]
82
+ return _apply("red", s)
77
83
 
78
84
 
79
85
  def red_bg(s: str) -> str:
80
- return _CODES["red_bg"] + s + _CODES["reset"]
86
+ return _apply("red_bg", s)
81
87
 
82
88
 
83
89
  def cyan(s: str) -> str:
84
- return _CODES["cyan"] + s + _CODES["reset"]
90
+ return _apply("cyan", s)
85
91
 
86
92
 
87
93
  def yellow(s: str) -> str:
88
- return _CODES["yellow"] + s + _CODES["reset"]
94
+ return _apply("yellow", s)
89
95
 
90
96
 
91
97
  def magenta(s: str) -> str:
92
- return _CODES["magenta"] + s + _CODES["reset"]
98
+ return _apply("magenta", s)
93
99
 
94
100
 
95
101
  def grey(s: str) -> str:
96
- return _CODES["grey"] + s + _CODES["reset"]
102
+ return _apply("grey", s)
97
103
 
98
104
 
99
105
  def bold(s: str) -> str:
100
- return _CODES["bold"] + s + _CODES["reset"]
106
+ return _apply("bold", s)
@@ -18,13 +18,15 @@ image: ??? # e.g., nvcr.io/nim/meta/llama-3.1-8b-instruct:1.8.6
18
18
  served_model_name: ???
19
19
  port: 8000
20
20
 
21
+ command: /opt/nim/start_server.sh
22
+
21
23
  # NIM containers use default entrypoint - no custom command needed
22
24
  # Configuration is done via environment variables in lepton_config
23
25
 
24
26
  endpoints:
25
27
  chat: /v1/chat/completions
26
28
  completions: /v1/completions
27
- health: /health
29
+ health: /v1/health/ready
28
30
  # Note: Environment variables should be configured in lepton_config.envs
29
31
  # Auto-derived environment variables from deployment config:
30
32
  # - SERVED_MODEL_NAME (from served_model_name)