nemo-evaluator-launcher 0.1.19__py3-none-any.whl → 0.1.56__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. nemo_evaluator_launcher/api/functional.py +159 -5
  2. nemo_evaluator_launcher/cli/logs.py +102 -0
  3. nemo_evaluator_launcher/cli/ls_task.py +280 -0
  4. nemo_evaluator_launcher/cli/ls_tasks.py +208 -55
  5. nemo_evaluator_launcher/cli/main.py +29 -2
  6. nemo_evaluator_launcher/cli/run.py +114 -16
  7. nemo_evaluator_launcher/cli/version.py +26 -23
  8. nemo_evaluator_launcher/common/container_metadata/__init__.py +61 -0
  9. nemo_evaluator_launcher/common/container_metadata/intermediate_repr.py +530 -0
  10. nemo_evaluator_launcher/common/container_metadata/loading.py +1126 -0
  11. nemo_evaluator_launcher/common/container_metadata/registries.py +824 -0
  12. nemo_evaluator_launcher/common/container_metadata/utils.py +63 -0
  13. nemo_evaluator_launcher/common/helpers.py +200 -51
  14. nemo_evaluator_launcher/common/logging_utils.py +16 -5
  15. nemo_evaluator_launcher/common/mapping.py +341 -155
  16. nemo_evaluator_launcher/common/printing_utils.py +25 -12
  17. nemo_evaluator_launcher/configs/deployment/sglang.yaml +4 -2
  18. nemo_evaluator_launcher/configs/deployment/trtllm.yaml +2 -3
  19. nemo_evaluator_launcher/configs/deployment/vllm.yaml +0 -1
  20. nemo_evaluator_launcher/configs/execution/slurm/default.yaml +14 -0
  21. nemo_evaluator_launcher/executors/base.py +31 -1
  22. nemo_evaluator_launcher/executors/lepton/deployment_helpers.py +36 -1
  23. nemo_evaluator_launcher/executors/lepton/executor.py +107 -9
  24. nemo_evaluator_launcher/executors/local/executor.py +383 -24
  25. nemo_evaluator_launcher/executors/local/run.template.sh +54 -2
  26. nemo_evaluator_launcher/executors/slurm/executor.py +559 -64
  27. nemo_evaluator_launcher/executors/slurm/proxy.cfg.template +26 -0
  28. nemo_evaluator_launcher/exporters/utils.py +32 -46
  29. nemo_evaluator_launcher/package_info.py +1 -1
  30. nemo_evaluator_launcher/resources/all_tasks_irs.yaml +17016 -0
  31. nemo_evaluator_launcher/resources/mapping.toml +64 -315
  32. {nemo_evaluator_launcher-0.1.19.dist-info → nemo_evaluator_launcher-0.1.56.dist-info}/METADATA +4 -3
  33. nemo_evaluator_launcher-0.1.56.dist-info/RECORD +69 -0
  34. {nemo_evaluator_launcher-0.1.19.dist-info → nemo_evaluator_launcher-0.1.56.dist-info}/entry_points.txt +1 -0
  35. nemo_evaluator_launcher-0.1.19.dist-info/RECORD +0 -60
  36. {nemo_evaluator_launcher-0.1.19.dist-info → nemo_evaluator_launcher-0.1.56.dist-info}/WHEEL +0 -0
  37. {nemo_evaluator_launcher-0.1.19.dist-info → nemo_evaluator_launcher-0.1.56.dist-info}/licenses/LICENSE +0 -0
  38. {nemo_evaluator_launcher-0.1.19.dist-info → nemo_evaluator_launcher-0.1.56.dist-info}/top_level.txt +0 -0
@@ -13,222 +13,360 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
  #
16
- import importlib
17
16
  import pathlib
18
- import sys
19
- from importlib import resources
20
- from typing import Any, Optional
17
+ from typing import Any
21
18
 
22
- import requests
23
-
24
- if sys.version_info >= (3, 11):
25
- import tomllib
26
- else:
27
- import tomli as tomllib
19
+ import yaml
28
20
 
21
+ from nemo_evaluator_launcher.common.container_metadata import (
22
+ TaskIntermediateRepresentation,
23
+ load_tasks_from_tasks_file,
24
+ )
29
25
  from nemo_evaluator_launcher.common.logging_utils import logger
30
26
 
31
- # Configuration constants
32
- # For below, see docs: https://docs.github.com/en/rest/repos/contents
33
- MAPPING_URL = "https://raw.githubusercontent.com/NVIDIA-NeMo/Eval/main/packages/nemo-evaluator-launcher/src/nemo_evaluator_launcher/resources/mapping.toml"
34
- CACHE_DIR = pathlib.Path.home() / ".nemo-evaluator" / "cache"
35
- CACHE_FILENAME = "mapping.toml"
36
- INTERNAL_RESOURCES_PKG = "nemo_evaluator_launcher_internal.resources"
37
-
38
27
 
39
- def _ensure_cache_dir() -> None:
40
- """Ensure the cache directory exists."""
41
- CACHE_DIR.mkdir(parents=True, exist_ok=True)
28
+ def _load_packaged_resource(*_args: Any, **_kwargs: Any) -> dict[str, Any]:
29
+ """Deprecated: mapping.toml support was removed in favor of packaged IRs."""
30
+ raise RuntimeError(
31
+ "mapping.toml is no longer supported. Use packaged IRs (all_tasks_irs.yaml) instead."
32
+ )
42
33
 
43
34
 
44
- def _get_cache_file() -> pathlib.Path:
45
- """Get the cache file path.
35
+ def _process_mapping(mapping_toml: dict) -> dict:
36
+ """Process the raw mapping TOML into the expected format.
46
37
 
38
+ Args:
39
+ mapping_toml: Raw mapping TOML data.
47
40
  Returns:
48
- pathlib.Path: Path to the cache file.
41
+ dict: Processed mapping in the expected format.
49
42
  """
50
- return CACHE_DIR / CACHE_FILENAME
43
+ mapping = {}
44
+ for harness_name, harness_data in mapping_toml.items():
45
+ # Skip entries that don't have the expected structure
46
+ if not isinstance(harness_data, dict):
47
+ logger.warning(
48
+ "Skipping invalid harness entry",
49
+ harness_name=harness_name,
50
+ reason="harness_data is not a dict",
51
+ )
52
+ continue
51
53
 
54
+ # Check if tasks field exists
55
+ if "tasks" not in harness_data:
56
+ logger.warning(
57
+ "Skipping harness entry without tasks",
58
+ harness_name=harness_name,
59
+ )
60
+ continue
52
61
 
53
- def _download_latest_mapping() -> Optional[bytes]:
54
- """Download latest mapping from MAPPING_URL and return raw bytes.
62
+ if not isinstance(harness_data["tasks"], dict):
63
+ logger.warning(
64
+ "Skipping invalid harness entry",
65
+ harness_name=harness_name,
66
+ reason="tasks is not a dict",
67
+ )
68
+ continue
69
+
70
+ # Get container, which may be optional
71
+ container = harness_data.get("container")
72
+ if not container:
73
+ logger.debug(
74
+ "Harness entry without container",
75
+ harness_name=harness_name,
76
+ )
55
77
 
56
- Returns:
57
- Optional[bytes]: Downloaded mapping bytes, or None if download fails.
58
- """
59
- try:
60
- response = requests.get(MAPPING_URL, timeout=10)
61
- response.raise_for_status()
78
+ for endpoint_type, harness_tasks in harness_data["tasks"].items():
79
+ if not isinstance(harness_tasks, dict):
80
+ logger.warning(
81
+ "Skipping invalid endpoint type",
82
+ harness_name=harness_name,
83
+ endpoint_type=endpoint_type,
84
+ reason="harness_tasks is not a dict",
85
+ )
86
+ continue
62
87
 
63
- # For GitHub raw URLs, the response content is the file content directly
64
- mapping_bytes = response.content
65
- assert isinstance(mapping_bytes, bytes)
88
+ for task_name, task_data in harness_tasks.items():
89
+ if not isinstance(task_data, dict):
90
+ logger.warning(
91
+ "Skipping invalid task entry",
92
+ harness_name=harness_name,
93
+ task_name=task_name,
94
+ reason="task_data is not a dict",
95
+ )
96
+ continue
66
97
 
67
- logger.debug("Successfully downloaded mapping from remote URL")
68
- return mapping_bytes
69
- except (requests.RequestException, OSError) as e:
70
- logger.warning("Failed to download mapping from remote URL", error=str(e))
71
- return None
98
+ key = (harness_name, task_name)
99
+ if key in mapping:
100
+ raise KeyError(
101
+ f"(harness,task)-tuple key {repr(key)} already exists in the mapping"
102
+ )
72
103
 
104
+ # Validate required fields exist in task_data
105
+ # task_name and harness_name are already validated above
106
+ # endpoint_type is validated as a key in harness_tasks
107
+ # task_data must be a dict (validated above)
73
108
 
74
- def _load_cached_mapping() -> Optional[dict[Any, Any]]:
75
- """Load mapping from cache file.
109
+ mapping[key] = {
110
+ "task": task_name,
111
+ "harness": harness_name,
112
+ "endpoint_type": endpoint_type,
113
+ }
114
+ # Only add container if it exists
115
+ if container:
116
+ mapping[key]["container"] = container
76
117
 
77
- Returns:
78
- Optional[dict]: Loaded mapping data, or None if loading fails.
79
- """
80
- cache_file = _get_cache_file()
81
- if not cache_file.exists():
82
- return None
118
+ # Validate task_data keys before updating
119
+ for task_data_key in task_data.keys():
120
+ if task_data_key in mapping[key]:
121
+ raise KeyError(
122
+ f"{repr(task_data_key)} is not allowed as key under {repr(key)} in the mapping"
123
+ )
124
+ # Validate that task_data values are valid types (basic check)
125
+ if task_data_key not in ("description", "type") and not isinstance(
126
+ task_data[task_data_key],
127
+ (str, int, float, bool, dict, list, type(None)),
128
+ ):
129
+ logger.warning(
130
+ "Unexpected value type in task_data",
131
+ harness_name=harness_name,
132
+ task_name=task_name,
133
+ key=task_data_key,
134
+ value_type=type(task_data[task_data_key]).__name__,
135
+ )
83
136
 
84
- try:
85
- with open(cache_file, "rb") as f:
86
- mapping = tomllib.load(f)
87
- logger.debug("Loaded mapping from cache")
88
- return mapping # type: ignore[no-any-return]
89
- except (OSError, tomllib.TOMLDecodeError) as e:
90
- logger.warning("Failed to load mapping from cache", error=str(e))
91
- return None
137
+ mapping[key].update(task_data)
138
+ return mapping
92
139
 
93
140
 
94
- def _save_mapping_to_cache(mapping_bytes: bytes) -> None:
95
- """Save mapping to cache file.
141
+ def _extract_tasks_from_framework_yml(
142
+ framework_yml_content: str, harness_name: str, container: str
143
+ ) -> dict[tuple[str, str], dict]:
144
+ """Extract tasks from framework.yml content and return as mapping entries.
96
145
 
97
146
  Args:
98
- mapping_bytes: Mapping data to save.
147
+ framework_yml_content: YAML content from framework.yml file
148
+ harness_name: Name of the harness
149
+ container: Container image string
150
+
151
+ Returns:
152
+ Dictionary mapping (harness_name, task_name) to task configuration
99
153
  """
154
+ tasks = {}
100
155
  try:
101
- _ensure_cache_dir()
102
- cache_file = _get_cache_file()
156
+ framework_data = yaml.safe_load(framework_yml_content)
157
+ if not framework_data or "evaluations" not in framework_data:
158
+ logger.warning(
159
+ "No evaluations found in framework.yml",
160
+ harness=harness_name,
161
+ container=container,
162
+ )
163
+ return tasks
103
164
 
104
- # Save the mapping data
105
- with open(cache_file, "wb") as f:
106
- f.write(mapping_bytes)
165
+ evaluations = framework_data.get("evaluations", [])
166
+ for eval_config in evaluations:
167
+ task_name = eval_config.get("name")
168
+ description = eval_config.get("description", "")
107
169
 
108
- except OSError as e:
109
- logger.warning("Failed to save mapping to cache", error=str(e))
170
+ if not task_name:
171
+ continue
110
172
 
173
+ # Extract endpoint types from the evaluation config
174
+ defaults = eval_config.get("defaults", {})
175
+ config = defaults.get("config", {})
176
+ supported_endpoint_types = config.get("supported_endpoint_types", ["chat"])
177
+ task_type = config.get("type", "") # Extract type from defaults.config.type
111
178
 
112
- def _load_packaged_resource(
113
- resource_name: str, pkg_name: str = "nemo_evaluator_launcher.resources"
114
- ) -> dict[str, Any]:
115
- """Load a resource from the packaged resources.
179
+ # Use first endpoint type (mapping key is (harness, task), so one entry per task)
180
+ endpoint_type = (
181
+ supported_endpoint_types[0] if supported_endpoint_types else "chat"
182
+ )
183
+
184
+ key = (harness_name, task_name)
185
+ # Only add if not already in mapping (don't override existing entries)
186
+ if key not in tasks:
187
+ tasks[key] = {
188
+ "task": task_name,
189
+ "harness": harness_name,
190
+ "container": container,
191
+ "endpoint_type": endpoint_type,
192
+ "description": description,
193
+ "type": task_type, # Store type from defaults.config.type
194
+ }
195
+ # Merge any additional config from defaults
196
+ if defaults:
197
+ tasks[key].update(defaults)
116
198
 
117
- Args:
118
- resource_name: The name of the resource to load.
119
- """
120
- try:
121
- resource_toml: dict[str, Any] = {}
122
- with resources.files(pkg_name).joinpath(resource_name).open("rb") as f:
123
- resource_toml = tomllib.load(f)
124
199
  logger.info(
125
- "Loaded resource from packaged file", resource=resource_name, pkg=pkg_name
200
+ "Extracted tasks from framework.yml",
201
+ harness=harness_name,
202
+ container=container,
203
+ num_tasks=len(tasks),
126
204
  )
127
- return resource_toml
128
- except (OSError, tomllib.TOMLDecodeError) as e:
129
- logger.error(
130
- "Failed to load from packaged file",
131
- resource=resource_name,
132
- pkg=pkg_name,
205
+ except yaml.YAMLError as e:
206
+ logger.warning(
207
+ "Failed to parse framework.yml",
208
+ harness=harness_name,
209
+ container=container,
210
+ error=str(e),
211
+ )
212
+ except Exception as e:
213
+ logger.warning(
214
+ "Error extracting tasks from framework.yml",
215
+ harness=harness_name,
216
+ container=container,
133
217
  error=str(e),
134
218
  )
135
- raise RuntimeError(f"Failed to load {resource_name} from packaged file") from e
136
219
 
220
+ return tasks
137
221
 
138
- def _process_mapping(mapping_toml: dict) -> dict:
139
- """Process the raw mapping TOML into the expected format.
222
+
223
+ def _convert_irs_to_mapping_format(
224
+ tasks: list[TaskIntermediateRepresentation],
225
+ ) -> dict[tuple[str, str], dict]:
226
+ """Convert list of TaskIntermediateRepresentation objects to mapping dict format.
140
227
 
141
228
  Args:
142
- mapping_toml: Raw mapping TOML data.
229
+ tasks: List of TaskIntermediateRepresentation objects.
230
+ harnesses_by_name: Optional mapping of harness name -> Harness IR. If provided,
231
+ adds harness-level metadata (e.g., arch) to each task mapping entry.
232
+
143
233
  Returns:
144
- dict: Processed mapping in the expected format.
234
+ dict: Mapping of (harness_name, task_name) to dict holding their configuration.
145
235
  """
146
- mapping = {}
147
- for harness_name, harness_data in mapping_toml.items():
148
- assert isinstance(harness_data["tasks"], dict)
149
- for endpoint_type, harness_tasks in harness_data["tasks"].items():
150
- assert isinstance(harness_tasks, dict)
151
- for task_name, task_data in harness_tasks.items():
152
- assert isinstance(task_data, dict)
153
- key = (harness_name, task_name)
154
- if key in mapping:
155
- raise KeyError(
156
- f"(harness,task)-tuple key {repr(key)} already exists in the mapping"
157
- )
158
- mapping[key] = {
159
- "task": task_name,
160
- "harness": harness_name,
161
- "container": harness_data["container"],
162
- "endpoint_type": endpoint_type,
163
- }
164
- for task_data_key in task_data.keys():
165
- if task_data_key in mapping[key]:
166
- raise KeyError(
167
- f"{repr(task_data_key)} is not allowed as key under {repr(key)} in the mapping"
168
- )
169
- mapping[key].update(task_data)
236
+ mapping: dict[tuple[str, str], dict] = {}
237
+
238
+ for task_ir in tasks:
239
+ harness_name = task_ir.harness
240
+ task_name = task_ir.name
241
+ key = (harness_name, task_name)
242
+
243
+ if key in mapping:
244
+ logger.warning(
245
+ "Duplicate task key found in IRs, keeping first occurrence",
246
+ harness=harness_name,
247
+ task=task_name,
248
+ )
249
+ continue
250
+
251
+ # Extract endpoint_type from defaults.config.supported_endpoint_types
252
+ defaults = task_ir.defaults or {}
253
+ config = defaults.get("config", {})
254
+ supported_endpoint_types = config.get("supported_endpoint_types", ["chat"])
255
+ endpoint_type = (
256
+ supported_endpoint_types[0] if supported_endpoint_types else "chat"
257
+ )
258
+
259
+ # Extract type from defaults.config.type
260
+ task_type = config.get("type", "")
261
+
262
+ # Build mapping entry
263
+ mapping[key] = {
264
+ "task": task_name,
265
+ "harness": harness_name,
266
+ "endpoint_type": endpoint_type,
267
+ "container": task_ir.container,
268
+ }
269
+
270
+ if task_ir.container_arch:
271
+ mapping[key]["arch"] = task_ir.container_arch
272
+
273
+ # Backwards-compatible enhancement: keep full IR defaults available.
274
+ # Existing code uses flattened defaults (excluding `config`) below; this adds a
275
+ # new field without changing any existing keys.
276
+ mapping[key]["defaults"] = defaults
277
+
278
+ # Backwards-compatible enhancement: surface command explicitly if present.
279
+ # Note: `command` is already included via flattened defaults merge, but
280
+ # keeping it explicit makes downstream usage simpler.
281
+ if "command" in defaults and "command" not in mapping[key]:
282
+ mapping[key]["command"] = defaults["command"]
283
+
284
+ # Add description if available
285
+ if task_ir.description:
286
+ mapping[key]["description"] = task_ir.description
287
+
288
+ # Add type if available
289
+ if task_type:
290
+ mapping[key]["type"] = task_type
291
+
292
+ # Add container_digest if available
293
+ if task_ir.container_digest:
294
+ mapping[key]["container_digest"] = task_ir.container_digest
295
+
296
+ # Merge defaults (flattened, excluding config which is already processed)
297
+ defaults_copy = {k: v for k, v in defaults.items() if k != "config"}
298
+ mapping[key].update(defaults_copy)
299
+
170
300
  return mapping
171
301
 
172
302
 
173
303
  def load_tasks_mapping(
174
- latest: bool = False,
175
304
  mapping_toml: pathlib.Path | str | None = None,
305
+ *,
306
+ from_container: str | None = None,
176
307
  ) -> dict[tuple[str, str], dict]:
177
308
  """Load tasks mapping.
178
309
 
179
310
  The function obeys the following priority rules:
180
- 1. (Default) If latest==False and mapping_toml is None -> load packaged mapping.
181
- 2. If latest==True -> fetch MAPPING_URL, save to cache, load it.
182
- 3. If mapping_toml is not None -> load mapping from this path.
311
+ 1. If from_container is not None -> extract framework.yml from that container and build mapping from the resulting IRs.
312
+ 2. Otherwise -> load packaged IRs (all_tasks_irs.yaml) and build mapping from those IRs.
313
+
314
+ Args:
315
+ mapping_toml: Deprecated. mapping.toml is no longer supported (IR-only mode).
316
+ from_container: Optional container image identifier. If provided, tasks are loaded on-the-fly from that container.
183
317
 
184
318
  Returns:
185
319
  dict: Mapping of (harness_name, task_name) to dict holding their configuration.
186
320
 
187
321
  """
188
- local_mapping: dict = {}
189
- if latest:
190
- mapping_bytes = _download_latest_mapping()
191
- if mapping_bytes:
192
- _save_mapping_to_cache(mapping_bytes)
193
- local_mapping = _process_mapping(
194
- tomllib.loads(mapping_bytes.decode("utf-8"))
322
+ if mapping_toml is not None:
323
+ raise ValueError(
324
+ "mapping_toml is no longer supported. This project has switched to packaged IRs (all_tasks_irs.yaml)."
325
+ )
326
+
327
+ # Explicit container path: extract tasks from container and return mapping built from IRs.
328
+ # This bypasses packaged IRs.
329
+ if from_container is not None:
330
+ try:
331
+ # Optional dependency path; importing may fail in "IR-only" environments.
332
+ from nemo_evaluator_launcher.common.container_metadata import (
333
+ load_tasks_from_container,
334
+ )
335
+ except ModuleNotFoundError as e:
336
+ raise RuntimeError(
337
+ "Loading tasks from a container requires optional dependencies. "
338
+ "Install nemo-evaluator-launcher with the full runtime dependencies."
339
+ ) from e
340
+
341
+ tasks = load_tasks_from_container(from_container)
342
+ if not tasks:
343
+ logger.warning(
344
+ "No tasks loaded from container via container-metadata",
345
+ container=from_container,
195
346
  )
196
347
  else:
197
- # Fallback to cached mapping; raise only if cache is missing/invalid
198
- cached = _load_cached_mapping()
199
- if cached:
200
- local_mapping = _process_mapping(cached)
201
- else:
202
- raise RuntimeError("could not download latest mapping")
203
-
204
- elif mapping_toml is not None:
205
- with open(mapping_toml, "rb") as f:
206
- local_mapping = _process_mapping(tomllib.load(f))
207
- else:
208
- local_mapping = _process_mapping(_load_packaged_resource(CACHE_FILENAME))
348
+ logger.info(
349
+ "Loaded tasks from container via container-metadata",
350
+ container=from_container,
351
+ num_tasks=len(tasks),
352
+ )
209
353
 
210
- # TODO: make more elegant. We consider it ok to avoid a fully-blown plugin system.
211
- # Check if nemo_evaluator_launcher_internal is available and load its mapping.toml
212
- # CAVEAT: lazy-loading here, not somewhere top level, is important, to ensure
213
- # order of package initialization.
214
- try:
215
- importlib.import_module("nemo_evaluator_launcher_internal")
216
- logger.debug("Internal package available, loading internal mapping")
217
- internal_mapping = _process_mapping(
218
- _load_packaged_resource(CACHE_FILENAME, INTERNAL_RESOURCES_PKG)
219
- )
354
+ return _convert_irs_to_mapping_format(tasks)
220
355
 
221
- # Merge internal mapping with local mapping (internal takes precedence)
222
- local_mapping.update(internal_mapping)
223
- logger.info(
224
- "Successfully merged internal mapping", internal_tasks=len(internal_mapping)
225
- )
226
- except ImportError:
227
- logger.debug("Internal package not available, using external mapping only")
356
+ try:
357
+ tasks, mapping_verified = load_tasks_from_tasks_file()
228
358
  except Exception as e:
229
- logger.warning("Failed to load internal mapping", error=str(e))
359
+ raise RuntimeError("Failed to load tasks from packaged IRs") from e
360
+
361
+ if not tasks:
362
+ raise RuntimeError("No tasks available in packaged IRs (all_tasks_irs.yaml)")
230
363
 
231
- return local_mapping
364
+ logger.info(
365
+ "Loaded tasks from packaged IRs",
366
+ num_tasks=len(tasks),
367
+ mapping_verified=mapping_verified,
368
+ )
369
+ return _convert_irs_to_mapping_format(tasks)
232
370
 
233
371
 
234
372
  def get_task_from_mapping(query: str, mapping: dict[Any, Any]) -> dict[Any, Any]:
@@ -293,3 +431,51 @@ def get_task_from_mapping(query: str, mapping: dict[Any, Any]) -> dict[Any, Any]
293
431
  f"invalid query={repr(query)} for task mapping,"
294
432
  " it must contain exactly zero or one occurrence of '.' character"
295
433
  )
434
+
435
+
436
+ def _minimal_task_definition(task_query: str, *, container: str) -> dict[str, Any]:
437
+ """Create a minimal task definition when task is not known in any mapping."""
438
+ if task_query.count(".") == 1:
439
+ harness, task = task_query.split(".")
440
+ else:
441
+ harness, task = "", task_query
442
+
443
+ # Default to chat; most configs and endpoints use chat unless explicitly known.
444
+ return {
445
+ "task": task,
446
+ "harness": harness,
447
+ "endpoint_type": "chat",
448
+ "container": container,
449
+ }
450
+
451
+
452
+ def get_task_definition_for_job(
453
+ *,
454
+ task_query: str,
455
+ base_mapping: dict[Any, Any],
456
+ container: str | None = None,
457
+ ) -> dict[str, Any]:
458
+ """Resolve task definition for a job.
459
+
460
+ If a container is provided, tasks are loaded from that container (using
461
+ container-metadata) and we attempt to resolve the task from that mapping.
462
+ If the task isn't found in the container, we warn and return a minimal
463
+ task definition so submission can proceed.
464
+ """
465
+ if not container:
466
+ return get_task_from_mapping(task_query, base_mapping)
467
+
468
+ # `load_tasks_mapping(from_container=...)` uses container-metadata extraction,
469
+ # which already has its own caching (e.g., caching extracted framework.yml).
470
+ container_mapping = load_tasks_mapping(from_container=container)
471
+
472
+ try:
473
+ return get_task_from_mapping(task_query, container_mapping)
474
+ except ValueError as e:
475
+ logger.warning(
476
+ "Task not found in provided container; proceeding with minimal task definition",
477
+ task=task_query,
478
+ container=container,
479
+ error=str(e),
480
+ )
481
+ return _minimal_task_definition(task_query, container=container)
@@ -28,6 +28,7 @@ USAGE:
28
28
  """
29
29
 
30
30
  import os
31
+ import sys
31
32
 
32
33
  # If this env var is set, it will override a more standard "LOG_LEVEL". If
33
34
  # both are unset, default would be used.
@@ -35,11 +36,17 @@ _DISABLE_COLOR_ENV_VAR = "NEMO_EVALUATOR_DISABLE_COLOR"
35
36
 
36
37
 
37
38
  def _is_color_disabled():
39
+ # Check environment variable first
38
40
  env_var = os.environ.get(_DISABLE_COLOR_ENV_VAR, "0").lower()
39
41
 
40
42
  if "1" in env_var or "yes" in env_var or "y" in env_var or "true" in env_var:
41
43
  return True
42
44
 
45
+ # If not explicitly disabled, check if stdout is a TTY
46
+ # Colors are disabled if output is not a TTY
47
+ if not sys.stdout.isatty():
48
+ return True
49
+
43
50
  return False
44
51
 
45
52
 
@@ -55,39 +62,45 @@ _CODES: dict[str, str] = dict(
55
62
  reset="\033[0m",
56
63
  )
57
64
 
58
- # If the colors are disabled, we null-out all the codes.
59
- if _is_color_disabled():
60
- for c in _CODES.keys():
61
- _CODES[c] = ""
65
+
66
+ def _apply(code_key: str, s: str) -> str:
67
+ """Apply an ANSI code if colors are enabled.
68
+
69
+ Note: Color enablement is evaluated at call time so tests that redirect
70
+ stdout (e.g. to StringIO) correctly disable ANSI sequences.
71
+ """
72
+ if _is_color_disabled():
73
+ return s
74
+ return _CODES[code_key] + s + _CODES["reset"]
62
75
 
63
76
 
64
77
  def green(s: str) -> str:
65
- return _CODES["green"] + s + _CODES["reset"]
78
+ return _apply("green", s)
66
79
 
67
80
 
68
81
  def red(s: str) -> str:
69
- return _CODES["red"] + s + _CODES["reset"]
82
+ return _apply("red", s)
70
83
 
71
84
 
72
85
  def red_bg(s: str) -> str:
73
- return _CODES["red_bg"] + s + _CODES["reset"]
86
+ return _apply("red_bg", s)
74
87
 
75
88
 
76
89
  def cyan(s: str) -> str:
77
- return _CODES["cyan"] + s + _CODES["reset"]
90
+ return _apply("cyan", s)
78
91
 
79
92
 
80
93
  def yellow(s: str) -> str:
81
- return _CODES["yellow"] + s + _CODES["reset"]
94
+ return _apply("yellow", s)
82
95
 
83
96
 
84
97
  def magenta(s: str) -> str:
85
- return _CODES["magenta"] + s + _CODES["reset"]
98
+ return _apply("magenta", s)
86
99
 
87
100
 
88
101
  def grey(s: str) -> str:
89
- return _CODES["grey"] + s + _CODES["reset"]
102
+ return _apply("grey", s)
90
103
 
91
104
 
92
105
  def bold(s: str) -> str:
93
- return _CODES["bold"] + s + _CODES["reset"]
106
+ return _apply("bold", s)
@@ -19,6 +19,7 @@ checkpoint_path: ???
19
19
  served_model_name: ???
20
20
  port: 8000
21
21
  tensor_parallel_size: 8
22
+ pipeline_parallel_size: 1
22
23
  data_parallel_size: 1
23
24
  extra_args: ""
24
25
  env_vars: {} # {name: value} dict
@@ -33,6 +34,7 @@ command: python3 -m sglang.launch_server
33
34
  --host 0.0.0.0
34
35
  --port ${deployment.port}
35
36
  --served-model-name ${deployment.served_model_name}
36
- --tp ${deployment.tensor_parallel_size}
37
- --dp ${deployment.data_parallel_size}
37
+ --tp-size ${deployment.tensor_parallel_size}
38
+ --dp-size ${deployment.data_parallel_size}
39
+ --pp-size ${deployment.pipeline_parallel_size}
38
40
  ${deployment.extra_args}