truss 0.10.13__py3-none-any.whl → 0.11.1rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of truss might be problematic. Click here for more details.

truss/cli/train/core.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import json
2
+ import os
2
3
  import tarfile
3
4
  import tempfile
4
5
  from dataclasses import dataclass
@@ -16,6 +17,11 @@ from truss.cli.train.metrics_watcher import MetricsWatcher
16
17
  from truss.cli.train.types import PrepareCheckpointArgs, PrepareCheckpointResult
17
18
  from truss.cli.utils import common as cli_common
18
19
  from truss.cli.utils.output import console
20
+ from truss.remote.baseten.custom_types import (
21
+ FileSummary,
22
+ FileSummaryWithTotalSize,
23
+ GetCacheSummaryResponseV1,
24
+ )
19
25
  from truss.remote.baseten.remote import BasetenRemote
20
26
  from truss_train import loader
21
27
  from truss_train.definitions import DeployCheckpointsConfig
@@ -446,6 +452,44 @@ def fetch_project_by_name_or_id(
446
452
  raise click.ClickException(f"Error fetching project: {str(e)}")
447
453
 
448
454
 
455
+ def create_file_summary_with_directory_sizes(
456
+ files: list[FileSummary],
457
+ ) -> list[FileSummaryWithTotalSize]:
458
+ directory_sizes = calculate_directory_sizes(files)
459
+ return [
460
+ FileSummaryWithTotalSize(
461
+ file_summary=file_info,
462
+ total_size=directory_sizes.get(file_info.path, file_info.size_bytes),
463
+ )
464
+ for file_info in files
465
+ ]
466
+
467
+
468
+ def calculate_directory_sizes(
469
+ files: list[FileSummary], max_depth: int = 100
470
+ ) -> dict[str, int]:
471
+ directory_sizes = {}
472
+
473
+ for file_info in files:
474
+ if file_info.file_type == "directory":
475
+ directory_sizes[file_info.path] = 0
476
+
477
+ for file_info in files:
478
+ current_path = file_info.path
479
+ for i in range(max_depth):
480
+ if current_path is None:
481
+ break
482
+ if current_path in directory_sizes:
483
+ directory_sizes[current_path] += file_info.size_bytes
484
+ # Move to parent directory
485
+ parent = os.path.dirname(current_path)
486
+ if parent == current_path: # Reached root
487
+ break
488
+ current_path = parent
489
+
490
+ return directory_sizes
491
+
492
+
449
493
  def view_cache_summary(
450
494
  remote_provider: BasetenRemote,
451
495
  project_id: str,
@@ -454,12 +498,14 @@ def view_cache_summary(
454
498
  ):
455
499
  """View cache summary for a training project."""
456
500
  try:
457
- cache_data = remote_provider.api.get_cache_summary(project_id)
501
+ raw_cache_data = remote_provider.api.get_cache_summary(project_id)
458
502
 
459
- if not cache_data:
503
+ if not raw_cache_data:
460
504
  console.print("No cache summary found for this project.", style="yellow")
461
505
  return
462
506
 
507
+ cache_data = GetCacheSummaryResponseV1.model_validate(raw_cache_data)
508
+
463
509
  table = rich.table.Table(title=f"Cache summary for project: {project_id}")
464
510
  table.add_column("File Path", style="cyan")
465
511
  table.add_column("Size", style="green")
@@ -467,58 +513,48 @@ def view_cache_summary(
467
513
  table.add_column("Type")
468
514
  table.add_column("Permissions", style="magenta")
469
515
 
470
- files = cache_data.get("file_summaries", [])
516
+ files = cache_data.file_summaries
471
517
  if not files:
472
518
  console.print("No files found in cache.", style="yellow")
473
519
  return
474
520
 
475
- reverse = order == SORT_ORDER_DESC
521
+ files_with_total_sizes = create_file_summary_with_directory_sizes(files)
476
522
 
477
- if sort_by == SORT_BY_FILEPATH:
478
- files.sort(key=lambda x: x.get("path", ""), reverse=reverse)
479
- elif sort_by == SORT_BY_SIZE:
480
- files.sort(key=lambda x: x.get("size_bytes", 0), reverse=reverse)
481
- elif sort_by == SORT_BY_MODIFIED:
482
- files.sort(key=lambda x: x.get("modified", ""), reverse=reverse)
483
- elif sort_by == SORT_BY_TYPE:
484
- files.sort(key=lambda x: x.get("file_type", ""), reverse=reverse)
485
- elif sort_by == SORT_BY_PERMISSIONS:
486
- files.sort(key=lambda x: x.get("permissions", ""), reverse=reverse)
487
-
488
- total_size = 0
489
- for file_info in files:
490
- total_size += file_info.get("size_bytes", 0)
523
+ reverse = order == SORT_ORDER_DESC
524
+ sort_key = _get_sort_key(sort_by)
525
+ files_with_total_sizes.sort(key=sort_key, reverse=reverse)
491
526
 
527
+ total_size = sum(
528
+ file_info.file_summary.size_bytes for file_info in files_with_total_sizes
529
+ )
492
530
  total_size_str = common.format_bytes_to_human_readable(total_size)
493
531
 
494
532
  console.print(
495
- f"📅 Cache captured at: {cache_data.get('timestamp', 'Unknown')}",
496
- style="bold blue",
533
+ f"📅 Cache captured at: {cache_data.timestamp}", style="bold blue"
497
534
  )
535
+ console.print(f"📁 Project ID: {cache_data.project_id}", style="bold blue")
536
+ console.print()
498
537
  console.print(
499
- f"📁 Project ID: {cache_data.get('project_id', 'Unknown')}",
500
- style="bold blue",
538
+ f"📊 Total files: {len(files_with_total_sizes)}", style="bold green"
501
539
  )
502
- console.print()
503
- console.print(f"📊 Total files: {len(files)}", style="bold green")
504
540
  console.print(f"💾 Total size: {total_size_str}", style="bold green")
505
541
  console.print()
506
542
 
507
- for file_info in files:
508
- size_bytes = file_info.get("size_bytes", 0)
543
+ for file_info in files_with_total_sizes:
544
+ total_size = file_info.total_size
509
545
 
510
- size_str = cli_common.format_bytes_to_human_readable(int(size_bytes))
546
+ size_str = cli_common.format_bytes_to_human_readable(int(total_size))
511
547
 
512
548
  modified_str = cli_common.format_localized_time(
513
- file_info.get("modified", "Unknown")
549
+ file_info.file_summary.modified
514
550
  )
515
551
 
516
552
  table.add_row(
517
- file_info.get("path", "Unknown"),
553
+ file_info.file_summary.path,
518
554
  size_str,
519
555
  modified_str,
520
- file_info.get("file_type", "Unknown"),
521
- file_info.get("permissions", "Unknown"),
556
+ file_info.file_summary.file_type or "Unknown",
557
+ file_info.file_summary.permissions or "Unknown",
522
558
  )
523
559
 
524
560
  console.print(table)
@@ -528,6 +564,21 @@ def view_cache_summary(
528
564
  raise
529
565
 
530
566
 
567
+ def _get_sort_key(sort_by: str) -> Callable[[FileSummaryWithTotalSize], Any]:
568
+ if sort_by == SORT_BY_FILEPATH:
569
+ return lambda x: x.file_summary.path
570
+ elif sort_by == SORT_BY_SIZE:
571
+ return lambda x: x.total_size
572
+ elif sort_by == SORT_BY_MODIFIED:
573
+ return lambda x: x.file_summary.modified
574
+ elif sort_by == SORT_BY_TYPE:
575
+ return lambda x: x.file_summary.file_type or ""
576
+ elif sort_by == SORT_BY_PERMISSIONS:
577
+ return lambda x: x.file_summary.permissions or ""
578
+ else:
579
+ raise ValueError(f"Invalid --sort argument: {sort_by}")
580
+
581
+
531
582
  def view_cache_summary_by_project(
532
583
  remote_provider: BasetenRemote,
533
584
  project_identifier: str,
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import json
4
4
  import logging
5
+ import os
5
6
  import re
6
7
  import shutil
7
8
  from abc import ABC, abstractmethod
@@ -783,6 +784,10 @@ class ServingImageBuilder(ImageBuilder):
783
784
  config
784
785
  )
785
786
 
787
+ non_root_user = os.getenv("BT_USE_NON_ROOT_USER", False)
788
+ enable_model_container_admin_commands = os.getenv(
789
+ "BT_ENABLE_MODEL_CONTAINER_ADMIN_CMDS"
790
+ )
786
791
  dockerfile_contents = dockerfile_template.render(
787
792
  should_install_server_requirements=should_install_server_requirements,
788
793
  base_image_name_and_tag=base_image_name_and_tag,
@@ -816,6 +821,8 @@ class ServingImageBuilder(ImageBuilder):
816
821
  build_commands=build_commands,
817
822
  use_local_src=config.use_local_src,
818
823
  passthrough_environment_variables=passthrough_environment_variables,
824
+ non_root_user=non_root_user,
825
+ enable_model_container_admin_commands=enable_model_container_admin_commands,
819
826
  **FILENAME_CONSTANTS_MAP,
820
827
  )
821
828
  # Consolidate repeated empty lines to single empty lines.
@@ -1,3 +1,4 @@
1
+ import re
1
2
  from dataclasses import dataclass, field
2
3
  from pathlib import Path
3
4
  from typing import Dict, List
@@ -31,12 +32,32 @@ class DockerBuildEmulator:
31
32
  self._context_dir = context_dir
32
33
 
33
34
  def run(self, fs_root_dir: Path) -> DockerBuildEmulatorResult:
34
- def _resolve_env(key: str) -> str:
35
- if key.startswith("$"):
36
- key = key.replace("$", "", 1)
37
- v = result.env[key]
38
- return v
39
- return key
35
+ def _resolve_env(in_value: str) -> str:
36
+ # Valid environment variable name pattern
37
+ var_name_pattern = r"[A-Za-z_][A-Za-z0-9_]*"
38
+
39
+ # Handle ${VAR} syntax
40
+ def replace_braced_var(match):
41
+ var_name = match.group(1)
42
+ return result.env.get(
43
+ var_name, match.group(0)
44
+ ) # Return original if not found
45
+
46
+ # Handle $VAR syntax (word boundary ensures we don't match parts of other vars)
47
+ def replace_simple_var(match):
48
+ var_name = match.group(1)
49
+ return result.env.get(
50
+ var_name, match.group(0)
51
+ ) # Return original if not found
52
+
53
+ # Replace ${VAR} patterns first, using % substitution to avoid additional braces noise with f-strings
54
+ value = re.sub(
55
+ r"\$\{(%s)\}" % var_name_pattern, replace_braced_var, in_value
56
+ )
57
+ # Then replace remaining $VAR patterns (only at word boundaries)
58
+ value = re.sub(r"\$(%s)\b" % var_name_pattern, replace_simple_var, value)
59
+
60
+ return value
40
61
 
41
62
  def _resolve_values(keys: List[str]) -> List[str]:
42
63
  return list(map(_resolve_env, keys))
@@ -53,11 +74,14 @@ class DockerBuildEmulator:
53
74
  if cmd.instruction == DockerInstruction.ENTRYPOINT:
54
75
  result.entrypoint = list(values)
55
76
  if cmd.instruction == DockerInstruction.COPY:
77
+ # Filter out --chown flags
78
+ filtered_values = [v for v in values if not v.startswith("--chown")]
79
+
56
80
  # NB(nikhil): Skip COPY commands with --from flag (multi-stage builds)
57
- if len(values) != 2:
81
+ if len(filtered_values) != 2:
58
82
  continue
59
83
 
60
- src, dst = values
84
+ src, dst = filtered_values
61
85
  src = src.replace("./", "", 1)
62
86
  dst = dst.replace("/", "", 1)
63
87
  copy_tree_or_file(self._context_dir / src, fs_root_dir / dst)
@@ -138,6 +138,13 @@ class FileSummary(pydantic.BaseModel):
138
138
  )
139
139
 
140
140
 
141
+ class FileSummaryWithTotalSize(pydantic.BaseModel):
142
+ file_summary: FileSummary
143
+ total_size: int = pydantic.Field(
144
+ description="Total size of the file and all its subdirectories"
145
+ )
146
+
147
+
141
148
  class GetCacheSummaryResponseV1(pydantic.BaseModel):
142
149
  """Response for getting cache summary."""
143
150
 
@@ -8,6 +8,35 @@ FROM {{ base_image_name_and_tag }} AS truss_server
8
8
  {%- set python_executable = config.base_image.python_executable_path or 'python3' %}
9
9
  ENV PYTHON_EXECUTABLE="{{ python_executable }}"
10
10
 
11
+ {%- set app_username = "app" %} {# needed later for USER directive#}
12
+ {% block user_setup %}
13
+ {%- set app_user_uid = 60000 %}
14
+ {%- set control_server_dir = "/control" %}
15
+ {%- set default_owner = "root:root" %}
16
+ {# The non-root user's home directory. #}
17
+ {# uv will use $HOME to install packages. #}
18
+ ENV HOME=/home/{{ app_username }}
19
+ {# Directory containing inference server code. #}
20
+ ENV APP_HOME=/{{ app_username }}
21
+ RUN mkdir -p ${APP_HOME} {{ control_server_dir }}
22
+ {# Create a non-root user to run model containers. #}
23
+ RUN useradd -u {{ app_user_uid }} -ms /bin/bash {{ app_username }}
24
+ {% endblock %} {#- endblock user_setup #}
25
+
26
+ {#- at the very beginning, set non-interactive mode for apt #}
27
+ ENV DEBIAN_FRONTEND=noninteractive
28
+
29
+ {# If non-root user is enabled and model container admin commands are enabled, install sudo #}
30
+ {# to allow the non-root user to install packages. #}
31
+ {%- if non_root_user and enable_model_container_admin_commands %}
32
+ RUN apt update && apt install -y sudo
33
+ {%- set allowed_admin_commands = ["/usr/bin/apt install *", "/usr/bin/apt update"] %}
34
+ RUN echo "Defaults:{{ app_username }} passwd_tries=0\n{{ app_username }} ALL=(root) NOPASSWD: {{ allowed_admin_commands | join(", ") }}" > /etc/sudoers.d/app-packages
35
+ RUN chmod 0440 /etc/sudoers.d/app-packages
36
+ {#- optional but good practice: check if the sudoers file is valid #}
37
+ RUN visudo -c
38
+ {%- endif %} {#- endif non_root_user and enable_model_container_admin_commands #}
39
+
11
40
  {%- set UV_VERSION = "0.7.19" %}
12
41
  {#
13
42
  NB(nikhil): We use a semi-complex uv installation command across the board:
@@ -39,7 +68,8 @@ RUN if ! command -v uv >/dev/null 2>&1; then \
39
68
  command -v curl >/dev/null 2>&1 || (apt update && apt install -y curl) && \
40
69
  curl -LsSf --retry 5 --retry-delay 5 https://astral.sh/uv/{{ UV_VERSION }}/install.sh | sh; \
41
70
  fi
42
- ENV PATH="/root/.local/bin:$PATH"
71
+ {# Add the user's local bin to the path, used by uv. #}
72
+ ENV PATH=${PATH}:${HOME}/.local/bin
43
73
  {% endblock %}
44
74
 
45
75
  {% block base_image_patch %}
@@ -57,7 +87,7 @@ RUN {{ sys_pip_install_command }} install mkl
57
87
 
58
88
  {% block install_system_requirements %}
59
89
  {%- if should_install_system_requirements %}
60
- COPY ./{{ system_packages_filename }} {{ system_packages_filename }}
90
+ COPY --chown={{ default_owner }} ./{{ system_packages_filename }} {{ system_packages_filename }}
61
91
  RUN apt-get update && apt-get install --yes --no-install-recommends $(cat {{ system_packages_filename }}) \
62
92
  && apt-get autoremove -y \
63
93
  && apt-get clean -y \
@@ -68,11 +98,11 @@ RUN apt-get update && apt-get install --yes --no-install-recommends $(cat {{ sys
68
98
 
69
99
  {% block install_requirements %}
70
100
  {%- if should_install_user_requirements_file %}
71
- COPY ./{{ user_supplied_requirements_filename }} {{ user_supplied_requirements_filename }}
101
+ COPY --chown={{ default_owner }} ./{{ user_supplied_requirements_filename }} {{ user_supplied_requirements_filename }}
72
102
  RUN {{ sys_pip_install_command }} -r {{ user_supplied_requirements_filename }} --no-cache-dir
73
103
  {%- endif %}
74
104
  {%- if should_install_requirements %}
75
- COPY ./{{ config_requirements_filename }} {{ config_requirements_filename }}
105
+ COPY --chown={{ default_owner }} ./{{ config_requirements_filename }} {{ config_requirements_filename }}
76
106
  RUN {{ sys_pip_install_command }} -r {{ config_requirements_filename }} --no-cache-dir
77
107
  {%- endif %}
78
108
  {% endblock %}
@@ -80,7 +110,6 @@ RUN {{ sys_pip_install_command }} -r {{ config_requirements_filename }} --no-cac
80
110
 
81
111
 
82
112
  {%- if not config.docker_server %}
83
- ENV APP_HOME="/app"
84
113
  WORKDIR $APP_HOME
85
114
  {%- endif %}
86
115
 
@@ -90,7 +119,7 @@ WORKDIR $APP_HOME
90
119
 
91
120
  {% block bundled_packages_copy %}
92
121
  {%- if bundled_packages_dir_exists %}
93
- COPY ./{{ config.bundled_packages_dir }} /packages
122
+ COPY --chown={{ default_owner }} ./{{ config.bundled_packages_dir }} /packages
94
123
  {%- endif %}
95
124
  {% endblock %}
96
125
 
@@ -1,20 +1,21 @@
1
1
  FROM python:3.11-slim AS cache_warmer
2
2
 
3
- RUN mkdir -p /app/model_cache
4
- WORKDIR /app
3
+ ENV APP_HOME=/app
4
+ RUN mkdir -p ${APP_HOME}/model_cache
5
+ WORKDIR ${APP_HOME}
5
6
 
6
7
  {% if hf_access_token %}
7
8
  ENV HUGGING_FACE_HUB_TOKEN="{{hf_access_token}}"
8
9
  {% endif %}
9
10
 
10
11
  RUN apt-get -y update; apt-get -y install curl; curl -s https://baseten-public.s3.us-west-2.amazonaws.com/bin/b10cp-5fe8dc7da-linux-amd64 -o /app/b10cp; chmod +x /app/b10cp
11
- ENV B10CP_PATH_TRUSS="/app/b10cp"
12
- COPY ./cache_requirements.txt /app/cache_requirements.txt
13
- RUN pip install -r /app/cache_requirements.txt --no-cache-dir && rm -rf /root/.cache/pip
14
- COPY ./cache_warmer.py /cache_warmer.py
12
+ ENV B10CP_PATH_TRUSS="${APP_HOME}/b10cp"
13
+ COPY --chown={{ default_owner }} ./cache_requirements.txt ${APP_HOME}/cache_requirements.txt
14
+ RUN pip install -r ${APP_HOME}/cache_requirements.txt --no-cache-dir && rm -rf /root/.cache/pip
15
+ COPY --chown={{ default_owner }} ./cache_warmer.py /cache_warmer.py
15
16
 
16
17
  {% for credential in credentials_to_cache %}
17
- COPY ./{{credential}} /app/{{credential}}
18
+ COPY ./{{credential}} ${APP_HOME}/{{credential}}
18
19
  {% endfor %}
19
20
 
20
21
  {% for repo, hf_dir in models.items() %}
@@ -1,3 +1,3 @@
1
1
  {% for file in cached_files %}
2
- COPY --from=cache_warmer {{file.source}} {{file.dst}}
2
+ COPY --chown={{ default_owner }} --from=cache_warmer {{file.source}} {{file.dst}}
3
3
  {% endfor %}
@@ -1,4 +1,5 @@
1
1
  [supervisord]
2
+ pidfile=/tmp/supervisord.pid ; Set PID file location to /tmp to be writable by the non-root user
2
3
  nodaemon=true ; Run supervisord in the foreground (useful for containers)
3
4
  logfile=/dev/null ; Disable logging to file (send logs to /dev/null)
4
5
  logfile_maxbytes=0 ; No size limit on logfile (since logging is disabled)
@@ -6,7 +6,6 @@ import pathlib
6
6
  import time
7
7
  from typing import Iterator, List, Optional, Sequence
8
8
 
9
- import opentelemetry.exporter.otlp.proto.http.trace_exporter as oltp_exporter
10
9
  import opentelemetry.sdk.resources as resources
11
10
  import opentelemetry.sdk.trace as sdk_trace
12
11
  import opentelemetry.sdk.trace.export as trace_export
@@ -16,7 +15,6 @@ from shared import secrets_resolver
16
15
  logger = logging.getLogger(__name__)
17
16
 
18
17
  ATTR_NAME_DURATION = "duration_sec"
19
- OTEL_EXPORTER_OTLP_ENDPOINT = "OTEL_EXPORTER_OTLP_ENDPOINT"
20
18
  # Writing trace data to a file is only intended for testing / debugging.
21
19
  OTEL_TRACING_NDJSON_FILE = "OTEL_TRACING_NDJSON_FILE"
22
20
  # Exporting trace data to a public honeycomb instance (not our cluster collector)
@@ -67,13 +65,6 @@ def get_truss_tracer(secrets: secrets_resolver.Secrets, config) -> trace.Tracer:
67
65
  return _truss_tracer
68
66
 
69
67
  span_processors: List[sdk_trace.SpanProcessor] = []
70
- if otlp_endpoint := os.getenv(OTEL_EXPORTER_OTLP_ENDPOINT):
71
- if enable_tracing_data:
72
- logger.info(f"Exporting trace data to {OTEL_EXPORTER_OTLP_ENDPOINT}.")
73
- otlp_exporter = oltp_exporter.OTLPSpanExporter(endpoint=otlp_endpoint)
74
- otlp_processor = sdk_trace.export.BatchSpanProcessor(otlp_exporter)
75
- span_processors.append(otlp_processor)
76
-
77
68
  if tracing_log_file := os.getenv(OTEL_TRACING_NDJSON_FILE):
78
69
  if enable_tracing_data:
79
70
  logger.info(f"Exporting trace data to file `{tracing_log_file}`.")
@@ -81,21 +72,6 @@ def get_truss_tracer(secrets: secrets_resolver.Secrets, config) -> trace.Tracer:
81
72
  file_processor = sdk_trace.export.SimpleSpanProcessor(json_file_exporter)
82
73
  span_processors.append(file_processor)
83
74
 
84
- if (
85
- honeycomb_dataset := os.getenv(HONEYCOMB_DATASET)
86
- ) and HONEYCOMB_API_KEY in secrets:
87
- honeycomb_api_key = secrets[HONEYCOMB_API_KEY]
88
- logger.info("Exporting trace data to honeycomb.")
89
- honeycomb_exporter = oltp_exporter.OTLPSpanExporter(
90
- endpoint="https://api.honeycomb.io/v1/traces",
91
- headers={
92
- "x-honeycomb-team": honeycomb_api_key,
93
- "x-honeycomb-dataset": honeycomb_dataset,
94
- },
95
- )
96
- honeycomb_processor = sdk_trace.export.BatchSpanProcessor(honeycomb_exporter)
97
- span_processors.append(honeycomb_processor)
98
-
99
75
  if span_processors and enable_tracing_data:
100
76
  logger.info("Instantiating truss tracer.")
101
77
  resource = resources.Resource.create({resources.SERVICE_NAME: "truss-server"})
@@ -11,7 +11,6 @@
11
11
  {% if config.base_image %}
12
12
  {%- if not config.docker_server %}
13
13
  ENV PYTHONUNBUFFERED="True"
14
- ENV DEBIAN_FRONTEND="noninteractive"
15
14
 
16
15
  {# Install common dependencies #}
17
16
  RUN apt update && \
@@ -20,7 +19,7 @@ RUN apt update && \
20
19
  && apt-get clean -y \
21
20
  && rm -rf /var/lib/apt/lists/*
22
21
 
23
- COPY ./{{ base_server_requirements_filename }} {{ base_server_requirements_filename }}
22
+ COPY --chown={{ default_owner }} ./{{ base_server_requirements_filename }} {{ base_server_requirements_filename }}
24
23
  RUN {{ sys_pip_install_command }} -r {{ base_server_requirements_filename }} --no-cache-dir
25
24
  {%- endif %} {#- endif not config.docker_server #}
26
25
 
@@ -38,7 +37,7 @@ RUN ln -sf {{ config.base_image.python_executable_path }} /usr/local/bin/python
38
37
 
39
38
  {% block install_requirements %}
40
39
  {%- if should_install_server_requirements %}
41
- COPY ./{{ server_requirements_filename }} {{ server_requirements_filename }}
40
+ COPY --chown={{ default_owner }} ./{{ server_requirements_filename }} {{ server_requirements_filename }}
42
41
  RUN {{ sys_pip_install_command }} -r {{ server_requirements_filename }} --no-cache-dir
43
42
  {%- endif %} {#- endif should_install_server_requirements #}
44
43
  {{ super() }}
@@ -47,7 +46,7 @@ RUN {{ sys_pip_install_command }} -r {{ server_requirements_filename }} --no-cac
47
46
 
48
47
  {% block app_copy %}
49
48
  {%- if model_cache_v1 %}
50
- # Copy data before code for better caching
49
+ {# Copy data before code for better caching #}
51
50
  {%- include "copy_cache_files.Dockerfile.jinja" -%}
52
51
  {%- endif %} {#- endif model_cache_v1 #}
53
52
 
@@ -65,47 +64,55 @@ RUN {% for secret,path in config.build.secret_to_path_mapping.items() %} --mount
65
64
 
66
65
  {# Copy data before code for better caching #}
67
66
  {%- if data_dir_exists %}
68
- COPY ./{{ config.data_dir }} /app/data
67
+ COPY --chown={{ default_owner }} ./{{ config.data_dir }} ${APP_HOME}/data
69
68
  {%- endif %} {#- endif data_dir_exists #}
70
69
 
71
70
  {%- if model_cache_v2 %}
72
- # v0.0.9, keep synced with server_requirements.txt
71
+ {# v0.0.9, keep synced with server_requirements.txt #}
73
72
  RUN curl -sSL --fail --retry 5 --retry-delay 2 -o /usr/local/bin/truss-transfer-cli https://github.com/basetenlabs/truss/releases/download/v0.10.11rc1/truss-transfer-cli-v0.10.11rc1-linux-x86_64-unknown-linux-musl
74
73
  RUN chmod +x /usr/local/bin/truss-transfer-cli
75
74
  RUN mkdir /static-bptr
76
75
  RUN echo "hash {{model_cache_hash}}"
77
- COPY ./bptr-manifest /static-bptr/static-bptr-manifest.json
76
+ COPY --chown={{ default_owner }} ./bptr-manifest /static-bptr/static-bptr-manifest.json
78
77
  {%- endif %} {#- endif model_cache_v2 #}
79
78
 
80
79
  {%- if not config.docker_server %}
81
- COPY ./server /app
80
+ COPY --chown={{ default_owner }} ./server ${APP_HOME}
82
81
  {%- endif %} {#- endif not config.docker_server #}
83
82
 
84
83
  {%- if use_local_src %}
85
84
  {# This path takes precedence over site-packages. #}
86
- COPY ./truss_chains /app/truss_chains
87
- COPY ./truss /app/truss
85
+ COPY --chown={{ default_owner }} ./truss_chains ${APP_HOME}/truss_chains
86
+ COPY --chown={{ default_owner }} ./truss ${APP_HOME}/truss
88
87
  {%- endif %} {#- endif use_local_src #}
89
88
 
90
- COPY ./config.yaml /app/config.yaml
89
+ COPY --chown={{ default_owner }} ./config.yaml ${APP_HOME}/config.yaml
91
90
  {%- if requires_live_reload %}
92
91
  RUN uv python install {{ control_python_version }}
93
92
  RUN uv venv /control/.env --python {{ control_python_version }}
94
93
 
95
- COPY ./control /control
94
+ COPY --chown={{ default_owner }} ./control /control
96
95
  RUN uv pip install -r /control/requirements.txt --python /control/.env/bin/python --no-cache-dir
97
96
  {%- endif %} {#- endif requires_live_reload #}
98
97
 
99
98
  {%- if model_dir_exists %}
100
- COPY ./{{ config.model_module_dir }} /app/model
99
+ COPY --chown={{ default_owner }} ./{{ config.model_module_dir }} ${APP_HOME}/model
101
100
  {%- endif %} {#- endif model_dir_exists #}
102
101
  {% endblock %} {#- endblock app_copy #}
103
102
 
104
103
  {% block run %}
104
+ {# Macro to change ownership of directories and switch to regular user #}
105
+ {%- macro chown_and_switch_to_regular_user_if_enabled(additional_chown_dirs=[]) -%}
106
+ {%- if non_root_user %}
107
+ RUN chown -R {{ app_username }}:{{ app_username }} {% for dir in additional_chown_dirs %}{{ dir }} {% endfor %}${HOME} ${APP_HOME}
108
+ USER {{ app_username }}
109
+ {%- endif %} {#- endif non_root_user #}
110
+ {%- endmacro -%}
111
+
105
112
  {%- if config.docker_server %}
106
- RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
113
+ RUN apt-get update -y && apt-get install -y --no-install-recommends \
107
114
  curl nginx && rm -rf /var/lib/apt/lists/*
108
- COPY ./docker_server_requirements.txt /app/docker_server_requirements.txt
115
+ COPY --chown={{ default_owner }} ./docker_server_requirements.txt ${APP_HOME}/docker_server_requirements.txt
109
116
 
110
117
  {# NB(nikhil): Use the same python version for custom server proxy as the control server, for consistency. #}
111
118
  RUN uv python install {{ control_python_version }}
@@ -113,23 +120,30 @@ RUN uv venv /docker_server/.venv --python {{ control_python_version }}
113
120
  RUN uv pip install --python /docker_server/.venv/bin/python -r /app/docker_server_requirements.txt --no-cache-dir
114
121
  {% set proxy_config_path = "/etc/nginx/conf.d/proxy.conf" %}
115
122
  {% set supervisor_config_path = "/etc/supervisor/supervisord.conf" %}
116
- {% set supervisor_log_dir = "/var/log/supervisor" %}
117
123
  {% set supervisor_server_url = "http://localhost:8080" %}
118
- COPY ./proxy.conf {{ proxy_config_path }}
119
- RUN mkdir -p {{ supervisor_log_dir }}
120
- COPY supervisord.conf {{ supervisor_config_path }}
124
+ COPY --chown={{ default_owner }} ./proxy.conf {{ proxy_config_path }}
125
+ COPY --chown={{ default_owner }} ./supervisord.conf {{ supervisor_config_path }}
121
126
  ENV SUPERVISOR_SERVER_URL="{{ supervisor_server_url }}"
122
127
  ENV SERVER_START_CMD="/docker_server/.venv/bin/supervisord -c {{ supervisor_config_path }}"
128
+ {#- default configuration uses port 80, which requires root privileges, so we remove it #}
129
+ RUN rm -f /etc/nginx/sites-enabled/default
130
+ {#- nginx writes to /var/lib/nginx, /var/log/nginx, and /run directories #}
131
+ {{ chown_and_switch_to_regular_user_if_enabled(["/var/lib/nginx", "/var/log/nginx", "/run"]) }}
123
132
  ENTRYPOINT ["/docker_server/.venv/bin/supervisord", "-c", "{{ supervisor_config_path }}"]
133
+
124
134
  {%- elif requires_live_reload %} {#- elif requires_live_reload #}
125
135
  ENV HASH_TRUSS="{{ truss_hash }}"
126
136
  ENV CONTROL_SERVER_PORT="8080"
127
137
  ENV INFERENCE_SERVER_PORT="8090"
128
138
  ENV SERVER_START_CMD="/control/.env/bin/python /control/control/server.py"
139
+ {{ chown_and_switch_to_regular_user_if_enabled() }}
129
140
  ENTRYPOINT ["/control/.env/bin/python", "/control/control/server.py"]
141
+
130
142
  {%- else %} {#- else (default inference server) #}
131
143
  ENV INFERENCE_SERVER_PORT="8080"
132
144
  ENV SERVER_START_CMD="{{ python_executable }} /app/main.py"
145
+ {{ chown_and_switch_to_regular_user_if_enabled() }}
133
146
  ENTRYPOINT ["{{ python_executable }}", "/app/main.py"]
134
147
  {%- endif %} {#- endif config.docker_server / live_reload #}
148
+
135
149
  {% endblock %} {#- endblock run #}
@@ -1,6 +1,11 @@
1
1
  from unittest.mock import Mock, patch
2
2
 
3
- from truss.cli.train.core import view_training_job_metrics
3
+ from truss.cli.train.core import (
4
+ calculate_directory_sizes,
5
+ create_file_summary_with_directory_sizes,
6
+ view_training_job_metrics,
7
+ )
8
+ from truss.remote.baseten.custom_types import FileSummary
4
9
 
5
10
 
6
11
  @patch("truss.cli.train.metrics_watcher.time.sleep")
@@ -189,3 +194,251 @@ def test_view_training_job_metrics(time_sleep, capfd):
189
194
  out, err = capfd.readouterr()
190
195
  assert "Training job completed successfully" in out
191
196
  assert "Error fetching metrics" not in out
197
+
198
+
199
+ def test_calculate_directory_sizes():
200
+ """Test calculate_directory_sizes function with various file structures."""
201
+ # Create test files with a nested directory structure
202
+ files = [
203
+ FileSummary(
204
+ path="/root",
205
+ size_bytes=0,
206
+ modified="2023-01-01T00:00:00Z",
207
+ file_type="directory",
208
+ permissions="drwxr-xr-x",
209
+ ),
210
+ FileSummary(
211
+ path="/root/file1.txt",
212
+ size_bytes=100,
213
+ modified="2023-01-01T00:00:00Z",
214
+ file_type="file",
215
+ permissions="-rw-r--r--",
216
+ ),
217
+ FileSummary(
218
+ path="/root/subdir",
219
+ size_bytes=0,
220
+ modified="2023-01-01T00:00:00Z",
221
+ file_type="directory",
222
+ permissions="drwxr-xr-x",
223
+ ),
224
+ FileSummary(
225
+ path="/root/subdir/file2.txt",
226
+ size_bytes=200,
227
+ modified="2023-01-01T00:00:00Z",
228
+ file_type="file",
229
+ permissions="-rw-r--r--",
230
+ ),
231
+ FileSummary(
232
+ path="/root/subdir/file3.txt",
233
+ size_bytes=300,
234
+ modified="2023-01-01T00:00:00Z",
235
+ file_type="file",
236
+ permissions="-rw-r--r--",
237
+ ),
238
+ FileSummary(
239
+ path="/root/other_file.txt",
240
+ size_bytes=50,
241
+ modified="2023-01-01T00:00:00Z",
242
+ file_type="file",
243
+ permissions="-rw-r--r--",
244
+ ),
245
+ ]
246
+
247
+ result = calculate_directory_sizes(files)
248
+
249
+ # Check that directory sizes are calculated correctly
250
+ assert result["/root/subdir"] == 500 # 200 + 300
251
+ assert result["/root"] == 650 # 100 + 200 + 300 + 50
252
+
253
+ # Check that files are not included in the result (only directories)
254
+ assert "/root/file1.txt" not in result
255
+ assert "/root/subdir/file2.txt" not in result
256
+ assert "/root/subdir/file3.txt" not in result
257
+ assert "/root/other_file.txt" not in result
258
+
259
+
260
+ def test_calculate_directory_sizes_empty_list():
261
+ """Test calculate_directory_sizes with empty file list."""
262
+ result = calculate_directory_sizes([])
263
+ assert result == {}
264
+
265
+
266
+ def test_calculate_directory_sizes_no_directories():
267
+ """Test calculate_directory_sizes with only files (no directories)."""
268
+ files = [
269
+ FileSummary(
270
+ path="/file1.txt",
271
+ size_bytes=100,
272
+ modified="2023-01-01T00:00:00Z",
273
+ file_type="file",
274
+ permissions="-rw-r--r--",
275
+ ),
276
+ FileSummary(
277
+ path="/file2.txt",
278
+ size_bytes=200,
279
+ modified="2023-01-01T00:00:00Z",
280
+ file_type="file",
281
+ permissions="-rw-r--r--",
282
+ ),
283
+ ]
284
+
285
+ result = calculate_directory_sizes(files)
286
+ assert result == {}
287
+
288
+
289
+ def test_create_file_summary_with_directory_sizes():
290
+ """Test create_file_summary_with_directory_sizes function."""
291
+ files = [
292
+ FileSummary(
293
+ path="/root",
294
+ size_bytes=0,
295
+ modified="2023-01-01T00:00:00Z",
296
+ file_type="directory",
297
+ permissions="drwxr-xr-x",
298
+ ),
299
+ FileSummary(
300
+ path="/root/file1.txt",
301
+ size_bytes=100,
302
+ modified="2023-01-01T00:00:00Z",
303
+ file_type="file",
304
+ permissions="-rw-r--r--",
305
+ ),
306
+ FileSummary(
307
+ path="/root/subdir",
308
+ size_bytes=0,
309
+ modified="2023-01-01T00:00:00Z",
310
+ file_type="directory",
311
+ permissions="drwxr-xr-x",
312
+ ),
313
+ FileSummary(
314
+ path="/root/subdir/file2.txt",
315
+ size_bytes=200,
316
+ modified="2023-01-01T00:00:00Z",
317
+ file_type="file",
318
+ permissions="-rw-r--r--",
319
+ ),
320
+ ]
321
+
322
+ result = create_file_summary_with_directory_sizes(files)
323
+
324
+ # Check that we get the correct number of FileSummaryWithTotalSize objects
325
+ assert len(result) == 4
326
+
327
+ # Check that files have their original size as total_size
328
+ file1_summary = next(f for f in result if f.file_summary.path == "/root/file1.txt")
329
+ assert file1_summary.total_size == 100
330
+
331
+ file2_summary = next(
332
+ f for f in result if f.file_summary.path == "/root/subdir/file2.txt"
333
+ )
334
+ assert file2_summary.total_size == 200
335
+
336
+ # Check that directories have calculated total sizes
337
+ subdir_summary = next(f for f in result if f.file_summary.path == "/root/subdir")
338
+ assert subdir_summary.total_size == 200 # Only file2.txt
339
+
340
+ root_summary = next(f for f in result if f.file_summary.path == "/root")
341
+ assert root_summary.total_size == 300 # file1.txt + file2.txt
342
+
343
+
344
+ def test_create_file_summary_with_directory_sizes_empty_list():
345
+ """Test create_file_summary_with_directory_sizes with empty file list."""
346
+ result = create_file_summary_with_directory_sizes([])
347
+ assert result == []
348
+
349
+
350
+ def test_calculate_directory_sizes_max_depth():
351
+ """Test that calculate_directory_sizes respects the max_depth parameter.
352
+
353
+ The max_depth parameter controls how many parent directories up from each file
354
+ the algorithm will traverse to add the file's size to parent directories.
355
+ """
356
+ # Create a deep directory structure: /root/level1/level2/level3/level4/level5/file.txt
357
+ files = [
358
+ # Root directory
359
+ FileSummary(
360
+ path="/root",
361
+ size_bytes=0,
362
+ modified="2023-01-01T00:00:00Z",
363
+ file_type="directory",
364
+ permissions="drwxr-xr-x",
365
+ ),
366
+ # Level 1 directory
367
+ FileSummary(
368
+ path="/root/level1",
369
+ size_bytes=0,
370
+ modified="2023-01-01T00:00:00Z",
371
+ file_type="directory",
372
+ permissions="drwxr-xr-x",
373
+ ),
374
+ # Level 2 directory
375
+ FileSummary(
376
+ path="/root/level1/level2",
377
+ size_bytes=0,
378
+ modified="2023-01-01T00:00:00Z",
379
+ file_type="directory",
380
+ permissions="drwxr-xr-x",
381
+ ),
382
+ # Level 3 directory
383
+ FileSummary(
384
+ path="/root/level1/level2/level3",
385
+ size_bytes=0,
386
+ modified="2023-01-01T00:00:00Z",
387
+ file_type="directory",
388
+ permissions="drwxr-xr-x",
389
+ ),
390
+ # Level 4 directory
391
+ FileSummary(
392
+ path="/root/level1/level2/level3/level4",
393
+ size_bytes=0,
394
+ modified="2023-01-01T00:00:00Z",
395
+ file_type="directory",
396
+ permissions="drwxr-xr-x",
397
+ ),
398
+ # Level 5 directory
399
+ FileSummary(
400
+ path="/root/level1/level2/level3/level4/level5",
401
+ size_bytes=0,
402
+ modified="2023-01-01T00:00:00Z",
403
+ file_type="directory",
404
+ permissions="drwxr-xr-x",
405
+ ),
406
+ # File at level 1
407
+ FileSummary(
408
+ path="/root/level1/file1.txt",
409
+ size_bytes=100,
410
+ modified="2023-01-01T00:00:00Z",
411
+ file_type="file",
412
+ permissions="-rw-r--r--",
413
+ ),
414
+ # File at level 2
415
+ FileSummary(
416
+ path="/root/level1/level2/file2.txt",
417
+ size_bytes=200,
418
+ modified="2023-01-01T00:00:00Z",
419
+ file_type="file",
420
+ permissions="-rw-r--r--",
421
+ ),
422
+ # File at level 3
423
+ FileSummary(
424
+ path="/root/level1/level2/level3/file3.txt",
425
+ size_bytes=300,
426
+ modified="2023-01-01T00:00:00Z",
427
+ file_type="file",
428
+ permissions="-rw-r--r--",
429
+ ),
430
+ ]
431
+
432
+ result_depth_0 = calculate_directory_sizes(files, max_depth=0)
433
+ assert result_depth_0["/root"] == 0
434
+ assert result_depth_0["/root/level1"] == 0
435
+ assert result_depth_0["/root/level1/level2"] == 0
436
+ assert result_depth_0["/root/level1/level2/level3"] == 0
437
+
438
+ # ensure that we stop early if the max depth is reached
439
+ result_depth_2 = calculate_directory_sizes(files, max_depth=2)
440
+
441
+ assert result_depth_2["/root"] == 0
442
+ assert result_depth_2["/root/level1"] == 100 # file1.txt only
443
+ assert result_depth_2["/root/level1/level2"] == 200 # file2.txt only
444
+ assert result_depth_2["/root/level1/level2/level3"] == 300 # file3.txt only
@@ -466,7 +466,7 @@ def test_model_cache_dockerfile_v2(test_data_path):
466
466
  print(gen_docker_file)
467
467
  assert "truss-transfer" in gen_docker_file
468
468
  assert (
469
- "COPY ./bptr-manifest /static-bptr/static-bptr-manifest.json"
469
+ "COPY --chown= ./bptr-manifest /static-bptr/static-bptr-manifest.json"
470
470
  in gen_docker_file
471
471
  ), "bptr-manifest copy not found in Dockerfile"
472
472
  assert "cache_warmer.py" not in gen_docker_file
@@ -21,6 +21,54 @@ from prometheus_client.parser import text_string_to_metric_families
21
21
  PATCH_PING_MAX_DELAY_SECS = 3
22
22
 
23
23
 
24
+ def _start_truss_server(
25
+ stdout_capture_file_path: str,
26
+ truss_control_container_fs: Path,
27
+ with_patch_ping_flow: bool,
28
+ patch_ping_server_port: int,
29
+ ctrl_port: int,
30
+ inf_port: int,
31
+ ):
32
+ """Module-level function to avoid pickling issues with multiprocessing."""
33
+ if with_patch_ping_flow:
34
+ os.environ["PATCH_PING_URL_TRUSS"] = (
35
+ f"http://localhost:{patch_ping_server_port}"
36
+ )
37
+ sys.stdout = open(stdout_capture_file_path, "w")
38
+ app_path = truss_control_container_fs / "app"
39
+ sys.path.append(str(app_path))
40
+ control_path = truss_control_container_fs / "control" / "control"
41
+ sys.path.append(str(control_path))
42
+
43
+ from server import ControlServer
44
+
45
+ control_server = ControlServer(
46
+ python_executable_path=sys.executable,
47
+ inf_serv_home=str(app_path),
48
+ control_server_port=ctrl_port,
49
+ inference_server_port=inf_port,
50
+ )
51
+ control_server.run()
52
+
53
+
54
+ def _start_patch_ping_server(patch_ping_server_port: int):
55
+ """Module-level function to avoid pickling issues with multiprocessing."""
56
+ import json
57
+ import random
58
+ import time
59
+ from http.server import BaseHTTPRequestHandler, HTTPServer
60
+
61
+ class Handler(BaseHTTPRequestHandler):
62
+ def do_POST(self):
63
+ time.sleep(random.uniform(0, PATCH_PING_MAX_DELAY_SECS))
64
+ self.send_response(200)
65
+ self.end_headers()
66
+ self.wfile.write(bytes(json.dumps({"is_current": True}), encoding="utf-8"))
67
+
68
+ httpd = HTTPServer(("localhost", patch_ping_server_port), Handler)
69
+ httpd.serve_forever()
70
+
71
+
24
72
  @dataclass
25
73
  class ControlServerDetails:
26
74
  control_server_process: Process
@@ -270,51 +318,24 @@ def _configured_control_server(
270
318
  inf_port = ctrl_port + 1
271
319
  patch_ping_server_port = ctrl_port + 2
272
320
 
273
- def start_truss_server(stdout_capture_file_path):
274
- if with_patch_ping_flow:
275
- os.environ["PATCH_PING_URL_TRUSS"] = (
276
- f"http://localhost:{patch_ping_server_port}"
277
- )
278
- sys.stdout = open(stdout_capture_file_path, "w")
279
- app_path = truss_control_container_fs / "app"
280
- sys.path.append(str(app_path))
281
- control_path = truss_control_container_fs / "control" / "control"
282
- sys.path.append(str(control_path))
283
-
284
- from server import ControlServer
285
-
286
- control_server = ControlServer(
287
- python_executable_path=sys.executable,
288
- inf_serv_home=str(app_path),
289
- control_server_port=ctrl_port,
290
- inference_server_port=inf_port,
291
- )
292
- control_server.run()
293
-
294
- def start_patch_ping_server():
295
- import json
296
- import random
297
- import time
298
- from http.server import BaseHTTPRequestHandler, HTTPServer
299
-
300
- class Handler(BaseHTTPRequestHandler):
301
- def do_POST(self):
302
- time.sleep(random.uniform(0, PATCH_PING_MAX_DELAY_SECS))
303
- self.send_response(200)
304
- self.end_headers()
305
- self.wfile.write(
306
- bytes(json.dumps({"is_current": True}), encoding="utf-8")
307
- )
308
-
309
- httpd = HTTPServer(("localhost", patch_ping_server_port), Handler)
310
- httpd.serve_forever()
311
-
312
321
  stdout_capture_file = tempfile.NamedTemporaryFile()
313
- subproc = Process(target=start_truss_server, args=(stdout_capture_file.name,))
322
+ subproc = Process(
323
+ target=_start_truss_server,
324
+ args=(
325
+ stdout_capture_file.name,
326
+ truss_control_container_fs,
327
+ with_patch_ping_flow,
328
+ patch_ping_server_port,
329
+ ctrl_port,
330
+ inf_port,
331
+ ),
332
+ )
314
333
  subproc.start()
315
334
  proc_id = subproc.pid
316
335
  if with_patch_ping_flow:
317
- patch_ping_server_proc = Process(target=start_patch_ping_server)
336
+ patch_ping_server_proc = Process(
337
+ target=_start_patch_ping_server, args=(patch_ping_server_port,)
338
+ )
318
339
  patch_ping_server_proc.start()
319
340
  try:
320
341
  time.sleep(2.0)
@@ -10,23 +10,30 @@ from pathlib import Path
10
10
  import pytest
11
11
 
12
12
 
13
- @pytest.mark.integration
14
- def test_truss_server_termination(truss_container_fs):
15
- port = 10123
13
+ def _start_truss_server(
14
+ stdout_capture_file_path: str, truss_container_fs: Path, port: int
15
+ ):
16
+ """Module-level function to avoid pickling issues with multiprocessing."""
17
+ sys.stdout = open(stdout_capture_file_path, "w")
18
+ app_path = truss_container_fs / "app"
19
+ sys.path.append(str(app_path))
20
+ os.chdir(app_path)
21
+
22
+ from truss_server import TrussServer
16
23
 
17
- def start_truss_server(stdout_capture_file_path):
18
- sys.stdout = open(stdout_capture_file_path, "w")
19
- app_path = truss_container_fs / "app"
20
- sys.path.append(str(app_path))
21
- os.chdir(app_path)
24
+ server = TrussServer(http_port=port, config_or_path=app_path / "config.yaml")
25
+ server.start()
22
26
 
23
- from truss_server import TrussServer
24
27
 
25
- server = TrussServer(http_port=port, config_or_path=app_path / "config.yaml")
26
- server.start()
28
+ @pytest.mark.integration
29
+ def test_truss_server_termination(truss_container_fs):
30
+ port = 10123
27
31
 
28
32
  stdout_capture_file = tempfile.NamedTemporaryFile()
29
- subproc = Process(target=start_truss_server, args=(stdout_capture_file.name,))
33
+ subproc = Process(
34
+ target=_start_truss_server,
35
+ args=(stdout_capture_file.name, truss_container_fs, port),
36
+ )
30
37
  subproc.start()
31
38
  proc_id = subproc.pid
32
39
  time.sleep(2.0)
@@ -1,6 +1,11 @@
1
1
  ARG PYVERSION=py39
2
2
  FROM baseten/truss-server-base:3.9-v0.4.3 AS truss_server
3
3
  ENV PYTHON_EXECUTABLE="/usr/local/bin/python3"
4
+ ENV HOME=/home/app
5
+ ENV APP_HOME=/app
6
+ RUN mkdir -p ${APP_HOME} /control
7
+ RUN useradd -u 60000 -ms /bin/bash app
8
+ ENV DEBIAN_FRONTEND=noninteractive
4
9
  RUN grep -w 'ID=debian\|ID_LIKE=debian' /etc/os-release || { echo "ERROR: Supplied base image is not a debian image"; exit 1; }
5
10
  RUN /usr/local/bin/python3 -c "import sys; \
6
11
  sys.exit(0) \
@@ -13,25 +18,23 @@ RUN if ! command -v uv >/dev/null 2>&1; then \
13
18
  command -v curl >/dev/null 2>&1 || (apt update && apt install -y curl) && \
14
19
  curl -LsSf --retry 5 --retry-delay 5 https://astral.sh/uv/0.7.19/install.sh | sh; \
15
20
  fi
16
- ENV PATH="/root/.local/bin:$PATH"
21
+ ENV PATH=${PATH}:${HOME}/.local/bin
17
22
  ENV PYTHONUNBUFFERED="True"
18
- ENV DEBIAN_FRONTEND="noninteractive"
19
23
  RUN apt update && \
20
24
  apt install -y bash build-essential git curl ca-certificates \
21
25
  && apt-get autoremove -y \
22
26
  && apt-get clean -y \
23
27
  && rm -rf /var/lib/apt/lists/*
24
- COPY ./base_server_requirements.txt base_server_requirements.txt
28
+ COPY --chown= ./base_server_requirements.txt base_server_requirements.txt
25
29
  RUN UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-300} uv pip install --index-strategy unsafe-best-match --python /usr/local/bin/python3 -r base_server_requirements.txt --no-cache-dir
26
- COPY ./requirements.txt requirements.txt
30
+ COPY --chown= ./requirements.txt requirements.txt
27
31
  RUN UV_HTTP_TIMEOUT=${UV_HTTP_TIMEOUT:-300} uv pip install --index-strategy unsafe-best-match --python /usr/local/bin/python3 -r requirements.txt --no-cache-dir
28
- ENV APP_HOME="/app"
29
32
  WORKDIR $APP_HOME
30
- COPY ./data /app/data
31
- COPY ./server /app
32
- COPY ./config.yaml /app/config.yaml
33
- COPY ./model /app/model
34
- COPY ./packages /packages
33
+ COPY --chown= ./data ${APP_HOME}/data
34
+ COPY --chown= ./server ${APP_HOME}
35
+ COPY --chown= ./config.yaml ${APP_HOME}/config.yaml
36
+ COPY --chown= ./model ${APP_HOME}/model
37
+ COPY --chown= ./packages /packages
35
38
  ENV INFERENCE_SERVER_PORT="8080"
36
39
  ENV SERVER_START_CMD="/usr/local/bin/python3 /app/main.py"
37
40
  ENTRYPOINT ["/usr/local/bin/python3", "/app/main.py"]
@@ -990,8 +990,10 @@ def test_is_healthy_returns_503_on_load_failure():
990
990
  # when the model goes down, this will throw an exception
991
991
  break
992
992
  diff = container.diff()
993
- assert "/root/inference_server_crashed.txt" in diff
994
- assert diff["/root/inference_server_crashed.txt"] == "A"
993
+ # the crash file is written to the app user's home directory
994
+ crash_file_path = "/home/app/inference_server_crashed.txt"
995
+ assert crash_file_path in diff
996
+ assert diff[crash_file_path] == "A"
995
997
 
996
998
 
997
999
  @pytest.mark.integration
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: truss
3
- Version: 0.10.13
3
+ Version: 0.11.1rc2
4
4
  Summary: A seamless bridge from model development to model delivery
5
5
  Project-URL: Repository, https://github.com/basetenlabs/truss
6
6
  Project-URL: Homepage, https://truss.baseten.co
@@ -17,7 +17,7 @@ truss/cli/logs/model_log_watcher.py,sha256=NACcP-wkcaroYa2Cb9BZC7Yr0554WZa_FSM2L
17
17
  truss/cli/logs/training_log_watcher.py,sha256=r6HRqrLnz-PiKTUXiDYYxg4ZnP8vYcXlEX1YmgHhzlo,1173
18
18
  truss/cli/logs/utils.py,sha256=z-U_FG4BUzdZLbE3BnXb4DZQ0zt3LSZ3PiQpLaDuc3o,1031
19
19
  truss/cli/train/common.py,sha256=xTR41U5FeSndXfNBBHF9wF5XwZH1sOIVFlv-XHjsKIU,1547
20
- truss/cli/train/core.py,sha256=gfFRqxCxGsanbMNgJBJ0WFqVxZm4SwOEM2iyLS3K8Ns,18687
20
+ truss/cli/train/core.py,sha256=dAmetxKqSc4bQPnVS_8WLfNsw1L7vLT2tU02BVwRPgc,20206
21
21
  truss/cli/train/deploy_from_checkpoint_config.yml,sha256=mktaVrfhN8Kjx1UveC4xr-gTW-kjwbHvq6bx_LpO-Wg,371
22
22
  truss/cli/train/deploy_from_checkpoint_config_whisper.yml,sha256=6GbOorYC8ml0UyOUvuBpFO_fuYtYE646JqsalR-D4oY,406
23
23
  truss/cli/train/metrics_watcher.py,sha256=smz-zrEsBj_-wJHI0pAZ-EAPrvfCWzq1eQjGiFNM-Mk,12755
@@ -36,9 +36,9 @@ truss/contexts/docker_build_setup.py,sha256=cF4ExZgtYvrWxvyCAaUZUvV_DB_7__MqVomU
36
36
  truss/contexts/truss_context.py,sha256=uS6L-ACHxNk0BsJwESOHh1lA0OGGw0pb33aFKGsASj4,436
37
37
  truss/contexts/image_builder/cache_warmer.py,sha256=TGMV1Mh87n2e_dSowH0sf0rZhZraDOR-LVapZL3a5r8,7377
38
38
  truss/contexts/image_builder/image_builder.py,sha256=IuRgDeeoHVLzIkJvKtX3807eeqEyaroCs_KWDcIHZUg,1461
39
- truss/contexts/image_builder/serving_image_builder.py,sha256=FH5HPnrr9_OomN5WplsyUrGGETe9ld6h3q9JCpvB6FY,33322
39
+ truss/contexts/image_builder/serving_image_builder.py,sha256=ywPV6qsItc7FgxAvgVq1ktQdtFAAQ6tyy5nzwO6pBaA,33647
40
40
  truss/contexts/image_builder/util.py,sha256=y2-CjUKv0XV-0w2sr1fUCflysDJLsoU4oPp6tvvoFnk,1203
41
- truss/contexts/local_loader/docker_build_emulator.py,sha256=rmf7I28zksSmHjwvJMx2rIa6xK4KeR5fBm5YFth_fQg,2464
41
+ truss/contexts/local_loader/docker_build_emulator.py,sha256=3n0eIlJblz_sldh4AN8AHQDyfjQGdYyld5FabBdd9wE,3563
42
42
  truss/contexts/local_loader/dockerfile_parser.py,sha256=GoRJ0Af_3ILyLhjovK5lrCGn1rMxz6W3l681ro17ZzI,1344
43
43
  truss/contexts/local_loader/load_model_local.py,sha256=7XoIjjDi9-PilI5VUgGbTn4ucxFbQF6eQ-SsIcchtcM,1867
44
44
  truss/contexts/local_loader/truss_module_loader.py,sha256=OtewX72XljkA0MPqqf9iPIqvUhG6HwP6q-IpQfm143o,5710
@@ -55,7 +55,7 @@ truss/remote/baseten/__init__.py,sha256=XNqJW1zyp143XQc6-7XVwsUA_Q_ZJv_ausn1_Oht
55
55
  truss/remote/baseten/api.py,sha256=lJOt2i3tu0ZeCh4B_-hpfpjcZKgTHVnkxraooK7TUHw,24699
56
56
  truss/remote/baseten/auth.py,sha256=tI7s6cI2EZgzpMIzrdbILHyGwiHDnmoKf_JBhJXT55E,776
57
57
  truss/remote/baseten/core.py,sha256=uxtmBI9RAVHu1glIEJb5Q4ccJYLeZM1Cp5Svb9W68Yw,21965
58
- truss/remote/baseten/custom_types.py,sha256=gUG7EkTeXzqcqznbKxz1SGTtHavNKGm1UoTFiln3LmQ,4309
58
+ truss/remote/baseten/custom_types.py,sha256=1OfbZwT-n7mrYQ4ygfWuvOnENedZ4L6zOSFMAhBAVqI,4509
59
59
  truss/remote/baseten/error.py,sha256=3TNTwwPqZnr4NRd9Sl6SfLUQR2fz9l6akDPpOntTpzA,578
60
60
  truss/remote/baseten/remote.py,sha256=Se8AES5mk8jxa8S9fN2DSG7wnsaV7ftRjJ4Uwc_w_S0,22544
61
61
  truss/remote/baseten/rest_client.py,sha256=_t3CWsWARt2u0C0fDsF4rtvkkHe-lH7KXoPxWXAkKd4,1185
@@ -66,12 +66,12 @@ truss/remote/baseten/utils/time.py,sha256=Ry9GMjYnbIGYVIGwtmv4V8ljWjvdcaCf5NOQzl
66
66
  truss/remote/baseten/utils/transfer.py,sha256=d3VptuQb6M1nyS6kz0BAfeOYDLkMKUjatJXpY-mp-As,1548
67
67
  truss/templates/README.md.jinja,sha256=N7CJdyldZuJamj5jLh47le0hFBdu9irVsTBqoxhPNPQ,2476
68
68
  truss/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
- truss/templates/base.Dockerfile.jinja,sha256=vFAJH1lC9jg90-076H2DCmkXUAlpseitIN6c4UwagxA,4020
70
- truss/templates/cache.Dockerfile.jinja,sha256=LhsVP9F3BATKQGkgya_YT4v6ABTUkpy-Jb3N36zsw10,1030
69
+ truss/templates/base.Dockerfile.jinja,sha256=irked6fWbiZ4tMkhR3zi3njpaaI9bANVqq7PTjp_Tmc,5610
70
+ truss/templates/cache.Dockerfile.jinja,sha256=1qZqDo1phrcqi-Vwol-VafYJkADsBbQWU6huQ-_1x00,1146
71
71
  truss/templates/cache_requirements.txt,sha256=xoPoJ-OVnf1z6oq_RVM3vCr3ionByyqMLj7wGs61nUs,87
72
- truss/templates/copy_cache_files.Dockerfile.jinja,sha256=arHldnuclt7vUFHyRz6vus5NGMDkIofm-1RU37A0xZM,98
72
+ truss/templates/copy_cache_files.Dockerfile.jinja,sha256=Os5zFdYLZ_AfCRGq4RcpVTObOTwL7zvmwYcvOzd_Zqo,126
73
73
  truss/templates/docker_server_requirements.txt,sha256=PyhOPKAmKW1N2vLvTfLMwsEtuGpoRrbWuNo7tT6v2Mc,18
74
- truss/templates/server.Dockerfile.jinja,sha256=TK4P5y8SPV7Mfy0dX8_u10SiGP5PdGcKj5fKrKk575A,5996
74
+ truss/templates/server.Dockerfile.jinja,sha256=CUYnF_hgxPGq2re7__0UPWlwzOHMoFkxp6NVKi3U16s,7071
75
75
  truss/templates/control/requirements.txt,sha256=Kk0tYID7trPk5gwX38Wrt2-YGWZAXFJCJRcqJ8ZzCjc,251
76
76
  truss/templates/control/control/application.py,sha256=jYeta6hWe1SkfLL3W4IDmdYjg3ZuKqI_UagWYs5RB_E,3793
77
77
  truss/templates/control/control/endpoints.py,sha256=FM-sgao7I3gMoUTasM3Xq_g2LDoJQe75JxIoaQxzeNo,10031
@@ -92,7 +92,7 @@ truss/templates/custom/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
92
92
  truss/templates/custom/model/model.py,sha256=J04rLxK09Pwt2F4GoKOLKL-H-CqZUdYIM-PL2CE9PoE,1079
93
93
  truss/templates/custom_python_dx/my_model.py,sha256=NG75mQ6wxzB1BYUemDFZvRLBET-UrzuUK4FuHjqI29U,910
94
94
  truss/templates/docker_server/proxy.conf.jinja,sha256=Lg-PcZzKflG85exZKHNgW_I6r0mATV8AtOIBaE40-RM,1669
95
- truss/templates/docker_server/supervisord.conf.jinja,sha256=CoaSLv0Lr8t1tS_q102IFufNX2lWrlbCHJLjMhYjOwM,1711
95
+ truss/templates/docker_server/supervisord.conf.jinja,sha256=dd37fwZE--cutrvOUCqEyJQQQhlp61H2IUs2huKWsSk,1808
96
96
  truss/templates/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
97
97
  truss/templates/server/main.py,sha256=kWXrdD8z8IpamyWxc8qcvd5ck9gM1Kz2QH5qHJCnmOQ,222
98
98
  truss/templates/server/model_wrapper.py,sha256=k75VVISwwlsx5EGb82UZsu8kCM_i6Yi3-Hd0-Kpm1yo,42055
@@ -103,7 +103,7 @@ truss/templates/server/common/errors.py,sha256=qWeZlmNI8ZGbZbOIp_mtS6IKvUFIzhj3Q
103
103
  truss/templates/server/common/patches.py,sha256=uEOzvDnXsHOkTSa8zygGYuR4GHhrFNVHNQc5peJcwvo,1393
104
104
  truss/templates/server/common/retry.py,sha256=dtz6yvwLoY0i55FnxECz57zEOKjAhGMYvvM-k9jiR9c,624
105
105
  truss/templates/server/common/schema.py,sha256=WLFtVyEKmk4whg5_gk6Gt1vOD6wM5fWKLb4zNuD0bkw,6042
106
- truss/templates/server/common/tracing.py,sha256=XSTXNoRtV8vXwveJoX3H32go0JKnLmznZ2TtrVzIe4M,5967
106
+ truss/templates/server/common/tracing.py,sha256=TDokphTO0O-b0xZLkkDMU6Z_JIsaZA0aimL6UIQB5eI,4808
107
107
  truss/templates/server/common/patches/whisper/patch.py,sha256=kDECQ-wmEpeAZFhUTQP457ofueeMsm7DgNy9tqinhJQ,2383
108
108
  truss/templates/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
109
109
  truss/templates/shared/dynamic_config_resolver.py,sha256=75s42NFhQI5jL7BqlJH_UkuQS7ptbtFh13f2nh6X5Wo,920
@@ -132,7 +132,7 @@ truss/tests/test_context_builder_image.py,sha256=fVZNJSzZNiWa7Dr1X_VhhMJtyJ5HzsL
132
132
  truss/tests/test_control_truss_patching.py,sha256=lbMuAjLbkeDRLxUxXHWr41BZyhZKHQYoMnbJSj3dqrc,15390
133
133
  truss/tests/test_custom_server.py,sha256=GP2qMgnqxJMPRtfEciqbhBcG0_JUK7gNL7nrXPGrSLg,1305
134
134
  truss/tests/test_docker.py,sha256=3RI6jEC9CVQsKj83s_gOBl3EkdOaov-KEX4IihfMJW4,523
135
- truss/tests/test_model_inference.py,sha256=gL_uIFbeMoJqSe0vwUEhRN9fB4p3Q2mT1YejFMSekaU,76119
135
+ truss/tests/test_model_inference.py,sha256=9QfPMa1kjxvKCWg5XKocjwcpfDkKB7pWd8bn4hIkshk,76213
136
136
  truss/tests/test_model_schema.py,sha256=Bw28CZ4D0JQOkYdBQJZvgryeW0TRn7Axketp5kvZ_t4,14219
137
137
  truss/tests/test_testing_utilities_for_other_tests.py,sha256=YqIKflnd_BUMYaDBSkX76RWiWGWM_UlC2IoT4NngMqE,3048
138
138
  truss/tests/test_truss_gatherer.py,sha256=bn288OEkC49YY0mhly4cAl410ktZPfElNdWwZy82WfA,1261
@@ -141,9 +141,9 @@ truss/tests/test_util.py,sha256=hs1bNMkXKEdoPRx4Nw-NAEdoibR92OubZuADGmbiYsQ,1344
141
141
  truss/tests/cli/test_cli.py,sha256=yfbVS5u1hnAmmA8mJ539vj3lhH-JVGUvC4Q_Mbort44,787
142
142
  truss/tests/cli/train/test_cache_view.py,sha256=aVRCh3atRpFbJqyYgq7N-vAW0DiKMftQ7ajUqO2ClOg,22606
143
143
  truss/tests/cli/train/test_deploy_checkpoints.py,sha256=wQZ3DPLPAyXE3iaQiyHJTBO15v_gXN44eDk1StYkKmM,44764
144
- truss/tests/cli/train/test_train_cli_core.py,sha256=T1Xa6-NRk2nTJGX6sXaA8x4qCwL3Ini72PBI2gW7rYM,7879
144
+ truss/tests/cli/train/test_train_cli_core.py,sha256=vzYfxKdwoa3NaFMrVZbSg5qOoLXivMvZXN1ClQirGTQ,16148
145
145
  truss/tests/cli/train/resources/test_deploy_from_checkpoint_config.yml,sha256=GF7r9l0KaeXiUYCPSBpeMPd2QG6PeWWyI12NdbqLOgc,1930
146
- truss/tests/contexts/image_builder/test_serving_image_builder.py,sha256=iJA7nxcLXhBmyjhLIKeN64ql0OI_R53l-qSt3SsENV8,22368
146
+ truss/tests/contexts/image_builder/test_serving_image_builder.py,sha256=16niCXZnuxFHXYQw2vPFZ8svSZafkH5DT0Gx3Z9Xdd8,22377
147
147
  truss/tests/contexts/local_loader/test_load_local.py,sha256=D1qMH2IpYA2j5009v50QMgUnKdeOsX15ndkwXe10a4E,801
148
148
  truss/tests/contexts/local_loader/test_truss_module_finder.py,sha256=oN1K2lg3ATHY5yOVUTfQIaSqusTF9I2wFaYaTSo5-O4,5342
149
149
  truss/tests/local/test_local_config_handler.py,sha256=aLvcOyfppskA2MziVLy_kMcagjxMpO4mjar9zxUN6g0,2245
@@ -163,7 +163,7 @@ truss/tests/remote/baseten/test_remote.py,sha256=y1qSPL1t7dBeYI3xMFn436fttG7wkYd
163
163
  truss/tests/remote/baseten/test_service.py,sha256=ufZbtQlBNIzFCxRt_iE-APLpWbVw_3ViUpSh6H9W5nU,1945
164
164
  truss/tests/templates/control/control/test_endpoints.py,sha256=tGU3w8zOKC8LfWGdhp-TlV7E603KXg2xGwpqDdf8Pnw,3385
165
165
  truss/tests/templates/control/control/test_server.py,sha256=r1O3VEK9eoIL2-cg8nYLXYct_H3jf5rGp1wLT1KBdeA,9488
166
- truss/tests/templates/control/control/test_server_integration.py,sha256=dWtYuieUT4wrV0FIn-9R6FHNs0qyqXy6TAxrFI_gLgw,11400
166
+ truss/tests/templates/control/control/test_server_integration.py,sha256=EdDY3nLzjrRCJ5LI5yZsNCEImSRkxTL7Rn9mGnK67zA,11837
167
167
  truss/tests/templates/control/control/helpers/test_context_managers.py,sha256=3LoonRaKu_UvhaWs1eNmEQCZq-iJ3aIjI0Mn4amC8Bw,283
168
168
  truss/tests/templates/control/control/helpers/test_model_container_patch_applier.py,sha256=jhPgExGFF42iuWPM9ry93dnpF765d-CGTCIhbswK0hk,5730
169
169
  truss/tests/templates/control/control/helpers/test_requirement_name_identifier.py,sha256=kPYrAb97BtN8Wm0Hofw7iJ412Tew2xHgiteKtXVoC5A,2980
@@ -172,7 +172,7 @@ truss/tests/templates/core/server/test_lazy_data_resolver_v2.py,sha256=xZNMhfhHa
172
172
  truss/tests/templates/core/server/test_secrets_resolver.py,sha256=qFHZEKs9t2Gj9JBJKB8xLbIjCgBpIUoNanQ3l5RBoRM,1550
173
173
  truss/tests/templates/server/test_model_wrapper.py,sha256=1has4G3wiHd1g4JzDunJCMkT2r6LFUSIt4HaP5PVx5A,9406
174
174
  truss/tests/templates/server/test_schema.py,sha256=HfaPGIm8u39qSvfzUwk4Ymtvq38JBczRUlmSYZS8X5I,9527
175
- truss/tests/templates/server/test_truss_server.py,sha256=AYDF6NjJNarT7h5Ok7M3URV8jVxoDl1LNJZYkeBTgrk,1522
175
+ truss/tests/templates/server/test_truss_server.py,sha256=iNc_JZ5Pobtj41d_uhlBUv3UdBI0dWpKtZYuG_X_5QU,1668
176
176
  truss/tests/templates/server/common/test_retry.py,sha256=-7yw0DvDXhCntNhFbTP1liWQQhlbQmZAA4AUW1VFsu0,2039
177
177
  truss/tests/test_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
178
178
  truss/tests/test_data/auto-mpg.data,sha256=SLgw4R_u5VclJfjxaR3bnTjT17cGPtzY_KZywqXhfY0,30286
@@ -181,7 +181,7 @@ truss/tests/test_data/pima-indians-diabetes.csv,sha256=BvW3ws17ymhv2k-S6rX2Hn_2Q
181
181
  truss/tests/test_data/readme_int_example.md,sha256=fuHvpLtdkJy1f4NAR_djotVBdzusHYNXc-Fwh588XAE,1586
182
182
  truss/tests/test_data/readme_no_example.md,sha256=T2CzFMRvICXeX3_5XbFoqhHchcHGot-xM7izx34B3aQ,1607
183
183
  truss/tests/test_data/readme_str_example.md,sha256=fP4pvMqgLdIapaOf_BgRiV0H7pw4so0RNxrlq5lbROE,1726
184
- truss/tests/test_data/server.Dockerfile,sha256=7DZ9RjgxQ7gWI9_BeDSdwZezjum4dT8NoCaiB_A5ZXI,1790
184
+ truss/tests/test_data/server.Dockerfile,sha256=auEgFqfrnTk6k3XZL3A1CqPd_Hnu_0TYtVuwrJv2eHU,1971
185
185
  truss/tests/test_data/annotated_types_truss/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
186
186
  truss/tests/test_data/annotated_types_truss/config.yaml,sha256=B-ZyyjLLqtxGfXj2tkH68Hy7NOMB_coYvoWyWom61g0,147
187
187
  truss/tests/test_data/annotated_types_truss/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -364,8 +364,8 @@ truss_train/definitions.py,sha256=V985HhY4rdXL10DZxpFEpze9ScxzWErMht4WwaPknGU,67
364
364
  truss_train/deployment.py,sha256=lWWANSuzBWu2M4oK4qD7n-oVR1JKdmw2Pn5BJQHg-Ck,3074
365
365
  truss_train/loader.py,sha256=0o66EjBaHc2YY4syxxHVR4ordJWs13lNXnKjKq2wq0U,1630
366
366
  truss_train/public_api.py,sha256=9N_NstiUlmBuLUwH_fNG_1x7OhGCytZLNvqKXBlStrM,1220
367
- truss-0.10.13.dist-info/METADATA,sha256=47EEkDwEv7QkXOh2Po5d32bbzDQHnp0c7iTco3jbQFY,6670
368
- truss-0.10.13.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
369
- truss-0.10.13.dist-info/entry_points.txt,sha256=-MwKfHHQHQ6j0HqIgvxrz3CehCmczDLTD-OsRHnjjuU,130
370
- truss-0.10.13.dist-info/licenses/LICENSE,sha256=FTqGzu85i-uw1Gi8E_o0oD60bH9yQ_XIGtZbA1QUYiw,1064
371
- truss-0.10.13.dist-info/RECORD,,
367
+ truss-0.11.1rc2.dist-info/METADATA,sha256=-QNAojZwEkUwM3B6Jo9KIbVMpBstsTCKx-qR1S_MFJM,6672
368
+ truss-0.11.1rc2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
369
+ truss-0.11.1rc2.dist-info/entry_points.txt,sha256=-MwKfHHQHQ6j0HqIgvxrz3CehCmczDLTD-OsRHnjjuU,130
370
+ truss-0.11.1rc2.dist-info/licenses/LICENSE,sha256=FTqGzu85i-uw1Gi8E_o0oD60bH9yQ_XIGtZbA1QUYiw,1064
371
+ truss-0.11.1rc2.dist-info/RECORD,,