sima-cli 0.0.21__py3-none-any.whl → 0.0.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sima_cli/__version__.py CHANGED
@@ -1,2 +1,2 @@
1
1
  # sima_cli/__version__.py
2
- __version__ = "0.0.21"
2
+ __version__ = "0.0.22"
sima_cli/cli.py CHANGED
@@ -9,6 +9,7 @@ from sima_cli.__version__ import __version__
9
9
  from sima_cli.utils.config import CONFIG_PATH
10
10
  from sima_cli.install.optiview import install_optiview
11
11
  from sima_cli.install.hostdriver import install_hostdriver
12
+ from sima_cli.install.metadata_installer import install_from_metadata, metadata_resolver
12
13
  from sima_cli.serial.serial import connect_serial
13
14
  from sima_cli.nvme.nvme import nvme_format, nvme_remount
14
15
  from sima_cli.network.network import network_menu
@@ -118,6 +119,7 @@ def download(ctx, url, dest):
118
119
  click.echo(f"\n✅ File downloaded successfully to: {path}")
119
120
  return
120
121
  except Exception as e:
122
+ click.echo(f"❌ Failed to download as file {e}")
121
123
  pass
122
124
 
123
125
  # If that fails, try to treat as a folder and download all files
@@ -278,22 +280,44 @@ SDK_INDEPENDENT_COMPONENTS = {"optiview"}
278
280
  ALL_COMPONENTS = SDK_DEPENDENT_COMPONENTS | SDK_INDEPENDENT_COMPONENTS
279
281
 
280
282
  @main.command(name="install")
281
- @click.argument("component", type=click.Choice(ALL_COMPONENTS, case_sensitive=False))
282
- @click.option("-v", "--version", help="SDK version (required for SDK-dependent components)")
283
+ @click.argument("component", required=False)
284
+ @click.option("-v", "--version", help="SDK version (required for SDK-dependent components unless --metadata is provided)")
285
+ @click.option("-m", "--mirror", help="URL to a metadata.json file for generic installation")
286
+ @click.option("-t", "--tag", help="Tag of the package (optional)")
283
287
  @click.pass_context
284
- def install_cmd(ctx, component, version):
288
+ def install_cmd(ctx, component, version, mirror, tag):
285
289
  """
286
- Install supported components such as SDKs or tools.
290
+ Install supported components such as SDKs, tools, or generic packages via metadata.
287
291
 
288
292
  Examples:
289
293
 
290
294
  sima-cli install hostdriver -v 1.6.0
291
295
 
292
296
  sima-cli install optiview
297
+
298
+ sima-cli install -m https://example.com/packages/foo/metadata.json
299
+
300
+ sima-cli install examples.llima -v 1.7.0
293
301
  """
294
- component = component.lower()
295
302
  internal = ctx.obj.get("internal", False)
296
303
 
304
+ # Metadata-based installation path
305
+ if mirror:
306
+ if component:
307
+ click.echo(f"⚠️ Component '{component}' is ignored when using --metadata. Proceeding with metadata-based installation.")
308
+ click.echo(f"🔧 Installing generic component from metadata URL: {mirror}")
309
+ if install_from_metadata(metadata_url=mirror, internal=internal):
310
+ click.echo("✅ Installation complete.")
311
+ return
312
+
313
+ # No component and no metadata: error
314
+ if not component:
315
+ click.echo("❌ You must specify either a component name or provide --metadata.")
316
+ ctx.exit(1)
317
+
318
+ component = component.lower()
319
+
320
+ # Validate version requirement
297
321
  if component in SDK_DEPENDENT_COMPONENTS and not version:
298
322
  click.echo(f"❌ The component '{component}' requires a specific SDK version. Please provide one using -v.")
299
323
  ctx.exit(1)
@@ -301,7 +325,7 @@ def install_cmd(ctx, component, version):
301
325
  if component in SDK_INDEPENDENT_COMPONENTS and version:
302
326
  click.echo(f"ℹ️ The component '{component}' does not require an SDK version. Ignoring -v {version}.")
303
327
 
304
- # Perform the installation logic
328
+ # Hardcoded component installation
305
329
  if component == "palette":
306
330
  click.echo(f"🔧 Installing SDK component 'palette' for version {version} is not implemented yet...")
307
331
  elif component == "hostdriver":
@@ -310,9 +334,20 @@ def install_cmd(ctx, component, version):
310
334
  elif component == "optiview":
311
335
  click.echo("🔧 Installing tool 'optiview'...")
312
336
  install_optiview()
337
+ else:
338
+ # Case 4: Try to resolve metadata URL from version + tag
339
+ try:
340
+ metadata_url = metadata_resolver(component, version, tag)
341
+ click.echo(f"🔧 Installing '{component}' from resolved metadata: {metadata_url}")
342
+ if install_from_metadata(metadata_url=metadata_url, internal=internal):
343
+ click.echo("✅ Installation complete.")
344
+ except Exception as e:
345
+ click.echo(f"❌ Failed to resolve metadata for component '{component}': {e}")
346
+ ctx.exit(1)
313
347
 
314
348
  click.echo("✅ Installation complete.")
315
349
 
350
+
316
351
  # ----------------------
317
352
  # Serial Subcommands
318
353
  # ----------------------
@@ -377,9 +412,9 @@ def nvme_cmd(ctx, operation):
377
412
  elif operation == "remount":
378
413
  try:
379
414
  nvme_remount()
380
- click.echo("✅ NVMe drive successfully remounted at /mnt/nvme.")
381
415
  except Exception as e:
382
416
  click.echo(f"❌ Failed to remount NVMe drive: {e}")
417
+ ctx.exit(1)
383
418
 
384
419
  else:
385
420
  click.echo(f"❌ Unsupported NVMe operation: {operation}")
@@ -80,10 +80,14 @@ def download_file_from_url(url: str, dest_folder: str = ".", internal: bool = Fa
80
80
  headers["Authorization"] = f"Bearer {auth_token}"
81
81
  request_fn = requests.get
82
82
  head_fn = requests.head
83
- else:
83
+ elif 'https://docs.sima.ai' in url:
84
84
  session = login_external()
85
85
  request_fn = session.get
86
86
  head_fn = session.head
87
+ else:
88
+ session = requests.Session()
89
+ request_fn = session.get
90
+ head_fn = session.head
87
91
 
88
92
  # HEAD request to get total file size
89
93
  head = head_fn(url, headers=headers, timeout=10)
@@ -95,7 +99,7 @@ def download_file_from_url(url: str, dest_folder: str = ".", internal: bool = Fa
95
99
  existing_size = os.path.getsize(dest_path)
96
100
 
97
101
  if existing_size == total_size:
98
- print(f"✔ File already exists and is complete: {file_name}")
102
+ print(f"✔ File already exists and is complete: {file_name}")
99
103
  return dest_path
100
104
  elif existing_size < total_size:
101
105
  resume_header['Range'] = f'bytes={existing_size}-'
@@ -113,7 +117,7 @@ def download_file_from_url(url: str, dest_folder: str = ".", internal: bool = Fa
113
117
  final_total = existing_size + content_length
114
118
 
115
119
  with open(dest_path, mode) as f, tqdm(
116
- desc=f"Downloading {file_name}",
120
+ desc=f"⬇️ Downloading {file_name}",
117
121
  total=final_total,
118
122
  initial=existing_size,
119
123
  unit='B',
@@ -0,0 +1,57 @@
1
+ from rich import print
2
+ from rich.table import Table
3
+ from rich.panel import Panel
4
+
5
+ def print_metadata_summary(metadata: dict):
6
+ table = Table(show_header=False, box=None, padding=(0, 1))
7
+
8
+ table.add_row("[bold]Name[/bold]", metadata.get("name", "N/A"))
9
+ table.add_row("[bold]Version[/bold]", metadata.get("version", "N/A"))
10
+ table.add_row("[bold]Release[/bold]", metadata.get("release", "N/A"))
11
+ table.add_row("[bold]Description[/bold]", metadata.get("description", "N/A"))
12
+
13
+ # Platform info
14
+ platform_info = []
15
+ for p in metadata.get("platforms", []):
16
+ platform_type = p.get("type", "unknown")
17
+ if platform_type == "board":
18
+ compat = ", ".join(p.get("compatible_with", []))
19
+ platform_info.append(f"{platform_type} ({compat})")
20
+ elif platform_type in ("host", "generic"):
21
+ os_list = ", ".join(p.get("os", []))
22
+ platform_info.append(f"{platform_type} ({os_list})")
23
+ else:
24
+ platform_info.append(platform_type)
25
+
26
+ table.add_row("[bold]Platforms[/bold]", "; ".join(platform_info) or "N/A")
27
+
28
+ # Resources
29
+ resource_count = len(metadata.get("resources", []))
30
+ table.add_row("[bold]Resources[/bold]", f"{resource_count} file(s)")
31
+
32
+ # Size
33
+ size = metadata.get("size", {})
34
+ table.add_row("[bold]Download Size[/bold]", size.get("download", "N/A"))
35
+ table.add_row("[bold]Install Size[/bold]", size.get("install", "N/A"))
36
+
37
+ print()
38
+ print(Panel(table, title="📦 Package Summary", expand=False))
39
+ print()
40
+
41
+
42
+ def parse_size_string_to_bytes(size_str: str) -> int:
43
+ """
44
+ Convert a size string like '40GB' or '512MB' to bytes.
45
+ """
46
+ size_str = size_str.strip().upper()
47
+ units = {"KB": 10**3, "MB": 10**6, "GB": 10**9}
48
+
49
+ for unit, multiplier in units.items():
50
+ if size_str.endswith(unit):
51
+ try:
52
+ value = float(size_str[:-len(unit)].strip())
53
+ return int(value * multiplier)
54
+ except ValueError:
55
+ raise ValueError(f"Invalid numeric value in size string: '{size_str}'")
56
+
57
+ raise ValueError(f"Unrecognized size unit in '{size_str}'. Must be KB, MB, or GB.")
@@ -0,0 +1,447 @@
1
+ import os
2
+ import re
3
+ import tempfile
4
+ import click
5
+ import json
6
+ import sys
7
+ import shutil
8
+ import tarfile
9
+ import zipfile
10
+ from urllib.parse import urlparse
11
+
12
+ from typing import Dict
13
+ from tqdm import tqdm
14
+ from urllib.parse import urljoin
15
+ from pathlib import Path
16
+ import subprocess
17
+
18
+ from rich.console import Console
19
+ from rich.panel import Panel
20
+
21
+ from huggingface_hub import snapshot_download
22
+
23
+ from sima_cli.utils.disk import check_disk_space
24
+ from sima_cli.utils.env import get_environment_type, get_exact_devkit_type
25
+ from sima_cli.download.downloader import download_file_from_url
26
+ from sima_cli.install.metadata_validator import validate_metadata, MetadataValidationError
27
+ from sima_cli.install.metadata_info import print_metadata_summary, parse_size_string_to_bytes
28
+
29
+ console = Console()
30
+
31
+ def _download_assets(metadata: dict, base_url: str, dest_folder: str, internal: bool = False, skip_models: bool = False) -> list:
32
+ """
33
+ Downloads resources defined in metadata to a local destination folder.
34
+
35
+ Args:
36
+ metadata (dict): Parsed and validated metadata
37
+ base_url (str): Base URL of the metadata file (used to resolve relative resource paths)
38
+ dest_folder (str): Local path to download resources into
39
+ internal (bool): Whether to use internal download routing (if applicable)
40
+ skip_models (bool): If True, skips downloading any file path starting with 'models/'
41
+
42
+ Returns:
43
+ list: Paths to the downloaded local files
44
+ """
45
+ resources = metadata.get("resources", [])
46
+ if not resources:
47
+ raise click.ClickException("❌ No 'resources' defined in metadata.")
48
+
49
+ os.makedirs(dest_folder, exist_ok=True)
50
+ local_paths = []
51
+
52
+ filtered_resources = []
53
+ for r in resources:
54
+ if skip_models and r.strip().lower().startswith("models/"):
55
+ click.echo(f"⏭️ Skipping model file: {r}")
56
+ continue
57
+ filtered_resources.append(r)
58
+
59
+ if not filtered_resources:
60
+ click.echo("ℹ️ No non-model resources to download.")
61
+ return []
62
+
63
+ click.echo(f"📥 Downloading {len(filtered_resources)} resource(s) to: {dest_folder}\n")
64
+
65
+ for resource in filtered_resources:
66
+ try:
67
+ # Handle Hugging Face snapshot-style URL: "hf:<repo_id>@version"
68
+ if resource.startswith("hf:"):
69
+ # Strip prefix and split by @
70
+ resource_spec = resource[3:]
71
+ if "@" in resource_spec:
72
+ repo_id, revision = resource_spec.split("@", 1)
73
+ else:
74
+ repo_id, revision = resource_spec, None
75
+
76
+ if "/" not in repo_id:
77
+ raise click.ClickException(f"❌ Invalid Hugging Face repo spec: {resource}")
78
+
79
+ org, name = repo_id.split("/", 1)
80
+ target_dir = os.path.join(dest_folder, name)
81
+
82
+ click.echo(f"🤗 Downloading Hugging Face repo: {repo_id}" + (f"@{revision}" if revision else ""))
83
+ model_path = snapshot_download(
84
+ repo_id=repo_id,
85
+ local_dir=target_dir,
86
+ revision=revision # None if not specified
87
+ )
88
+ local_paths.append(model_path)
89
+ continue
90
+
91
+ # Handle normal relative or absolute URLs
92
+ resource_url = urljoin(base_url, resource)
93
+ local_path = download_file_from_url(
94
+ url=resource_url,
95
+ dest_folder=dest_folder,
96
+ internal=internal
97
+ )
98
+ click.echo(f"✅ Downloaded: {resource}")
99
+ local_paths.append(local_path)
100
+
101
+ except Exception as e:
102
+ raise click.ClickException(f"❌ Failed to download resource '{resource}': {e}")
103
+
104
+ return local_paths
105
+
106
+ def _download_and_validate_metadata(metadata_url, internal=False):
107
+ """
108
+ Downloads (if remote), validates, and parses metadata from a given URL or local file path.
109
+
110
+ Args:
111
+ metadata_url (str): URL or local path to a metadata.json file
112
+ internal (bool): Whether to use internal mirrors or logic in downloader
113
+
114
+ Returns:
115
+ tuple: (parsed metadata dict, folder containing the metadata file)
116
+ """
117
+ try:
118
+ parsed = urlparse(metadata_url)
119
+
120
+ # Case 1: Local file (e.g., /path/to/file or ./file)
121
+ if parsed.scheme == "" or parsed.scheme == "file":
122
+ metadata_path = parsed.path
123
+ if not os.path.isfile(metadata_path):
124
+ raise FileNotFoundError(f"File not found: {metadata_path}")
125
+ click.echo(f"📄 Using local metadata file: {metadata_path}")
126
+
127
+ # Case 2: Remote URL
128
+ else:
129
+ with tempfile.TemporaryDirectory() as tmpdir:
130
+ metadata_path = download_file_from_url(
131
+ url=metadata_url,
132
+ dest_folder=tmpdir,
133
+ internal=internal
134
+ )
135
+ click.echo(f"⬇️ Downloaded metadata to: {metadata_path}")
136
+
137
+ # Must copy to outside tmpdir since tmpdir will be deleted
138
+ # But since we're returning contents only, no need to keep file
139
+ with open(metadata_path, "r", encoding="utf-8") as f:
140
+ metadata = json.load(f)
141
+ validate_metadata(metadata)
142
+ click.echo("✅ Metadata validated successfully.")
143
+ return metadata, os.path.dirname(metadata_path)
144
+
145
+ # Common validation logic for local file
146
+ with open(metadata_path, "r", encoding="utf-8") as f:
147
+ metadata = json.load(f)
148
+
149
+ validate_metadata(metadata)
150
+ click.echo("✅ Metadata validated successfully.")
151
+ return metadata, os.path.dirname(os.path.abspath(metadata_path))
152
+
153
+ except MetadataValidationError as e:
154
+ click.echo(f"❌ Metadata validation failed: {e}")
155
+ raise click.Abort()
156
+
157
+ except Exception as e:
158
+ click.echo(f"❌ Failed to retrieve or parse metadata from {metadata_url}: {e}")
159
+ raise click.Abort()
160
+
161
+ def _check_whether_disk_is_big_enough(metadata: dict):
162
+ # Step 3: Disk space check
163
+ try:
164
+ install_size_str = metadata.get("size", {}).get("install")
165
+ if install_size_str:
166
+ required_bytes = parse_size_string_to_bytes(install_size_str)
167
+ if not check_disk_space(required_bytes, folder="."):
168
+ required_gb = required_bytes / 1e9
169
+ raise click.ClickException(
170
+ f"Not enough disk space. At least {required_gb:.2f} GB required the in current directory."
171
+ )
172
+
173
+ available_bytes = shutil.disk_usage(".").free
174
+ available_gb = available_bytes / 1e9
175
+ required_gb = required_bytes / 1e9
176
+ click.echo(f"🗄️ Available disk space: {available_gb:.2f} GB")
177
+ click.echo(f"✅ Enough disk space for installation: requires {required_gb:.2f} GB")
178
+ return True
179
+ except Exception as e:
180
+ click.echo(f"❌ Failed to validate disk space: {e}")
181
+ raise click.Abort()
182
+
183
+ return False
184
+
185
+ def _extract_tar_streaming(tar_path: Path, extract_dir: Path):
186
+ """
187
+ Extract tar while preserving full folder structure.
188
+ """
189
+ extracted_files = 0
190
+ with tarfile.open(tar_path, "r:*") as tar:
191
+ with tqdm(desc=f"📦 Extracting {tar_path.name}", unit=" file") as pbar:
192
+ while True:
193
+ member = tar.next()
194
+ if member is None:
195
+ break
196
+
197
+ # Don't strip anything — preserve full path
198
+ if not member.name.strip():
199
+ print(f"⚠️ Skipping empty member in archive: {member}")
200
+ continue
201
+
202
+ tar.extract(member, path=extract_dir)
203
+ extracted_files += 1
204
+ pbar.update(1)
205
+
206
+ print(f"✅ Extracted {extracted_files} files to {extract_dir}/")
207
+
208
+ def _extract_zip_streaming(zip_path: Path, extract_dir: Path):
209
+ """
210
+ Extract a .zip file using streaming to avoid NFS slowness from metadata calls,
211
+ and flatten one top-level directory if present.
212
+ """
213
+ with zipfile.ZipFile(zip_path, "r") as zipf:
214
+ members = zipf.infolist()
215
+ with tqdm(total=len(members), desc=f"📦 Extracting {zip_path.name}", unit="file") as pbar:
216
+ for member in members:
217
+ # Strip one top-level directory if it exists
218
+ parts = Path(member.filename).parts
219
+ if len(parts) > 1:
220
+ stripped_path = Path(*parts[1:])
221
+ else:
222
+ stripped_path = Path(parts[-1])
223
+
224
+ target_path = extract_dir / stripped_path
225
+ target_path.parent.mkdir(parents=True, exist_ok=True)
226
+
227
+ with zipf.open(member) as src, open(target_path, "wb") as dst:
228
+ shutil.copyfileobj(src, dst)
229
+
230
+ pbar.update(1)
231
+
232
+ print(f"✅ Extracted {len(members)} files to {extract_dir}/")
233
+
234
+ def _combine_multipart_files(folder: str):
235
+ """
236
+ Scan a folder for multipart files like name-split-aa, -ab, etc.,
237
+ combine them into a single file, and remove the split parts.
238
+ Then auto-extract .tar files with progress.
239
+ """
240
+ folder = Path(folder)
241
+ parts_by_base = {}
242
+
243
+ # Step 1: Group parts by base name
244
+ for file in folder.iterdir():
245
+ if not file.is_file():
246
+ continue
247
+
248
+ match = re.match(r"(.+)-split-([a-z]{2})$", file.name)
249
+ if match:
250
+ base, part = match.groups()
251
+ parts_by_base.setdefault(base, []).append((part, file))
252
+
253
+ # Step 2: Process each group
254
+ for base, parts in parts_by_base.items():
255
+ parts.sort(key=lambda x: x[0])
256
+ output_file = folder / f"{base}.tar"
257
+ total_size = sum(part_file.stat().st_size for _, part_file in parts)
258
+
259
+ print(f"\n🧩 Reassembling: {output_file.name} from {len(parts)} parts")
260
+
261
+ if not output_file.exists():
262
+ with open(output_file, "wb") as outfile, tqdm(
263
+ total=total_size,
264
+ unit="B",
265
+ unit_scale=True,
266
+ unit_divisor=1024,
267
+ desc=f"Combining {output_file.name}",
268
+ ) as pbar:
269
+ for _, part_file in parts:
270
+ with open(part_file, "rb") as infile:
271
+ while True:
272
+ chunk = infile.read(1024 * 1024) # 1MB
273
+ if not chunk:
274
+ break
275
+ outfile.write(chunk)
276
+ pbar.update(len(chunk))
277
+
278
+ # Step 3: Remove original parts
279
+ # for _, part_file in parts:
280
+ # part_file.unlink()
281
+
282
+ print(f"✅ Created: {output_file.name} ({output_file.stat().st_size / 1e6:.2f} MB)")
283
+
284
+ # Step 4: Auto-extract .tar
285
+ extract_dir = folder / base
286
+ print(f"📦 Extracting {output_file.name} to {extract_dir}/")
287
+ _extract_tar_streaming(output_file, extract_dir)
288
+
289
+ print(f"✅ Extracted to: {extract_dir}/")
290
+
291
+ def _extract_archives_in_folder(folder: str):
292
+ """
293
+ Extract all .tar.gz and .zip files in the given folder into subdirectories.
294
+ Uses streaming to avoid NFS performance issues.
295
+ """
296
+ folder = Path(folder)
297
+ for file in folder.iterdir():
298
+ if not file.is_file():
299
+ continue
300
+
301
+ # TAR.GZ
302
+ if file.suffixes == [".tar", ".gz"] or file.name.endswith(".tar.gz"):
303
+ extract_dir = folder / file.stem.replace(".tar", "")
304
+ print(f"📦 Extracting TAR.GZ: {file.name} to {extract_dir}/")
305
+ _extract_tar_streaming(file, extract_dir)
306
+
307
+ # ZIP
308
+ elif file.suffix == ".zip":
309
+ extract_dir = folder / file.stem
310
+ print(f"📦 Extracting ZIP: {file.name} to {extract_dir}/")
311
+ _extract_zip_streaming(file, extract_dir)
312
+
313
+ def _is_platform_compatible(metadata: dict) -> bool:
314
+ """
315
+ Determines if the current environment is compatible with the package metadata.
316
+
317
+ Args:
318
+ metadata (dict): Metadata that includes a 'platforms' section
319
+
320
+ Returns:
321
+ bool: True if compatible, False otherwise
322
+ """
323
+ env_type, env_subtype = get_environment_type()
324
+ exact_devkit_type = get_exact_devkit_type()
325
+ platforms = metadata.get("platforms", [])
326
+
327
+ for i, platform_entry in enumerate(platforms):
328
+ platform_type = platform_entry.get("type")
329
+ if platform_type != env_type:
330
+ continue
331
+
332
+ # For board/devkit: check compatible_with list
333
+ if env_type == "board":
334
+ compat = platform_entry.get("compatible_with", [])
335
+ if env_subtype not in compat and exact_devkit_type not in compat:
336
+ continue
337
+
338
+ # For host/sdk/generic: optionally check OS match
339
+ if "os" in platform_entry:
340
+ supported_oses = [os_name.lower() for os_name in platform_entry["os"]]
341
+ if env_subtype.lower() not in supported_oses:
342
+ continue
343
+
344
+ # Passed all checks
345
+ return True
346
+
347
+ click.echo(f"❌ Current environment [{env_type}:{env_subtype}] is not compatible with the package")
348
+ return False
349
+
350
+
351
+ def _print_post_install_message(metadata: Dict):
352
+ """
353
+ Print post-installation instructions from the metadata in a compact box.
354
+
355
+ Args:
356
+ metadata (Dict): The package metadata dictionary.
357
+ """
358
+ msg = metadata.get("installation", {}).get("post-message", "").strip()
359
+
360
+ if msg:
361
+ panel = Panel.fit(
362
+ msg,
363
+ title="[bold green]Post-Installation Instructions[/bold green]",
364
+ title_align="left",
365
+ border_style="green",
366
+ padding=(1, 2)
367
+ )
368
+ console.print(panel)
369
+
370
+ def _run_installation_script(metadata: Dict, extract_path: str = "."):
371
+ """
372
+ Run the installation script specified in the metadata.
373
+
374
+ Args:
375
+ metadata (dict): Metadata dictionary with an 'installation' key.
376
+ extract_path (str): Path where the files were extracted.
377
+ """
378
+ script = metadata.get("installation", {}).get("script", "").strip()
379
+ if not script:
380
+ print("⚠️ No installation script provided. Follow package documentation to install the package.")
381
+ return
382
+
383
+ print(f"🚀 Running installation script in: {os.path.abspath(extract_path)}")
384
+ print(f"📜 Script: {script}")
385
+
386
+ # Determine shell type based on platform
387
+ shell_executable = os.environ.get("COMSPEC") if os.name == "nt" else None
388
+
389
+ try:
390
+ subprocess.run(
391
+ script,
392
+ shell=True,
393
+ executable=shell_executable,
394
+ cwd=extract_path,
395
+ check=True
396
+ )
397
+ _print_post_install_message(metadata=metadata)
398
+ except subprocess.CalledProcessError as e:
399
+ print("❌ Installation failed with return code:", e.returncode)
400
+ sys.exit(e.returncode)
401
+
402
+ print("✅ Installation completed successfully.")
403
+
404
+ def install_from_metadata(metadata_url: str, internal: bool, install_dir: str = '.'):
405
+ try:
406
+ metadata, _ = _download_and_validate_metadata(metadata_url, internal)
407
+ print_metadata_summary(metadata=metadata)
408
+
409
+ if _check_whether_disk_is_big_enough(metadata):
410
+ if _is_platform_compatible(metadata):
411
+ local_paths = _download_assets(metadata, metadata_url, install_dir, internal)
412
+ if len(local_paths) > 0:
413
+ _combine_multipart_files(install_dir)
414
+ _extract_archives_in_folder(install_dir)
415
+ _run_installation_script(metadata=metadata, extract_path=install_dir)
416
+
417
+ except Exception as e:
418
+ click.echo(f"❌ Failed to install from metadata URL {metadata_url}: {e}")
419
+ exit(1)
420
+
421
+ return False
422
+
423
+ def metadata_resolver(component: str, version: str = None, tag: str = None) -> str:
424
+ """
425
+ Resolve the metadata.json URL for a given component and version/tag.
426
+
427
+ Args:
428
+ component (str): Component name (e.g., "examples.llima")
429
+ version (str): Optional SDK version string (e.g., "1.7.0")
430
+ tag (str): Optional tag to use (e.g., "dev")
431
+
432
+ Returns:
433
+ str: Fully qualified metadata URL
434
+ """
435
+ if not version:
436
+ raise ValueError("Version (-v) is required for non-hardcoded components.")
437
+
438
+ # Normalize version for URL path
439
+ sdk_path = f"SDK{version}"
440
+ base = f"https://docs.sima.ai/pkg_downloads/{sdk_path}/{component}"
441
+
442
+ if tag:
443
+ metadata_name = f"metadata-{tag}.json"
444
+ else:
445
+ metadata_name = "metadata.json"
446
+
447
+ return f"{base}/{metadata_name}"
@@ -0,0 +1,138 @@
1
+ import re
2
+ import sys
3
+ import json
4
+ from pathlib import Path
5
+
6
+ class MetadataValidationError(Exception):
7
+ pass
8
+
9
+ VALID_TYPES = {"board", "palette", "host"}
10
+ VALID_OS = {"linux", "windows", "mac"}
11
+
12
+ def validate_metadata(data: dict):
13
+ # Top-level required fields
14
+ required_fields = ["name", "version", "release", "platforms", "resources"]
15
+ for field in required_fields:
16
+ if field not in data:
17
+ raise MetadataValidationError(f"Missing required field: '{field}'")
18
+
19
+ # Validate platforms
20
+ if not isinstance(data["platforms"], list):
21
+ raise MetadataValidationError("'platforms' must be a list")
22
+
23
+ for i, platform in enumerate(data["platforms"]):
24
+ if "type" not in platform:
25
+ raise MetadataValidationError(f"Missing 'type' in platform entry {i}")
26
+ if platform["type"] not in VALID_TYPES:
27
+ raise MetadataValidationError(
28
+ f"Invalid platform type '{platform['type']}' in entry {i}. Must be one of {VALID_TYPES}"
29
+ )
30
+
31
+ if platform["type"] == "board":
32
+ if "compatible_with" not in platform:
33
+ raise MetadataValidationError(f"'compatible_with' is required for board in entry {i}")
34
+ if not isinstance(platform["compatible_with"], list):
35
+ raise MetadataValidationError(f"'compatible_with' must be a list in entry {i}")
36
+
37
+ if "os" in platform:
38
+ if not isinstance(platform["os"], list):
39
+ raise MetadataValidationError(f"'os' must be a list in entry {i}")
40
+ for os_value in platform["os"]:
41
+ if os_value.lower() not in VALID_OS:
42
+ raise MetadataValidationError(
43
+ f"Invalid OS '{os_value}' in platform entry {i}. Supported: {VALID_OS}"
44
+ )
45
+
46
+ # Validate resources
47
+ if not isinstance(data["resources"], list) or not data["resources"]:
48
+ raise MetadataValidationError("'resources' must be a non-empty list")
49
+
50
+ # Validate prerequisite (optional)
51
+ if "prerequisite" in data:
52
+ prereq = data["prerequisite"]
53
+ if "wheel_url" not in prereq or "entry_point" not in prereq:
54
+ raise MetadataValidationError("Both 'wheel_url' and 'entry_point' are required in 'prerequisite'")
55
+ _validate_entry_point_format(prereq["entry_point"], field="prerequisite.entry_point")
56
+
57
+ # Validate installation (optional)
58
+ if "installation" in data:
59
+ install = data["installation"]
60
+ if "script" not in install:
61
+ raise MetadataValidationError("Missing 'script' in 'installation'")
62
+ if not isinstance(install["script"], str):
63
+ raise MetadataValidationError("'installation.script' must be a string")
64
+
65
+ # Validate size (optional)
66
+ if "size" in data:
67
+ size = data["size"]
68
+ if not isinstance(size, dict):
69
+ raise MetadataValidationError("'size' must be a dictionary with 'download' and 'install' fields")
70
+
71
+ for key in ["download", "install"]:
72
+ if key not in size:
73
+ raise MetadataValidationError(f"Missing '{key}' in 'size'")
74
+ if not isinstance(size[key], str):
75
+ raise MetadataValidationError(f"'size.{key}' must be a string")
76
+
77
+ size_str = size[key].strip().upper()
78
+ if not any(size_str.endswith(unit) for unit in ["KB", "MB", "GB"]):
79
+ raise MetadataValidationError(
80
+ f"'size.{key}' must end with one of: KB, MB, GB (e.g., '30GB')"
81
+ )
82
+
83
+ try:
84
+ # Extract number (e.g., "30GB" → 30.0)
85
+ float(size_str[:-2].strip())
86
+ except ValueError:
87
+ raise MetadataValidationError(
88
+ f"'size.{key}' must start with a numeric value (e.g., '30GB')"
89
+ )
90
+
91
+ return True
92
+
93
+
94
+ def _validate_entry_point_format(entry_point: str, field: str):
95
+ if not re.match(r"^[a-zA-Z0-9_.\-]+:[a-zA-Z0-9_]+$", entry_point):
96
+ raise MetadataValidationError(
97
+ f"Invalid format for {field}. Must be in the form 'module:function'"
98
+ )
99
+
100
+
101
+ def validate_file(filepath):
102
+ try:
103
+ with open(filepath, "r") as f:
104
+ metadata = json.load(f)
105
+ validate_metadata(metadata)
106
+ print(f"✅ {filepath} is valid.")
107
+ except FileNotFoundError:
108
+ print(f"❌ File not found: {filepath}")
109
+ except json.JSONDecodeError as e:
110
+ print(f"❌ JSON parse error in {filepath}: {e}")
111
+ except MetadataValidationError as e:
112
+ print(f"❌ Validation failed in {filepath}: {e}")
113
+
114
+ def main():
115
+ if len(sys.argv) != 2:
116
+ print("Usage: python validate_metadata.py <file-or-folder>")
117
+ sys.exit(1)
118
+
119
+ path = Path(sys.argv[1])
120
+
121
+ if not path.exists():
122
+ print(f"❌ Path does not exist: {path}")
123
+ sys.exit(1)
124
+
125
+ if path.is_file():
126
+ validate_file(path)
127
+ elif path.is_dir():
128
+ json_files = list(path.rglob("*.json"))
129
+ if not json_files:
130
+ print(f"⚠️ No JSON files found in directory: {path}")
131
+ for file in json_files:
132
+ validate_file(file)
133
+ else:
134
+ print(f"❌ Unsupported path type: {path}")
135
+ sys.exit(1)
136
+
137
+ if __name__ == "__main__":
138
+ main()
@@ -112,6 +112,20 @@ def get_gateway_for_interface(ip):
112
112
  parts[-1] = "1"
113
113
  return ".".join(parts)
114
114
 
115
+ def populate_resolv_conf(dns_server="8.8.8.8"):
116
+ """
117
+ Use sudo to write a DNS entry into /etc/resolv.conf even if not running as root.
118
+ """
119
+ content = f"nameserver {dns_server}\n"
120
+
121
+ try:
122
+ # Write using echo and sudo tee
123
+ cmd = f"echo '{content.strip()}' | sudo tee /etc/resolv.conf > /dev/null"
124
+ result = subprocess.run(cmd, shell=True, check=True)
125
+ print(f"✅ /etc/resolv.conf updated with nameserver {dns_server}")
126
+ except subprocess.CalledProcessError as e:
127
+ print(f"❌ Failed to update /etc/resolv.conf: {e}")
128
+
115
129
  def set_default_route(iface, ip):
116
130
  gateway = get_gateway_for_interface(ip)
117
131
  if not gateway:
@@ -182,6 +196,7 @@ def network_menu():
182
196
 
183
197
  if second == "Set to DHCP":
184
198
  move_network_file(selected_iface["name"], "dhcp")
199
+ populate_resolv_conf()
185
200
  elif second == "Set to Default Static IP":
186
201
  move_network_file(selected_iface["name"], "static")
187
202
  elif second == "Set as Default Route":
sima_cli/nvme/nvme.py CHANGED
@@ -56,8 +56,27 @@ def add_nvme_to_fstab():
56
56
  click.echo(f"❌ Failed to update /etc/fstab: {e}")
57
57
 
58
58
  def mount_nvme():
59
- subprocess.run("sudo mkdir -p /mnt/nvme", shell=True, check=True)
60
- subprocess.run("sudo mount /dev/nvme0n1p1 /mnt/nvme", shell=True, check=True)
59
+ try:
60
+ # Create mount point
61
+ subprocess.run("sudo mkdir -p /mnt/nvme", shell=True, check=True)
62
+
63
+ # Mount the NVMe partition
64
+ subprocess.run("sudo mount /dev/nvme0n1p1 /mnt/nvme", shell=True, check=True)
65
+
66
+ add_nvme_to_fstab()
67
+
68
+ subprocess.run("sudo mount -a", shell=True, check=True)
69
+
70
+ # Change ownership to user 'sima'
71
+ subprocess.run("sudo chown sima:sima /mnt/nvme", shell=True, check=True)
72
+
73
+ subprocess.run("sudo chmod 755 /mnt/nvme", shell=True, check=True)
74
+
75
+
76
+ print("✅ NVMe mounted and write permission granted to user 'sima'.")
77
+
78
+ except subprocess.CalledProcessError as e:
79
+ print(f"❌ Error during NVMe mount: {e}")
61
80
 
62
81
  def nvme_format():
63
82
  if not is_modalix_devkit():
@@ -87,8 +106,6 @@ def nvme_format():
87
106
  # Format and mount
88
107
  format_nvme(lbaf_index)
89
108
  mount_nvme()
90
- add_nvme_to_fstab()
91
-
92
109
  click.echo("✅ NVMe drive formatted and mounted at /mnt/nvme.")
93
110
  except subprocess.CalledProcessError:
94
111
  click.echo("❌ Formatting process failed.")
@@ -100,13 +117,7 @@ def nvme_remount():
100
117
  return
101
118
 
102
119
  try:
103
- # Ensure mount point exists
104
- subprocess.run("sudo mkdir -p /mnt/nvme", shell=True, check=True)
105
- # Mount the partition
106
- subprocess.run("sudo mount /dev/nvme0n1p1 /mnt/nvme", shell=True, check=True)
107
-
108
- # Add NVME to fstab
109
- add_nvme_to_fstab()
120
+ mount_nvme()
110
121
 
111
122
  except subprocess.CalledProcessError as e:
112
123
  raise RuntimeError(f"Failed to remount NVMe: {e}")
@@ -448,7 +448,7 @@ def perform_update(version_or_url: str, ip: str = None, internal: bool = False,
448
448
  board, version, fdt_name = get_remote_board_info(ip, passwd)
449
449
 
450
450
  if board in ['davinci', 'modalix']:
451
- click.echo(f"🔧 Target board: {board}, board currently running: {version}")
451
+ click.echo(f"🔧 Target board: {board} {fdt_name}, board currently running: {version}")
452
452
 
453
453
  if flavor == 'full' and fdt_name != 'modalix-som.dtb':
454
454
  click.echo(f"❌ You've requested updating {fdt_name} to full image, this is only supported for the Modalix DevKit")
sima_cli/utils/disk.py ADDED
@@ -0,0 +1,15 @@
1
+ import shutil
2
+
3
+ def check_disk_space(required_bytes: int, folder: str = ".") -> bool:
4
+ """
5
+ Check if the given folder has enough free disk space.
6
+
7
+ Args:
8
+ required_bytes (int): Space required in bytes
9
+ folder (str): Path to check (default: current dir)
10
+
11
+ Returns:
12
+ bool: True if enough space is available, False otherwise
13
+ """
14
+ total, used, free = shutil.disk_usage(folder)
15
+ return free >= required_bytes
sima_cli/utils/env.py CHANGED
@@ -92,6 +92,30 @@ def is_modalix_devkit() -> bool:
92
92
 
93
93
  return False
94
94
 
95
+ def get_exact_devkit_type() -> str:
96
+ """
97
+ Extracts the exact devkit type from 'fdt_name' in fw_printenv output.
98
+
99
+ Returns:
100
+ str: The value of fdt_name (e.g., "modalix-som"), or an empty string if not found or unavailable.
101
+ """
102
+ if not shutil.which("fw_printenv"):
103
+ return ""
104
+
105
+ try:
106
+ output = subprocess.check_output(["fw_printenv"], text=True)
107
+ for line in output.splitlines():
108
+ line = line.strip()
109
+ if line.startswith("fdt_name="):
110
+ _, value = line.split("=", 1)
111
+ return value.strip().replace('.dtb','')
112
+ except subprocess.CalledProcessError:
113
+ return ""
114
+ except Exception:
115
+ return ""
116
+
117
+ return ""
118
+
95
119
  def is_palette_sdk() -> bool:
96
120
  """
97
121
  Check if the environment is running inside the Palette SDK container.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sima-cli
3
- Version: 0.0.21
3
+ Version: 0.0.22
4
4
  Summary: CLI tool for SiMa Developer Portal to download models, firmware, and apps.
5
5
  Home-page: https://developer.sima.ai/
6
6
  Author: SiMa.ai
@@ -26,6 +26,7 @@ Requires-Dist: rich
26
26
  Requires-Dist: InquirerPy
27
27
  Requires-Dist: tftpy
28
28
  Requires-Dist: psutil
29
+ Requires-Dist: huggingface_hub
29
30
  Dynamic: author
30
31
  Dynamic: license-file
31
32
  Dynamic: requires-python
@@ -1,7 +1,7 @@
1
1
  sima_cli/__init__.py,sha256=Nb2jSg9-CX1XvSc1c21U9qQ3atINxphuNkNfmR-9P3o,332
2
2
  sima_cli/__main__.py,sha256=ehzD6AZ7zGytC2gLSvaJatxeD0jJdaEvNJvwYeGsWOg,69
3
- sima_cli/__version__.py,sha256=5C0RJVUsHkVkCFWA58oGCy10CpKRe_uE6H3nKgYCdRk,49
4
- sima_cli/cli.py,sha256=OwoMg0UFl4Qy4FTCj5fXSNg-BjhC91dPYeZRSr15gBs,14447
3
+ sima_cli/__version__.py,sha256=lZAZ5KAyL1sCs70SHAhw90CQC4944919Ga30NNcR4_g,49
4
+ sima_cli/cli.py,sha256=dO6MG8sBX7LKJ-FhPQf7bw8oqSlosDXKcBaVbCIDZWQ,16056
5
5
  sima_cli/app_zoo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  sima_cli/app_zoo/app.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  sima_cli/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -10,16 +10,19 @@ sima_cli/auth/login.py,sha256=yCYXWgrfbP4jSTZ3hITfxlgHkdVQVzsd8hQKpqaqCKs,3780
10
10
  sima_cli/data/resources_internal.yaml,sha256=zlQD4cSnZK86bLtTWuvEudZTARKiuIKmB--Jv4ajL8o,200
11
11
  sima_cli/data/resources_public.yaml,sha256=U7hmUomGeQ2ULdo1BU2OQHr0PyKBamIdK9qrutDlX8o,201
12
12
  sima_cli/download/__init__.py,sha256=6y4O2FOCYFR2jdnQoVi3hRtEoZ0Gw6rydlTy1SGJ5FE,218
13
- sima_cli/download/downloader.py,sha256=pHfqcg_ujBQjds_EkcRV85M2mRYGrysoZaiR-FIrpf4,5161
13
+ sima_cli/download/downloader.py,sha256=nCBrr_0WdnKTIyecwKpg1sCdfm_4PSQTRPwEbiezy8M,5339
14
14
  sima_cli/install/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  sima_cli/install/hostdriver.py,sha256=kAWDLebs60mbWIyTbUxmNrChcKW1uD5r7FtWNSUVUE4,5852
16
+ sima_cli/install/metadata_info.py,sha256=wmMqwzGfXbuilkqaxRVrFOzOtTOiONkmPCyA2oDAQpA,2168
17
+ sima_cli/install/metadata_installer.py,sha256=mtwtBEMmLh6-Hqtv6CTTAQa76sRJXZCqE9ORPQutfcc,16547
18
+ sima_cli/install/metadata_validator.py,sha256=7954rp9vFRNnqmIMvCVTjq40kUIEbGXzfc8HmQmChe0,5221
16
19
  sima_cli/install/optiview.py,sha256=i5eWVor-9MScEfrQm3Ty9OP4VpSsCgWvNh7AvYdZu7s,3365
17
20
  sima_cli/install/palette.py,sha256=uRznoHa4Mv9ZXHp6AoqknfC3RxpYNKi9Ins756Cyifk,3930
18
21
  sima_cli/mla/meminfo.py,sha256=ndc8kQJmWGEIdvNh6iIhATGdrkqM2pbddr_eHxaPNfg,1466
19
22
  sima_cli/model_zoo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
23
  sima_cli/model_zoo/model.py,sha256=q91Nrg62j1TqwPO8HiX4nlEFCCmzNEFcyFTBVMbJm8w,9836
21
- sima_cli/network/network.py,sha256=C4vCBPT-n1SNdBDAhzFCawBhny0Y9g9yB2u2LlXVSG4,7012
22
- sima_cli/nvme/nvme.py,sha256=ABLdRm83y2x-UX-rk8W7Uh2nvOnHYc6xECwLitRHcUc,3856
24
+ sima_cli/network/network.py,sha256=ToDCQBfX0bUFEWWtfS8srImK5T11MX6R4MBQFM80faY,7617
25
+ sima_cli/nvme/nvme.py,sha256=ECdd25fvFbs5T5_PlIwnxm3NiPNmqnFGXrgLZhLENRY,4129
23
26
  sima_cli/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
27
  sima_cli/sdk/syscheck.py,sha256=h9zCULW67y4i2hqiGc-hc1ucBDShA5FAe9NxwBGq-fM,4575
25
28
  sima_cli/serial/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -31,15 +34,16 @@ sima_cli/update/local.py,sha256=Blje7O2pcBopBLXwuVI826lnjPMTJ3lPU85dTUWUV48,3445
31
34
  sima_cli/update/netboot.py,sha256=RqFgBhixcjPEwdVGvKhR0TeztoFnmGigmXlA71WVksA,18647
32
35
  sima_cli/update/query.py,sha256=9yCW1ZQl42DAWV_7sbNsqEKeS9FzHdvgXpY5eS2GpDs,3540
33
36
  sima_cli/update/remote.py,sha256=uv0cezLeG4tsJvalgm_VDOo3EUCU7LB3nXl8mNFFtds,10934
34
- sima_cli/update/updater.py,sha256=gW6kIX0Xn16FWmaRryfu0BmM25bIaphCQ6tWu9N4tVY,20868
37
+ sima_cli/update/updater.py,sha256=vBdT0Im0a0iKwB-LzVDZasnXk2Rq-kNlBGr7bTG0-14,20879
35
38
  sima_cli/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
39
  sima_cli/utils/artifactory.py,sha256=6YyVpzVm8ATy7NEwT9nkWx-wptkXrvG7Wl_zDT6jmLs,2390
37
40
  sima_cli/utils/config.py,sha256=wE-cPQqY_gOqaP8t01xsRHD9tBUGk9MgBUm2GYYxI3E,1616
38
41
  sima_cli/utils/config_loader.py,sha256=7I5we1yiCai18j9R9jvhfUzAmT3OjAqVK35XSLuUw8c,2005
39
- sima_cli/utils/env.py,sha256=m6yRnNuajYWTfysPJLk6vJY9Z1kYGFIAaArnpHXhino,6411
42
+ sima_cli/utils/disk.py,sha256=66Kr631yhc_ny19up2aijfycWfD35AeLQOJgUsuH2hY,446
43
+ sima_cli/utils/env.py,sha256=bNushG2BD243fNlqCpuUJxLF76inRxTFeSDkl_KCHy0,7130
40
44
  sima_cli/utils/net.py,sha256=WVntA4CqipkNrrkA4tBVRadJft_pMcGYh4Re5xk3rqo,971
41
45
  sima_cli/utils/network.py,sha256=UvqxbqbWUczGFyO-t1SybG7Q-x9kjUVRNIn_D6APzy8,1252
42
- sima_cli-0.0.21.dist-info/licenses/LICENSE,sha256=a260OFuV4SsMZ6sQCkoYbtws_4o2deFtbnT9kg7Rfd4,1082
46
+ sima_cli-0.0.22.dist-info/licenses/LICENSE,sha256=a260OFuV4SsMZ6sQCkoYbtws_4o2deFtbnT9kg7Rfd4,1082
43
47
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
48
  tests/test_app_zoo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
49
  tests/test_auth.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -48,8 +52,8 @@ tests/test_download.py,sha256=t87DwxlHs26_ws9rpcHGwr_OrcRPd3hz6Zmm0vRee2U,4465
48
52
  tests/test_firmware.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
53
  tests/test_model_zoo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
54
  tests/test_utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
- sima_cli-0.0.21.dist-info/METADATA,sha256=vX4CMPMiH5QqByXV6CphuCM2rh1vwefsLRCctFcmP9o,3674
52
- sima_cli-0.0.21.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
- sima_cli-0.0.21.dist-info/entry_points.txt,sha256=xRYrDq1nCs6R8wEdB3c1kKuimxEjWJkHuCzArQPT0Xk,47
54
- sima_cli-0.0.21.dist-info/top_level.txt,sha256=FtrbAUdHNohtEPteOblArxQNwoX9_t8qJQd59fagDlc,15
55
- sima_cli-0.0.21.dist-info/RECORD,,
55
+ sima_cli-0.0.22.dist-info/METADATA,sha256=3LetoBbyq_AopItmkPCqz3BBWL2_osKmaPHd_Vh8038,3705
56
+ sima_cli-0.0.22.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
57
+ sima_cli-0.0.22.dist-info/entry_points.txt,sha256=xRYrDq1nCs6R8wEdB3c1kKuimxEjWJkHuCzArQPT0Xk,47
58
+ sima_cli-0.0.22.dist-info/top_level.txt,sha256=FtrbAUdHNohtEPteOblArxQNwoX9_t8qJQd59fagDlc,15
59
+ sima_cli-0.0.22.dist-info/RECORD,,