figpack 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of figpack might be problematic. Click here for more details.

figpack/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  """
2
- figpack - A Python package for creating interactive visualizations
2
+ figpack - A Python package for creating shareable, interactive visualizations in the browser
3
3
  """
4
4
 
5
5
  __version__ = "0.1.0"
File without changes
@@ -0,0 +1,56 @@
1
+ import os
2
+ import pathlib
3
+ import zarr
4
+ from .figpack_view import FigpackView
5
+
6
+ thisdir = pathlib.Path(__file__).parent.resolve()
7
+
8
+
9
+ def prepare_figure_bundle(
10
+ view: FigpackView, tmpdir: str, *, title: str = None, description: str = None
11
+ ) -> None:
12
+ """
13
+ Prepare a figure bundle in the specified temporary directory.
14
+
15
+ This function:
16
+ 1. Copies all files from the figpack-gui-dist directory to tmpdir
17
+ 2. Writes the view data to a zarr group
18
+ 3. Consolidates zarr metadata
19
+
20
+ Args:
21
+ view: The figpack view to prepare
22
+ tmpdir: The temporary directory to prepare the bundle in
23
+ title: Optional title for the figure
24
+ description: Optional description for the figure (markdown supported)
25
+ """
26
+ html_dir = thisdir / ".." / "figpack-gui-dist"
27
+ if not os.path.exists(html_dir):
28
+ raise SystemExit(f"Error: directory not found: {html_dir}")
29
+
30
+ # Copy all files in html_dir recursively to tmpdir
31
+ for item in html_dir.iterdir():
32
+ if item.is_file():
33
+ target = pathlib.Path(tmpdir) / item.name
34
+ target.write_bytes(item.read_bytes())
35
+ elif item.is_dir():
36
+ target = pathlib.Path(tmpdir) / item.name
37
+ target.mkdir(exist_ok=True)
38
+ for subitem in item.iterdir():
39
+ target_sub = target / subitem.name
40
+ target_sub.write_bytes(subitem.read_bytes())
41
+
42
+ # Write the graph data to the Zarr group
43
+ zarr_group = zarr.open_group(
44
+ pathlib.Path(tmpdir) / "data.zarr",
45
+ mode="w",
46
+ synchronizer=zarr.ThreadSynchronizer(),
47
+ )
48
+ view._write_to_zarr_group(zarr_group)
49
+
50
+ # Add title and description as attributes on the top-level zarr group
51
+ if title is not None:
52
+ zarr_group.attrs["title"] = title
53
+ if description is not None:
54
+ zarr_group.attrs["description"] = description
55
+
56
+ zarr.consolidate_metadata(zarr_group.store)
@@ -1,7 +1,6 @@
1
1
  import os
2
2
 
3
3
  from typing import Union
4
- import zarr
5
4
  import tempfile
6
5
 
7
6
  import webbrowser
@@ -11,50 +10,54 @@ import pathlib
11
10
  import threading
12
11
  from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
13
12
 
14
- from .views import TimeseriesGraph
13
+ from .figpack_view import FigpackView
14
+ from ._bundle_utils import prepare_figure_bundle
15
+ from ._upload_bundle import _upload_bundle
15
16
 
16
17
  thisdir = pathlib.Path(__file__).parent.resolve()
17
18
 
18
19
 
19
20
  def _show_view(
20
- view: TimeseriesGraph,
21
+ view: FigpackView,
21
22
  *,
22
23
  open_in_browser: bool = False,
23
24
  port: Union[int, None] = None,
24
25
  allow_origin: Union[str, None] = None,
26
+ upload: bool = False,
27
+ title: Union[str, None] = None,
28
+ description: Union[str, None] = None,
25
29
  ):
26
30
  with tempfile.TemporaryDirectory(prefix="figpack_") as tmpdir:
27
- html_dir = thisdir / "figpack-gui-dist"
28
- if not os.path.exists(html_dir):
29
- raise SystemExit(f"Error: directory not found: {html_dir}")
30
- # copy all files in html_dir recursively to tmpdir
31
- for item in html_dir.iterdir():
32
- if item.is_file():
33
- target = pathlib.Path(tmpdir) / item.name
34
- target.write_bytes(item.read_bytes())
35
- elif item.is_dir():
36
- target = pathlib.Path(tmpdir) / item.name
37
- target.mkdir(exist_ok=True)
38
- for subitem in item.iterdir():
39
- target_sub = target / subitem.name
40
- target_sub.write_bytes(subitem.read_bytes())
41
-
42
- # Write the graph data to the Zarr group
43
- zarr_group = zarr.open_group(
44
- pathlib.Path(tmpdir) / "data.zarr",
45
- mode="w",
46
- synchronizer=zarr.ThreadSynchronizer(),
47
- )
48
- view._write_to_zarr_group(zarr_group)
49
-
50
- zarr.consolidate_metadata(zarr_group.store)
51
-
52
- serve_files(
53
- tmpdir,
54
- port=port,
55
- open_in_browser=open_in_browser,
56
- allow_origin=allow_origin,
57
- )
31
+ prepare_figure_bundle(view, tmpdir, title=title, description=description)
32
+
33
+ if upload:
34
+ # Check for required environment variable
35
+ passcode = os.environ.get("FIGPACK_UPLOAD_PASSCODE")
36
+ if not passcode:
37
+ raise EnvironmentError(
38
+ "FIGPACK_UPLOAD_PASSCODE environment variable must be set to upload views."
39
+ )
40
+
41
+ # Upload the bundle
42
+ print("Starting upload...")
43
+ figure_url = _upload_bundle(tmpdir, passcode)
44
+
45
+ if open_in_browser:
46
+ webbrowser.open(figure_url)
47
+ print(f"Opening {figure_url} in browser.")
48
+ # wait until user presses Enter
49
+ input("Press Enter to continue...")
50
+ else:
51
+ print(f"View the figure at: {figure_url}")
52
+
53
+ return figure_url
54
+ else:
55
+ serve_files(
56
+ tmpdir,
57
+ port=port,
58
+ open_in_browser=open_in_browser,
59
+ allow_origin=allow_origin,
60
+ )
58
61
 
59
62
 
60
63
  class CORSRequestHandler(SimpleHTTPRequestHandler):
@@ -0,0 +1,451 @@
1
+ import time
2
+ import json
3
+ import uuid
4
+ import pathlib
5
+ import requests
6
+ import threading
7
+ import hashlib
8
+ from .. import __version__
9
+ from concurrent.futures import ThreadPoolExecutor, as_completed
10
+ from datetime import datetime, timedelta, timezone
11
+
12
+ thisdir = pathlib.Path(__file__).parent.resolve()
13
+
14
+ FIGPACK_API_BASE_URL = "https://figpack-api.vercel.app"
15
+ TEMPORY_BASE_URL = "https://tempory.net/figpack/default/figures"
16
+
17
+
18
+ def _upload_single_file(
19
+ figure_id: str, relative_path: str, file_path: pathlib.Path, passcode: str
20
+ ) -> str:
21
+ """
22
+ Worker function to upload a single file
23
+
24
+ Returns:
25
+ str: The relative path of the uploaded file
26
+ """
27
+ file_type = _determine_file_type(relative_path)
28
+
29
+ if file_type == "small":
30
+ with open(file_path, "r", encoding="utf-8") as f:
31
+ content = f.read()
32
+ _upload_small_file(figure_id, relative_path, content, passcode)
33
+ else: # large file
34
+ _upload_large_file(figure_id, relative_path, file_path, passcode)
35
+
36
+ return relative_path
37
+
38
+
39
+ MAX_WORKERS_FOR_UPLOAD = 16
40
+
41
+
42
+ def _compute_deterministic_figure_id(tmpdir_path: pathlib.Path) -> str:
43
+ """
44
+ Compute a deterministic figure ID based on SHA1 hashes of all files
45
+
46
+ Returns:
47
+ str: 40-character SHA1 hash representing the content of all files
48
+ """
49
+ file_hashes = []
50
+
51
+ # Collect all files and their hashes
52
+ for file_path in sorted(tmpdir_path.rglob("*")):
53
+ if file_path.is_file():
54
+ relative_path = file_path.relative_to(tmpdir_path)
55
+
56
+ # Compute SHA1 hash of file content
57
+ sha1_hash = hashlib.sha1()
58
+ with open(file_path, "rb") as f:
59
+ for chunk in iter(lambda: f.read(4096), b""):
60
+ sha1_hash.update(chunk)
61
+
62
+ # Include both the relative path and content hash to ensure uniqueness
63
+ file_info = f"{relative_path}:{sha1_hash.hexdigest()}"
64
+ file_hashes.append(file_info)
65
+
66
+ # Create final hash from all file hashes
67
+ combined_hash = hashlib.sha1()
68
+ for file_hash in file_hashes:
69
+ combined_hash.update(file_hash.encode("utf-8"))
70
+
71
+ return combined_hash.hexdigest()
72
+
73
+
74
+ def _check_existing_figure(figure_id: str) -> dict:
75
+ """
76
+ Check if a figure already exists and return its status
77
+
78
+ Returns:
79
+ dict: Contains 'exists' (bool) and 'status' (str) if exists
80
+ """
81
+ figpack_url = f"{TEMPORY_BASE_URL}/{figure_id}/figpack.json"
82
+
83
+ try:
84
+ response = requests.get(figpack_url, timeout=10)
85
+ if response.ok:
86
+ figpack_data = response.json()
87
+ return {"exists": True, "status": figpack_data.get("status", "unknown")}
88
+ else:
89
+ return {"exists": False}
90
+ except Exception:
91
+ return {"exists": False}
92
+
93
+
94
+ def _find_available_figure_id(base_figure_id: str) -> tuple:
95
+ """
96
+ Find an available figure ID by checking base_figure_id, then base_figure_id-1, base_figure_id-2, etc.
97
+
98
+ Returns:
99
+ tuple: (figure_id_to_use, completed_figure_id) where:
100
+ - figure_id_to_use is None if upload should be skipped
101
+ - completed_figure_id is the ID of the completed figure if one exists
102
+ """
103
+ # First check the base figure ID
104
+ result = _check_existing_figure(base_figure_id)
105
+ if not result["exists"]:
106
+ return (base_figure_id, None)
107
+ elif result["status"] == "completed":
108
+ print(
109
+ f"Figure {base_figure_id} already exists and is completed. Skipping upload."
110
+ )
111
+ return (None, base_figure_id) # Signal to skip upload, return completed ID
112
+
113
+ # If exists but not completed, try with suffixes
114
+ suffix = 1
115
+ while True:
116
+ candidate_id = f"{base_figure_id}-{suffix}"
117
+ result = _check_existing_figure(candidate_id)
118
+
119
+ if not result["exists"]:
120
+ print(f"Using figure ID: {candidate_id}")
121
+ return (candidate_id, None)
122
+ elif result["status"] == "completed":
123
+ print(
124
+ f"Figure {candidate_id} already exists and is completed. Skipping upload."
125
+ )
126
+ return (None, candidate_id) # Signal to skip upload, return completed ID
127
+
128
+ suffix += 1
129
+ if suffix > 100: # Safety limit
130
+ raise Exception(
131
+ "Too many existing figure variants, unable to find available ID"
132
+ )
133
+
134
+
135
+ def _upload_bundle(tmpdir: str, passcode: str) -> None:
136
+ """
137
+ Upload the prepared bundle to the cloud using parallel uploads
138
+ """
139
+ tmpdir_path = pathlib.Path(tmpdir)
140
+
141
+ # Compute deterministic figure ID based on file contents
142
+ print("Computing deterministic figure ID...")
143
+ base_figure_id = _compute_deterministic_figure_id(tmpdir_path)
144
+ print(f"Base figure ID: {base_figure_id}")
145
+
146
+ # Find available figure ID (check for existing uploads)
147
+ figure_id, completed_figure_id = _find_available_figure_id(base_figure_id)
148
+
149
+ # If figure_id is None, it means we found a completed upload and should skip
150
+ if figure_id is None:
151
+ figure_url = f"{TEMPORY_BASE_URL}/{completed_figure_id}/index.html"
152
+ print(f"Figure already exists at: {figure_url}")
153
+ return figure_url
154
+
155
+ print(f"Using figure ID: {figure_id}")
156
+
157
+ # First, upload initial figpack.json with "uploading" status
158
+ print("Uploading initial status...")
159
+ figpack_json = {
160
+ "status": "uploading",
161
+ "upload_started": datetime.now(timezone.utc).isoformat(),
162
+ "upload_updated": datetime.now(timezone.utc).isoformat(),
163
+ "figure_id": figure_id,
164
+ "figpack_version": __version__,
165
+ }
166
+ _upload_small_file(
167
+ figure_id, "figpack.json", json.dumps(figpack_json, indent=2), passcode
168
+ )
169
+
170
+ # Collect all files to upload
171
+ all_files = []
172
+ for file_path in tmpdir_path.rglob("*"):
173
+ if file_path.is_file():
174
+ relative_path = file_path.relative_to(tmpdir_path)
175
+ all_files.append((str(relative_path), file_path))
176
+
177
+ print(f"Found {len(all_files)} files to upload")
178
+
179
+ # Filter out figpack.json since we already uploaded the initial version
180
+ files_to_upload = [
181
+ (rel_path, file_path)
182
+ for rel_path, file_path in all_files
183
+ if rel_path != "figpack.json"
184
+ ]
185
+ total_files_to_upload = len(files_to_upload)
186
+
187
+ if total_files_to_upload == 0:
188
+ print("No additional files to upload")
189
+ else:
190
+ print(
191
+ f"Uploading {total_files_to_upload} files with up to 8 concurrent uploads..."
192
+ )
193
+
194
+ # Thread-safe progress tracking
195
+ uploaded_count = 0
196
+ count_lock = threading.Lock()
197
+ timer = time.time()
198
+
199
+ # Upload files in parallel with concurrent uploads
200
+ with ThreadPoolExecutor(max_workers=MAX_WORKERS_FOR_UPLOAD) as executor:
201
+ # Submit all upload tasks
202
+ future_to_file = {
203
+ executor.submit(
204
+ _upload_single_file, figure_id, rel_path, file_path, passcode
205
+ ): rel_path
206
+ for rel_path, file_path in files_to_upload
207
+ }
208
+
209
+ # Process completed uploads
210
+ for future in as_completed(future_to_file):
211
+ relative_path = future_to_file[future]
212
+ try:
213
+ future.result() # This will raise any exception that occurred during upload
214
+
215
+ # Thread-safe progress update
216
+ with count_lock:
217
+ uploaded_count += 1
218
+ print(
219
+ f"Uploaded {uploaded_count}/{total_files_to_upload}: {relative_path}"
220
+ )
221
+
222
+ # Update progress every 60 seconds
223
+ elapsed_time = time.time() - timer
224
+ if elapsed_time > 60:
225
+ figpack_json = {
226
+ **figpack_json,
227
+ "status": "uploading",
228
+ "upload_progress": f"{uploaded_count}/{total_files_to_upload}",
229
+ "upload_updated": datetime.now(
230
+ timezone.utc
231
+ ).isoformat(),
232
+ }
233
+ _upload_small_file(
234
+ figure_id,
235
+ "figpack.json",
236
+ json.dumps(figpack_json, indent=2),
237
+ passcode,
238
+ )
239
+ print(
240
+ f"Updated figpack.json with progress: {uploaded_count}/{total_files_to_upload}"
241
+ )
242
+ timer = time.time()
243
+
244
+ except Exception as e:
245
+ print(f"Failed to upload {relative_path}: {e}")
246
+ raise # Re-raise the exception to stop the upload process
247
+
248
+ # Create and upload manifest.json
249
+ print("Creating manifest.json...")
250
+ manifest = {
251
+ "timestamp": datetime.now(timezone.utc).isoformat(),
252
+ "files": [],
253
+ "total_size": 0,
254
+ "total_files": len(files_to_upload),
255
+ }
256
+
257
+ for rel_path, file_path in files_to_upload:
258
+ file_size = file_path.stat().st_size
259
+ manifest["files"].append({"path": rel_path, "size": file_size})
260
+ manifest["total_size"] += file_size
261
+
262
+ _upload_small_file(
263
+ figure_id, "manifest.json", json.dumps(manifest, indent=2), passcode
264
+ )
265
+ print("Uploaded manifest.json")
266
+ print(f"Total size: {manifest['total_size'] / (1024 * 1024):.2f} MB")
267
+
268
+ # Finally, upload completion status
269
+ figpack_json = {
270
+ **figpack_json,
271
+ "status": "completed",
272
+ "upload_completed": datetime.now(timezone.utc).isoformat(),
273
+ "expiration": (datetime.now(timezone.utc) + timedelta(days=1)).isoformat(),
274
+ "figure_id": figure_id,
275
+ "total_files": len(all_files),
276
+ "total_size": manifest["total_size"],
277
+ "figpack_version": __version__,
278
+ }
279
+ _upload_small_file(
280
+ figure_id, "figpack.json", json.dumps(figpack_json, indent=2), passcode
281
+ )
282
+ print("Upload completed successfully")
283
+
284
+ figure_url = f"{TEMPORY_BASE_URL}/{figure_id}/index.html"
285
+ return figure_url
286
+
287
+
288
+ def _determine_file_type(file_path: str) -> str:
289
+ """
290
+ Determine if a file should be uploaded as small or large
291
+ Based on the validation logic in the API
292
+ """
293
+ # Check exact matches first
294
+ if file_path == "figpack.json" or file_path == "index.html":
295
+ return "small"
296
+
297
+ # Check zarr metadata files
298
+ if (
299
+ file_path.endswith(".zattrs")
300
+ or file_path.endswith(".zgroup")
301
+ or file_path.endswith(".zarray")
302
+ or file_path.endswith(".zmetadata")
303
+ ):
304
+ return "small"
305
+
306
+ # Check HTML files
307
+ if file_path.endswith(".html"):
308
+ return "small"
309
+
310
+ # Check data.zarr directory
311
+ if file_path.startswith("data.zarr/"):
312
+ file_name = file_path[len("data.zarr/") :]
313
+ # Check if it's a zarr chunk (numeric like 0.0.1)
314
+ if _is_zarr_chunk(file_name):
315
+ return "large"
316
+ # Check for zarr metadata files in subdirectories
317
+ if (
318
+ file_name.endswith(".zattrs")
319
+ or file_name.endswith(".zgroup")
320
+ or file_name.endswith(".zarray")
321
+ or file_name.endswith(".zmetadata")
322
+ ):
323
+ return "small"
324
+
325
+ # Check assets directory
326
+ if file_path.startswith("assets/"):
327
+ file_name = file_path[len("assets/") :]
328
+ if file_name.endswith(".js") or file_name.endswith(".css"):
329
+ return "large"
330
+
331
+ # Default to large file
332
+ return "large"
333
+
334
+
335
+ def _is_zarr_chunk(file_name: str) -> bool:
336
+ """
337
+ Check if filename consists only of numbers and dots (zarr chunk pattern)
338
+ """
339
+ for char in file_name:
340
+ if char != "." and not char.isdigit():
341
+ return False
342
+ return (
343
+ len(file_name) > 0
344
+ and not file_name.startswith(".")
345
+ and not file_name.endswith(".")
346
+ )
347
+
348
+
349
+ def _upload_small_file(
350
+ figure_id: str, file_path: str, content: str, passcode: str
351
+ ) -> None:
352
+ """
353
+ Upload a small file by sending content directly
354
+ """
355
+ destination_url = f"{TEMPORY_BASE_URL}/{figure_id}/{file_path}"
356
+
357
+ try:
358
+ content.encode("utf-8")
359
+ except Exception as e:
360
+ raise Exception(f"Content for {file_path} is not UTF-8 encodable: {e}")
361
+ payload = {
362
+ "destinationUrl": destination_url,
363
+ "passcode": passcode,
364
+ "content": content,
365
+ }
366
+ # check that payload is json serializable
367
+ try:
368
+ json.dumps(payload)
369
+ except Exception as e:
370
+ raise Exception(f"Payload for {file_path} is not JSON serializable: {e}")
371
+
372
+ response = requests.post(f"{FIGPACK_API_BASE_URL}/api/upload", json=payload)
373
+
374
+ if not response.ok:
375
+ try:
376
+ error_data = response.json()
377
+ error_msg = error_data.get("message", "Unknown error")
378
+ except:
379
+ error_msg = f"HTTP {response.status_code}"
380
+ raise Exception(f"Failed to upload {file_path}: {error_msg}")
381
+
382
+
383
+ def _upload_large_file(
384
+ figure_id: str, file_path: str, local_file_path: pathlib.Path, passcode: str
385
+ ) -> None:
386
+ """
387
+ Upload a large file using signed URL
388
+ """
389
+ destination_url = f"{TEMPORY_BASE_URL}/{figure_id}/{file_path}"
390
+ file_size = local_file_path.stat().st_size
391
+
392
+ # Get signed URL
393
+ payload = {
394
+ "destinationUrl": destination_url,
395
+ "passcode": passcode,
396
+ "size": file_size,
397
+ }
398
+
399
+ response = requests.post(f"{FIGPACK_API_BASE_URL}/api/upload", json=payload)
400
+
401
+ if not response.ok:
402
+ try:
403
+ error_data = response.json()
404
+ error_msg = error_data.get("message", "Unknown error")
405
+ except:
406
+ error_msg = f"HTTP {response.status_code}"
407
+ raise Exception(f"Failed to get signed URL for {file_path}: {error_msg}")
408
+
409
+ response_data = response.json()
410
+ if not response_data.get("success"):
411
+ raise Exception(
412
+ f"Failed to get signed URL for {file_path}: {response_data.get('message', 'Unknown error')}"
413
+ )
414
+
415
+ signed_url = response_data.get("signedUrl")
416
+ if not signed_url:
417
+ raise Exception(f"No signed URL returned for {file_path}")
418
+
419
+ # Upload file to signed URL
420
+ content_type = _determine_content_type(file_path)
421
+ with open(local_file_path, "rb") as f:
422
+ upload_response = requests.put(
423
+ signed_url, data=f, headers={"Content-Type": content_type}
424
+ )
425
+
426
+ if not upload_response.ok:
427
+ raise Exception(
428
+ f"Failed to upload {file_path} to signed URL: HTTP {upload_response.status_code}"
429
+ )
430
+
431
+
432
+ def _determine_content_type(file_path: str) -> str:
433
+ """
434
+ Determine content type for upload based on file extension
435
+ """
436
+ file_name = file_path.split("/")[-1]
437
+ extension = file_name.split(".")[-1] if "." in file_name else ""
438
+
439
+ content_type_map = {
440
+ "json": "application/json",
441
+ "html": "text/html",
442
+ "css": "text/css",
443
+ "js": "application/javascript",
444
+ "png": "image/png",
445
+ "zattrs": "application/json",
446
+ "zgroup": "application/json",
447
+ "zarray": "application/json",
448
+ "zmetadata": "application/json",
449
+ }
450
+
451
+ return content_type_map.get(extension, "application/octet-stream")
@@ -0,0 +1,70 @@
1
+ """
2
+ Base view class for figpack visualization components
3
+ """
4
+
5
+ from typing import Union
6
+
7
+ import zarr
8
+
9
+
10
+ class FigpackView:
11
+ """
12
+ Base class for all figpack visualization components
13
+ """
14
+
15
+ def show(
16
+ self,
17
+ *,
18
+ port: Union[int, None] = None,
19
+ open_in_browser: bool = False,
20
+ allow_origin: Union[str, None] = None,
21
+ upload: bool = False,
22
+ _dev: bool = False,
23
+ title: Union[str, None] = None,
24
+ description: Union[str, None] = None,
25
+ ):
26
+ """
27
+ Display the visualization component
28
+
29
+ Args:
30
+ port: Port number for local server
31
+ open_in_browser: Whether to open in browser automatically
32
+ allow_origin: CORS allow origin header
33
+ upload: Whether to upload the figure
34
+ _dev: Development mode flag
35
+ title: Title for the browser tab and figure
36
+ description: Description text (markdown supported) for the figure
37
+ """
38
+ from ._show_view import _show_view
39
+
40
+ if _dev:
41
+ if port is None:
42
+ port = 3004
43
+ if allow_origin is not None:
44
+ raise ValueError("Cannot set allow_origin when _dev is True.")
45
+ allow_origin = "http://localhost:5173"
46
+ if upload:
47
+ raise ValueError("Cannot upload when _dev is True.")
48
+
49
+ print(
50
+ f"For development, run figpack-gui in dev mode and use http://localhost:5173?data=http://localhost:{port}/data.zarr"
51
+ )
52
+
53
+ _show_view(
54
+ self,
55
+ port=port,
56
+ open_in_browser=open_in_browser,
57
+ allow_origin=allow_origin,
58
+ upload=upload,
59
+ title=title,
60
+ description=description,
61
+ )
62
+
63
+ def _write_to_zarr_group(self, group: zarr.Group) -> None:
64
+ """
65
+ Write the view data to a Zarr group. Must be implemented by subclasses.
66
+
67
+ Args:
68
+ group: Zarr group to write data into
69
+ """
70
+ raise NotImplementedError("Subclasses must implement _write_to_zarr_group")