prefab 1.5.0__tar.gz → 1.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {prefab-1.5.0 → prefab-1.6.0}/PKG-INFO +1 -1
- {prefab-1.5.0 → prefab-1.6.0}/prefab/__init__.py +1 -1
- {prefab-1.5.0 → prefab-1.6.0}/prefab/predict.py +72 -2
- {prefab-1.5.0 → prefab-1.6.0}/pyproject.toml +1 -1
- {prefab-1.5.0 → prefab-1.6.0}/.gitignore +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/LICENSE +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/README.md +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/__main__.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/compare.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/device.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/geometry.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/models/__init__.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/models/base.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/models/evaluation.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/py.typed +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/read.py +0 -0
- {prefab-1.5.0 → prefab-1.6.0}/prefab/shapes.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: prefab
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Artificial nanofabrication of integrated photonic circuits using deep learning
|
|
5
5
|
Project-URL: Homepage, https://prefabphotonics.com
|
|
6
6
|
Project-URL: Repository, https://github.com/PreFab-Photonics/PreFab
|
|
@@ -9,6 +9,7 @@ differentiation.
|
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
11
|
import base64
|
|
12
|
+
import gzip
|
|
12
13
|
import io
|
|
13
14
|
import json
|
|
14
15
|
import os
|
|
@@ -384,9 +385,13 @@ def _compute_vjp(
|
|
|
384
385
|
) -> npt.NDArray[Any]:
|
|
385
386
|
"""Compute J.T @ upstream_gradient via the server-side VJP endpoint."""
|
|
386
387
|
headers = _prepare_headers()
|
|
388
|
+
upstream_arr = np.squeeze(upstream_gradient).astype(np.float32)
|
|
387
389
|
vjp_data = {
|
|
388
390
|
"device_array": _encode_array(np.squeeze(device_array)),
|
|
389
|
-
"upstream_gradient":
|
|
391
|
+
"upstream_gradient": base64.b64encode(
|
|
392
|
+
gzip.compress(upstream_arr.tobytes(), compresslevel=1)
|
|
393
|
+
).decode("utf-8"),
|
|
394
|
+
"upstream_gradient_shape": list(upstream_arr.shape),
|
|
390
395
|
"model": model.to_json(),
|
|
391
396
|
"model_type": "p",
|
|
392
397
|
}
|
|
@@ -473,6 +478,8 @@ def _predict_array_diff_vjp(
|
|
|
473
478
|
)
|
|
474
479
|
# Clean up cache
|
|
475
480
|
_diff_cache.pop(cache_key, None)
|
|
481
|
+
# Ensure gradient shape matches input shape
|
|
482
|
+
vjp_result = vjp_result.reshape(cached_device_array.shape)
|
|
476
483
|
# Return gradient for device_array, None for model (not differentiable)
|
|
477
484
|
return (vjp_result, None)
|
|
478
485
|
|
|
@@ -487,6 +494,69 @@ predict_array_with_grad = predict_array_diff
|
|
|
487
494
|
"""Alias for predict_array_diff. Deprecated, use predict_array_diff directly."""
|
|
488
495
|
|
|
489
496
|
|
|
497
|
+
def differentiable(model: Model):
|
|
498
|
+
"""
|
|
499
|
+
Create a model-bound differentiable predictor for clean autograd integration.
|
|
500
|
+
|
|
501
|
+
Returns a function that takes only `device_array` as input, enabling seamless
|
|
502
|
+
composition with other differentiable functions. The VJP returns a single
|
|
503
|
+
gradient array (not a tuple), making it compatible with standard autograd workflows.
|
|
504
|
+
|
|
505
|
+
Parameters
|
|
506
|
+
----------
|
|
507
|
+
model : Model
|
|
508
|
+
The model to use for prediction.
|
|
509
|
+
|
|
510
|
+
Returns
|
|
511
|
+
-------
|
|
512
|
+
callable
|
|
513
|
+
A differentiable prediction function that takes `device_array` and returns
|
|
514
|
+
the predicted fabrication outcome.
|
|
515
|
+
|
|
516
|
+
Examples
|
|
517
|
+
--------
|
|
518
|
+
>>> predictor = pf.predict.differentiable(model)
|
|
519
|
+
>>> def loss_fn(x):
|
|
520
|
+
... pred = predictor(x)
|
|
521
|
+
... return np.mean((pred - target) ** 2)
|
|
522
|
+
>>> gradient = grad(loss_fn)(device_array) # Returns array, not tuple
|
|
523
|
+
"""
|
|
524
|
+
|
|
525
|
+
@primitive
|
|
526
|
+
def predict(device_array: npt.NDArray[Any]) -> npt.NDArray[Any]:
|
|
527
|
+
prediction = predict_array(
|
|
528
|
+
device_array=device_array,
|
|
529
|
+
model=model,
|
|
530
|
+
model_type="p",
|
|
531
|
+
binarize=False,
|
|
532
|
+
)
|
|
533
|
+
_diff_cache[id(prediction)] = (device_array.copy(), model)
|
|
534
|
+
return prediction
|
|
535
|
+
|
|
536
|
+
def predict_vjp(
|
|
537
|
+
ans: npt.NDArray[Any], device_array: npt.NDArray[Any]
|
|
538
|
+
) -> Any:
|
|
539
|
+
cache_key = id(ans)
|
|
540
|
+
cached_device_array, cached_model = _diff_cache.get(
|
|
541
|
+
cache_key, (device_array, model)
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
def vjp(g: npt.NDArray[Any]) -> npt.NDArray[Any]:
|
|
545
|
+
vjp_result = _compute_vjp(
|
|
546
|
+
device_array=cached_device_array,
|
|
547
|
+
upstream_gradient=g,
|
|
548
|
+
model=cached_model,
|
|
549
|
+
)
|
|
550
|
+
_diff_cache.pop(cache_key, None)
|
|
551
|
+
# Ensure gradient shape matches input shape
|
|
552
|
+
return vjp_result.reshape(device_array.shape)
|
|
553
|
+
|
|
554
|
+
return vjp
|
|
555
|
+
|
|
556
|
+
defvjp(predict, predict_vjp)
|
|
557
|
+
return predict
|
|
558
|
+
|
|
559
|
+
|
|
490
560
|
def _encode_array(array: npt.NDArray[Any]) -> str:
|
|
491
561
|
"""Encode an ndarray as a base64 encoded image for transmission."""
|
|
492
562
|
image = Image.fromarray(np.uint8(array * 255))
|
|
@@ -500,7 +570,7 @@ def _decode_array(encoded_png: str) -> npt.NDArray[Any]:
|
|
|
500
570
|
"""Decode a base64 encoded image and return an ndarray."""
|
|
501
571
|
binary_data = base64.b64decode(encoded_png)
|
|
502
572
|
image = Image.open(io.BytesIO(binary_data))
|
|
503
|
-
return np.array(image) / 255
|
|
573
|
+
return np.array(image) / 255
|
|
504
574
|
|
|
505
575
|
|
|
506
576
|
def _prepare_headers() -> dict[str, str]:
|
|
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "prefab"
|
|
7
|
-
version = "1.
|
|
7
|
+
version = "1.6.0"
|
|
8
8
|
description = "Artificial nanofabrication of integrated photonic circuits using deep learning"
|
|
9
9
|
authors = [{ name = "PreFab Photonics Inc.", email = "hi@prefabphotonics.com" }]
|
|
10
10
|
keywords = [
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|