comfy-env 0.0.49__py3-none-any.whl → 0.0.51__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
comfy_env/workers/venv.py CHANGED
@@ -572,8 +572,13 @@ import json
572
572
  import socket
573
573
  import struct
574
574
  import traceback
575
+ import faulthandler
575
576
  from types import SimpleNamespace
576
577
 
578
+ # Enable faulthandler to dump traceback on SIGSEGV/SIGABRT/etc
579
+ faulthandler.enable(file=sys.stderr, all_threads=True)
580
+ print("[worker] Faulthandler enabled", flush=True)
581
+
577
582
  # On Windows, add host Python's DLL directories so packages like opencv can find VC++ runtime
578
583
  if sys.platform == "win32":
579
584
  _host_python_dir = os.environ.get("COMFYUI_HOST_PYTHON_DIR")
@@ -587,6 +592,92 @@ if sys.platform == "win32":
587
592
  except Exception:
588
593
  pass
589
594
 
595
+ # =============================================================================
596
+ # Object Reference System - keep complex objects in worker, pass refs to host
597
+ # =============================================================================
598
+
599
+ _object_cache = {} # Maps ref_id -> object
600
+ _object_ids = {} # Maps id(obj) -> ref_id (for deduplication)
601
+ _ref_counter = 0
602
+
603
+ def _cache_object(obj):
604
+ """Store object in cache, return reference ID. Deduplicates by object id."""
605
+ global _ref_counter
606
+ obj_id = id(obj)
607
+
608
+ # Return existing ref if we've seen this object
609
+ if obj_id in _object_ids:
610
+ return _object_ids[obj_id]
611
+
612
+ ref_id = f"ref_{_ref_counter:08x}"
613
+ _ref_counter += 1
614
+ _object_cache[ref_id] = obj
615
+ _object_ids[obj_id] = ref_id
616
+ return ref_id
617
+
618
+ def _resolve_ref(ref_id):
619
+ """Get object from cache by reference ID."""
620
+ return _object_cache.get(ref_id)
621
+
622
+ def _should_use_reference(obj):
623
+ """Check if object should be passed by reference instead of value."""
624
+ if obj is None:
625
+ return False
626
+ # Primitives - pass by value
627
+ if isinstance(obj, (bool, int, float, str, bytes)):
628
+ return False
629
+ # NumPy arrays and torch tensors - pass by value (they serialize well)
630
+ obj_type = type(obj).__name__
631
+ if obj_type in ('ndarray', 'Tensor'):
632
+ return False
633
+ # Dicts, lists, tuples - recurse into contents (don't ref the container)
634
+ if isinstance(obj, (dict, list, tuple)):
635
+ return False
636
+ # Everything else (trimesh, custom classes) - pass by reference
637
+ return True
638
+
639
+ def _serialize_result(obj, visited=None):
640
+ """Convert result for IPC - complex objects become references."""
641
+ if visited is None:
642
+ visited = set()
643
+
644
+ obj_id = id(obj)
645
+ if obj_id in visited:
646
+ # Circular reference - use existing ref or create one
647
+ if obj_id in _object_ids:
648
+ return {"__comfy_ref__": _object_ids[obj_id], "__class__": type(obj).__name__}
649
+ return None # Skip circular refs to primitives
650
+
651
+ if _should_use_reference(obj):
652
+ ref_id = _cache_object(obj)
653
+ return {"__comfy_ref__": ref_id, "__class__": type(obj).__name__}
654
+
655
+ visited.add(obj_id)
656
+
657
+ if isinstance(obj, dict):
658
+ return {k: _serialize_result(v, visited) for k, v in obj.items()}
659
+ if isinstance(obj, list):
660
+ return [_serialize_result(v, visited) for v in obj]
661
+ if isinstance(obj, tuple):
662
+ return tuple(_serialize_result(v, visited) for v in obj)
663
+ return obj
664
+
665
+ def _deserialize_input(obj):
666
+ """Convert input from IPC - references become real objects."""
667
+ if isinstance(obj, dict):
668
+ if "__comfy_ref__" in obj:
669
+ ref_id = obj["__comfy_ref__"]
670
+ real_obj = _resolve_ref(ref_id)
671
+ if real_obj is None:
672
+ raise ValueError(f"Object reference not found: {ref_id}")
673
+ return real_obj
674
+ return {k: _deserialize_input(v) for k, v in obj.items()}
675
+ if isinstance(obj, list):
676
+ return [_deserialize_input(v) for v in obj]
677
+ if isinstance(obj, tuple):
678
+ return tuple(_deserialize_input(v) for v in obj)
679
+ return obj
680
+
590
681
 
591
682
  class SocketTransport:
592
683
  """Length-prefixed JSON transport."""
@@ -655,20 +746,24 @@ def _deserialize_isolated_objects(obj):
655
746
 
656
747
 
657
748
  def main():
749
+ print("[worker] Starting...", flush=True)
658
750
  # Get socket address from command line
659
751
  if len(sys.argv) < 2:
660
752
  print("Usage: worker.py <socket_addr>", file=sys.stderr)
661
753
  sys.exit(1)
662
754
  socket_addr = sys.argv[1]
755
+ print(f"[worker] Connecting to {socket_addr}...", flush=True)
663
756
 
664
757
  # Connect to host process
665
758
  sock = _connect(socket_addr)
666
759
  transport = SocketTransport(sock)
760
+ print("[worker] Connected, waiting for config...", flush=True)
667
761
 
668
762
  # Read config as first message
669
763
  config = transport.recv()
670
764
  if not config:
671
765
  return
766
+ print("[worker] Got config, setting up paths...", flush=True)
672
767
 
673
768
  # Setup sys.path
674
769
  for p in config.get("sys_paths", []):
@@ -676,10 +771,13 @@ def main():
676
771
  sys.path.insert(0, p)
677
772
 
678
773
  # Import torch after path setup
774
+ print("[worker] Importing torch...", flush=True)
679
775
  import torch
776
+ print(f"[worker] Torch imported: {torch.__version__}", flush=True)
680
777
 
681
778
  # Signal ready
682
779
  transport.send({"status": "ready"})
780
+ print("[worker] Ready, entering request loop...", flush=True)
683
781
 
684
782
  # Process requests
685
783
  while True:
@@ -698,36 +796,50 @@ def main():
698
796
  module_name = request["module"]
699
797
  inputs_path = request.get("inputs_path")
700
798
  outputs_path = request.get("outputs_path")
799
+ print(f"[worker] Request: {request_type} {module_name}", flush=True)
701
800
 
702
801
  # Load inputs
703
802
  if inputs_path:
803
+ print(f"[worker] Loading inputs from {inputs_path}...", flush=True)
704
804
  inputs = torch.load(inputs_path, weights_only=False)
805
+ print(f"[worker] Deserializing isolated objects...", flush=True)
705
806
  inputs = _deserialize_isolated_objects(inputs)
807
+ # Resolve any object references from previous node calls
808
+ print(f"[worker] Resolving object references...", flush=True)
809
+ inputs = _deserialize_input(inputs)
810
+ print(f"[worker] Inputs ready: {list(inputs.keys())}", flush=True)
706
811
  else:
707
812
  inputs = {}
708
813
 
709
814
  # Import module
815
+ print(f"[worker] Importing module {module_name}...", flush=True)
710
816
  module = __import__(module_name, fromlist=[""])
817
+ print(f"[worker] Module imported", flush=True)
711
818
 
712
819
  if request_type == "call_method":
713
820
  class_name = request["class_name"]
714
821
  method_name = request["method_name"]
715
822
  self_state = request.get("self_state")
823
+ print(f"[worker] Getting class {class_name}...", flush=True)
716
824
 
717
825
  cls = getattr(module, class_name)
826
+ print(f"[worker] Creating instance...", flush=True)
718
827
  instance = object.__new__(cls)
719
828
  if self_state:
720
829
  instance.__dict__.update(self_state)
830
+ print(f"[worker] Calling {method_name}...", flush=True)
721
831
  method = getattr(instance, method_name)
722
832
  result = method(**inputs)
833
+ print(f"[worker] Method returned", flush=True)
723
834
  else:
724
835
  func_name = request["func"]
725
836
  func = getattr(module, func_name)
726
837
  result = func(**inputs)
727
838
 
728
- # Save result
839
+ # Save result - convert complex objects to references
729
840
  if outputs_path:
730
- torch.save(result, outputs_path)
841
+ serialized_result = _serialize_result(result)
842
+ torch.save(serialized_result, outputs_path)
731
843
 
732
844
  transport.send({"status": "ok"})
733
845
 
@@ -803,6 +915,10 @@ class PersistentVenvWorker(Worker):
803
915
  self._socket_addr: Optional[str] = None
804
916
  self._transport: Optional[SocketTransport] = None
805
917
 
918
+ # Stderr buffer for crash diagnostics
919
+ self._stderr_buffer: List[str] = []
920
+ self._stderr_lock = threading.Lock()
921
+
806
922
  # Write worker script to temp file
807
923
  self._worker_script = self._temp_dir / "persistent_worker.py"
808
924
  self._worker_script.write_text(_PERSISTENT_WORKER_SCRIPT)
@@ -875,13 +991,17 @@ class PersistentVenvWorker(Worker):
875
991
  [str(self.python), str(self._worker_script), self._socket_addr],
876
992
  stdin=subprocess.DEVNULL,
877
993
  stdout=subprocess.PIPE,
878
- stderr=subprocess.STDOUT, # Merge stdout/stderr for forwarding
994
+ stderr=subprocess.PIPE, # Capture stderr separately for crash diagnostics
879
995
  cwd=str(self.working_dir),
880
996
  env=env,
881
997
  )
882
998
 
883
- # Start output forwarding thread
884
- def forward_output():
999
+ # Clear stderr buffer for new process
1000
+ with self._stderr_lock:
1001
+ self._stderr_buffer.clear()
1002
+
1003
+ # Start stdout forwarding thread
1004
+ def forward_stdout():
885
1005
  try:
886
1006
  for line in self._process.stdout:
887
1007
  if isinstance(line, bytes):
@@ -890,22 +1010,43 @@ class PersistentVenvWorker(Worker):
890
1010
  sys.stderr.flush()
891
1011
  except:
892
1012
  pass
893
- self._output_thread = threading.Thread(target=forward_output, daemon=True)
894
- self._output_thread.start()
1013
+ self._stdout_thread = threading.Thread(target=forward_stdout, daemon=True)
1014
+ self._stdout_thread.start()
1015
+
1016
+ # Start stderr capture thread (buffer for crash diagnostics)
1017
+ def capture_stderr():
1018
+ try:
1019
+ for line in self._process.stderr:
1020
+ if isinstance(line, bytes):
1021
+ line = line.decode('utf-8', errors='replace')
1022
+ # Print to terminal AND buffer for crash reporting
1023
+ sys.stderr.write(f" [stderr] {line}")
1024
+ sys.stderr.flush()
1025
+ with self._stderr_lock:
1026
+ self._stderr_buffer.append(line.rstrip())
1027
+ # Keep last 50 lines
1028
+ if len(self._stderr_buffer) > 50:
1029
+ self._stderr_buffer.pop(0)
1030
+ except:
1031
+ pass
1032
+ self._stderr_thread = threading.Thread(target=capture_stderr, daemon=True)
1033
+ self._stderr_thread.start()
895
1034
 
896
1035
  # Accept connection from worker with timeout
897
1036
  self._server_socket.settimeout(60)
898
1037
  try:
899
1038
  client_sock, _ = self._server_socket.accept()
900
1039
  except socket.timeout:
901
- stderr = ""
1040
+ # Collect stderr from buffer
1041
+ time.sleep(0.2) # Give stderr thread time to capture
1042
+ with self._stderr_lock:
1043
+ stderr = "\n".join(self._stderr_buffer) if self._stderr_buffer else "(no stderr captured)"
902
1044
  try:
903
1045
  self._process.kill()
904
- stdout, _ = self._process.communicate(timeout=5)
905
- stderr = stdout.decode('utf-8', errors='replace') if stdout else ""
1046
+ self._process.wait(timeout=5)
906
1047
  except:
907
1048
  pass
908
- raise RuntimeError(f"{self.name}: Worker failed to connect (timeout). output: {stderr}")
1049
+ raise RuntimeError(f"{self.name}: Worker failed to connect (timeout).\nStderr:\n{stderr}")
909
1050
  finally:
910
1051
  self._server_socket.settimeout(None)
911
1052
 
@@ -946,7 +1087,33 @@ class PersistentVenvWorker(Worker):
946
1087
  self._transport.send(request)
947
1088
 
948
1089
  # Read response with timeout
949
- response = self._transport.recv(timeout=timeout)
1090
+ try:
1091
+ response = self._transport.recv(timeout=timeout)
1092
+ except ConnectionError as e:
1093
+ # Socket closed - check if worker process died
1094
+ self._shutdown = True
1095
+ time.sleep(0.2) # Give process time to fully exit and stderr to flush
1096
+ exit_code = None
1097
+ if self._process:
1098
+ exit_code = self._process.poll()
1099
+
1100
+ # Get captured stderr
1101
+ with self._stderr_lock:
1102
+ stderr_output = "\n".join(self._stderr_buffer) if self._stderr_buffer else "(no stderr captured)"
1103
+
1104
+ if exit_code is not None:
1105
+ raise RuntimeError(
1106
+ f"{self.name}: Worker process died with exit code {exit_code}. "
1107
+ f"This usually indicates a crash in native code (CGAL, pymeshlab, etc.).\n"
1108
+ f"Stderr:\n{stderr_output}"
1109
+ ) from e
1110
+ else:
1111
+ # Process still alive but socket closed - something weird
1112
+ raise RuntimeError(
1113
+ f"{self.name}: Socket closed but worker process still running. "
1114
+ f"This may indicate a protocol error or worker bug.\n"
1115
+ f"Stderr:\n{stderr_output}"
1116
+ ) from e
950
1117
 
951
1118
  if response is None:
952
1119
  # Timeout - kill process
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: comfy-env
3
- Version: 0.0.49
3
+ Version: 0.0.51
4
4
  Summary: Environment management for ComfyUI custom nodes - CUDA wheel resolution and process isolation
5
5
  Project-URL: Homepage, https://github.com/PozzettiAndrea/comfy-env
6
6
  Project-URL: Repository, https://github.com/PozzettiAndrea/comfy-env
@@ -84,9 +84,47 @@ from comfy_env import install
84
84
  install()
85
85
  ```
86
86
 
87
- ### Process Isolation (Type 1 - Separate Venv)
87
+ ### Process Isolation (Type 1 - Separate Environment)
88
88
 
89
- For nodes that need completely separate dependencies:
89
+ For nodes that need completely separate dependencies (different Python version, conda packages, conflicting libraries).
90
+
91
+ #### Recommended: Pack-Wide Isolation
92
+
93
+ For node packs where ALL nodes run in the same isolated environment:
94
+
95
+ **Step 1: Configure comfy-env.toml**
96
+
97
+ ```toml
98
+ [mypack]
99
+ python = "3.11"
100
+ isolated = true # All nodes run in this env
101
+
102
+ [mypack.conda]
103
+ packages = ["cgal"] # Conda packages (uses pixi)
104
+
105
+ [mypack.packages]
106
+ requirements = ["trimesh[easy]>=4.0", "bpy>=4.2"]
107
+ ```
108
+
109
+ **Step 2: Enable in __init__.py**
110
+
111
+ ```python
112
+ from comfy_env import setup_isolated_imports, enable_isolation
113
+
114
+ # Setup import stubs BEFORE importing nodes
115
+ setup_isolated_imports(__file__)
116
+
117
+ from .nodes import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
118
+
119
+ # Enable isolation for all nodes
120
+ enable_isolation(NODE_CLASS_MAPPINGS)
121
+ ```
122
+
123
+ **That's it!** All nodes run in an isolated Python 3.11 environment with their own dependencies.
124
+
125
+ #### Alternative: Per-Node Isolation
126
+
127
+ For cases where different nodes need different environments:
90
128
 
91
129
  ```python
92
130
  from comfy_env import isolated
@@ -266,7 +304,27 @@ vars_dict = env.as_dict()
266
304
  # {'cuda_version': '12.8', 'cuda_short': '128', 'torch_mm': '28', ...}
267
305
  ```
268
306
 
269
- ### Workers (for isolation)
307
+ ### enable_isolation()
308
+
309
+ ```python
310
+ from comfy_env import enable_isolation
311
+
312
+ enable_isolation(NODE_CLASS_MAPPINGS)
313
+ ```
314
+
315
+ Wraps all node classes so their FUNCTION methods run in the isolated environment specified in comfy-env.toml. Requires `isolated = true` in the environment config.
316
+
317
+ ### setup_isolated_imports()
318
+
319
+ ```python
320
+ from comfy_env import setup_isolated_imports
321
+
322
+ setup_isolated_imports(__file__)
323
+ ```
324
+
325
+ Sets up import stubs for packages that exist only in the isolated pixi environment. Call this BEFORE importing your nodes module. Packages available in both host and isolated environment are not stubbed.
326
+
327
+ ### Workers (for custom isolation)
270
328
 
271
329
  ```python
272
330
  from comfy_env import TorchMPWorker
@@ -1,15 +1,17 @@
1
- comfy_env/__init__.py,sha256=OQJFNjmArjLcgrfHAFxgDJQFH_IhxibqMXbU5bu_j9Q,3822
1
+ comfy_env/__init__.py,sha256=DvOCFCq-EuXZ_e6vsPCObgTPYSRLZqiG3w35SOD-e4E,4101
2
2
  comfy_env/cli.py,sha256=Pjzb-jsoH67lyHxmBFOWvasWX01eHFE_BEjUk1l-uEo,14509
3
- comfy_env/decorator.py,sha256=sCb3DQIYYdt8fuMiJnmNfU8oLawbfjMMR2Hf0YMT4B0,16738
3
+ comfy_env/decorator.py,sha256=bL0eUjXf5UAGbsT2zilZbO0e8ZbhtQAR3vDJLbMO3ZI,27018
4
4
  comfy_env/errors.py,sha256=8hN8NDlo8oBUdapc-eT3ZluigI5VBzfqsSBvQdfWlz4,9943
5
- comfy_env/install.py,sha256=m4NKlfCcQGI5xzVRjHw3ep-lWbqx5kE1e21sUUZ2Leo,17528
5
+ comfy_env/install.py,sha256=b2eXFqQ58pUBWpwc1AhymbTh7dGRMnEuq5FoZot0CO0,15661
6
+ comfy_env/isolation.py,sha256=wuze8TmkRbdMfrg2cj9quoGYJWsxKcc5iiDvmLWX16g,8680
6
7
  comfy_env/nodes.py,sha256=CWUe35jU5SKk4ur-SddZePdqWgxJDlxGhpcJiu5pAK4,4354
7
- comfy_env/pixi.py,sha256=VyPYL6hV7rxMUJcUl-8UobwTpOTzaSEmvRKT8v5a0vo,15250
8
+ comfy_env/pixi.py,sha256=p3qP-4l95-toOI3-N2luJQ7kB-KLbalDB6yEJjmfzoc,19574
8
9
  comfy_env/registry.py,sha256=w-QwvAPFlCrBYRAv4cXkp2zujQPZn8Fk5DUxKCtox8o,3430
9
10
  comfy_env/resolver.py,sha256=WoNIo2IfTR2RlEf_HQl66eAeMa2R2pmLof_UdK-0RNE,6714
11
+ comfy_env/stub_imports.py,sha256=s84q8x5156ZkHNJhbuDqMhU_c-0ux51Rhe1aoVcGrj4,8920
10
12
  comfy_env/env/__init__.py,sha256=imQdoQEQvrRT-QDtyNpFlkVbm2fBzgACdpQwRPd09fI,1157
11
- comfy_env/env/config.py,sha256=Ila-5Yal3bj6jENbBeYJlZtkbgdwnzJzImVZK3ZF1lg,7645
12
- comfy_env/env/config_file.py,sha256=HzFKeQh9zQ--K1V-XuvgE6DiE_bYrXrChL1ZT8Tzlq4,24684
13
+ comfy_env/env/config.py,sha256=cRSuWiVZGSK9xpvyNIyMirlHyfTm6_AB_eXzeKyvfnc,7749
14
+ comfy_env/env/config_file.py,sha256=g6fZq6XbTUXx97meWW_7xfKcuC0sN002c7fBvR0eCOg,24819
13
15
  comfy_env/env/cuda_gpu_detection.py,sha256=YLuXUdWg6FeKdNyLlQAHPlveg4rTenXJ2VbeAaEi9QE,9755
14
16
  comfy_env/env/manager.py,sha256=-qdbZDsbNfs70onVbC7mhKCzNsxYx3WmG7ttlBinhGI,23659
15
17
  comfy_env/env/security.py,sha256=dNSitAnfBNVdvxgBBntYw33AJaCs_S1MHb7KJhAVYzM,8171
@@ -30,17 +32,17 @@ comfy_env/stubs/folder_paths.py,sha256=K90J34EG6LD4eZP8YG-xMeBmqwpp_wA8E92DKMXd1
30
32
  comfy_env/stubs/comfy/__init__.py,sha256=-y4L6gX21vrI2V8MvNaMeHOcAn5kUNK3jUyLvtXRmJQ,173
31
33
  comfy_env/stubs/comfy/model_management.py,sha256=Khx8Qa3NutKPLTn9oSM3VLeATUOg1fe4QCjxdxXd6eE,1462
32
34
  comfy_env/stubs/comfy/utils.py,sha256=s3t_KLj_-w1Uj3A3iAy69wIk4Ggklojw5hsDNb69Pcc,776
33
- comfy_env/templates/comfy-env-instructions.txt,sha256=pXxlyOxWKq8NYtxIsa9wD8Y3qzC8JUWguTPiV9dB0Mw,2481
34
- comfy_env/templates/comfy-env.toml,sha256=1CyphZu9DVc-EKzkLiDCnNzY9AWH5s_Kp9o4s2Fnw9g,7441
35
+ comfy_env/templates/comfy-env-instructions.txt,sha256=Q38Hb_YdN0a8rTxn7l5ON4JDPba7XgVftDqfEy-8I2I,3004
36
+ comfy_env/templates/comfy-env.toml,sha256=v6nxvCWWKSlIpn4m-WqCkzJt4ObJsklbr9KBJt0r5fU,6729
35
37
  comfy_env/workers/__init__.py,sha256=IKZwOvrWOGqBLDUIFAalg4CdqzJ_YnAdxo2Ha7gZTJ0,1467
36
38
  comfy_env/workers/base.py,sha256=ZILYXlvGCWuCZXmjKqfG8VeD19ihdYaASdlbasl2BMo,2312
37
39
  comfy_env/workers/pool.py,sha256=MtjeOWfvHSCockq8j1gfnxIl-t01GSB79T5N4YB82Lg,6956
38
40
  comfy_env/workers/tensor_utils.py,sha256=TCuOAjJymrSbkgfyvcKtQ_KbVWTqSwP9VH_bCaFLLq8,6409
39
41
  comfy_env/workers/torch_mp.py,sha256=TnsCoBHEJBXEoBkx7WiCd9tBAlzFtMOw1dk_7_zGJZY,22288
40
- comfy_env/workers/venv.py,sha256=kAaADoUJpKItZoG73YZhn0ufqzHmDgU1xeSHNCiZ77I,38941
42
+ comfy_env/workers/venv.py,sha256=MXN0KRlIPREEgJKM-4_sXHQw4Dv8hLFGohmway6y3UU,46393
41
43
  comfy_env/wheel_sources.yml,sha256=uU0YJmWaiLAicQNN9VYS8PZevlP2NOH6mBUE294dvAo,8156
42
- comfy_env-0.0.49.dist-info/METADATA,sha256=Ve0XcboM1LLVeXVFYTbBDMxIcefaYiYnC8Nf84inrhQ,7138
43
- comfy_env-0.0.49.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
44
- comfy_env-0.0.49.dist-info/entry_points.txt,sha256=J4fXeqgxU_YenuW_Zxn_pEL7J-3R0--b6MS5t0QmAr0,49
45
- comfy_env-0.0.49.dist-info/licenses/LICENSE,sha256=E68QZMMpW4P2YKstTZ3QU54HRQO8ecew09XZ4_Vn870,1093
46
- comfy_env-0.0.49.dist-info/RECORD,,
44
+ comfy_env-0.0.51.dist-info/METADATA,sha256=6DlmPszS1Q0lDXK1I0YXfkbaKPQyZKiZN7PsXQuVSHk,8735
45
+ comfy_env-0.0.51.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
46
+ comfy_env-0.0.51.dist-info/entry_points.txt,sha256=J4fXeqgxU_YenuW_Zxn_pEL7J-3R0--b6MS5t0QmAr0,49
47
+ comfy_env-0.0.51.dist-info/licenses/LICENSE,sha256=E68QZMMpW4P2YKstTZ3QU54HRQO8ecew09XZ4_Vn870,1093
48
+ comfy_env-0.0.51.dist-info/RECORD,,