torchmonarch-nightly 2025.6.12__cp310-cp310-manylinux2014_x86_64.whl → 2025.6.14__cp310-cp310-manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tests/test_controller.py CHANGED
@@ -96,7 +96,7 @@ remote_sleep = remote("time.sleep", propagate="inspect")
96
96
  torch.cuda.device_count() < 2,
97
97
  reason="Not enough GPUs, this test requires at least 2 GPUs",
98
98
  )
99
- @pytest.mark.parametrize("backend_type", [BackendType.PY, BackendType.RS])
99
+ @pytest.mark.parametrize("backend_type", [BackendType.PY, BackendType.RS, "mesh"])
100
100
  # Set global timeout--sandcastle's timeout is 600s. A test that sandcastle times
101
101
  # out is not counted as a failure, so we set a more restrictive timeout to
102
102
  # ensure we see a hard failure in CI.
@@ -114,7 +114,7 @@ class TestController:
114
114
  N,
115
115
  gpu_per_host,
116
116
  activate,
117
- rust=backend_type == BackendType.RS,
117
+ backend=str(backend_type),
118
118
  )
119
119
 
120
120
  def test_errors(self, backend_type):
@@ -512,6 +512,7 @@ class TestController:
512
512
  monarch.random.make_deterministic()
513
513
  for device in ("cpu", "cuda"):
514
514
  a = monarch.random.get_state()
515
+ monarch.inspect(a)
515
516
  first = torch.rand(1, device=device)
516
517
  monarch.random.set_state(a)
517
518
  second = torch.rand(1, device=device)
@@ -601,6 +602,15 @@ class TestController:
601
602
  assert torch.equal(moved_tensor_a, torch.tensor([1.0]))
602
603
  assert torch.equal(moved_tensor_b, torch.tensor([2.0]))
603
604
 
605
+ def test_hanging_error(self, backend_type):
606
+ if backend_type != "mesh":
607
+ pytest.skip("only relevant for mesh backend")
608
+ with self.local_device_mesh(2, 2, backend_type) as device_mesh:
609
+ remote(lambda: torch.rand(3) + torch.rand(4), propagate=lambda: None)()
610
+
611
+ with pytest.raises(Exception, match="The size of tensor"):
612
+ device_mesh.client.shutdown()
613
+
604
614
  def test_slice_mesh_pytree(self, backend_type):
605
615
  with self.local_device_mesh(2, 2, backend_type) as device_mesh:
606
616
  a = device_mesh.rank(("host")) + torch.zeros((1,), device="cuda")
@@ -4,8 +4,13 @@
4
4
  # This source code is licensed under the BSD-style license found in the
5
5
  # LICENSE file in the root directory of this source tree.
6
6
 
7
+ import asyncio
7
8
  import operator
9
+ import os
10
+ import re
11
+ import threading
8
12
  from types import ModuleType
13
+ from unittest.mock import AsyncMock, patch
9
14
 
10
15
  import monarch
11
16
 
@@ -20,7 +25,9 @@ from monarch.actor_mesh import (
20
25
  current_rank,
21
26
  current_size,
22
27
  endpoint,
28
+ MonarchContext,
23
29
  )
30
+ from monarch.debugger import init_debugging
24
31
 
25
32
  from monarch.mesh_controller import spawn_tensor_engine
26
33
 
@@ -384,6 +391,10 @@ def test_rust_binding_modules_correct() -> None:
384
391
  check(bindings, "monarch._rust_bindings")
385
392
 
386
393
 
394
+ @pytest.mark.skipif(
395
+ torch.cuda.device_count() < 2,
396
+ reason="Not enough GPUs, this test requires at least 2 GPUs",
397
+ )
387
398
  def test_tensor_engine() -> None:
388
399
  pm = proc_mesh(gpus=2).get()
389
400
 
@@ -399,3 +410,184 @@ def test_tensor_engine() -> None:
399
410
  assert torch.allclose(torch.zeros(3, 4), f)
400
411
 
401
412
  dm.exit()
413
+
414
+
415
+ def _debugee_actor_internal(rank):
416
+ if rank == 0:
417
+ breakpoint() # noqa
418
+ rank += 1
419
+ return rank
420
+ elif rank == 1:
421
+ breakpoint() # noqa
422
+ rank += 2
423
+ return rank
424
+ elif rank == 2:
425
+ breakpoint() # noqa
426
+ rank += 3
427
+ raise ValueError("bad rank")
428
+ elif rank == 3:
429
+ breakpoint() # noqa
430
+ rank += 4
431
+ return rank
432
+
433
+
434
+ class DebugeeActor(Actor):
435
+ @endpoint
436
+ async def to_debug(self):
437
+ rank = MonarchContext.get().point.rank
438
+ return _debugee_actor_internal(rank)
439
+
440
+
441
+ async def test_debug() -> None:
442
+ input_mock = AsyncMock()
443
+ input_mock.side_effect = [
444
+ "attach 1",
445
+ "n",
446
+ "n",
447
+ "n",
448
+ "n",
449
+ "detach",
450
+ "attach 1",
451
+ "detach",
452
+ "quit",
453
+ "cast 0,3 n",
454
+ "cast 0,3 n",
455
+ # Attaching to 0 and 3 ensures that when we call "list"
456
+ # the next time, their function/lineno info will be
457
+ # up-to-date.
458
+ "attach 0",
459
+ "detach",
460
+ "attach 3",
461
+ "detach",
462
+ "quit",
463
+ "attach 2",
464
+ "c",
465
+ "quit",
466
+ "continue",
467
+ ]
468
+
469
+ outputs = []
470
+
471
+ def _patch_output(msg):
472
+ nonlocal outputs
473
+ outputs.append(msg)
474
+
475
+ with patch("monarch.debugger._debugger_input", side_effect=input_mock), patch(
476
+ "monarch.debugger._debugger_output", new=_patch_output
477
+ ):
478
+ proc = await proc_mesh(hosts=2, gpus=2)
479
+ debugee = await proc.spawn("debugee", DebugeeActor)
480
+ debug_client = await init_debugging(debugee)
481
+
482
+ fut = debugee.to_debug.call()
483
+ await debug_client.wait_pending_session.call_one()
484
+ breakpoints = []
485
+ for i in range(10):
486
+ breakpoints = await debug_client.list.call_one()
487
+ if len(breakpoints) == 4:
488
+ break
489
+ await asyncio.sleep(1)
490
+ if i == 9:
491
+ raise RuntimeError("timed out waiting for breakpoints")
492
+
493
+ initial_linenos = {}
494
+ for i in range(len(breakpoints)):
495
+ rank, coords, _, _, function, lineno = breakpoints[i]
496
+ initial_linenos[rank] = lineno
497
+ assert rank == i
498
+ assert coords == {"hosts": rank % 2, "gpus": rank // 2}
499
+ assert function == "test_python_actors._debugee_actor_internal"
500
+ assert lineno == breakpoints[0][5] + 4 * rank
501
+
502
+ await debug_client.enter.call_one()
503
+
504
+ # Check that when detaching and re-attaching to a session, the last portion of the output is repeated
505
+ expected_last_output = [
506
+ r"--Return--",
507
+ r"\n",
508
+ r"> (/.*/)+test_python_actors.py\(\d+\)to_debug\(\)->3\n-> return _debugee_actor_internal\(rank\)",
509
+ r"\n",
510
+ r"\(Pdb\) ",
511
+ ]
512
+ output_len = len(expected_last_output)
513
+ assert outputs[-2 * output_len : -output_len] == outputs[-output_len:]
514
+ for real_output, expected_output in zip(
515
+ outputs[-output_len:], expected_last_output
516
+ ):
517
+ assert re.match(expected_output, real_output) is not None
518
+
519
+ breakpoints = await debug_client.list.call_one()
520
+ for i in range(len(breakpoints)):
521
+ if i == 1:
522
+ assert breakpoints[i][4] == "test_python_actors.to_debug"
523
+ else:
524
+ assert breakpoints[i][4] == "test_python_actors._debugee_actor_internal"
525
+ assert breakpoints[i][5] == initial_linenos[i]
526
+
527
+ await debug_client.enter.call_one()
528
+
529
+ breakpoints = await debug_client.list.call_one()
530
+ for i in range(len(breakpoints)):
531
+ if i == 1:
532
+ assert breakpoints[i][4] == "test_python_actors.to_debug"
533
+ elif i in (0, 3):
534
+ assert breakpoints[i][4] == "test_python_actors._debugee_actor_internal"
535
+ assert breakpoints[i][5] == initial_linenos[i] + 2
536
+ else:
537
+ assert breakpoints[i][4] == "test_python_actors._debugee_actor_internal"
538
+ assert breakpoints[i][5] == initial_linenos[i]
539
+
540
+ await debug_client.enter.call_one()
541
+
542
+ breakpoints = await debug_client.list.call_one()
543
+ assert len(breakpoints) == 3
544
+ for i, rank in enumerate((0, 1, 3)):
545
+ assert breakpoints[i][0] == rank
546
+
547
+ await debug_client.enter.call_one()
548
+ breakpoints = await debug_client.list.call_one()
549
+ assert len(breakpoints) == 0
550
+
551
+ with pytest.raises(monarch.actor_mesh.ActorError, match="ValueError: bad rank"):
552
+ await fut
553
+
554
+
555
+ class TLSActor(Actor):
556
+ """An actor that manages thread-local state."""
557
+
558
+ def __init__(self):
559
+ self.local = threading.local()
560
+ self.local.value = 0
561
+
562
+ @endpoint
563
+ def increment(self):
564
+ self.local.value += 1
565
+
566
+ @endpoint
567
+ async def increment_async(self):
568
+ self.local.value += 1
569
+
570
+ @endpoint
571
+ def get(self):
572
+ return self.local.value
573
+
574
+ @endpoint
575
+ async def get_async(self):
576
+ return self.local.value
577
+
578
+
579
+ async def test_actor_tls() -> None:
580
+ """Test that thread-local state is respected."""
581
+ pm = await proc_mesh(gpus=1)
582
+ am = await pm.spawn("tls", TLSActor)
583
+ await am.increment.call_one()
584
+ # TODO(suo): TLS is NOT preserved across async/sync endpoints, because currently
585
+ # we run async endpoints on a different thread than sync ones.
586
+ # Will fix this in a followup diff.
587
+
588
+ # await am.increment_async.call_one()
589
+ await am.increment.call_one()
590
+ # await am.increment_async.call_one()
591
+
592
+ assert 2 == await am.get.call_one()
593
+ # assert 4 == await am.get_async.call_one()
@@ -169,7 +169,7 @@ class RemoteFunctionsTestBase:
169
169
  num_hosts,
170
170
  gpu_per_host,
171
171
  activate,
172
- rust=backend_type == BackendType.RS,
172
+ backend=str(backend_type),
173
173
  )
174
174
 
175
175
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: torchmonarch-nightly
3
- Version: 2025.6.12
3
+ Version: 2025.6.14
4
4
  Summary: Monarch: Single controller library
5
5
  Author: Meta
6
6
  Author-email: oncall+monarch@xmail.facebook.com
@@ -1,19 +1,21 @@
1
1
  monarch/__init__.py,sha256=iUvWHc0-7Q2tovRoRxOIiA3TsefMXCbWl-jEfQ2djew,6897
2
- monarch/_rust_bindings.so,sha256=VPU8MhCnz10umRwSqv99QvwFkr2q0N0DiOTpZ37Ecl0,40645344
3
- monarch/_testing.py,sha256=MN8DK1e-wzV0-R_nFW1b_7-O5oKfWvZ12BMGD4Z7PQk,6755
4
- monarch/actor_mesh.py,sha256=ovi5RBxobGEcg7zKkzhRc83n82KOD6ermhuloHKbuFs,24420
2
+ monarch/_rust_bindings.so,sha256=iHKENTGdgZf4uRXWLL61j9Z7z-MOAIKabPo5JD0b6dw,40611888
3
+ monarch/_testing.py,sha256=jOIOG6jcZBzvEvG_DwSnwCkaMVXvSun6sJAG6nXemww,7859
4
+ monarch/actor_mesh.py,sha256=nAW65WFEWMJWCv8zuH9GSOyTNXwFN8QNqZxMZTuSYxw,25537
5
5
  monarch/allocator.py,sha256=ylvYTf31o-PT385cYJPhi17uNbC4yl_RAraqD0fVe4g,4112
6
- monarch/bootstrap_main.py,sha256=EYaTMA1lxy2213L_04drTKlJvZQjzNdD3jeUHiqSBJc,2578
6
+ monarch/bootstrap_main.py,sha256=RCUQhJk07yMFiKp6HzQuqZFUpkgsT9kVEyimiwjn6_E,1827
7
7
  monarch/cached_remote_function.py,sha256=kYdB6r4OHx_T_uX4q3tCNcp1t2DJwF8tPTIahUiT2pU,8785
8
+ monarch/debugger.py,sha256=AdlvOG3X-9Pw9c1DLQYEy4vjEfh0ZtwtsNJEFLFzN8o,13312
8
9
  monarch/fetch.py,sha256=61jxo7sx4QNUTkc0_rF5NaJROen4tKbAaiIjrXWLOvg,1705
9
10
  monarch/future.py,sha256=lcdFEe7m1shYPPuvZ1RkS6JUIChEKGBWe3v7x_nu4Hg,731
10
11
  monarch/gradient_generator.py,sha256=Rl3dmXGceTdCc1mYBg2JciR88ywGPnW7TVkL86KwqEA,6366
11
12
  monarch/memory.py,sha256=ol86dBhFAJqg78iF25-BuK0wuwj1onR8FIioZ_B0gjw,1377
12
- monarch/mesh_controller.py,sha256=Rr4VNUNN0pJdThbPmbCoaPWid4QpTNHya9xYpmjTkW0,8575
13
- monarch/monarch_controller,sha256=MECcriPRnSdI_NpAG6y-GiK2-DqnDsLBfyOHVdqewRU,20397992
13
+ monarch/mesh_controller.py,sha256=Xft2edk7rz8_PPe-iIUZ09P-j4JDPGADBGHBiuiZ7YY,10363
14
+ monarch/monarch_controller,sha256=P6ijudU0UZcrZW34d1lafCnJ1Hf90yo764rnkEg45dI,20399544
14
15
  monarch/notebook.py,sha256=zu9MKDFKf1-rCM2TqFSRJjMBeiWuKcJSyUFLvoZRQzs,25949
15
16
  monarch/opaque_module.py,sha256=oajOu_WD1hD4hxE8HDdO-tvWY7KDHWd7VaAhJEa5L2I,10446
16
17
  monarch/opaque_object.py,sha256=IVpll4pyuKZMo_EnPh4s0qnx8RlAcJrJ1yoLX6E75wQ,2782
18
+ monarch/pdb_wrapper.py,sha256=gm46AZnfR4amH1vYFWnWivEv5MaU3Nb6KIWjSM8KjWM,4052
17
19
  monarch/proc_mesh.py,sha256=xoaReM9Ab9TWkesxedWSyyk4TMD0HLV88dQ8CQcbqTI,6892
18
20
  monarch/profiler.py,sha256=TQ9fnVM8H7smBWtYdB_6Irtzz8DBOmcp7U1T3wlUmco,4911
19
21
  monarch/python_local_mesh.py,sha256=YsureIzR9uGlNVrKd4vRghxOXBeYabkt9lICRErfRAI,3536
@@ -23,6 +25,7 @@ monarch/remote_class.py,sha256=-OAowzU1aDP6i4ik_SjXntVUC9h4dqAzgqwohkQ6Grc,4167
23
25
  monarch/rust_backend_mesh.py,sha256=1htC62of4MgFtkezWGlsxSFtKJdc0CIeqeSuOx7yu3M,9944
24
26
  monarch/rust_local_mesh.py,sha256=7ASptybn3wy4J7eoBc7LhGW4j4AA6bigl5Kuhyflw8s,47405
25
27
  monarch/sim_mesh.py,sha256=9wkS99L0EpG2Gldi-nzA-3ww7z__DQ7Qp2uReMfn188,12183
28
+ monarch/telemetry.py,sha256=7JUZWaoD2Yn5Ae_7kNhkLFRBLYaSGfH071_m_qfVehI,525
26
29
  monarch/tensor_worker_main.py,sha256=Nbarl2sJKIddLeaRFsaUnqOerLHjzggUr9SqCr2_GYI,8300
27
30
  monarch/tensorboard.py,sha256=MnLgH5lbqeUJauEuirEgR6L_qYl2NGdtwZOWIAuOZao,2587
28
31
  monarch/world_mesh.py,sha256=GqZpFoVNJPxYa70rLYgv0vu8Vg1nXqx_GYERRb1E9Pc,975
@@ -34,7 +37,7 @@ monarch/_monarch/worker/debugger.py,sha256=JJZwRPTgQO2emz-hrMelkOSxJFIR3dV4ZA6e7
34
37
  monarch/_monarch/worker/logging.py,sha256=nJUkIuKhPqRZaNDOT7MVbFFjcITZQf_CiFRLFKJJqsw,3591
35
38
  monarch/builtins/__init__.py,sha256=QcfnHZGbc2qktBg7DyZt2ruE6VahnIt4S8lEZLHdJqU,443
36
39
  monarch/builtins/log.py,sha256=H1QkuVzwxyi36Zyv-XR0VN0QsNimBWwxE1__fjs0_2o,554
37
- monarch/builtins/random.py,sha256=xVt0cJBRBhCOH1Eioy8O511rp7HKFSCVXRwjBy02K5I,1798
40
+ monarch/builtins/random.py,sha256=wPbvscg7u53EXpMFo885fO2XOlsyjrNAJ4rBxLzfxdg,1839
38
41
  monarch/common/_C.pyi,sha256=kHY2G3ksMAjQJ6IcPb4F1bBh5knzw5RVVNhhBlEmwFU,314
39
42
  monarch/common/_C.so,sha256=gVDCDUQSKiPHwLPIpyxcRgiv8uF_quH1LpgI5Lhle9Y,715600
40
43
  monarch/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -43,7 +46,7 @@ monarch/common/_device_utils.py,sha256=gBpl23wMjppVAEzzj8U9HyX-B7Bs2_3ftiMAkzUS4
43
46
  monarch/common/_tensor_to_table.py,sha256=yRjCNwvtl188Z1Dwkx3ZU-Bh2mwYnQ0Lnue2RAztwvc,5753
44
47
  monarch/common/base_tensor.py,sha256=ujRzR6lWaeCdPv2JX0vCR-VsCWn-3SHaJIkZH1Sw9FQ,1159
45
48
  monarch/common/borrows.py,sha256=7KR62xoUat1T6FyADsdHsxVAVIJDvfJWUnPO-xx277U,5307
46
- monarch/common/client.py,sha256=BaBhOzQaNsqTa-BGy7_IknQxpnpK0j4C5QsNyFHZHW4,24343
49
+ monarch/common/client.py,sha256=axo37s_z17nYQGOZG5fi_0zUEJ_8qw7INjs-Kw2vaVo,24937
47
50
  monarch/common/constants.py,sha256=ohvsVYMpfeWopv3KXDAeHWDFLukwc-OY37VRxpKNBE8,300
48
51
  monarch/common/context_manager.py,sha256=GOeyaFbyCqvQmkJ0oI7q6IxRd8_0mVyYKZRccI8iaug,1067
49
52
  monarch/common/controller_api.py,sha256=djGkK5aSd-V6pBkr3uBCXbfJv3OKf2o2VbBXJgFF2WI,3202
@@ -133,16 +136,16 @@ tests/sleep_binary.py,sha256=XfLYaAfwm9xgzM-svs8fhAeFhwYIg6SyVEnx4e6wbUw,1009
133
136
  tests/test_actor_error.py,sha256=z3Sf4lteUggTryPLOhRKJ55v0MwVK3a7QN7-U2U9iJg,7484
134
137
  tests/test_alloc.py,sha256=D6DdQbtOZEvvnnc7LV-WyWFMk0Xb77eblH6Oz90zJTA,745
135
138
  tests/test_allocator.py,sha256=P11sQ95ADjzC_-CfPs3CEP80nP8sn7wW8vVPsmpSVoM,8164
136
- tests/test_coalescing.py,sha256=-KtAWzTaeXbyzltplfojavx0iFeeZnvej-tFTlu2p5k,15616
137
- tests/test_controller.py,sha256=yxuVp2DG3TDKJlwuE3cFm9dbWMlbrYtG1uHfvVWRYbw,30935
139
+ tests/test_coalescing.py,sha256=JZ4YgQNlWWs7N-Z8KCCXQPANcuyyXEKjeHIXYbPnQhk,15606
140
+ tests/test_controller.py,sha256=Rp_kW20zYT8ocsK5LX0Ha3LB9azS2LSKpp8n_dBlzVU,31384
138
141
  tests/test_device_mesh.py,sha256=DrbezYOM0thfP9MgLXb5-F0VoLOmSz5GR0GwjR_3bE4,5290
139
142
  tests/test_fault_tolerance.py,sha256=u4wmG1z5MZ6PY6us5zUZHJh2pUC3L7i0wsUfRDNHmxA,14144
140
143
  tests/test_future.py,sha256=cXzaNi2YDwVyjR541ScXmgktX1YFsKzbl8wep0DMVbk,3032
141
144
  tests/test_grad_generator.py,sha256=p4Pm4kMEeGldt2jUVAkGKCB0mLccKI28pltH6OTGbQA,3412
142
145
  tests/test_mock_cuda.py,sha256=5hisElxeLJ5MHw3KM9gwxBiXiMaG-Rm382u3AsQcDOI,3068
143
146
  tests/test_pdb_actor.py,sha256=5KJhuhcZDPWMdjC6eAtDdwnz1W7jNFXvIrMSFaCWaPw,3858
144
- tests/test_python_actors.py,sha256=gP6MDN2BL282qInUGP9untlpsqqB2uy1Iq5gUXnXcUo,11387
145
- tests/test_remote_functions.py,sha256=ExqYlRQWRabpGBuKvNIOa8Hwj-iXuP87Jfb9i5RhaGs,50066
147
+ tests/test_python_actors.py,sha256=MzGeuhGVICzwiNaQt8SFCKyfwhNzdRzZ4s2rJxYbeoo,17283
148
+ tests/test_remote_functions.py,sha256=5nxYB8dfA9NT9f9Od9O3htgQtPbiRNiXZ1Kgtn75sOQ,50056
146
149
  tests/test_rust_backend.py,sha256=94S3R995ZkyIhEiBsM5flcjf5X7bscEAHBtInbTRFe8,7776
147
150
  tests/test_signal_safe_block_on.py,sha256=bmal0XgzJowZXJV6T1Blow5a-vZluYWusCThLMGxyTE,3336
148
151
  tests/test_sim_backend.py,sha256=RckCkHO3DxKsAGdZMcIzRnd6YJXwDim1D5-xbBbgKio,1473
@@ -151,9 +154,9 @@ tests/simulator/test_profiling.py,sha256=TGYCfzTLdkpIwnOuO6KApprmrgPIRQe60KRX3wk
151
154
  tests/simulator/test_simulator.py,sha256=LO8lA0ssY-OGEBL5ipEu74f97Y765TEwfUOv-DtIptM,14568
152
155
  tests/simulator/test_task.py,sha256=ipqBDuDAysuo1xOB9S5psaFvwe6VATD43IovCTSs0t4,2327
153
156
  tests/simulator/test_worker.py,sha256=QrWWIJ3HDgDLkBPRc2mwYPlOQoXQcj1qRfc0WUfKkFY,3507
154
- torchmonarch_nightly-2025.6.12.dist-info/licenses/LICENSE,sha256=e0Eotbf_rHOYPuEUlppIbvwy4SN98CZnl_hqwvbDA4Q,1530
155
- torchmonarch_nightly-2025.6.12.dist-info/METADATA,sha256=mBsDu66W3vkM2SdaxX7hw8_B6kl_XgQZT7nQKZhVkMk,2772
156
- torchmonarch_nightly-2025.6.12.dist-info/WHEEL,sha256=_wZSFk0d90K9wOBp8Q-UGxshyiJ987JoPiyUBNC6VLk,104
157
- torchmonarch_nightly-2025.6.12.dist-info/entry_points.txt,sha256=sqfQ16oZqjEvttUI-uj9BBXIIE6jt05bYFSmy-2hyXI,106
158
- torchmonarch_nightly-2025.6.12.dist-info/top_level.txt,sha256=E-ZssZzyM17glpVrh-S9--qJ-w9p2EjuYOuNw9tQ4Eg,33
159
- torchmonarch_nightly-2025.6.12.dist-info/RECORD,,
157
+ torchmonarch_nightly-2025.6.14.dist-info/licenses/LICENSE,sha256=e0Eotbf_rHOYPuEUlppIbvwy4SN98CZnl_hqwvbDA4Q,1530
158
+ torchmonarch_nightly-2025.6.14.dist-info/METADATA,sha256=NZRNanfPhkjOMapMX_WCs34wphY1w-zccgncjQWmylk,2772
159
+ torchmonarch_nightly-2025.6.14.dist-info/WHEEL,sha256=_wZSFk0d90K9wOBp8Q-UGxshyiJ987JoPiyUBNC6VLk,104
160
+ torchmonarch_nightly-2025.6.14.dist-info/entry_points.txt,sha256=sqfQ16oZqjEvttUI-uj9BBXIIE6jt05bYFSmy-2hyXI,106
161
+ torchmonarch_nightly-2025.6.14.dist-info/top_level.txt,sha256=E-ZssZzyM17glpVrh-S9--qJ-w9p2EjuYOuNw9tQ4Eg,33
162
+ torchmonarch_nightly-2025.6.14.dist-info/RECORD,,