sonolus.py 0.1.8__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sonolus.py might be problematic. Click here for more details.

Files changed (48) hide show
  1. sonolus/backend/finalize.py +2 -1
  2. sonolus/backend/optimize/constant_evaluation.py +2 -2
  3. sonolus/backend/optimize/copy_coalesce.py +16 -7
  4. sonolus/backend/optimize/optimize.py +12 -4
  5. sonolus/backend/optimize/passes.py +2 -1
  6. sonolus/backend/place.py +95 -14
  7. sonolus/backend/visitor.py +83 -12
  8. sonolus/build/cli.py +60 -9
  9. sonolus/build/collection.py +68 -26
  10. sonolus/build/compile.py +87 -30
  11. sonolus/build/engine.py +166 -40
  12. sonolus/build/level.py +2 -1
  13. sonolus/build/node.py +8 -1
  14. sonolus/build/project.py +30 -11
  15. sonolus/script/archetype.py +169 -51
  16. sonolus/script/array.py +12 -1
  17. sonolus/script/bucket.py +26 -8
  18. sonolus/script/debug.py +2 -2
  19. sonolus/script/effect.py +2 -2
  20. sonolus/script/engine.py +123 -15
  21. sonolus/script/internal/builtin_impls.py +21 -2
  22. sonolus/script/internal/constant.py +6 -2
  23. sonolus/script/internal/context.py +30 -25
  24. sonolus/script/internal/introspection.py +8 -1
  25. sonolus/script/internal/math_impls.py +2 -1
  26. sonolus/script/internal/transient.py +5 -1
  27. sonolus/script/internal/value.py +10 -2
  28. sonolus/script/interval.py +16 -0
  29. sonolus/script/iterator.py +17 -0
  30. sonolus/script/level.py +130 -8
  31. sonolus/script/metadata.py +32 -0
  32. sonolus/script/num.py +11 -2
  33. sonolus/script/options.py +5 -3
  34. sonolus/script/pointer.py +2 -0
  35. sonolus/script/project.py +41 -5
  36. sonolus/script/record.py +8 -3
  37. sonolus/script/runtime.py +61 -10
  38. sonolus/script/sprite.py +18 -1
  39. sonolus/script/ui.py +7 -3
  40. sonolus/script/values.py +8 -5
  41. sonolus/script/vec.py +28 -0
  42. {sonolus_py-0.1.8.dist-info → sonolus_py-0.2.0.dist-info}/METADATA +3 -2
  43. sonolus_py-0.2.0.dist-info/RECORD +90 -0
  44. {sonolus_py-0.1.8.dist-info → sonolus_py-0.2.0.dist-info}/WHEEL +1 -1
  45. sonolus_py-0.1.8.dist-info/RECORD +0 -89
  46. /sonolus/script/{print.py → printing.py} +0 -0
  47. {sonolus_py-0.1.8.dist-info → sonolus_py-0.2.0.dist-info}/entry_points.txt +0 -0
  48. {sonolus_py-0.1.8.dist-info → sonolus_py-0.2.0.dist-info}/licenses/LICENSE +0 -0
sonolus/build/project.py CHANGED
@@ -5,7 +5,7 @@ from sonolus.build.engine import package_engine
5
5
  from sonolus.build.level import package_level_data
6
6
  from sonolus.script.engine import Engine
7
7
  from sonolus.script.level import Level
8
- from sonolus.script.project import Project, ProjectSchema
8
+ from sonolus.script.project import BuildConfig, Project, ProjectSchema
9
9
 
10
10
  BLANK_PNG = (
11
11
  b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x01\x00\x00\x00\x007n\xf9$"
@@ -17,24 +17,24 @@ BLANK_AUDIO = (
17
17
  )
18
18
 
19
19
 
20
- def build_project_to_collection(project: Project):
20
+ def build_project_to_collection(project: Project, config: BuildConfig):
21
21
  collection = load_resources_files_to_collection(project.resources)
22
- add_engine_to_collection(collection, project, project.engine)
22
+ add_engine_to_collection(collection, project, project.engine, config)
23
23
  for level in project.levels:
24
24
  add_level_to_collection(collection, project, level)
25
25
  collection.name = f"{project.engine.name}"
26
26
  return collection
27
27
 
28
28
 
29
- def add_engine_to_collection(collection: Collection, project: Project, engine: Engine):
30
- packaged_engine = package_engine(engine.data)
29
+ def add_engine_to_collection(collection: Collection, project: Project, engine: Engine, config: BuildConfig):
30
+ packaged_engine = package_engine(engine.data, config)
31
31
  item = {
32
32
  "name": engine.name,
33
33
  "version": engine.version,
34
34
  "title": engine.title,
35
35
  "subtitle": engine.subtitle,
36
36
  "author": engine.author,
37
- "tags": [],
37
+ "tags": [tag.as_dict() for tag in engine.tags],
38
38
  "skin": collection.get_item("skins", engine.skin) if engine.skin else collection.get_default_item("skins"),
39
39
  "background": collection.get_item("backgrounds", engine.background)
40
40
  if engine.background
@@ -53,6 +53,8 @@ def add_engine_to_collection(collection: Collection, project: Project, engine: E
53
53
  "rom": collection.add_asset(packaged_engine.rom),
54
54
  "configuration": collection.add_asset(packaged_engine.configuration),
55
55
  }
56
+ if engine.description is not None:
57
+ item["description"] = engine.description
56
58
  collection.add_item("engines", engine.name, item)
57
59
 
58
60
 
@@ -65,16 +67,28 @@ def add_level_to_collection(collection: Collection, project: Project, level: Lev
65
67
  "title": level.title,
66
68
  "artists": level.artists,
67
69
  "author": level.author,
68
- "tags": [],
70
+ "tags": [tag.as_dict() for tag in level.tags],
69
71
  "engine": collection.get_item("engines", project.engine.name),
70
- "useSkin": {"useDefault": True},
71
- "useBackground": {"useDefault": True},
72
- "useEffect": {"useDefault": True},
73
- "useParticle": {"useDefault": True},
72
+ "useSkin": {"useDefault": True}
73
+ if level.use_skin is None
74
+ else {"useDefault": False, "item": collection.get_item("skins", level.use_skin)},
75
+ "useBackground": {"useDefault": True}
76
+ if level.use_background is None
77
+ else {"useDefault": False, "item": collection.get_item("backgrounds", level.use_background)},
78
+ "useEffect": {"useDefault": True}
79
+ if level.use_effect is None
80
+ else {"useDefault": False, "item": collection.get_item("effects", level.use_effect)},
81
+ "useParticle": {"useDefault": True}
82
+ if level.use_particle is None
83
+ else {"useDefault": False, "item": collection.get_item("particles", level.use_particle)},
74
84
  "cover": load_resource(collection, level.cover, project.resources, BLANK_PNG),
75
85
  "bgm": load_resource(collection, level.bgm, project.resources, BLANK_AUDIO),
76
86
  "data": collection.add_asset(packaged_level_data),
77
87
  }
88
+ if level.description is not None:
89
+ item["description"] = level.description
90
+ if level.preview is not None:
91
+ item["preview"] = load_resource(collection, level.preview, project.resources, BLANK_AUDIO)
78
92
  collection.add_item("levels", level.name, item)
79
93
 
80
94
 
@@ -97,6 +111,7 @@ def load_resources_files_to_collection(base_path: Path) -> Collection:
97
111
  def get_project_schema(project: Project) -> ProjectSchema:
98
112
  by_archetype: dict[str, dict[str, bool]] = {}
99
113
  for archetype in project.engine.data.play.archetypes:
114
+ archetype._init_fields()
100
115
  fields = by_archetype.setdefault(archetype.name, {})
101
116
  # If a field is exported, we should exclude it if it's imported in watch mode
102
117
  for field in archetype._exported_keys_:
@@ -104,11 +119,15 @@ def get_project_schema(project: Project) -> ProjectSchema:
104
119
  for field in archetype._imported_keys_:
105
120
  fields[field] = True
106
121
  for archetype in project.engine.data.watch.archetypes:
122
+ archetype._init_fields()
107
123
  fields = by_archetype.setdefault(archetype.name, {})
108
124
  for field in archetype._imported_keys_:
125
+ if field in {"#ACCURACY", "#JUDGMENT"}:
126
+ continue
109
127
  if field not in fields:
110
128
  fields[field] = True
111
129
  for archetype in project.engine.data.preview.archetypes:
130
+ archetype._init_fields()
112
131
  fields = by_archetype.setdefault(archetype.name, {})
113
132
  for field in archetype._imported_keys_:
114
133
  fields[field] = True
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import inspect
4
+ from abc import abstractmethod
4
5
  from collections.abc import Callable
5
6
  from dataclasses import dataclass
6
7
  from enum import Enum, StrEnum
@@ -62,7 +63,9 @@ class _ArchetypeField(SonolusDescriptor):
62
63
  result = _deref(ctx().blocks.EntityData, self.offset, self.type)
63
64
  case _ArchetypeReferenceData(index=index):
64
65
  result = _deref(
65
- ctx().blocks.EntityDataArray, self.offset + index * _ENTITY_DATA_SIZE, self.type
66
+ ctx().blocks.EntityDataArray,
67
+ Num._accept_(self.offset) + index * _ENTITY_DATA_SIZE,
68
+ self.type,
66
69
  )
67
70
  case _ArchetypeLevelData(values=values):
68
71
  result = values[self.name]
@@ -108,7 +111,9 @@ class _ArchetypeField(SonolusDescriptor):
108
111
  target = _deref(ctx().blocks.EntityData, self.offset, self.type)
109
112
  case _ArchetypeReferenceData(index=index):
110
113
  target = _deref(
111
- ctx().blocks.EntityDataArray, self.offset + index * _ENTITY_DATA_SIZE, self.type
114
+ ctx().blocks.EntityDataArray,
115
+ Num._accept_(self.offset) + index * _ENTITY_DATA_SIZE,
116
+ self.type,
112
117
  )
113
118
  case _ArchetypeLevelData(values=values):
114
119
  target = values[self.name]
@@ -119,7 +124,7 @@ class _ArchetypeField(SonolusDescriptor):
119
124
  raise TypeError(f"Expected {self.type}, got {type(value)}")
120
125
  for k, v in value._to_flat_dict_(self.data_name).items():
121
126
  index = instance._exported_keys_[k]
122
- ctx().add_statements(IRInstr(Op.ExportValue, [IRConst(index), Num._accept_(v).ir()]))
127
+ ctx().add_statements(IRInstr(Op.ExportValue, [IRConst(index), Num(v).ir()]))
123
128
  return
124
129
  case _ArchetypeReferenceData():
125
130
  raise RuntimeError("Exported fields of other entities are not accessible")
@@ -173,6 +178,23 @@ def imported(*, name: str | None = None) -> Any:
173
178
  return _ArchetypeFieldInfo(name, _StorageType.IMPORTED)
174
179
 
175
180
 
181
+ def entity_data() -> Any:
182
+ """Declare a field as entity data.
183
+
184
+ Entity data is accessible from other entities, but may only be updated in the `preprocess` callback
185
+ and is read-only in other callbacks.
186
+
187
+ It functions like `imported`, except that it is not loaded from the level data.
188
+
189
+ Usage:
190
+ ```
191
+ class MyArchetype(PlayArchetype):
192
+ field: int = entity_data()
193
+ ```
194
+ """
195
+ return _ArchetypeFieldInfo(None, _StorageType.IMPORTED)
196
+
197
+
176
198
  def exported(*, name: str | None = None) -> Any:
177
199
  """Declare a field as exported.
178
200
 
@@ -342,6 +364,7 @@ class _BaseArchetype:
342
364
  is_scored: ClassVar[bool] = False
343
365
 
344
366
  def __init__(self, *args, **kwargs):
367
+ self._init_fields()
345
368
  if ctx():
346
369
  raise RuntimeError("The Archetype constructor is only for defining level data")
347
370
  bound = self._data_constructor_signature_.bind_partial(*args, **kwargs)
@@ -354,10 +377,12 @@ class _BaseArchetype:
354
377
 
355
378
  @classmethod
356
379
  def _new(cls):
380
+ cls._init_fields()
357
381
  return object.__new__(cls)
358
382
 
359
383
  @classmethod
360
384
  def _for_compilation(cls):
385
+ cls._init_fields()
361
386
  result = cls._new()
362
387
  result._data_ = _ArchetypeSelfData()
363
388
  return result
@@ -369,6 +394,13 @@ class _BaseArchetype:
369
394
  result._data_ = _ArchetypeReferenceData(index=Num._accept_(index))
370
395
  return result
371
396
 
397
+ @classmethod
398
+ @meta_fn
399
+ def is_at(cls, index: Num) -> bool:
400
+ if not ctx():
401
+ raise RuntimeError("is_at is only available during compilation")
402
+ return entity_info_at(index).archetype_id == cls.id()
403
+
372
404
  @classmethod
373
405
  @meta_fn
374
406
  def id(cls):
@@ -396,6 +428,7 @@ class _BaseArchetype:
396
428
  Args:
397
429
  **kwargs: Entity memory values to inject by field name as defined in the Archetype.
398
430
  """
431
+ cls._init_fields()
399
432
  if not ctx():
400
433
  raise RuntimeError("Spawn is only allowed within a callback")
401
434
  archetype_id = cls.id()
@@ -403,21 +436,30 @@ class _BaseArchetype:
403
436
  bound.apply_defaults()
404
437
  data = []
405
438
  for field in cls._memory_fields_.values():
406
- data.extend(field.type._accept_(bound.arguments[field.name] or zeros(field.type))._to_list_())
439
+ data.extend(
440
+ field.type._accept_(
441
+ bound.arguments[field.name] if field.name in bound.arguments else zeros(field.type)
442
+ )._to_list_()
443
+ )
407
444
  native_call(Op.Spawn, archetype_id, *(Num(x) for x in data))
408
445
 
409
446
  @classmethod
410
447
  def schema(cls) -> ArchetypeSchema:
448
+ cls._init_fields()
411
449
  return {"name": cls.name or "unnamed", "fields": list(cls._imported_fields_)}
412
450
 
413
- def _level_data_entries(self, level_refs: dict[Any, int] | None = None):
451
+ def _level_data_entries(self, level_refs: dict[Any, str] | None = None):
452
+ self._init_fields()
414
453
  if not isinstance(self._data_, _ArchetypeLevelData):
415
454
  raise RuntimeError("Entity is not level data")
416
455
  entries = []
417
456
  for name, value in self._data_.values.items():
418
457
  field_info = self._imported_fields_.get(name)
419
458
  for k, v in value._to_flat_dict_(field_info.data_name, level_refs).items():
420
- entries.append({"name": k, "value": v})
459
+ if isinstance(v, str):
460
+ entries.append({"name": k, "ref": v})
461
+ else:
462
+ entries.append({"name": k, "value": v})
421
463
  return entries
422
464
 
423
465
  def __init_subclass__(cls, **kwargs):
@@ -426,19 +468,47 @@ class _BaseArchetype:
426
468
  raise TypeError("Cannot directly subclass Archetype, use the Archetype subclass for your mode")
427
469
  cls._default_callbacks_ = {getattr(cls, cb_info.py_name) for cb_info in cls._supported_callbacks_.values()}
428
470
  return
429
- if getattr(cls, "_callbacks_", None) is not None:
430
- raise TypeError("Cannot subclass Archetypes")
431
- if cls.name is None:
471
+ if cls.name is None or cls.name in {getattr(mro_entry, "name", None) for mro_entry in cls.mro()[1:]}:
432
472
  cls.name = cls.__name__
433
- field_specifiers = get_field_specifiers(cls, skip={"name", "is_scored"}).items()
434
- cls._imported_fields_ = {}
435
- cls._exported_fields_ = {}
436
- cls._memory_fields_ = {}
437
- cls._shared_memory_fields_ = {}
438
- imported_offset = 0
439
- exported_offset = 0
440
- memory_offset = 0
441
- shared_memory_offset = 0
473
+ cls._callbacks_ = []
474
+ for name in cls._supported_callbacks_:
475
+ cb = getattr(cls, name)
476
+ if cb in cls._default_callbacks_:
477
+ continue
478
+ cls._callbacks_.append(cb)
479
+ cls._field_init_done = False
480
+
481
+ @classmethod
482
+ def _init_fields(cls):
483
+ if cls._field_init_done:
484
+ return
485
+ cls._field_init_done = True
486
+ for mro_entry in cls.mro()[1:]:
487
+ if hasattr(mro_entry, "_field_init_done"):
488
+ mro_entry._init_fields()
489
+ field_specifiers = get_field_specifiers(
490
+ cls, skip={"name", "is_scored", "_callbacks_", "_field_init_done"}
491
+ ).items()
492
+ if not hasattr(cls, "_imported_fields_"):
493
+ cls._imported_fields_ = {}
494
+ else:
495
+ cls._imported_fields_ = {**cls._imported_fields_}
496
+ if not hasattr(cls, "_exported_fields_"):
497
+ cls._exported_fields_ = {}
498
+ else:
499
+ cls._exported_fields_ = {**cls._exported_fields_}
500
+ if not hasattr(cls, "_memory_fields_"):
501
+ cls._memory_fields_ = {}
502
+ else:
503
+ cls._memory_fields_ = {**cls._memory_fields_}
504
+ if not hasattr(cls, "_shared_memory_fields_"):
505
+ cls._shared_memory_fields_ = {}
506
+ else:
507
+ cls._shared_memory_fields_ = {**cls._shared_memory_fields_}
508
+ imported_offset = sum(field.type._size_() for field in cls._imported_fields_.values())
509
+ exported_offset = sum(field.type._size_() for field in cls._exported_fields_.values())
510
+ memory_offset = sum(field.type._size_() for field in cls._memory_fields_.values())
511
+ shared_memory_offset = sum(field.type._size_() for field in cls._shared_memory_fields_.values())
442
512
  for name, value in field_specifiers:
443
513
  if value is ClassVar or get_origin(value) is ClassVar:
444
514
  continue
@@ -469,24 +539,32 @@ class _BaseArchetype:
469
539
  name, field_info.name or name, field_info.storage, imported_offset, field_type
470
540
  )
471
541
  imported_offset += field_type._size_()
542
+ if imported_offset > _ENTITY_DATA_SIZE:
543
+ raise ValueError("Imported fields exceed entity data size")
472
544
  setattr(cls, name, cls._imported_fields_[name])
473
545
  case _StorageType.EXPORTED:
474
546
  cls._exported_fields_[name] = _ArchetypeField(
475
547
  name, field_info.name or name, field_info.storage, exported_offset, field_type
476
548
  )
477
549
  exported_offset += field_type._size_()
550
+ if exported_offset > _ENTITY_DATA_SIZE:
551
+ raise ValueError("Exported fields exceed entity data size")
478
552
  setattr(cls, name, cls._exported_fields_[name])
479
553
  case _StorageType.MEMORY:
480
554
  cls._memory_fields_[name] = _ArchetypeField(
481
555
  name, field_info.name or name, field_info.storage, memory_offset, field_type
482
556
  )
483
557
  memory_offset += field_type._size_()
558
+ if memory_offset > _ENTITY_MEMORY_SIZE:
559
+ raise ValueError("Memory fields exceed entity memory size")
484
560
  setattr(cls, name, cls._memory_fields_[name])
485
561
  case _StorageType.SHARED:
486
562
  cls._shared_memory_fields_[name] = _ArchetypeField(
487
563
  name, field_info.name or name, field_info.storage, shared_memory_offset, field_type
488
564
  )
489
565
  shared_memory_offset += field_type._size_()
566
+ if shared_memory_offset > _ENTITY_SHARED_MEMORY_SIZE:
567
+ raise ValueError("Shared memory fields exceed entity shared memory size")
490
568
  setattr(cls, name, cls._shared_memory_fields_[name])
491
569
  cls._imported_keys_ = {
492
570
  name: i
@@ -506,12 +584,30 @@ class _BaseArchetype:
506
584
  cls._spawn_signature_ = inspect.Signature(
507
585
  [inspect.Parameter(name, inspect.Parameter.POSITIONAL_OR_KEYWORD) for name in cls._memory_fields_]
508
586
  )
509
- cls._callbacks_ = []
510
- for name in cls._supported_callbacks_:
511
- cb = getattr(cls, name)
512
- if cb in cls._default_callbacks_:
513
- continue
514
- cls._callbacks_.append(cb)
587
+
588
+ @property
589
+ @abstractmethod
590
+ def index(self) -> int:
591
+ """The index of this entity."""
592
+ raise NotImplementedError
593
+
594
+ @meta_fn
595
+ def ref(self) -> EntityRef[Self]:
596
+ """Get a reference to this entity.
597
+
598
+ Valid both in level data and in callbacks.
599
+ """
600
+ match self._data_:
601
+ case _ArchetypeSelfData():
602
+ return EntityRef[type(self)](index=self.index)
603
+ case _ArchetypeReferenceData(index=index):
604
+ return EntityRef[type(self)](index=index)
605
+ case _ArchetypeLevelData():
606
+ result = EntityRef[type(self)](index=-1)
607
+ result._ref_ = self
608
+ return result
609
+ case _:
610
+ raise RuntimeError("Invalid entity data")
515
611
 
516
612
 
517
613
  class PlayArchetype(_BaseArchetype):
@@ -653,6 +749,7 @@ class PlayArchetype(_BaseArchetype):
653
749
  return self._info.state == 2
654
750
 
655
751
  @property
752
+ @meta_fn
656
753
  def life(self) -> ArchetypeLife:
657
754
  """How this entity contributes to life."""
658
755
  if not ctx():
@@ -664,6 +761,7 @@ class PlayArchetype(_BaseArchetype):
664
761
  raise RuntimeError("Life is not available in level data")
665
762
 
666
763
  @property
764
+ @meta_fn
667
765
  def result(self) -> PlayEntityInput:
668
766
  """The result of this entity.
669
767
 
@@ -677,17 +775,6 @@ class PlayArchetype(_BaseArchetype):
677
775
  case _:
678
776
  raise RuntimeError("Result is only accessible from the entity itself")
679
777
 
680
- def ref(self):
681
- """Get a reference to this entity for creating level data.
682
-
683
- Not valid elsewhere.
684
- """
685
- if not isinstance(self._data_, _ArchetypeLevelData):
686
- raise RuntimeError("Entity is not level data")
687
- result = EntityRef[type(self)](index=-1)
688
- result._ref_ = self
689
- return result
690
-
691
778
 
692
779
  class WatchArchetype(_BaseArchetype):
693
780
  """Base class for watch mode archetypes.
@@ -772,6 +859,7 @@ class WatchArchetype(_BaseArchetype):
772
859
  return self._info.state == 1
773
860
 
774
861
  @property
862
+ @meta_fn
775
863
  def life(self) -> ArchetypeLife:
776
864
  """How this entity contributes to life."""
777
865
  if not ctx():
@@ -783,6 +871,7 @@ class WatchArchetype(_BaseArchetype):
783
871
  raise RuntimeError("Life is not available in level data")
784
872
 
785
873
  @property
874
+ @meta_fn
786
875
  def result(self) -> WatchEntityInput:
787
876
  """The result of this entity.
788
877
 
@@ -796,20 +885,6 @@ class WatchArchetype(_BaseArchetype):
796
885
  case _:
797
886
  raise RuntimeError("Result is only accessible from the entity itself")
798
887
 
799
- @property
800
- def target_time(self) -> float:
801
- """The target time of this entity.
802
-
803
- Only meaningful for scored entities. Determines when combo and score are updated.
804
-
805
- Alias of `result.target_time`.
806
- """
807
- return self.result.target_time
808
-
809
- @target_time.setter
810
- def target_time(self, value: float):
811
- self.result.target_time = value
812
-
813
888
 
814
889
  class PreviewArchetype(_BaseArchetype):
815
890
  """Base class for preview mode archetypes.
@@ -842,6 +917,7 @@ class PreviewArchetype(_BaseArchetype):
842
917
  """
843
918
 
844
919
  @property
920
+ @meta_fn
845
921
  def _info(self) -> PreviewEntityInfo:
846
922
  if not ctx():
847
923
  raise RuntimeError("Calling info is only allowed within a callback")
@@ -959,18 +1035,38 @@ class WatchEntityInput(Record):
959
1035
 
960
1036
 
961
1037
  class EntityRef[A: _BaseArchetype](Record):
962
- """Reference to another entity."""
1038
+ """Reference to another entity.
1039
+
1040
+ May be used with `Any` to reference an unknown archetype.
1041
+
1042
+ Usage:
1043
+ ```
1044
+ class MyArchetype(PlayArchetype):
1045
+ ref_1: EntityRef[OtherArchetype] = imported()
1046
+ ref_2: EntityRef[Any] = imported()
1047
+ ```
1048
+ """
963
1049
 
964
1050
  index: int
965
1051
 
966
1052
  @classmethod
967
1053
  def archetype(cls) -> type[A]:
1054
+ """Get the archetype type."""
968
1055
  return cls.type_var_value(A)
969
1056
 
1057
+ def with_archetype(self, archetype: type[A]) -> EntityRef[A]:
1058
+ """Return a new reference with the given archetype type."""
1059
+ return EntityRef[archetype](index=self.index)
1060
+
970
1061
  def get(self) -> A:
1062
+ """Get the entity."""
971
1063
  return self.archetype().at(self.index)
972
1064
 
973
- def _to_list_(self, level_refs: dict[Any, int] | None = None) -> list[float | BlockPlace]:
1065
+ def archetype_matches(self) -> bool:
1066
+ """Check if entity at the index is precisely of the archetype."""
1067
+ return self.index >= 0 and self.archetype().is_at(self.index)
1068
+
1069
+ def _to_list_(self, level_refs: dict[Any, str] | None = None) -> list[float | str | BlockPlace]:
974
1070
  ref = getattr(self, "_ref_", None)
975
1071
  if ref is None:
976
1072
  return [self.index]
@@ -979,6 +1075,28 @@ class EntityRef[A: _BaseArchetype](Record):
979
1075
  raise KeyError("Reference to entity not in level data")
980
1076
  return [level_refs[ref]]
981
1077
 
1078
+ def _copy_from_(self, value: Self):
1079
+ super()._copy_from_(value)
1080
+ if hasattr(value, "_ref_"):
1081
+ self._ref_ = value._ref_
1082
+
1083
+ @classmethod
1084
+ def _accepts_(cls, value: Any) -> bool:
1085
+ return (
1086
+ super()._accepts_(value)
1087
+ or (cls._type_args_ and cls.archetype() is Any and isinstance(value, EntityRef))
1088
+ or (issubclass(type(value), EntityRef) and issubclass(value.archetype(), cls.archetype()))
1089
+ )
1090
+
1091
+ @classmethod
1092
+ def _accept_(cls, value: Any) -> Self:
1093
+ if not cls._accepts_(value):
1094
+ raise TypeError(f"Expected {cls}, got {type(value)}")
1095
+ result = value.with_archetype(cls.archetype())
1096
+ if hasattr(value, "_ref_"):
1097
+ result._ref_ = value._ref_
1098
+ return result
1099
+
982
1100
 
983
1101
  class StandardArchetypeName(StrEnum):
984
1102
  """Standard archetype names."""
sonolus/script/array.py CHANGED
@@ -4,6 +4,7 @@ from __future__ import annotations
4
4
  from collections.abc import Iterable
5
5
  from typing import Any, Self, final
6
6
 
7
+ from sonolus.backend.ir import IRConst, IRSet
7
8
  from sonolus.backend.place import BlockPlace
8
9
  from sonolus.script.array_like import ArrayLike
9
10
  from sonolus.script.debug import assert_unreachable
@@ -111,7 +112,7 @@ class Array[T, Size](GenericValue, ArrayLike[T]):
111
112
  iterator = iter(values)
112
113
  return cls(*(cls.element_type()._from_list_(iterator) for _ in range(cls.size())))
113
114
 
114
- def _to_list_(self, level_refs: dict[Any, int] | None = None) -> list[float | BlockPlace]:
115
+ def _to_list_(self, level_refs: dict[Any, str] | None = None) -> list[float | str | BlockPlace]:
115
116
  match self._value:
116
117
  case list():
117
118
  return [entry for value in self._value for entry in value._to_list_(level_refs)]
@@ -159,6 +160,16 @@ class Array[T, Size](GenericValue, ArrayLike[T]):
159
160
  else:
160
161
  return cls._with_value([cls.element_type()._alloc_() for _ in range(cls.size())])
161
162
 
163
+ @classmethod
164
+ def _zero_(cls) -> Self:
165
+ if ctx():
166
+ place = ctx().alloc(size=cls._size_())
167
+ result: Self = cls._from_place_(place)
168
+ ctx().add_statements(*[IRSet(place.add_offset(i), IRConst(0)) for i in range(cls._size_())])
169
+ return result
170
+ else:
171
+ return cls._with_value([cls.element_type()._zero_() for _ in range(cls.size())])
172
+
162
173
  def __len__(self):
163
174
  return self.size()
164
175
 
sonolus/script/bucket.py CHANGED
@@ -80,6 +80,24 @@ class JudgmentWindow(Record):
80
80
  self.good * other,
81
81
  )
82
82
 
83
+ def __add__(self, other: float | int) -> JudgmentWindow:
84
+ """Add a scalar to the intervals."""
85
+ return JudgmentWindow(
86
+ self.perfect + other,
87
+ self.great + other,
88
+ self.good + other,
89
+ )
90
+
91
+ @property
92
+ def start(self) -> float:
93
+ """The start time of the good interval."""
94
+ return self.good.start
95
+
96
+ @property
97
+ def end(self) -> float:
98
+ """The end time of the good interval."""
99
+ return self.good.end
100
+
83
101
 
84
102
  class Judgment(IntEnum):
85
103
  """The judgment of a hit."""
@@ -147,10 +165,10 @@ class Bucket(Record):
147
165
  class _BucketSprite:
148
166
  id: int
149
167
  fallback_id: int | None
150
- x: int
151
- y: int
152
- w: int
153
- h: int
168
+ x: float
169
+ y: float
170
+ w: float
171
+ h: float
154
172
  rotation: float
155
173
 
156
174
  def to_dict(self):
@@ -185,10 +203,10 @@ def bucket_sprite(
185
203
  *,
186
204
  sprite: Sprite,
187
205
  fallback_sprite: Sprite | None = None,
188
- x: int,
189
- y: int,
190
- w: int,
191
- h: int,
206
+ x: float,
207
+ y: float,
208
+ w: float,
209
+ h: float,
192
210
  rotation: float = 0,
193
211
  ) -> _BucketSprite:
194
212
  """Define a sprite for a bucket."""
sonolus/script/debug.py CHANGED
@@ -1,4 +1,4 @@
1
- from collections.abc import Callable
1
+ from collections.abc import Callable, Sequence
2
2
  from contextvars import ContextVar
3
3
  from typing import Any, Never
4
4
 
@@ -75,7 +75,7 @@ def terminate():
75
75
  raise RuntimeError("Terminated")
76
76
 
77
77
 
78
- def visualize_cfg(fn: Callable[[], Any], passes: list[CompilerPass] | None = None) -> str:
78
+ def visualize_cfg(fn: Callable[[], Any], passes: Sequence[CompilerPass] | None = None) -> str:
79
79
  from sonolus.build.compile import callback_to_cfg
80
80
 
81
81
  if passes is None:
sonolus/script/effect.py CHANGED
@@ -25,7 +25,7 @@ class Effect(Record):
25
25
  """Return whether the effect clip is available."""
26
26
  return _has_effect_clip(self.id)
27
27
 
28
- def play(self, distance: float) -> None:
28
+ def play(self, distance: float = 0) -> None:
29
29
  """Play the effect clip.
30
30
 
31
31
  If the clip was already played within the specified distance, it will be skipped.
@@ -35,7 +35,7 @@ class Effect(Record):
35
35
  """
36
36
  _play(self.id, distance)
37
37
 
38
- def schedule(self, time: float, distance: float) -> None:
38
+ def schedule(self, time: float, distance: float = 0) -> None:
39
39
  """Schedule the effect clip to play at a specific time.
40
40
 
41
41
  This is not suitable for real-time effects such as responses to user input. Use `play` instead.