sonolus.py 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sonolus.py might be problematic. Click here for more details.

Files changed (75) hide show
  1. sonolus/__init__.py +0 -0
  2. sonolus/backend/__init__.py +0 -0
  3. sonolus/backend/allocate.py +51 -0
  4. sonolus/backend/blocks.py +756 -0
  5. sonolus/backend/excepthook.py +37 -0
  6. sonolus/backend/finalize.py +69 -0
  7. sonolus/backend/flow.py +92 -0
  8. sonolus/backend/interpret.py +333 -0
  9. sonolus/backend/ir.py +89 -0
  10. sonolus/backend/mode.py +24 -0
  11. sonolus/backend/node.py +40 -0
  12. sonolus/backend/ops.py +197 -0
  13. sonolus/backend/optimize.py +9 -0
  14. sonolus/backend/passes.py +6 -0
  15. sonolus/backend/place.py +90 -0
  16. sonolus/backend/simplify.py +30 -0
  17. sonolus/backend/utils.py +48 -0
  18. sonolus/backend/visitor.py +880 -0
  19. sonolus/build/__init__.py +0 -0
  20. sonolus/build/cli.py +170 -0
  21. sonolus/build/collection.py +293 -0
  22. sonolus/build/compile.py +90 -0
  23. sonolus/build/defaults.py +32 -0
  24. sonolus/build/engine.py +149 -0
  25. sonolus/build/level.py +23 -0
  26. sonolus/build/node.py +43 -0
  27. sonolus/build/project.py +94 -0
  28. sonolus/py.typed +0 -0
  29. sonolus/script/__init__.py +0 -0
  30. sonolus/script/archetype.py +651 -0
  31. sonolus/script/array.py +241 -0
  32. sonolus/script/bucket.py +192 -0
  33. sonolus/script/callbacks.py +105 -0
  34. sonolus/script/comptime.py +146 -0
  35. sonolus/script/containers.py +247 -0
  36. sonolus/script/debug.py +70 -0
  37. sonolus/script/effect.py +132 -0
  38. sonolus/script/engine.py +101 -0
  39. sonolus/script/globals.py +234 -0
  40. sonolus/script/graphics.py +141 -0
  41. sonolus/script/icon.py +73 -0
  42. sonolus/script/internal/__init__.py +5 -0
  43. sonolus/script/internal/builtin_impls.py +144 -0
  44. sonolus/script/internal/context.py +365 -0
  45. sonolus/script/internal/descriptor.py +17 -0
  46. sonolus/script/internal/error.py +15 -0
  47. sonolus/script/internal/generic.py +197 -0
  48. sonolus/script/internal/impl.py +69 -0
  49. sonolus/script/internal/introspection.py +14 -0
  50. sonolus/script/internal/native.py +38 -0
  51. sonolus/script/internal/value.py +144 -0
  52. sonolus/script/interval.py +98 -0
  53. sonolus/script/iterator.py +211 -0
  54. sonolus/script/level.py +52 -0
  55. sonolus/script/math.py +92 -0
  56. sonolus/script/num.py +382 -0
  57. sonolus/script/options.py +194 -0
  58. sonolus/script/particle.py +158 -0
  59. sonolus/script/pointer.py +30 -0
  60. sonolus/script/project.py +17 -0
  61. sonolus/script/range.py +58 -0
  62. sonolus/script/record.py +293 -0
  63. sonolus/script/runtime.py +526 -0
  64. sonolus/script/sprite.py +332 -0
  65. sonolus/script/text.py +404 -0
  66. sonolus/script/timing.py +42 -0
  67. sonolus/script/transform.py +118 -0
  68. sonolus/script/ui.py +160 -0
  69. sonolus/script/values.py +43 -0
  70. sonolus/script/vec.py +48 -0
  71. sonolus_py-0.1.0.dist-info/METADATA +10 -0
  72. sonolus_py-0.1.0.dist-info/RECORD +75 -0
  73. sonolus_py-0.1.0.dist-info/WHEEL +4 -0
  74. sonolus_py-0.1.0.dist-info/entry_points.txt +2 -0
  75. sonolus_py-0.1.0.dist-info/licenses/LICENSE +21 -0
File without changes
sonolus/build/cli.py ADDED
@@ -0,0 +1,170 @@
1
+ import argparse
2
+ import contextlib
3
+ import http.server
4
+ import importlib
5
+ import shutil
6
+ import socket
7
+ import socketserver
8
+ import sys
9
+ from pathlib import Path
10
+
11
+ from sonolus.build.engine import package_engine
12
+ from sonolus.build.level import package_level_data
13
+ from sonolus.build.project import build_project_to_collection
14
+ from sonolus.script.project import Project
15
+
16
+
17
+ def find_default_module() -> str | None:
18
+ current_dir = Path.cwd()
19
+
20
+ potential_modules = []
21
+
22
+ project_files = list(current_dir.glob("*/project.py"))
23
+ potential_modules.extend(str(f.parent.relative_to(current_dir)).replace("/", ".") for f in project_files)
24
+
25
+ init_files = list(current_dir.glob("*/__init__.py"))
26
+ potential_modules.extend(str(f.parent.relative_to(current_dir)).replace("/", ".") for f in init_files)
27
+
28
+ potential_modules = [m for m in set(potential_modules) if m]
29
+
30
+ return potential_modules[0] if len(potential_modules) == 1 else None
31
+
32
+
33
+ def import_project(module_path: str) -> Project | None:
34
+ try:
35
+ current_dir = Path.cwd()
36
+ if current_dir not in sys.path:
37
+ sys.path.insert(0, str(current_dir))
38
+
39
+ project = None
40
+
41
+ try:
42
+ module = importlib.import_module(module_path)
43
+ project = getattr(module, "project", None)
44
+ except ImportError as e:
45
+ if not str(e).endswith(f"'{module_path}'"):
46
+ # It's an error from the module itself
47
+ raise
48
+
49
+ if project is None:
50
+ try:
51
+ project_module = importlib.import_module(f"{module_path}.project")
52
+ project = getattr(project_module, "project", None)
53
+ except ImportError as e:
54
+ if not str(e).endswith(f"'{module_path}.project'"):
55
+ raise
56
+
57
+ if project is None:
58
+ print(f"Error: No Project instance found in module {module_path} or {module_path}.project")
59
+ return None
60
+
61
+ return project
62
+ except Exception as e:
63
+ print(f"Error: Failed to import project: {e}")
64
+ return None
65
+
66
+
67
+ def build_project(project: Project, build_dir: Path):
68
+ dist_dir = build_dir / "dist"
69
+ levels_dir = dist_dir / "levels"
70
+ shutil.rmtree(dist_dir, ignore_errors=True)
71
+ dist_dir.mkdir(parents=True, exist_ok=True)
72
+ levels_dir.mkdir(parents=True, exist_ok=True)
73
+
74
+ package_engine(project.engine.data).write(dist_dir / "engine")
75
+
76
+ for level in project.levels:
77
+ level_path = levels_dir / level.name
78
+ level_path.write_bytes(package_level_data(level.data))
79
+
80
+
81
+ def build_collection(project: Project, build_dir: Path):
82
+ site_dir = build_dir / "site"
83
+ shutil.rmtree(site_dir, ignore_errors=True)
84
+ site_dir.mkdir(parents=True, exist_ok=True)
85
+
86
+ collection = build_project_to_collection(project)
87
+ collection.write(site_dir)
88
+
89
+
90
+ def get_local_ips():
91
+ hostname = socket.gethostname()
92
+ local_ips = []
93
+
94
+ with contextlib.suppress(socket.gaierror):
95
+ local_ips.append(socket.gethostbyname(socket.getfqdn()))
96
+
97
+ try:
98
+ for info in socket.getaddrinfo(hostname, None):
99
+ ip = info[4][0]
100
+ if not ip.startswith("127.") and ":" not in ip:
101
+ local_ips.append(ip)
102
+ except socket.gaierror:
103
+ pass
104
+
105
+ return sorted(set(local_ips))
106
+
107
+
108
+ def run_server(base_dir: Path, port: int = 8000):
109
+ class DirectoryHandler(http.server.SimpleHTTPRequestHandler):
110
+ def __init__(self, *args, **kwargs):
111
+ super().__init__(*args, directory=str(base_dir), **kwargs)
112
+
113
+ with socketserver.TCPServer(("", port), DirectoryHandler) as httpd:
114
+ local_ips = get_local_ips()
115
+ print(f"Server started on port {port}")
116
+ print("Available on:")
117
+ for ip in local_ips:
118
+ print(f" http://{ip}:{port}")
119
+ try:
120
+ httpd.serve_forever()
121
+ except KeyboardInterrupt:
122
+ print("\nStopping server...")
123
+ httpd.shutdown()
124
+
125
+
126
+ def main():
127
+ parser = argparse.ArgumentParser(description="Sonolus project build and development tools")
128
+ subparsers = parser.add_subparsers(dest="command", required=True)
129
+
130
+ build_parser = subparsers.add_parser("build")
131
+ build_parser.add_argument(
132
+ "module",
133
+ type=str,
134
+ nargs="?",
135
+ help="Module path (e.g., 'module.name'). If omitted, will auto-detect if only one module exists.",
136
+ )
137
+ build_parser.add_argument("--build-dir", type=str, default="./build")
138
+
139
+ dev_parser = subparsers.add_parser("dev")
140
+ dev_parser.add_argument(
141
+ "module",
142
+ type=str,
143
+ nargs="?",
144
+ help="Module path (e.g., 'module.name'). If omitted, will auto-detect if only one module exists.",
145
+ )
146
+ dev_parser.add_argument("--build-dir", type=str, default="./build")
147
+ dev_parser.add_argument("--port", type=int, default=8000)
148
+
149
+ args = parser.parse_args()
150
+
151
+ if not args.module:
152
+ default_module = find_default_module()
153
+ if default_module:
154
+ print(f"Using auto-detected module: {default_module}")
155
+ args.module = default_module
156
+ else:
157
+ parser.error("Module argument is required when multiple or no modules are found")
158
+
159
+ project = import_project(args.module)
160
+ if project is None:
161
+ sys.exit(1)
162
+
163
+ build_dir = Path(args.build_dir)
164
+
165
+ if args.command == "build":
166
+ build_project(project, build_dir)
167
+ print(f"Project built successfully to '{build_dir.resolve()}'")
168
+ elif args.command == "dev":
169
+ build_collection(project, build_dir)
170
+ run_server(build_dir / "site", port=args.port)
@@ -0,0 +1,293 @@
1
+ from __future__ import annotations
2
+
3
+ import gzip
4
+ import hashlib
5
+ import json
6
+ import urllib.request
7
+ import zipfile
8
+ from io import BytesIO
9
+ from os import PathLike
10
+ from pathlib import Path
11
+ from typing import Any, Literal, TypedDict, TypeGuard
12
+
13
+ type Category = Literal[
14
+ "posts",
15
+ "playlists",
16
+ "levels",
17
+ "replays",
18
+ "skins",
19
+ "backgrounds",
20
+ "effects",
21
+ "particles",
22
+ "engines",
23
+ ]
24
+ type Asset = bytes | PathLike | str
25
+ CATEGORY_NAMES = {"posts", "playlists", "levels", "replays", "skins", "backgrounds", "effects", "particles", "engines"}
26
+ SINGULAR_CATEGORY_NAMES: dict[Category, str] = {
27
+ "posts": "post",
28
+ "playlists": "playlist",
29
+ "levels": "level",
30
+ "replays": "replay",
31
+ "skins": "skin",
32
+ "backgrounds": "background",
33
+ "effects": "effect",
34
+ "particles": "particle",
35
+ "engines": "engine",
36
+ }
37
+ BASE_PATH = "/sonolus/"
38
+ RESERVED_FILENAMES = {"info", "list"}
39
+ LOCALIZED_KEYS = {"title", "subtitle", "author", "description"}
40
+ CATEGORY_SORT_ORDER = {
41
+ "levels": 0,
42
+ "engines": 1,
43
+ "skins": 2,
44
+ "effects": 3,
45
+ "particles": 4,
46
+ "backgrounds": 5,
47
+ "posts": 6,
48
+ "playlists": 7,
49
+ "replays": 8,
50
+ }
51
+
52
+
53
+ class Collection:
54
+ def __init__(self) -> None:
55
+ self.name = "Unnamed"
56
+ self.categories: dict[Category, dict[str, Any]] = {}
57
+ self.repository: dict[str, bytes] = {}
58
+
59
+ def get_item(self, category: Category, name: str) -> Any:
60
+ if name not in self.categories.get(category, {}):
61
+ raise KeyError(f"Item '{name}' not found in category '{category}'")
62
+ return self.categories[category][name]["item"]
63
+
64
+ def get_default_item(self, category: Category) -> Any:
65
+ if not self.categories.get(category):
66
+ raise KeyError(f"No items found in category '{category}'")
67
+ return next(iter(self.categories[category].values()))["item"]
68
+
69
+ def add_item(self, category: Category, name: str, item: Any) -> None:
70
+ self.categories.setdefault(category, {})[name] = self._make_item_details(item)
71
+
72
+ @staticmethod
73
+ def _make_item_details(item: dict[str, Any]) -> dict[str, Any]:
74
+ return {
75
+ "item": item,
76
+ "actions": [],
77
+ "hasCommunity": False,
78
+ "leaderboards": [],
79
+ "sections": [],
80
+ }
81
+
82
+ @staticmethod
83
+ def _load_data(value: Asset) -> bytes:
84
+ match value:
85
+ case str() if value.startswith(("http://", "https://")):
86
+ with urllib.request.urlopen(value) as response:
87
+ return response.read()
88
+ case PathLike():
89
+ return Path(value).read_bytes()
90
+ case bytes():
91
+ return value
92
+ case _:
93
+ raise TypeError("value must be a URL, a path, or bytes")
94
+
95
+ def add_asset(self, value: Asset, /) -> Srl:
96
+ data = self._load_data(value)
97
+ key = hashlib.sha1(data).hexdigest()
98
+ self.repository[key] = data
99
+ return Srl(hash=key, url=f"{BASE_PATH}repository/{key}")
100
+
101
+ def load_from_scp(self, zip_data: Asset) -> None:
102
+ with zipfile.ZipFile(BytesIO(self._load_data(zip_data))) as zf:
103
+ files_by_dir = self._group_zip_entries_by_directory(zf.filelist)
104
+ self._process_zip_directories(zf, files_by_dir)
105
+
106
+ def load_from_source(self, path: PathLike | str) -> None:
107
+ root_path = Path(path)
108
+
109
+ for category_dir in root_path.iterdir():
110
+ if not category_dir.is_dir():
111
+ continue
112
+
113
+ category_name = category_dir.name
114
+ if not self._is_valid_category(category_name):
115
+ continue
116
+
117
+ for item_dir in category_dir.iterdir():
118
+ if not item_dir.is_dir():
119
+ continue
120
+
121
+ item_json_path = item_dir / "item.json"
122
+ if not item_json_path.exists():
123
+ continue
124
+
125
+ try:
126
+ item_data = json.loads(item_json_path.read_text())
127
+ except json.JSONDecodeError:
128
+ continue
129
+
130
+ item_data = self._localize_item(item_data)
131
+ item_data["name"] = item_dir.name
132
+
133
+ for resource_path in item_dir.iterdir():
134
+ if resource_path.name == "item.json":
135
+ continue
136
+
137
+ try:
138
+ resource_data = resource_path.read_bytes()
139
+
140
+ if resource_path.suffix.lower() in {".json", ".bin"}:
141
+ resource_data = gzip.compress(resource_data)
142
+
143
+ srl = self.add_asset(resource_data)
144
+ item_data[resource_path.stem] = srl
145
+
146
+ except Exception as e:
147
+ print(f"Error processing resource {resource_path}: {e}")
148
+ continue
149
+
150
+ self.add_item(category_name, item_dir.name, item_data)
151
+
152
+ @staticmethod
153
+ def _localize_item(item: dict[str, Any]) -> dict[str, Any]:
154
+ localized_item = item.copy()
155
+ for key in LOCALIZED_KEYS:
156
+ match localized_item.get(key):
157
+ case {"en": localized_value}:
158
+ localized_item[key] = localized_value
159
+ case {**other_languages} if other_languages:
160
+ localized_item[key] = localized_item[key][min(other_languages)]
161
+ case _:
162
+ localized_item[key] = ""
163
+ return localized_item
164
+
165
+ def _group_zip_entries_by_directory(self, file_list: list[zipfile.ZipInfo]) -> dict[str, list[zipfile.ZipInfo]]:
166
+ files_by_dir: dict[str, list[zipfile.ZipInfo]] = {}
167
+
168
+ for zip_entry in file_list:
169
+ if self._should_skip_zip_entry(zip_entry):
170
+ continue
171
+
172
+ path_parts = Path(zip_entry.filename).parts
173
+ if path_parts[0] == "sonolus":
174
+ path_parts = path_parts[1:]
175
+
176
+ if not path_parts:
177
+ continue
178
+
179
+ dir_name = path_parts[0]
180
+ files_by_dir.setdefault(dir_name, []).append(zip_entry)
181
+
182
+ return files_by_dir
183
+
184
+ def _should_skip_zip_entry(self, zip_entry: zipfile.ZipInfo) -> bool:
185
+ path = Path(zip_entry.filename)
186
+ if path.parts[0] == "sonolus":
187
+ path = Path(*path.parts[1:])
188
+ return zip_entry.filename.endswith("/") or len(path.parts) < 2 or path.name.lower() in RESERVED_FILENAMES
189
+
190
+ def _process_zip_directories(self, zf: zipfile.ZipFile, files_by_dir: dict[str, list[zipfile.ZipInfo]]) -> None:
191
+ for dir_name, zip_entries in files_by_dir.items():
192
+ if dir_name == "repository":
193
+ self._add_repository_items(zf, zip_entries)
194
+ elif self._is_valid_category(dir_name):
195
+ self.categories.setdefault(dir_name, {})
196
+ self._extract_category_items(zf, dir_name, zip_entries)
197
+
198
+ def _add_repository_items(self, zf: zipfile.ZipFile, zip_entries: list[zipfile.ZipInfo]) -> None:
199
+ for zip_entry in zip_entries:
200
+ self.repository[Path(zip_entry.filename).name] = zf.read(zip_entry)
201
+
202
+ def _is_valid_category(self, category: str) -> TypeGuard[Category]:
203
+ return category in CATEGORY_NAMES
204
+
205
+ def _extract_category_items(
206
+ self, zf: zipfile.ZipFile, dir_name: Category, zip_entries: list[zipfile.ZipInfo]
207
+ ) -> None:
208
+ for zip_entry in zip_entries:
209
+ try:
210
+ item_details = json.loads(zf.read(zip_entry))
211
+ except json.JSONDecodeError:
212
+ continue
213
+
214
+ path = Path(zip_entry.filename)
215
+ if path.parts[0] == "sonolus":
216
+ path = Path(*path.parts[1:])
217
+ item_name = path.stem
218
+
219
+ if self._is_valid_category(dir_name):
220
+ self.categories[dir_name][item_name] = item_details
221
+
222
+ def write(self, path: Asset) -> None:
223
+ base_dir = self._create_base_directory(path)
224
+ self._write_main_info(base_dir)
225
+ self._write_category_items(base_dir)
226
+ self._write_repository_items(base_dir)
227
+
228
+ def _create_base_directory(self, path: Asset) -> Path:
229
+ base_dir = Path(path) / BASE_PATH.strip("/")
230
+ base_dir.mkdir(parents=True, exist_ok=True)
231
+ return base_dir
232
+
233
+ def _write_main_info(self, base_dir: Path) -> None:
234
+ sorted_categories = sorted(self.categories.keys(), key=lambda c: CATEGORY_SORT_ORDER.get(c, 100))
235
+ info = {
236
+ "title": self.name,
237
+ "buttons": [{"type": SINGULAR_CATEGORY_NAMES[category]} for category in sorted_categories],
238
+ "configuration": {"options": []},
239
+ }
240
+ self._write_json(base_dir / "info", info)
241
+
242
+ def _write_category_items(self, base_dir: Path) -> None:
243
+ for category, items in self.categories.items():
244
+ if not items:
245
+ continue
246
+ category_dir = self._create_category_directory(base_dir, category)
247
+ self._write_category_structure(category_dir, category, items)
248
+
249
+ def _create_category_directory(self, base_dir: Path, category: Category) -> Path:
250
+ category_dir = base_dir / category
251
+ category_dir.mkdir(exist_ok=True)
252
+ return category_dir
253
+
254
+ def _write_category_structure(self, category_dir: Path, category: Category, items: dict[str, Any]) -> None:
255
+ self._write_json(
256
+ category_dir / "info",
257
+ {
258
+ "sections": [
259
+ {
260
+ "itemType": SINGULAR_CATEGORY_NAMES[category],
261
+ "title": "Items",
262
+ "items": [item_details["item"] for item_details in items.values()],
263
+ }
264
+ ]
265
+ },
266
+ )
267
+
268
+ category_list = {"pageCount": 1, "items": [item_details["item"] for item_details in items.values()]}
269
+ self._write_json(category_dir / "list", category_list)
270
+
271
+ for item_name, item_details in items.items():
272
+ self._write_json(category_dir / item_name, item_details)
273
+
274
+ def _write_repository_items(self, base_dir: Path) -> None:
275
+ repo_dir = base_dir / "repository"
276
+ repo_dir.mkdir(exist_ok=True)
277
+
278
+ for key, data in self.repository.items():
279
+ (repo_dir / key).write_bytes(data)
280
+
281
+ @staticmethod
282
+ def _write_json(path: Path, content: Any) -> None:
283
+ path.write_text(json.dumps(content))
284
+
285
+ def update(self, other: Collection) -> None:
286
+ self.repository.update(other.repository)
287
+ for category, items in other.categories.items():
288
+ self.categories.setdefault(category, {}).update(items)
289
+
290
+
291
+ class Srl(TypedDict):
292
+ hash: str
293
+ url: str
@@ -0,0 +1,90 @@
1
+ from collections.abc import Callable
2
+
3
+ from sonolus.backend.finalize import cfg_to_engine_node
4
+ from sonolus.backend.flow import BasicBlock
5
+ from sonolus.backend.ir import IRConst, IRInstr
6
+ from sonolus.backend.mode import Mode
7
+ from sonolus.backend.ops import Op
8
+ from sonolus.backend.optimize import optimize_and_allocate
9
+ from sonolus.backend.visitor import compile_and_call
10
+ from sonolus.build.node import OutputNodeGenerator
11
+ from sonolus.script.archetype import BaseArchetype
12
+ from sonolus.script.callbacks import CallbackInfo
13
+ from sonolus.script.internal.context import (
14
+ CallbackContextState,
15
+ Context,
16
+ GlobalContextState,
17
+ ReadOnlyMemory,
18
+ context_to_cfg,
19
+ ctx,
20
+ using_ctx,
21
+ )
22
+ from sonolus.script.num import Num
23
+
24
+
25
+ def compile_mode(
26
+ mode: Mode,
27
+ rom: ReadOnlyMemory,
28
+ archetypes: list[type[BaseArchetype]] | None,
29
+ global_callbacks: list[tuple[CallbackInfo, Callable]] | None,
30
+ ) -> dict:
31
+ global_state = GlobalContextState(
32
+ mode, {a: i for i, a in enumerate(archetypes)} if archetypes is not None else None, rom
33
+ )
34
+ nodes = OutputNodeGenerator()
35
+ results = {}
36
+ if archetypes is not None:
37
+ archetype_entries = []
38
+ for archetype in archetypes:
39
+ archetype_data = {
40
+ "name": archetype.name,
41
+ "hasInput": archetype.is_scored,
42
+ }
43
+ archetype_data["imports"] = [
44
+ {"name": name, "index": index} for name, index in archetype._imported_keys_.items()
45
+ ]
46
+ if mode == Mode.Play:
47
+ archetype_data["exports"] = [
48
+ {"name": name, "index": index} for name, index in archetype._exported_keys_.items()
49
+ ]
50
+ for cb_name, cb_info in archetype._supported_callbacks_.items():
51
+ cb = getattr(archetype, cb_name)
52
+ if cb in archetype._default_callbacks_:
53
+ continue
54
+ cb_order = getattr(cb, "_callback_order_", 0)
55
+ if not cb_info.supports_order and cb_order != 0:
56
+ raise ValueError(f"Callback '{cb_name}' does not support a non-zero order")
57
+ cfg = callback_to_cfg(global_state, cb, cb_info.name, archetype)
58
+ cfg = optimize_and_allocate(cfg)
59
+ node = cfg_to_engine_node(cfg)
60
+ node_index = nodes.add(node)
61
+ archetype_data[cb_info.name] = {
62
+ "index": node_index,
63
+ "order": cb_order,
64
+ }
65
+ archetype_entries.append(archetype_data)
66
+ results["archetypes"] = archetype_entries
67
+ if global_callbacks is not None:
68
+ for cb_info, cb in global_callbacks:
69
+ cfg = callback_to_cfg(global_state, cb, cb_info.name)
70
+ cfg = optimize_and_allocate(cfg)
71
+ node = cfg_to_engine_node(cfg)
72
+ node_index = nodes.add(node)
73
+ results[cb_info.name] = node_index
74
+ results["nodes"] = nodes.get()
75
+ return results
76
+
77
+
78
+ def callback_to_cfg(
79
+ global_state: GlobalContextState, callback: Callable, name: str, archetype: type[BaseArchetype] | None = None
80
+ ) -> BasicBlock:
81
+ callback_state = CallbackContextState(name)
82
+ context = Context(global_state, callback_state)
83
+ with using_ctx(context):
84
+ if archetype is not None:
85
+ result = compile_and_call(callback, archetype._for_compilation())
86
+ else:
87
+ result = compile_and_call(callback)
88
+ if isinstance(result, Num):
89
+ ctx().add_statements(IRInstr(Op.Break, [IRConst(1), result.ir()]))
90
+ return context_to_cfg(context)
@@ -0,0 +1,32 @@
1
+ EMPTY_ENGINE_PLAY_DATA = {
2
+ "skin": {"sprites": []},
3
+ "effect": {"clips": []},
4
+ "particle": {"effects": []},
5
+ "buckets": [],
6
+ "archetypes": [],
7
+ "nodes": [{"value": 0}],
8
+ }
9
+
10
+ EMPTY_ENGINE_WATCH_DATA = {
11
+ "skin": {"sprites": []},
12
+ "effect": {"clips": []},
13
+ "particle": {"effects": []},
14
+ "buckets": [],
15
+ "archetypes": [],
16
+ "updateSpawn": 0,
17
+ "nodes": [{"value": 0}],
18
+ }
19
+
20
+ EMPTY_ENGINE_PREVIEW_DATA = {
21
+ "skin": {"sprites": []},
22
+ "archetypes": [],
23
+ "nodes": [{"value": 0}],
24
+ }
25
+
26
+ EMPTY_ENGINE_TUTORIAL_DATA = {
27
+ "skin": {"sprites": []},
28
+ "effect": {"clips": []},
29
+ "particle": {"effects": []},
30
+ "instruction": {"texts": [], "icons": []},
31
+ "nodes": [{"value": 0}],
32
+ }