lua-annotations 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lua_annotations/__init__.py +0 -0
- lua_annotations/api/__init__.py +0 -0
- lua_annotations/api/annotations.py +123 -0
- lua_annotations/api/arguments.py +26 -0
- lua_annotations/api/lua_dict.py +192 -0
- lua_annotations/build_process.py +99 -0
- lua_annotations/exceptions.py +14 -0
- lua_annotations/extensions/__init__.py +0 -0
- lua_annotations/extensions/default.py +76 -0
- lua_annotations/extensions/game_framework/__init__.py +0 -0
- lua_annotations/extensions/game_framework/index.py +84 -0
- lua_annotations/extensions/game_framework/lifecycle.py +60 -0
- lua_annotations/extensions/game_framework/main.py +8 -0
- lua_annotations/extensions/game_framework/networking.py +65 -0
- lua_annotations/init_project.py +197 -0
- lua_annotations/main.py +59 -0
- lua_annotations/parser.py +277 -0
- lua_annotations/parser_schemas.py +139 -0
- lua_annotations/templates/AnnotationInit.lua +24 -0
- lua_annotations/templates/LazyLoadIndex.lua +23 -0
- lua_annotations/templates/annotations.config.json +13 -0
- lua_annotations-0.1.0.dist-info/METADATA +74 -0
- lua_annotations-0.1.0.dist-info/RECORD +27 -0
- lua_annotations-0.1.0.dist-info/WHEEL +5 -0
- lua_annotations-0.1.0.dist-info/entry_points.txt +2 -0
- lua_annotations-0.1.0.dist-info/licenses/LICENSE +201 -0
- lua_annotations-0.1.0.dist-info/top_level.txt +1 -0
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from graphlib import TopologicalSorter
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Callable, Literal, Optional
|
|
5
|
+
|
|
6
|
+
ENVIRONMENTS = ('client', 'server', 'shared')
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from lua_annotations.build_process import BuildProcessCtx, PostProcessCtx
|
|
10
|
+
from lua_annotations.parser_schemas import Annotation
|
|
11
|
+
from parser import FileParser
|
|
12
|
+
|
|
13
|
+
ARG_SEP = ', '
|
|
14
|
+
|
|
15
|
+
type retention = Literal['build', 'init', 'runtime']
|
|
16
|
+
type scope = Literal['module', 'method', 'type', 'returned_value']
|
|
17
|
+
type argProcessor = Callable[[str], Any]
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class AnnotationBuildCtx():
|
|
21
|
+
annotation: 'Annotation'
|
|
22
|
+
parser: 'FileParser'
|
|
23
|
+
build_ctx: 'BuildProcessCtx'
|
|
24
|
+
|
|
25
|
+
type OnBuild = Callable[[AnnotationBuildCtx], None]
|
|
26
|
+
|
|
27
|
+
#for extensions to define annotations
|
|
28
|
+
@dataclass
|
|
29
|
+
class AnnotationDef():
|
|
30
|
+
"""An annotation definition; all Annotation classes are attached to one of these."""
|
|
31
|
+
name: str
|
|
32
|
+
args: list[argProcessor]=field(default_factory=list)
|
|
33
|
+
kwargs: dict[str, argProcessor]=field(default_factory=dict)
|
|
34
|
+
retention: retention='build'
|
|
35
|
+
scope: scope='module'
|
|
36
|
+
mutual_include: list['AnnotationDef']=field(default_factory=list)
|
|
37
|
+
mutual_exclude: list['AnnotationDef']=field(default_factory=list)
|
|
38
|
+
on_build: Optional[OnBuild]=None
|
|
39
|
+
extends: list['AnnotationDef']=field(default_factory=list)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class FileBuildCtx():
|
|
44
|
+
build_ctx: 'BuildProcessCtx'
|
|
45
|
+
parser: 'FileParser'
|
|
46
|
+
filepath: Path
|
|
47
|
+
|
|
48
|
+
type FileBuildHook = Callable[[FileBuildCtx], None]
|
|
49
|
+
type PostBuildHook = Callable[[PostProcessCtx], None]
|
|
50
|
+
|
|
51
|
+
class Extension():
|
|
52
|
+
def on_post_process(self, ctx: PostProcessCtx):
|
|
53
|
+
...
|
|
54
|
+
def on_file_process(self, ctx: FileBuildCtx):
|
|
55
|
+
...
|
|
56
|
+
def load(self, ctx: ExtensionRegistry):
|
|
57
|
+
...
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class SortedRegistry():
|
|
62
|
+
"""Topologically sorted file_build_hooks and post_build_hooks"""
|
|
63
|
+
file_build_hooks: list[FileBuildHook]
|
|
64
|
+
post_build_hooks: list[PostBuildHook]
|
|
65
|
+
anot_registry: dict[str, AnnotationDef]
|
|
66
|
+
|
|
67
|
+
class ExtensionRegistry():
|
|
68
|
+
"""Provides an API to register and get extensions"""
|
|
69
|
+
|
|
70
|
+
def __init__(self):
|
|
71
|
+
self.anot_registry: dict[str, AnnotationDef] = {}
|
|
72
|
+
self.extensions: dict[str, Extension] = {}
|
|
73
|
+
self.ext_graph: dict[str, list[str]] = {}
|
|
74
|
+
self.ext_load_order: list[str] = []
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def register_extension(self, extension: Extension, deps: list[str] = [], hook_order: Literal['before', 'after']='after'):
|
|
78
|
+
name = type(extension).__name__
|
|
79
|
+
extension.hook_order = hook_order
|
|
80
|
+
self.extensions[name] = extension
|
|
81
|
+
self.ext_graph[name] = deps
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def register_anot(self, anot: AnnotationDef):
|
|
85
|
+
self.anot_registry[anot.name] = anot
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def sort_extensions(self):
|
|
89
|
+
ext_layers = [[self.extensions[ext] for ext in layer] for layer in topo_layers(self.ext_graph)]
|
|
90
|
+
load_exts = [x for y in ext_layers for x in y]
|
|
91
|
+
hook_exts = [x for y in shift_exts(ext_layers) for x in y]
|
|
92
|
+
|
|
93
|
+
for ext in load_exts:
|
|
94
|
+
ext.load(self)
|
|
95
|
+
|
|
96
|
+
return SortedRegistry(
|
|
97
|
+
[ext.on_file_process for ext in hook_exts],
|
|
98
|
+
[ext.on_post_process for ext in hook_exts],
|
|
99
|
+
self.anot_registry
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
def shift_exts(layers: list[list[Extension]], flag: str='before'):
|
|
103
|
+
befores = []
|
|
104
|
+
for layer in layers:
|
|
105
|
+
befores += [e for e in layer if e.hook_order == flag]
|
|
106
|
+
layer[:] = [e for e in layer if e.hook_order != flag]
|
|
107
|
+
|
|
108
|
+
if befores:
|
|
109
|
+
layers[0][:0] = befores
|
|
110
|
+
|
|
111
|
+
return layers
|
|
112
|
+
|
|
113
|
+
def topo_layers(graph: dict[str, list[str]]):
|
|
114
|
+
ts = TopologicalSorter(graph)
|
|
115
|
+
ts.prepare()
|
|
116
|
+
|
|
117
|
+
layers: list[list[str]] = []
|
|
118
|
+
while ts.is_active():
|
|
119
|
+
ready = list(ts.get_ready())
|
|
120
|
+
ready.sort()
|
|
121
|
+
layers.append(list(ready))
|
|
122
|
+
ts.done(*ready)
|
|
123
|
+
return layers
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from lua_annotations.exceptions import ParseError
|
|
2
|
+
|
|
3
|
+
#[val1, val2, val3]
|
|
4
|
+
def list_arg(string: str) -> list[str]:
|
|
5
|
+
"""Converts a string wrapped in [] and seperated by commas into a python list"""
|
|
6
|
+
if not (string.startswith('[') and string.endswith(']')):
|
|
7
|
+
raise ParseError('list argument must be wrapped in `[` and `]` characters')
|
|
8
|
+
string = string[1:-1]
|
|
9
|
+
if string.strip() == '':
|
|
10
|
+
return []
|
|
11
|
+
|
|
12
|
+
return [s.strip() for s in string.split(',')]
|
|
13
|
+
|
|
14
|
+
def default_list(str: str):
|
|
15
|
+
"""Tries to call list_arg, but if the argument cannot be parsed into a list, a list wrapped with the string is returned."""
|
|
16
|
+
try:
|
|
17
|
+
return list_arg(str)
|
|
18
|
+
except ParseError:
|
|
19
|
+
return [str]
|
|
20
|
+
|
|
21
|
+
def literal_builder(options: list[str]):
|
|
22
|
+
def f(s: str):
|
|
23
|
+
if s not in options:
|
|
24
|
+
raise ParseError(f'{s!r} not in literal options: {options}')
|
|
25
|
+
return s
|
|
26
|
+
return f
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from pathlib import PurePath
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from .annotations import ENVIRONMENTS
|
|
6
|
+
from lua_annotations.build_process import AUTOGENERATED_HEADER, Environment, ProcessCtx, Workspace
|
|
7
|
+
|
|
8
|
+
HEADER = '-- ' + AUTOGENERATED_HEADER
|
|
9
|
+
|
|
10
|
+
ENV_TO_IMPORT: dict[Environment, str] = {
|
|
11
|
+
'server': 'game:GetService("ServerScriptService")',
|
|
12
|
+
'client': 'game:GetService("Players").LocalPlayer:FindFirstChildOfClass("PlayerScripts")',
|
|
13
|
+
'shared': 'game:GetService("ReplicatedStorage")'
|
|
14
|
+
}
|
|
15
|
+
ENV_TO_VAR: dict[Environment, str] = {
|
|
16
|
+
'server': 'ServerScriptService',
|
|
17
|
+
'client': 'PlayerScripts',
|
|
18
|
+
'shared': 'ReplicatedStorage'
|
|
19
|
+
}
|
|
20
|
+
INDENT = ' ' * 4
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def convert_dict(resolver: LuaPathResolver, data: Any, indent: int = 0, sep: str = ' = ', prefix: str = 'return'):
|
|
24
|
+
def table_block(entry_lines: list[str], level: int) -> str:
|
|
25
|
+
if len(entry_lines) == 0:
|
|
26
|
+
return '{}'
|
|
27
|
+
inner = INDENT * (level + 1)
|
|
28
|
+
outer = INDENT * level
|
|
29
|
+
return '\n'.join(
|
|
30
|
+
['{', *(f'{inner}{line}' for line in entry_lines), f'{outer}}}']
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
def process_dict(value: dict[Any,Any], level: int):
|
|
34
|
+
entries = [
|
|
35
|
+
f'{key_to_lua(k, level)}{sep}{to_lua(v, level + 1)},'
|
|
36
|
+
for k, v in value.items()
|
|
37
|
+
]
|
|
38
|
+
return table_block(entries, level)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def to_lua(value: Any, level: int):
|
|
42
|
+
if isinstance(value, object):
|
|
43
|
+
if isinstance(value, LuaExpr):
|
|
44
|
+
return value.string
|
|
45
|
+
|
|
46
|
+
if isinstance(value, LuaPath):
|
|
47
|
+
return value.to_lua(resolver)
|
|
48
|
+
|
|
49
|
+
if hasattr(value, 'asdict') and callable(value.asdict):
|
|
50
|
+
return process_dict(value.asdict(), level)
|
|
51
|
+
|
|
52
|
+
if isinstance(value, dict):
|
|
53
|
+
return process_dict(value, level)
|
|
54
|
+
|
|
55
|
+
if isinstance(value, (list, tuple)):
|
|
56
|
+
entries = [f'{to_lua(item, level + 1)},' for item in value]
|
|
57
|
+
return table_block(entries, level)
|
|
58
|
+
|
|
59
|
+
if isinstance(value, str):
|
|
60
|
+
escaped = value.replace('\\', '\\\\').replace('"', '\\"')
|
|
61
|
+
return f'"{escaped}"'
|
|
62
|
+
|
|
63
|
+
if isinstance(value, bool):
|
|
64
|
+
return 'true' if value else 'false'
|
|
65
|
+
|
|
66
|
+
if value is None:
|
|
67
|
+
return 'nil'
|
|
68
|
+
|
|
69
|
+
return str(value)
|
|
70
|
+
|
|
71
|
+
def key_to_lua(key: Any, level: int):
|
|
72
|
+
if isinstance(key, str) and key.isidentifier():
|
|
73
|
+
return key
|
|
74
|
+
return f'[{to_lua(key, level)}]'
|
|
75
|
+
|
|
76
|
+
body = to_lua(data, indent)
|
|
77
|
+
import_lines = resolver.get_import_lines()
|
|
78
|
+
|
|
79
|
+
parts: list[str] = []
|
|
80
|
+
if import_lines:
|
|
81
|
+
parts += ['\n'.join(import_lines), '']
|
|
82
|
+
parts.append(f'{prefix} {body}')
|
|
83
|
+
|
|
84
|
+
return '\n'.join(parts) + '\n'
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class LuaPathResolver:
|
|
88
|
+
"""Resolves lua paths and stores required imports; one should be created per output file."""
|
|
89
|
+
def __init__(self, workspace: Workspace):
|
|
90
|
+
self.workspace = workspace
|
|
91
|
+
self.used_imports: set[Environment] = set()
|
|
92
|
+
|
|
93
|
+
def _resolve_expr(self, env: Environment, raw_expr: str, inline_require: bool = False):
|
|
94
|
+
map = ENV_TO_IMPORT if inline_require else ENV_TO_VAR
|
|
95
|
+
#require(:Packages.default_extension): -> require(ServerScriptService.Packages.default_extension).example.path)
|
|
96
|
+
return raw_expr.replace(':', f'{map[env]}.', 1)
|
|
97
|
+
|
|
98
|
+
def normalize(self, path: PurePath, inline_require: bool = False) -> tuple[Environment, PurePath, str]:
|
|
99
|
+
# path is relative an env workdir
|
|
100
|
+
for env, path_map in self.workspace.items():
|
|
101
|
+
for workdir, lua_expr in path_map.items():
|
|
102
|
+
try:
|
|
103
|
+
relative = path.relative_to(workdir)
|
|
104
|
+
return env, relative, self._resolve_expr(env, lua_expr, inline_require)
|
|
105
|
+
except ValueError:
|
|
106
|
+
continue
|
|
107
|
+
|
|
108
|
+
# path is prefixed with an env name
|
|
109
|
+
parts = [part for part in path.parts if part not in ('', '.', '/')]
|
|
110
|
+
if parts and parts[0] in self.workspace:
|
|
111
|
+
env = parts[0] # type: ignore[assignment]
|
|
112
|
+
return env, PurePath(*parts[1:]), ENV_TO_VAR[env]
|
|
113
|
+
|
|
114
|
+
raise ValueError(f'Unknown lua path: {path}')
|
|
115
|
+
|
|
116
|
+
def mark_used(self, env: Environment) -> None:
|
|
117
|
+
self.used_imports.add(env)
|
|
118
|
+
|
|
119
|
+
def get_import_lines(self):
|
|
120
|
+
import_lines: list[str] = []
|
|
121
|
+
|
|
122
|
+
for env in ENVIRONMENTS:
|
|
123
|
+
if env in self.used_imports:
|
|
124
|
+
import_lines.append(f'local {ENV_TO_VAR[env]} = {ENV_TO_IMPORT[env]}')
|
|
125
|
+
|
|
126
|
+
return import_lines
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@dataclass
|
|
130
|
+
class LuaExpr:
|
|
131
|
+
"""Tiny wrapper class to represent a lua expression literal.
|
|
132
|
+
When used with the `convert_dict` api, the value is not wrapped with quotes in the lua output."""
|
|
133
|
+
string: str
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
@dataclass
|
|
137
|
+
class LuaPath:
|
|
138
|
+
"""A class which allows for easy conversion of a pathlib path into a lua expression. Note that any non-relative paths require a LuaPathResolver."""
|
|
139
|
+
path: PurePath
|
|
140
|
+
relative: bool=False
|
|
141
|
+
require: bool=False
|
|
142
|
+
properties: list[str]=field(default_factory=list)
|
|
143
|
+
function: bool=False
|
|
144
|
+
|
|
145
|
+
def _parts_no_ext(self, p: PurePath):
|
|
146
|
+
return [x for x in p.with_suffix('').parts if x not in ('', '.')]
|
|
147
|
+
|
|
148
|
+
def _post_process(self, string: str):
|
|
149
|
+
if self.require or len(self.properties) > 0:
|
|
150
|
+
string = f'require({string})'
|
|
151
|
+
|
|
152
|
+
for prop in self.properties:
|
|
153
|
+
string += '.' + prop
|
|
154
|
+
|
|
155
|
+
string = string.replace('..', '.')
|
|
156
|
+
|
|
157
|
+
if self.function or (len(self.properties) > 0 and not self.require):
|
|
158
|
+
#if require is false, wrap the require path in a function
|
|
159
|
+
return f'function() return {string} end'
|
|
160
|
+
|
|
161
|
+
return string
|
|
162
|
+
|
|
163
|
+
def to_lua_relative(self):
|
|
164
|
+
assert self.relative
|
|
165
|
+
|
|
166
|
+
#relative path (script.Parent.Example.Path)
|
|
167
|
+
parts = self._parts_no_ext(self.path)
|
|
168
|
+
expr = '.'.join(['script', 'Parent', *parts])
|
|
169
|
+
return self._post_process(expr)
|
|
170
|
+
|
|
171
|
+
def to_lua(self, resolver: LuaPathResolver, inline_require: bool = False):
|
|
172
|
+
if self.relative:
|
|
173
|
+
return self.to_lua_relative()
|
|
174
|
+
|
|
175
|
+
#env path (ReplicatedStorage.Example.Path)
|
|
176
|
+
env, rel, expr_root = resolver.normalize(self.path, inline_require)
|
|
177
|
+
if not inline_require:
|
|
178
|
+
resolver.mark_used(env)
|
|
179
|
+
|
|
180
|
+
parts = self._parts_no_ext(rel)
|
|
181
|
+
expr = '.'.join([expr_root, *parts])
|
|
182
|
+
return self._post_process(expr)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
#public api
|
|
186
|
+
def convert_dict_module(ctx: ProcessCtx, data: Any, indent: int = 0):
|
|
187
|
+
dict_str = convert_dict(LuaPathResolver(ctx.workspace), data, indent)
|
|
188
|
+
return f'{HEADER}\n{dict_str}'
|
|
189
|
+
|
|
190
|
+
def convert_dict_type(ctx: ProcessCtx, data: dict[str, Any], type_name: str, indent: int = 0):
|
|
191
|
+
dict_str = convert_dict(LuaPathResolver(ctx.workspace), data, indent, sep=': ', prefix = f'export type {type_name} = ')
|
|
192
|
+
return f'{HEADER}\n{dict_str}'
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
import json
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any, Literal
|
|
5
|
+
from importlib.resources import files
|
|
6
|
+
|
|
7
|
+
from .api.annotations import FileBuildCtx, SortedRegistry
|
|
8
|
+
from .exceptions import BuildError
|
|
9
|
+
from .parser_schemas import ANNOTATION_PREFIX
|
|
10
|
+
|
|
11
|
+
type Environment = Literal['server', 'client', 'shared']
|
|
12
|
+
FILENAMES = ['*.lua', '*.luau']
|
|
13
|
+
|
|
14
|
+
AUTOGENERATED_HEADER = 'Generated using lua-anot; do not edit manually.'
|
|
15
|
+
|
|
16
|
+
def get_template(name: str):
|
|
17
|
+
root = files('lua_annotations')
|
|
18
|
+
path = root / 'templates' / name
|
|
19
|
+
return path.read_text()
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class FileBuildError(BuildError):
|
|
23
|
+
message: str
|
|
24
|
+
file: Path
|
|
25
|
+
workdir: Path
|
|
26
|
+
|
|
27
|
+
def __post_init__(self):
|
|
28
|
+
super().__init__(self.message)
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class ProcessCtx():
|
|
32
|
+
reg: SortedRegistry
|
|
33
|
+
root_dir: Path
|
|
34
|
+
workspace: Workspace
|
|
35
|
+
|
|
36
|
+
def error(self, message: str, file: Path):
|
|
37
|
+
raise FileBuildError(message, file, self.root_dir)
|
|
38
|
+
|
|
39
|
+
type BuildCtxList = dict[Environment, BuildProcessCtx]
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class PostProcessCtx(ProcessCtx):
|
|
43
|
+
build_ctxs: BuildCtxList
|
|
44
|
+
|
|
45
|
+
def create_file(self, env: Environment, name: str, text: str):
|
|
46
|
+
self.build_ctxs[env].create_file(name, text)
|
|
47
|
+
|
|
48
|
+
def dump_json(self, env: Environment, name: str, data: dict[Any, Any]):
|
|
49
|
+
out = {'_comment': AUTOGENERATED_HEADER} | data
|
|
50
|
+
self.create_file(env, name, json.dumps(out, indent=4))
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class BuildProcessCtx(ProcessCtx):
|
|
54
|
+
workdirs: dict[Path, str]
|
|
55
|
+
output_root: Path
|
|
56
|
+
env: Environment
|
|
57
|
+
|
|
58
|
+
def create_file(self, name: str, text: str):
|
|
59
|
+
file = self.output_root / name
|
|
60
|
+
file.parent.mkdir(parents=True, exist_ok=True)
|
|
61
|
+
file.write_text(text)
|
|
62
|
+
|
|
63
|
+
return file
|
|
64
|
+
|
|
65
|
+
def process_file(self, file: Path):
|
|
66
|
+
from .parser import FileParser
|
|
67
|
+
|
|
68
|
+
with file.open('r') as f:
|
|
69
|
+
text = f.read()
|
|
70
|
+
if ANNOTATION_PREFIX in text:
|
|
71
|
+
parser = FileParser(self.reg, file, self)
|
|
72
|
+
parser.parse(text)
|
|
73
|
+
|
|
74
|
+
#post-file
|
|
75
|
+
for hook in self.reg.file_build_hooks:
|
|
76
|
+
hook(FileBuildCtx(self, parser, file))
|
|
77
|
+
|
|
78
|
+
return parser
|
|
79
|
+
|
|
80
|
+
def process_dir(self, dir: Path):
|
|
81
|
+
for filename in FILENAMES:
|
|
82
|
+
matched_files = dir.rglob(filename)
|
|
83
|
+
for file in matched_files:
|
|
84
|
+
self.process_file(file)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
type RawWorkspace = dict[Environment, dict[str, str]]
|
|
88
|
+
type Workspace = dict[Environment, dict[Path, str]]
|
|
89
|
+
|
|
90
|
+
type Extension = tuple[Literal['library', 'path'], str]
|
|
91
|
+
|
|
92
|
+
class Config():
|
|
93
|
+
def __init__(self, data: dict[Any, Any]):
|
|
94
|
+
self.out_dir_name = data.get('outDirName', 'Generated')
|
|
95
|
+
|
|
96
|
+
self.workspaces: list[RawWorkspace] = data.get('workspaces', [])
|
|
97
|
+
|
|
98
|
+
#extensions
|
|
99
|
+
self.extensions: list[Extension] = data.get('extensions', [])
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
class LuaAnnotationsError(Exception):
|
|
2
|
+
"""Base class for general Lua Annotations errors."""
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class BuildError(LuaAnnotationsError):
|
|
6
|
+
"""Raised for general build-time errors"""
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ConfigError(LuaAnnotationsError):
|
|
10
|
+
"""Raised for invalid project configuration."""
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ParseError(LuaAnnotationsError):
|
|
14
|
+
"""Raised for invalid user-provided text."""
|
|
File without changes
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from lua_annotations.api.annotations import ENVIRONMENTS, AnnotationBuildCtx, AnnotationDef, ExtensionRegistry, Extension, FileBuildCtx
|
|
4
|
+
from lua_annotations.build_process import Environment, PostProcessCtx, get_template
|
|
5
|
+
from lua_annotations.parser_schemas import LuaMethod
|
|
6
|
+
from lua_annotations.api.lua_dict import LuaPathResolver, convert_dict
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ManifestExtension(Extension):
|
|
10
|
+
def __init__(self):
|
|
11
|
+
self.manifest: dict[Environment, dict[Any, Any]] = {env: {} for env in ENVIRONMENTS}
|
|
12
|
+
for env in ENVIRONMENTS:
|
|
13
|
+
self.manifest[env]['anot_hooks'] = {}
|
|
14
|
+
self.manifest[env]['init_hooks'] = []
|
|
15
|
+
self.manifest[env]['post_init_hooks'] = []
|
|
16
|
+
self.manifest[env]['annotations'] = []
|
|
17
|
+
|
|
18
|
+
def on_build_post_init(self, ctx: AnnotationBuildCtx, key: str):
|
|
19
|
+
adornee = ctx.annotation.adornee
|
|
20
|
+
assert isinstance(adornee, LuaMethod)
|
|
21
|
+
|
|
22
|
+
self.manifest[ctx.build_ctx.env][key].append(adornee.get_path(require=True))
|
|
23
|
+
|
|
24
|
+
def on_build_annotation_init(self, ctx: AnnotationBuildCtx):
|
|
25
|
+
adornee = ctx.annotation.adornee
|
|
26
|
+
assert isinstance(adornee, LuaMethod)
|
|
27
|
+
|
|
28
|
+
self.manifest[ctx.build_ctx.env]['anot_hooks'][ctx.annotation.adornee.name] = adornee.get_path(require=True)
|
|
29
|
+
|
|
30
|
+
def load(self, ctx: ExtensionRegistry):
|
|
31
|
+
ctx.register_anot(AnnotationDef(
|
|
32
|
+
name='onInit',
|
|
33
|
+
scope='method',
|
|
34
|
+
on_build=lambda ctx: self.on_build_post_init(ctx, 'init_hooks')
|
|
35
|
+
))
|
|
36
|
+
ctx.register_anot(AnnotationDef(
|
|
37
|
+
'onPostInit',
|
|
38
|
+
scope='method',
|
|
39
|
+
on_build=lambda ctx: self.on_build_post_init(ctx, 'post_init_hooks')
|
|
40
|
+
))
|
|
41
|
+
ctx.register_anot(AnnotationDef(
|
|
42
|
+
name='annotationInit',
|
|
43
|
+
scope='method',
|
|
44
|
+
on_build=self.on_build_annotation_init
|
|
45
|
+
))
|
|
46
|
+
|
|
47
|
+
#annotation to literally just mark a module to be parsed.
|
|
48
|
+
ctx.register_anot(AnnotationDef(
|
|
49
|
+
name='module',
|
|
50
|
+
scope='module'
|
|
51
|
+
))
|
|
52
|
+
|
|
53
|
+
def on_file_process(self, ctx: FileBuildCtx):
|
|
54
|
+
for anot in ctx.parser.annotations:
|
|
55
|
+
if anot.adef.retention != 'build':
|
|
56
|
+
self.manifest[ctx.build_ctx.env]['annotations'].append(anot)
|
|
57
|
+
|
|
58
|
+
def on_post_process(self, ctx: PostProcessCtx):
|
|
59
|
+
for env in ('server', 'client'):
|
|
60
|
+
template = get_template('AnnotationInit.lua')
|
|
61
|
+
|
|
62
|
+
for key in ('annotations', 'anot_hooks', 'init_hooks', 'post_init_hooks'):
|
|
63
|
+
if isinstance(self.manifest[env][key], dict):
|
|
64
|
+
self.manifest[env][key] |= self.manifest['shared'][key]
|
|
65
|
+
else:
|
|
66
|
+
self.manifest[env][key] += self.manifest['shared'][key]
|
|
67
|
+
data = self.manifest[env]
|
|
68
|
+
|
|
69
|
+
converted = convert_dict(LuaPathResolver(ctx.workspace), data, prefix = 'local manifest =')
|
|
70
|
+
out = template.replace('--manifest', converted)
|
|
71
|
+
|
|
72
|
+
ctx.create_file(env, f'AnnotationInit.{env}.lua', out)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def load(ctx: ExtensionRegistry):
|
|
76
|
+
ctx.register_extension(ManifestExtension())
|
|
File without changes
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from lua_annotations.api.annotations import ENVIRONMENTS, AnnotationBuildCtx, AnnotationDef, Extension, ExtensionRegistry
|
|
2
|
+
from lua_annotations.api.lua_dict import HEADER, LuaPath, LuaPathResolver, convert_dict_module
|
|
3
|
+
from lua_annotations.build_process import Environment, PostProcessCtx
|
|
4
|
+
from lua_annotations.parser_schemas import LuaType, ReturnedValue
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def _env(ctx: AnnotationBuildCtx):
|
|
8
|
+
return ctx.build_ctx.env
|
|
9
|
+
def _name(ctx: AnnotationBuildCtx):
|
|
10
|
+
return ctx.annotation.kwargs_val.get('name')
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class IndexExtension(Extension):
|
|
14
|
+
def __init__(self) -> None:
|
|
15
|
+
self.indexes: dict[Environment, dict[str, LuaPath]] = {env: {} for env in ENVIRONMENTS}
|
|
16
|
+
self.exported_types: dict[Environment, list[tuple[LuaPath, str]]] = {env: [] for env in ENVIRONMENTS}
|
|
17
|
+
self.indexed_types: dict[Environment, list[tuple[LuaPath, str, str]]] = {env: [] for env in ENVIRONMENTS}
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def on_post_process(self, ctx: PostProcessCtx):
|
|
21
|
+
for env in ENVIRONMENTS:
|
|
22
|
+
# module index
|
|
23
|
+
ctx.create_file(env, 'Index.lua', convert_dict_module(ctx, self.indexes[env]))
|
|
24
|
+
|
|
25
|
+
# type index
|
|
26
|
+
resolver = LuaPathResolver(ctx.workspace)
|
|
27
|
+
imports: dict[str, str] = {}
|
|
28
|
+
type_lines: list[str] = []
|
|
29
|
+
|
|
30
|
+
for path, type_name in self.exported_types[env]:
|
|
31
|
+
type_lines.append(f'export type {type_name} = typeof({path.to_lua(resolver)})')
|
|
32
|
+
|
|
33
|
+
for path, type_name, module_name in self.indexed_types[env]:
|
|
34
|
+
imports.setdefault(module_name, f'local {module_name} = {path.to_lua(resolver)}')
|
|
35
|
+
type_lines.append(f'export type {type_name} = {module_name}.{type_name}')
|
|
36
|
+
|
|
37
|
+
# build final file
|
|
38
|
+
out = (
|
|
39
|
+
[HEADER]
|
|
40
|
+
+ resolver.get_import_lines()
|
|
41
|
+
+ ['']
|
|
42
|
+
+ list(imports.values())
|
|
43
|
+
+ type_lines
|
|
44
|
+
+ ['', 'return nil']
|
|
45
|
+
)
|
|
46
|
+
ctx.create_file(env, 'Types/Index.lua', '\n'.join(out))
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def on_build_indexed(self, ctx: AnnotationBuildCtx):
|
|
50
|
+
module = ctx.annotation.adornee
|
|
51
|
+
assert isinstance(module, ReturnedValue)
|
|
52
|
+
|
|
53
|
+
dict = self.indexes[_env(ctx)]
|
|
54
|
+
key = _name(ctx) or module.returned_name
|
|
55
|
+
value = module.get_path(require=True)
|
|
56
|
+
|
|
57
|
+
argval = ctx.annotation.args_val
|
|
58
|
+
if argval:
|
|
59
|
+
dict.setdefault(argval[0], {})
|
|
60
|
+
dict[argval[0]][key] = value
|
|
61
|
+
else:
|
|
62
|
+
dict[key] = value
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def on_build_export_type(self, ctx: AnnotationBuildCtx):
|
|
66
|
+
module = ctx.annotation.adornee
|
|
67
|
+
assert isinstance(module, ReturnedValue)
|
|
68
|
+
|
|
69
|
+
self.exported_types[_env(ctx)].append((module.get_path(require=True), _name(ctx) or module.returned_name))
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def on_build_indexed_type(self, ctx: AnnotationBuildCtx):
|
|
73
|
+
lua_type = ctx.annotation.adornee
|
|
74
|
+
assert isinstance(lua_type, LuaType)
|
|
75
|
+
assert lua_type.exported
|
|
76
|
+
|
|
77
|
+
path = LuaPath(ctx.parser.file, require=True)
|
|
78
|
+
self.indexed_types[_env(ctx)].append((path, _name(ctx) or lua_type.name, ctx.parser.file_name))
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def load(self, ctx: ExtensionRegistry) -> None:
|
|
82
|
+
ctx.register_anot(AnnotationDef('indexedType', scope='type', kwargs={'name': str}, on_build=self.on_build_indexed_type))
|
|
83
|
+
ctx.register_anot(AnnotationDef('exportType', scope='returned_value', kwargs={'name': str}, on_build=self.on_build_export_type))
|
|
84
|
+
ctx.register_anot(AnnotationDef('indexed', scope='returned_value', kwargs={'name': str}, args=[str], on_build=self.on_build_indexed))
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from graphlib import CycleError, TopologicalSorter
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
from lua_annotations.api.annotations import ENVIRONMENTS, AnnotationBuildCtx, AnnotationDef, ExtensionRegistry, Extension, scope
|
|
5
|
+
from lua_annotations.api.arguments import default_list
|
|
6
|
+
from lua_annotations.build_process import Environment, PostProcessCtx
|
|
7
|
+
from lua_annotations.exceptions import BuildError
|
|
8
|
+
from lua_annotations.parser_schemas import Annotation
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from lua_annotations.extensions.default import ManifestExtension
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def filter_deps(deps: list[str]):
|
|
15
|
+
return [d for d in deps if ':' not in d]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def proc_deps(deps: list[str]):
|
|
19
|
+
out = {'services': [], 'remotes': []}
|
|
20
|
+
for dep in deps:
|
|
21
|
+
if ':' in dep:
|
|
22
|
+
out['remotes'].append(dep.split(':')[1])
|
|
23
|
+
else:
|
|
24
|
+
out['services'].append(dep)
|
|
25
|
+
return out
|
|
26
|
+
|
|
27
|
+
class LifecycleExtension(Extension):
|
|
28
|
+
def __init__(self):
|
|
29
|
+
self.services: dict[Environment, list[Annotation]] = {env: [] for env in ENVIRONMENTS}
|
|
30
|
+
|
|
31
|
+
def add_service(self, ctx: AnnotationBuildCtx):
|
|
32
|
+
self.services[ctx.build_ctx.env].append(ctx.annotation)
|
|
33
|
+
|
|
34
|
+
def on_post_process(self, ctx: PostProcessCtx):
|
|
35
|
+
for env in ('server', 'client'):
|
|
36
|
+
services = self.services[env] + self.services['shared']
|
|
37
|
+
|
|
38
|
+
self.manifestExt.manifest[env]['services'] = {svc.adornee.returned_name: (
|
|
39
|
+
{
|
|
40
|
+
'depends': proc_deps(svc.kwargs_val.get('depends', [])),
|
|
41
|
+
'getAdornee': svc.adornee.get_path(function=True, require=True),
|
|
42
|
+
'kind': svc.name
|
|
43
|
+
}
|
|
44
|
+
| ({'tags': svc.args_val[0]} if svc.name == 'component' else {})
|
|
45
|
+
)
|
|
46
|
+
for svc in services}
|
|
47
|
+
|
|
48
|
+
sorter = TopologicalSorter({svc.adornee.returned_name: filter_deps(svc.kwargs_val.get('depends', {})) for svc in services})
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
self.manifestExt.manifest[env]['load_order'] = list(sorter.static_order())
|
|
52
|
+
except CycleError as e:
|
|
53
|
+
raise BuildError(f"Cycle detected for service graph: {e.args}") from e
|
|
54
|
+
|
|
55
|
+
def load(self, ctx: ExtensionRegistry):
|
|
56
|
+
self.manifestExt: ManifestExtension = ctx.extensions['ManifestExtension']
|
|
57
|
+
|
|
58
|
+
ctx.register_anot(AnnotationDef('service', retention='build', kwargs={'depends': default_list}, on_build=self.add_service))
|
|
59
|
+
ctx.register_anot(AnnotationDef('component', retention='build', args=[default_list], kwargs={'depends': default_list}, on_build=self.add_service))
|
|
60
|
+
ctx.register_anot(AnnotationDef('bindTag', retention='init', args=[default_list], scope='method'))
|