auto-editor 28.1.0__py3-none-any.whl → 29.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {auto_editor-28.1.0.dist-info → auto_editor-29.0.0.dist-info}/METADATA +4 -3
  2. auto_editor-29.0.0.dist-info/RECORD +5 -0
  3. auto_editor-29.0.0.dist-info/top_level.txt +1 -0
  4. auto_editor/__init__.py +0 -1
  5. auto_editor/__main__.py +0 -504
  6. auto_editor/analyze.py +0 -393
  7. auto_editor/cmds/__init__.py +0 -0
  8. auto_editor/cmds/cache.py +0 -69
  9. auto_editor/cmds/desc.py +0 -32
  10. auto_editor/cmds/info.py +0 -213
  11. auto_editor/cmds/levels.py +0 -199
  12. auto_editor/cmds/palet.py +0 -29
  13. auto_editor/cmds/repl.py +0 -113
  14. auto_editor/cmds/subdump.py +0 -72
  15. auto_editor/cmds/test.py +0 -816
  16. auto_editor/edit.py +0 -560
  17. auto_editor/exports/__init__.py +0 -0
  18. auto_editor/exports/fcp11.py +0 -195
  19. auto_editor/exports/fcp7.py +0 -313
  20. auto_editor/exports/json.py +0 -63
  21. auto_editor/exports/kdenlive.py +0 -322
  22. auto_editor/exports/shotcut.py +0 -147
  23. auto_editor/ffwrapper.py +0 -187
  24. auto_editor/help.py +0 -224
  25. auto_editor/imports/__init__.py +0 -0
  26. auto_editor/imports/fcp7.py +0 -275
  27. auto_editor/imports/json.py +0 -234
  28. auto_editor/json.py +0 -297
  29. auto_editor/lang/__init__.py +0 -0
  30. auto_editor/lang/libintrospection.py +0 -10
  31. auto_editor/lang/libmath.py +0 -23
  32. auto_editor/lang/palet.py +0 -724
  33. auto_editor/lang/stdenv.py +0 -1179
  34. auto_editor/lib/__init__.py +0 -0
  35. auto_editor/lib/contracts.py +0 -235
  36. auto_editor/lib/data_structs.py +0 -278
  37. auto_editor/lib/err.py +0 -2
  38. auto_editor/make_layers.py +0 -315
  39. auto_editor/preview.py +0 -93
  40. auto_editor/render/__init__.py +0 -0
  41. auto_editor/render/audio.py +0 -517
  42. auto_editor/render/subtitle.py +0 -205
  43. auto_editor/render/video.py +0 -307
  44. auto_editor/timeline.py +0 -331
  45. auto_editor/utils/__init__.py +0 -0
  46. auto_editor/utils/bar.py +0 -142
  47. auto_editor/utils/chunks.py +0 -2
  48. auto_editor/utils/cmdkw.py +0 -206
  49. auto_editor/utils/container.py +0 -101
  50. auto_editor/utils/func.py +0 -128
  51. auto_editor/utils/log.py +0 -126
  52. auto_editor/utils/types.py +0 -277
  53. auto_editor/vanparse.py +0 -313
  54. auto_editor-28.1.0.dist-info/RECORD +0 -57
  55. auto_editor-28.1.0.dist-info/entry_points.txt +0 -6
  56. auto_editor-28.1.0.dist-info/top_level.txt +0 -2
  57. docs/build.py +0 -70
  58. {auto_editor-28.1.0.dist-info → auto_editor-29.0.0.dist-info}/WHEEL +0 -0
  59. {auto_editor-28.1.0.dist-info → auto_editor-29.0.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,234 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import os
4
- from difflib import get_close_matches
5
- from fractions import Fraction
6
- from typing import TYPE_CHECKING, Any
7
-
8
- from auto_editor.ffwrapper import FileInfo
9
- from auto_editor.json import load
10
- from auto_editor.lib.err import MyError
11
- from auto_editor.timeline import (
12
- Clip,
13
- Template,
14
- audio_builder,
15
- v1,
16
- v3,
17
- visual_objects,
18
- )
19
- from auto_editor.utils.cmdkw import ParserError, Required, pAttrs
20
- from auto_editor.utils.types import CoerceError
21
-
22
- if TYPE_CHECKING:
23
- from auto_editor.timeline import ASpace, VSpace
24
- from auto_editor.utils.log import Log
25
-
26
-
27
- def check_attrs(data: object, log: Log, *attrs: str) -> None:
28
- if not isinstance(data, dict):
29
- log.error("Data is in wrong shape!")
30
-
31
- for attr in attrs:
32
- if attr not in data:
33
- log.error(f"'{attr}' attribute not found!")
34
-
35
-
36
- def check_file(path: str, log: Log) -> None:
37
- if not os.path.isfile(path):
38
- log.error(f"Could not locate media file: '{path}'")
39
-
40
-
41
- def read_v3(tl: Any, log: Log) -> v3:
42
- check_attrs(
43
- tl,
44
- log,
45
- "background",
46
- "v",
47
- "a",
48
- "timebase",
49
- "resolution",
50
- "samplerate",
51
- )
52
-
53
- srcs: dict[str, FileInfo] = {}
54
-
55
- def make_src(v: str) -> FileInfo:
56
- if v in srcs:
57
- return srcs[v]
58
- temp = FileInfo.init(v, log)
59
- srcs[v] = temp
60
- return temp
61
-
62
- def parse_obj(obj: dict[str, Any], build: pAttrs) -> dict[str, Any]:
63
- kwargs: dict[str, Any] = {}
64
- del obj["name"]
65
-
66
- for attr in build.attrs:
67
- assert attr.coerce is not None
68
- if attr.default is Required:
69
- kwargs[attr.n] = Required
70
- else:
71
- assert attr.coerce != "source"
72
- kwargs[attr.n] = attr.coerce(attr.default)
73
-
74
- for key, val in obj.items():
75
- found = False
76
- for attr in build.attrs:
77
- if key == attr.n:
78
- try:
79
- if attr.coerce == "source":
80
- kwargs[key] = make_src(val)
81
- else:
82
- assert attr.coerce is not None
83
- kwargs[key] = attr.coerce(val)
84
- except CoerceError as e:
85
- raise ParserError(e)
86
- found = True
87
- break
88
-
89
- if not found:
90
- all_names = {attr.n for attr in build.attrs}
91
- if matches := get_close_matches(key, all_names):
92
- more = f"\n Did you mean:\n {', '.join(matches)}"
93
- else:
94
- more = (
95
- f"\n attributes available:\n {', '.join(all_names)}"
96
- )
97
-
98
- raise ParserError(
99
- f"{build.name} got an unexpected keyword '{key}'\n{more}"
100
- )
101
-
102
- for k, v in kwargs.items():
103
- if v is Required:
104
- raise ParserError(f"'{k}' must be specified.")
105
-
106
- return kwargs
107
-
108
- bg = tl["background"]
109
- sr = tl["samplerate"]
110
- res = (tl["resolution"][0], tl["resolution"][1])
111
- tb = Fraction(tl["timebase"])
112
-
113
- v: Any = []
114
- a: list[list[Clip]] = []
115
-
116
- for vlayers in tl["v"]:
117
- if vlayers:
118
- v_out: VSpace = []
119
- for vdict in vlayers:
120
- if "name" not in vdict:
121
- log.error("Invalid video object: name not specified")
122
- if vdict["name"] not in visual_objects:
123
- log.error(f"Unknown video object: {vdict['name']}")
124
- my_vobj, my_build = visual_objects[vdict["name"]]
125
-
126
- try:
127
- v_out.append(my_vobj(**parse_obj(vdict, my_build)))
128
- except ParserError as e:
129
- log.error(e)
130
-
131
- v.append(v_out)
132
-
133
- for alayers in tl["a"]:
134
- if alayers:
135
- a_out = []
136
- for adict in alayers:
137
- if "name" not in adict:
138
- log.error("Invalid audio object: name not specified")
139
- if adict["name"] != "audio":
140
- log.error(f"Unknown audio object: {adict['name']}")
141
-
142
- try:
143
- a_out.append(Clip(**parse_obj(adict, audio_builder)))
144
- except ParserError as e:
145
- log.error(e)
146
-
147
- a.append(a_out)
148
-
149
- try:
150
- T = Template.init(srcs[next(iter(srcs))])
151
- except StopIteration:
152
- T = Template(sr, "stereo", res, [], [])
153
-
154
- return v3(tb, bg, T, v, a, v1=None)
155
-
156
-
157
- def read_v1(tl: Any, log: Log) -> v3:
158
- from auto_editor.make_layers import clipify
159
-
160
- check_attrs(tl, log, "source", "chunks")
161
-
162
- chunks = tl["chunks"]
163
- path = tl["source"]
164
-
165
- check_file(path, log)
166
-
167
- src = FileInfo.init(path, log)
168
-
169
- vtl: VSpace = []
170
- atl: ASpace = [[] for _ in range(len(src.audios))]
171
-
172
- # Verify chunks
173
- last_end: int | None = None
174
- if type(chunks) is not list:
175
- log.error("chunks key must be an array")
176
-
177
- for i, chunk in enumerate(chunks):
178
- if type(chunk) is not list or len(chunk) != 3:
179
- log.error(f"Invalid chunk at chunk {i}")
180
- if type(chunk[0]) not in (int, float) or chunk[0] < 0:
181
- log.error(f"Invalid start at chunk {i}")
182
- if type(chunk[1]) not in (int, float) or chunk[1] <= chunk[0]:
183
- log.error(f"Invalid end at chunk {i}")
184
- if type(chunk[2]) not in (int, float) or chunk[2] < 0.0 or chunk[2] > 99999.0:
185
- log.error(f"Invalid speed at chunk {i}")
186
-
187
- if i == 0 and chunk[0] != 0:
188
- log.error("First chunk must start with 0")
189
- if i != 0 and chunk[0] != last_end:
190
- log.error(f"Invalid start at chunk {i}")
191
- last_end = chunk[1]
192
-
193
- if type(chunk[0]) is float or type(chunk[1]) is float or type(chunk[2]) is int:
194
- chunks[i] = (int(chunk[0]), int(chunk[1]), float(chunk[2]))
195
-
196
- for c in clipify(chunks, src):
197
- if src.videos:
198
- if len(vtl) == 0:
199
- vtl.append([])
200
- vtl[0].append(Clip(c.start, c.dur, c.src, c.offset, 0, c.speed))
201
-
202
- for a in range(len(src.audios)):
203
- atl[a].append(Clip(c.start, c.dur, c.src, c.offset, a, c.speed))
204
-
205
- return v3(
206
- src.get_fps(),
207
- "#000",
208
- Template.init(src),
209
- vtl,
210
- atl,
211
- v1(src, chunks),
212
- )
213
-
214
-
215
- def read_json(path: str, log: Log) -> v3:
216
- try:
217
- with open(path, encoding="utf-8", errors="ignore") as f:
218
- tl = load(path, f)
219
- except FileNotFoundError:
220
- log.error(f"File not found: {path}")
221
- except MyError as e:
222
- log.error(e)
223
-
224
- check_attrs(tl, log, "version")
225
-
226
- ver = tl["version"]
227
-
228
- if ver == "3":
229
- return read_v3(tl, log)
230
- if ver == "1":
231
- return read_v1(tl, log)
232
- if type(ver) is not str:
233
- log.error("version needs to be a string")
234
- log.error(f"Importing version {ver} timelines is not supported.")
auto_editor/json.py DELETED
@@ -1,297 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from io import StringIO, TextIOWrapper
4
- from typing import TYPE_CHECKING
5
-
6
- from auto_editor.ffwrapper import FileInfo
7
- from auto_editor.lib.err import MyError
8
-
9
- if TYPE_CHECKING:
10
- from typing import Any, NoReturn
11
-
12
- from _typeshed import SupportsWrite
13
-
14
- Token = tuple[int, object]
15
-
16
-
17
- EOF, LCUR, RCUR, LBRAC, RBRAC, COL, COMMA, STR, VAL = range(9)
18
- str_escape = {
19
- "\\": "\\",
20
- "/": "/",
21
- '"': '"',
22
- "b": "\b",
23
- "f": "\f",
24
- "n": "\n",
25
- "r": "\r",
26
- "t": "\t",
27
- }
28
-
29
-
30
- def normalize_string(v: str) -> str:
31
- for to, replace in str_escape.items():
32
- if to == "/":
33
- continue
34
- v = v.replace(replace, f"\\{to}")
35
- return v
36
-
37
-
38
- class Lexer:
39
- __slots__ = ("filename", "text", "pos", "char", "lineno", "column")
40
-
41
- def __init__(self, filename: str, text: str | bytes | TextIOWrapper):
42
- self.filename = filename
43
- self.pos: int = 0
44
- self.lineno: int = 1
45
- self.column: int = 1
46
-
47
- if isinstance(text, bytes):
48
- self.text: str = text.decode("utf-8", "replace")
49
- elif isinstance(text, str):
50
- self.text = text
51
- else:
52
- self.text = text.read()
53
-
54
- self.char: str | None = self.text[self.pos] if text else None
55
-
56
- def error(self, msg: str) -> NoReturn:
57
- raise MyError(f"{msg}\n at {self.filename}:{self.lineno}:{self.column}")
58
-
59
- def advance(self) -> None:
60
- if self.char == "\n":
61
- self.lineno += 1
62
- self.column = 0
63
-
64
- self.pos += 1
65
-
66
- if self.pos > len(self.text) - 1:
67
- self.char = None
68
- else:
69
- self.char = self.text[self.pos]
70
- self.column += 1
71
-
72
- def rewind(self) -> None:
73
- self.pos = 0
74
- self.lineno = 1
75
- self.column = 1
76
- self.char = self.text[self.pos] if self.text else None
77
-
78
- def peek(self) -> str | None:
79
- peek_pos = self.pos + 1
80
- return None if peek_pos > len(self.text) - 1 else self.text[peek_pos]
81
-
82
- def string(self) -> str:
83
- result = StringIO()
84
- while self.char is not None and self.char != '"':
85
- if self.char == "\\":
86
- self.advance()
87
- if self.char is None:
88
- break
89
-
90
- if self.char == "u":
91
- buf = ""
92
- for _ in range(4):
93
- self.advance()
94
- if self.char is None:
95
- self.error("\\u escape sequence needs 4 hexs")
96
- buf += self.char
97
- try:
98
- result.write(chr(int(buf, 16)))
99
- except ValueError:
100
- self.error(f"Invalid \\u escape sequence: `{buf}`")
101
- if self.char in str_escape:
102
- result.write(str_escape[self.char])
103
- self.advance()
104
- continue
105
-
106
- self.error(f"Unknown escape sequence `\\{self.char}` in string")
107
- else:
108
- result.write(self.char)
109
- self.advance()
110
-
111
- if self.char is None:
112
- self.error('Expected a closing `"`')
113
-
114
- self.advance()
115
- return result.getvalue()
116
-
117
- def number(self) -> Token:
118
- buf = StringIO()
119
-
120
- has_dot = False
121
- while self.char is not None and self.char in "-0123456789.":
122
- if self.char == ".":
123
- if has_dot:
124
- self.error("Number has multiple `.`s")
125
- has_dot = True
126
- buf.write(self.char)
127
- self.advance()
128
-
129
- result = buf.getvalue()
130
-
131
- try:
132
- return (VAL, float(result) if has_dot else int(result))
133
- except ValueError:
134
- self.error(f"`{result}` is not a valid JSON Number")
135
-
136
- def is_whitespace(self) -> bool:
137
- return self.char is None or self.char in " \t\n\r\x0b\x0c"
138
-
139
- def get_next_token(self) -> Token:
140
- while self.char is not None:
141
- while self.char is not None and self.is_whitespace():
142
- self.advance()
143
- if self.char is None:
144
- break
145
-
146
- if self.char == '"':
147
- self.advance()
148
- return (STR, self.string())
149
-
150
- if self.char == "-":
151
- _peek = self.peek()
152
- if _peek is not None and _peek in "0123456789.":
153
- return self.number()
154
-
155
- if self.char in "0123456789.":
156
- return self.number()
157
-
158
- table = {"{": LCUR, "}": RCUR, "[": LBRAC, "]": RBRAC, ":": COL, ",": COMMA}
159
- if self.char in table:
160
- key = table[self.char]
161
- self.advance()
162
- return (key, None)
163
-
164
- keyword = ""
165
- for _ in range(5): # Longest valid keyword length
166
- if self.char is None or self.char in " \t\n\r\x0b\x0c[]}{,":
167
- break
168
- keyword += self.char
169
- self.advance()
170
-
171
- if keyword == "true":
172
- return (VAL, True)
173
- if keyword == "false":
174
- return (VAL, False)
175
- if keyword == "null":
176
- return (VAL, None)
177
-
178
- self.error(f"Invalid keyword: `{keyword}`")
179
- return (EOF, None)
180
-
181
-
182
- class Parser:
183
- __slots__ = ("lexer", "current_token")
184
-
185
- def __init__(self, lexer: Lexer):
186
- self.lexer = lexer
187
- self.current_token = self.lexer.get_next_token()
188
-
189
- def eat(self) -> None:
190
- self.current_token = self.lexer.get_next_token()
191
-
192
- def expr(self) -> Any:
193
- self.current_token
194
-
195
- if self.current_token[0] in {STR, VAL}:
196
- val = self.current_token[1]
197
- self.eat()
198
- return val
199
-
200
- if self.current_token[0] == LCUR:
201
- self.eat()
202
-
203
- my_dic = {}
204
- while self.current_token[0] != RCUR:
205
- if self.current_token[0] != STR:
206
- if self.current_token[0] in {LBRAC, VAL}:
207
- self.lexer.error("JSON Objects only allow strings as keys")
208
- self.lexer.error("Expected closing `}`")
209
- key = self.current_token[1]
210
- if key in my_dic:
211
- self.lexer.error(f"Object has repeated key `{key}`")
212
- self.eat()
213
- if self.current_token[0] != COL:
214
- self.lexer.error("Expected `:`")
215
- self.eat()
216
-
217
- my_dic[key] = self.expr()
218
- if self.current_token[0] != RCUR:
219
- if self.current_token[0] != COMMA:
220
- self.lexer.error("Expected `,` between Object entries")
221
- self.eat()
222
- if self.current_token[0] == RCUR:
223
- self.lexer.error("Trailing `,` in Object")
224
-
225
- self.eat()
226
- return my_dic
227
-
228
- if self.current_token[0] == LBRAC:
229
- self.eat()
230
- my_arr = []
231
- while self.current_token[0] != RBRAC:
232
- my_arr.append(self.expr())
233
- if self.current_token[0] != RBRAC:
234
- if self.current_token[0] != COMMA:
235
- self.lexer.error("Expected `,` between array entries")
236
- self.eat()
237
- if self.current_token[0] == RBRAC:
238
- self.lexer.error("Trailing `,` in array")
239
- self.eat()
240
- return my_arr
241
-
242
- raise MyError(f"Unknown token: {self.current_token}")
243
-
244
-
245
- def load(path: str, f: str | bytes | TextIOWrapper) -> dict[str, object]:
246
- lexer = Lexer(path, f)
247
- if lexer.get_next_token()[0] != LCUR:
248
- raise MyError("Expected JSON Object")
249
- lexer.rewind()
250
- return Parser(lexer).expr()
251
-
252
-
253
- def dump(
254
- data: object, file: SupportsWrite[str], indent: int | None = None, level: int = 0
255
- ) -> None:
256
- if data is True:
257
- file.write("true")
258
- elif data is False:
259
- file.write("false")
260
- elif data is None:
261
- file.write("null")
262
- elif isinstance(data, str):
263
- file.write(f'"{normalize_string(data)}"')
264
- elif isinstance(data, FileInfo):
265
- file.write(f'"{normalize_string(f"{data.path}")}"')
266
- elif isinstance(data, int | float):
267
- file.write(f"{data}")
268
- elif isinstance(data, list | tuple):
269
- file.write("[")
270
- if indent is not None:
271
- level += indent
272
- file.write("\n" + (" " * level))
273
-
274
- for item in data[:-1]:
275
- dump(item, file, indent, level)
276
- file.write(", " if indent is None else f",\n{' ' * level}")
277
- if data:
278
- dump(data[-1], file, indent, level)
279
- file.write("]" if indent is None else f"\n{' ' * (level - indent)}]")
280
- else:
281
- my_dic = data if isinstance(data, dict) else data.__dict__
282
- file.write("{")
283
- if indent is not None:
284
- level += indent
285
- file.write("\n" + (" " * level))
286
- not_first = False
287
- for key, item in my_dic.items():
288
- if not_first:
289
- file.write(", " if indent is None else f",\n{' ' * level}")
290
- dump(key, file, indent, level)
291
- file.write(": ")
292
- dump(item, file, indent, level)
293
- not_first = True
294
-
295
- if indent is not None:
296
- file.write(f"\n{' ' * (level - indent)}")
297
- file.write("}")
File without changes
@@ -1,10 +0,0 @@
1
- from .palet import Syntax
2
-
3
-
4
- def all() -> dict[str, object]:
5
- return {
6
- "get-current-env": Syntax(lambda env, node: env.data.copy()),
7
- "proc-name": Syntax(
8
- lambda env, node: [proc := node[1].val, env[proc].name][-1]
9
- ),
10
- }
@@ -1,23 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import math
4
-
5
- from auto_editor.lib.contracts import Proc, andc, between_c, gt_c, is_real
6
-
7
-
8
- def all() -> dict[str, object]:
9
- return {
10
- "exp": Proc("exp", math.exp, (1, 1), is_real),
11
- "ceil": Proc("ceil", math.ceil, (1, 1), is_real),
12
- "floor": Proc("floor", math.floor, (1, 1), is_real),
13
- "sin": Proc("sin", math.sin, (1, 1), is_real),
14
- "cos": Proc("cos", math.cos, (1, 1), is_real),
15
- "tan": Proc("tan", math.tan, (1, 1), is_real),
16
- "asin": Proc("asin", math.asin, (1, 1), between_c(-1, 1)),
17
- "acos": Proc("acos", math.acos, (1, 1), between_c(-1, 1)),
18
- "atan": Proc("atan", math.atan, (1, 1), is_real),
19
- "log": Proc("log", math.log, (1, 2), andc(is_real, gt_c(0))),
20
- "pi": math.pi,
21
- "e": math.e,
22
- "tau": math.tau,
23
- }