auto-editor 25.1.0__py3-none-any.whl → 25.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
auto_editor/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "25.1.0"
1
+ __version__ = "25.3.0"
auto_editor/__main__.py CHANGED
@@ -254,6 +254,11 @@ def main_options(parser: ArgumentParser) -> ArgumentParser:
254
254
  flag=True,
255
255
  help="Disable the inclusion of subtitle streams in the output file",
256
256
  )
257
+ parser.add_argument(
258
+ "-dn",
259
+ flag=True,
260
+ help="Disable the inclusion of data streams in the output file",
261
+ )
257
262
  parser.add_argument(
258
263
  "--extras",
259
264
  metavar="CMD",
auto_editor/analyze.py CHANGED
@@ -19,6 +19,7 @@ from auto_editor.utils.subtitle_tools import convert_ass_to_text
19
19
  if TYPE_CHECKING:
20
20
  from collections.abc import Iterator
21
21
  from fractions import Fraction
22
+ from pathlib import Path
22
23
  from typing import Any
23
24
 
24
25
  from numpy.typing import NDArray
@@ -70,15 +71,6 @@ def mut_remove_large(
70
71
  active = False
71
72
 
72
73
 
73
- def obj_tag(tag: str, tb: Fraction, obj: dict[str, Any]) -> str:
74
- key = f"{tag}:{tb}:"
75
- for k, v in obj.items():
76
- key += f"{k}={v},"
77
-
78
- key = key[:-1] # remove unnecessary char
79
- return key
80
-
81
-
82
74
  def iter_audio(src, tb: Fraction, stream: int = 0) -> Iterator[np.float32]:
83
75
  fifo = AudioFifo()
84
76
  try:
@@ -122,7 +114,7 @@ def iter_motion(src, tb, stream: int, blur: int, width: int) -> Iterator[np.floa
122
114
 
123
115
  prev_frame = None
124
116
  current_frame = None
125
- total_pixels = src.videos[0].width * src.videos[0].height
117
+ total_pixels = None
126
118
  index = 0
127
119
  prev_index = -1
128
120
 
@@ -140,10 +132,13 @@ def iter_motion(src, tb, stream: int, blur: int, width: int) -> Iterator[np.floa
140
132
  continue
141
133
 
142
134
  graph.push(unframe)
143
- frame = graph.pull()
135
+ frame = graph.vpull()
144
136
  assert frame.time is not None
145
137
  index = round(frame.time * tb)
146
138
 
139
+ if total_pixels is None:
140
+ total_pixels = frame.width * frame.height
141
+
147
142
  current_frame = frame.to_ndarray()
148
143
  if prev_frame is None:
149
144
  value = np.float32(0.0)
@@ -161,6 +156,12 @@ def iter_motion(src, tb, stream: int, blur: int, width: int) -> Iterator[np.floa
161
156
  container.close()
162
157
 
163
158
 
159
+ def obj_tag(path: Path, kind: str, tb: Fraction, obj: dict[str, Any]) -> str:
160
+ mod_time = int(path.stat().st_mtime)
161
+ key = f"{path.name}:{mod_time:x}:{kind}:{tb}:"
162
+ return key + ",".join(f"{v}" for v in obj.values())
163
+
164
+
164
165
  @dataclass(slots=True)
165
166
  class Levels:
166
167
  src: FileInfo
@@ -201,7 +202,7 @@ class Levels:
201
202
  def all(self) -> NDArray[np.bool_]:
202
203
  return np.zeros(self.media_length, dtype=np.bool_)
203
204
 
204
- def read_cache(self, tag: str, obj: dict[str, Any]) -> None | np.ndarray:
205
+ def read_cache(self, kind: str, obj: dict[str, Any]) -> None | np.ndarray:
205
206
  if self.no_cache:
206
207
  return None
207
208
 
@@ -213,14 +214,14 @@ class Levels:
213
214
  self.log.debug(e)
214
215
  return None
215
216
 
216
- key = f"{self.src.path}:{obj_tag(tag, self.tb, obj)}"
217
+ key = obj_tag(self.src.path, kind, self.tb, obj)
217
218
  if key not in npzfile.files:
218
219
  return None
219
220
 
220
221
  self.log.debug("Using cache")
221
222
  return npzfile[key]
222
223
 
223
- def cache(self, arr: np.ndarray, tag: str, obj: dict[str, Any]) -> np.ndarray:
224
+ def cache(self, arr: np.ndarray, kind: str, obj: dict[str, Any]) -> np.ndarray:
224
225
  if self.no_cache:
225
226
  return arr
226
227
 
@@ -228,8 +229,8 @@ class Levels:
228
229
  if not os.path.exists(workdur):
229
230
  os.mkdir(workdur)
230
231
 
231
- tag = obj_tag(tag, self.tb, obj)
232
- np.savez(os.path.join(workdur, "cache.npz"), **{f"{self.src.path}:{tag}": arr})
232
+ key = obj_tag(self.src.path, kind, self.tb, obj)
233
+ np.savez(os.path.join(workdur, "cache.npz"), **{key: arr})
233
234
 
234
235
  return arr
235
236
 
auto_editor/edit.py CHANGED
@@ -11,7 +11,7 @@ from auto_editor.render.audio import make_new_audio
11
11
  from auto_editor.render.subtitle import make_new_subtitles
12
12
  from auto_editor.render.video import render_av
13
13
  from auto_editor.timeline import v1, v3
14
- from auto_editor.utils.bar import Bar
14
+ from auto_editor.utils.bar import initBar
15
15
  from auto_editor.utils.chunks import Chunk, Chunks
16
16
  from auto_editor.utils.cmdkw import ParserError, parse_with_palet, pAttr, pAttrs
17
17
  from auto_editor.utils.container import Container, container_constructor
@@ -125,7 +125,9 @@ def parse_export(export: str, log: Log) -> dict[str, Any]:
125
125
  "default": pAttrs("default"),
126
126
  "premiere": pAttrs("premiere", name_attr),
127
127
  "resolve-fcp7": pAttrs("resolve-fcp7", name_attr),
128
- "final-cut-pro": pAttrs("final-cut-pro", name_attr),
128
+ "final-cut-pro": pAttrs(
129
+ "final-cut-pro", name_attr, pAttr("version", 11, is_int)
130
+ ),
129
131
  "resolve": pAttrs("resolve", name_attr),
130
132
  "shotcut": pAttrs("shotcut"),
131
133
  "json": pAttrs("json", pAttr("api", 3, is_int)),
@@ -146,7 +148,7 @@ def parse_export(export: str, log: Log) -> dict[str, Any]:
146
148
 
147
149
 
148
150
  def edit_media(paths: list[str], ffmpeg: FFmpeg, args: Args, log: Log) -> None:
149
- bar = Bar(args.progress)
151
+ bar = initBar(args.progress)
150
152
  tl = None
151
153
 
152
154
  if paths:
@@ -232,11 +234,19 @@ def edit_media(paths: list[str], ffmpeg: FFmpeg, args: Args, log: Log) -> None:
232
234
  fcp7_write_xml(export_ops["name"], output, is_resolve, tl, log)
233
235
  return
234
236
 
235
- if export in ("final-cut-pro", "resolve"):
237
+ if export == "final-cut-pro":
236
238
  from auto_editor.formats.fcp11 import fcp11_write_xml
237
239
 
238
- is_resolve = export.startswith("resolve")
239
- fcp11_write_xml(export_ops["name"], ffmpeg, output, is_resolve, tl, log)
240
+ ver = export_ops["version"]
241
+ fcp11_write_xml(export_ops["name"], ver, output, False, tl, log)
242
+ return
243
+
244
+ if export == "resolve":
245
+ from auto_editor.formats.fcp11 import fcp11_write_xml
246
+ from auto_editor.timeline import set_stream_to_0
247
+
248
+ set_stream_to_0(tl, log)
249
+ fcp11_write_xml(export_ops["name"], 10, output, True, tl, log)
240
250
  return
241
251
 
242
252
  if export == "shotcut":
auto_editor/ffwrapper.py CHANGED
@@ -127,6 +127,31 @@ class FFmpeg:
127
127
  return output
128
128
 
129
129
 
130
+ def mux(input: Path, output: Path, stream: int, codec: str | None = None) -> None:
131
+ input_container = av.open(input, "r")
132
+ output_container = av.open(output, "w")
133
+
134
+ input_audio_stream = input_container.streams.audio[stream]
135
+
136
+ if codec is None:
137
+ codec = "pcm_s16le"
138
+
139
+ output_audio_stream = output_container.add_stream(codec)
140
+ assert isinstance(output_audio_stream, av.audio.AudioStream)
141
+
142
+ for frame in input_container.decode(input_audio_stream):
143
+ packet = output_audio_stream.encode(frame)
144
+ if packet:
145
+ output_container.mux(packet)
146
+
147
+ packet = output_audio_stream.encode(None)
148
+ if packet:
149
+ output_container.mux(packet)
150
+
151
+ output_container.close()
152
+ input_container.close()
153
+
154
+
130
155
  @dataclass(slots=True, frozen=True)
131
156
  class VideoStream:
132
157
  width: int
@@ -269,7 +294,7 @@ def initFileInfo(path: str, log: Log) -> FileInfo:
269
294
 
270
295
  desc = cont.metadata.get("description", None)
271
296
  bitrate = 0 if cont.bit_rate is None else cont.bit_rate
272
- dur = 0 if cont.duration is None else cont.duration / 1_000_000
297
+ dur = 0 if cont.duration is None else cont.duration / av.time_base
273
298
 
274
299
  cont.close()
275
300
 
@@ -3,17 +3,15 @@ from __future__ import annotations
3
3
  from typing import TYPE_CHECKING, Any, cast
4
4
  from xml.etree.ElementTree import Element, ElementTree, SubElement, indent
5
5
 
6
- from auto_editor.ffwrapper import FFmpeg, FileInfo, initFileInfo
7
-
8
- from .utils import make_tracks_dir
9
-
10
6
  if TYPE_CHECKING:
11
7
  from collections.abc import Sequence
12
8
  from fractions import Fraction
13
9
 
10
+ from auto_editor.ffwrapper import FileInfo
14
11
  from auto_editor.timeline import TlAudio, TlVideo, v3
15
12
  from auto_editor.utils.log import Log
16
13
 
14
+
17
15
  """
18
16
  Export a FCPXML 11 file readable with Final Cut Pro 10.6.8 or later.
19
17
 
@@ -54,7 +52,7 @@ def make_name(src: FileInfo, tb: Fraction) -> str:
54
52
 
55
53
 
56
54
  def fcp11_write_xml(
57
- group_name: str, ffmpeg: FFmpeg, output: str, resolve: bool, tl: v3, log: Log
55
+ group_name: str, version: int, output: str, resolve: bool, tl: v3, log: Log
58
56
  ) -> None:
59
57
  def fraction(val: int) -> str:
60
58
  if val == 0:
@@ -68,23 +66,17 @@ def fcp11_write_xml(
68
66
  src_dur = int(src.duration * tl.tb)
69
67
  tl_dur = src_dur if resolve else tl.out_len()
70
68
 
71
- all_srcs: list[FileInfo] = [src]
72
- all_refs: list[str] = ["r2"]
73
- if resolve and len(src.audios) > 1:
74
- fold = make_tracks_dir(src)
75
-
76
- for i in range(1, len(src.audios)):
77
- newtrack = fold / f"{i}.wav"
78
- ffmpeg.run(
79
- ["-i", f"{src.path.resolve()}", "-map", f"0:a:{i}", f"{newtrack}"]
80
- )
81
- all_srcs.append(initFileInfo(f"{newtrack}", log))
82
- all_refs.append(f"r{(i + 1) * 2}")
69
+ if version == 11:
70
+ ver_str = "1.11"
71
+ elif version == 10:
72
+ ver_str = "1.10"
73
+ else:
74
+ log.error(f"Unknown final cut pro version: {version}")
83
75
 
84
- fcpxml = Element("fcpxml", version="1.10" if resolve else "1.11")
76
+ fcpxml = Element("fcpxml", version=ver_str)
85
77
  resources = SubElement(fcpxml, "resources")
86
78
 
87
- for i, one_src in enumerate(all_srcs):
79
+ for i, one_src in enumerate(tl.unique_sources()):
88
80
  SubElement(
89
81
  resources,
90
82
  "format",
@@ -126,13 +118,6 @@ def fcp11_write_xml(
126
118
  )
127
119
  spine = SubElement(sequence, "spine")
128
120
 
129
- if tl.v and tl.v[0]:
130
- clips: Sequence[TlVideo | TlAudio] = cast(Any, tl.v[0])
131
- elif tl.a and tl.a[0]:
132
- clips = tl.a[0]
133
- else:
134
- clips = []
135
-
136
121
  def make_clip(ref: str, clip: TlVideo | TlAudio) -> None:
137
122
  clip_properties = {
138
123
  "name": proj_name,
@@ -157,7 +142,19 @@ def fcp11_write_xml(
157
142
  interp="smooth2",
158
143
  )
159
144
 
160
- for my_ref in all_refs:
145
+ if tl.v and tl.v[0]:
146
+ clips: Sequence[TlVideo | TlAudio] = cast(Any, tl.v[0])
147
+ elif tl.a and tl.a[0]:
148
+ clips = tl.a[0]
149
+ else:
150
+ clips = []
151
+
152
+ all_refs: list[str] = ["r2"]
153
+ if resolve:
154
+ for i in range(1, len(tl.a)):
155
+ all_refs.append(f"r{(i + 1) * 2}")
156
+
157
+ for my_ref in reversed(all_refs):
161
158
  for clip in clips:
162
159
  make_clip(my_ref, clip)
163
160
 
@@ -4,9 +4,6 @@ from typing import TYPE_CHECKING
4
4
  from xml.etree.ElementTree import Element
5
5
 
6
6
  if TYPE_CHECKING:
7
- from pathlib import Path
8
-
9
- from auto_editor.ffwrapper import FileInfo
10
7
  from auto_editor.utils.log import Log
11
8
 
12
9
 
@@ -19,21 +16,6 @@ def show(ele: Element, limit: int, depth: int = 0) -> None:
19
16
  show(child, limit, depth + 1)
20
17
 
21
18
 
22
- def make_tracks_dir(src: FileInfo) -> Path:
23
- from os import mkdir
24
- from shutil import rmtree
25
-
26
- fold = src.path.parent / f"{src.path.stem}_tracks"
27
-
28
- try:
29
- mkdir(fold)
30
- except OSError:
31
- rmtree(fold)
32
- mkdir(fold)
33
-
34
- return fold
35
-
36
-
37
19
  class Validator:
38
20
  def __init__(self, log: Log):
39
21
  self.log = log
auto_editor/lang/palet.py CHANGED
@@ -60,6 +60,8 @@ str_escape = {
60
60
  class Token:
61
61
  type: str
62
62
  value: Any
63
+ lineno: int
64
+ column: int
63
65
 
64
66
 
65
67
  class Lexer:
@@ -156,21 +158,31 @@ class Lexer:
156
158
  elif unit == "dB":
157
159
  token = DB
158
160
  elif unit != "i" and unit != "%":
159
- return Token(VAL, Sym(result + unit))
161
+ return Token(
162
+ VAL,
163
+ Sym(result + unit, self.lineno, self.column),
164
+ self.lineno,
165
+ self.column,
166
+ )
160
167
 
161
168
  try:
162
169
  if unit == "i":
163
- return Token(VAL, complex(result + "j"))
170
+ return Token(VAL, complex(result + "j"), self.lineno, self.column)
164
171
  elif unit == "%":
165
- return Token(VAL, float(result) / 100)
172
+ return Token(VAL, float(result) / 100, self.lineno, self.column)
166
173
  elif "/" in result:
167
- return Token(token, Fraction(result))
174
+ return Token(token, Fraction(result), self.lineno, self.column)
168
175
  elif "." in result:
169
- return Token(token, float(result))
176
+ return Token(token, float(result), self.lineno, self.column)
170
177
  else:
171
- return Token(token, int(result))
178
+ return Token(token, int(result), self.lineno, self.column)
172
179
  except ValueError:
173
- return Token(VAL, Sym(result + unit))
180
+ return Token(
181
+ VAL,
182
+ Sym(result + unit, self.lineno, self.column),
183
+ self.lineno,
184
+ self.column,
185
+ )
174
186
 
175
187
  def hash_literal(self) -> Token:
176
188
  if self.char == "\\":
@@ -180,7 +192,7 @@ class Lexer:
180
192
 
181
193
  char = self.char
182
194
  self.advance()
183
- return Token(VAL, Char(char))
195
+ return Token(VAL, Char(char), self.lineno, self.column)
184
196
 
185
197
  if self.char == ":":
186
198
  self.advance()
@@ -190,14 +202,14 @@ class Lexer:
190
202
  buf.write(self.char)
191
203
  self.advance()
192
204
 
193
- return Token(VAL, Keyword(buf.getvalue()))
205
+ return Token(VAL, Keyword(buf.getvalue()), self.lineno, self.column)
194
206
 
195
207
  if self.char is not None and self.char in "([{":
196
208
  brac_type = self.char
197
209
  self.advance()
198
210
  if self.char is None:
199
211
  self.close_err(f"Expected a character after #{brac_type}")
200
- return Token(VLIT, brac_pairs[brac_type])
212
+ return Token(VLIT, brac_pairs[brac_type], self.lineno, self.column)
201
213
 
202
214
  buf = StringIO()
203
215
  while self.char_is_norm():
@@ -207,10 +219,10 @@ class Lexer:
207
219
 
208
220
  result = buf.getvalue()
209
221
  if result in ("t", "T", "true"):
210
- return Token(VAL, True)
222
+ return Token(VAL, True, self.lineno, self.column)
211
223
 
212
224
  if result in ("f", "F", "false"):
213
- return Token(VAL, False)
225
+ return Token(VAL, False, self.lineno, self.column)
214
226
 
215
227
  self.error(f"Unknown hash literal `#{result}`")
216
228
 
@@ -231,17 +243,19 @@ class Lexer:
231
243
  my_str = self.string()
232
244
  if self.char == ".": # handle `object.method` syntax
233
245
  self.advance()
234
- return Token(DOT, (my_str, self.get_next_token()))
235
- return Token(VAL, my_str)
246
+ return Token(
247
+ DOT, (my_str, self.get_next_token()), self.lineno, self.column
248
+ )
249
+ return Token(VAL, my_str, self.lineno, self.column)
236
250
 
237
251
  if self.char == "'":
238
252
  self.advance()
239
- return Token(QUOTE, "'")
253
+ return Token(QUOTE, "'", self.lineno, self.column)
240
254
 
241
255
  if self.char in "(){}[]":
242
256
  _par = self.char
243
257
  self.advance()
244
- return Token(_par, _par)
258
+ return Token(_par, _par, self.lineno, self.column)
245
259
 
246
260
  if self.char in "+-":
247
261
  _peek = self.peek()
@@ -339,18 +353,27 @@ class Lexer:
339
353
  if is_method:
340
354
  from auto_editor.utils.cmdkw import parse_method
341
355
 
342
- return Token(M, parse_method(name, result, env))
356
+ return Token(
357
+ M, parse_method(name, result, env), self.lineno, self.column
358
+ )
343
359
 
344
360
  if self.char == ".": # handle `object.method` syntax
345
361
  self.advance()
346
- return Token(DOT, (Sym(result), self.get_next_token()))
362
+ return Token(
363
+ DOT,
364
+ (Sym(result, self.lineno, self.column), self.get_next_token()),
365
+ self.lineno,
366
+ self.column,
367
+ )
347
368
 
348
369
  if has_illegal:
349
370
  self.error(f"Symbol has illegal character(s): {result}")
350
371
 
351
- return Token(VAL, Sym(result))
372
+ return Token(
373
+ VAL, Sym(result, self.lineno, self.column), self.lineno, self.column
374
+ )
352
375
 
353
- return Token(EOF, "EOF")
376
+ return Token(EOF, "EOF", self.lineno, self.column)
354
377
 
355
378
 
356
379
  ###############################################################################
@@ -370,6 +393,7 @@ class Parser:
370
393
 
371
394
  def expr(self) -> Any:
372
395
  token = self.current_token
396
+ lineno, column = token.lineno, token.column
373
397
 
374
398
  if token.type == VAL:
375
399
  self.eat()
@@ -397,7 +421,7 @@ class Parser:
397
421
  if token.type == M:
398
422
  self.eat()
399
423
  name, args, kwargs = token.value
400
- _result = [Sym(name)] + args
424
+ _result = [Sym(name, lineno, column)] + args
401
425
  for key, val in kwargs.items():
402
426
  _result.append(Keyword(key))
403
427
  _result.append(val)
@@ -413,7 +437,7 @@ class Parser:
413
437
 
414
438
  if token.type == QUOTE:
415
439
  self.eat()
416
- return (Sym("quote"), self.expr())
440
+ return (Sym("quote", lineno, column), self.expr())
417
441
 
418
442
  if token.type in brac_pairs:
419
443
  self.eat()
@@ -610,16 +634,41 @@ def edit_subtitle(pattern, stream=0, **kwargs):
610
634
  return raise_(e) if levels.strict else levels.all()
611
635
 
612
636
 
637
+ class StackTraceManager:
638
+ def __init__(self) -> None:
639
+ self.stack: list[Sym] = []
640
+
641
+ def push(self, sym: Sym) -> None:
642
+ self.stack.append(sym)
643
+
644
+ def pop(self) -> None:
645
+ if self.stack:
646
+ self.stack.pop()
647
+
648
+ def get_stacktrace(self) -> str:
649
+ return "\n".join(
650
+ f" at {sym.val} ({sym.lineno}:{sym.column})"
651
+ for sym in reversed(self.stack)
652
+ )
653
+
654
+
655
+ stack_trace_manager = StackTraceManager()
656
+
657
+
613
658
  def my_eval(env: Env, node: object) -> Any:
659
+ def make_trace(sym: Sym) -> str:
660
+ return f" at {sym.val} ({sym.lineno}:{sym.column})"
661
+
614
662
  if type(node) is Sym:
615
663
  val = env.get(node.val)
616
664
  if type(val) is NotFound:
665
+ stacktrace = make_trace(node)
617
666
  if mat := get_close_matches(node.val, env.data):
618
667
  raise MyError(
619
- f"variable `{node.val}` not found. Did you mean: {mat[0]}"
668
+ f"variable `{node.val}` not found. Did you mean: {mat[0]}\n{stacktrace}"
620
669
  )
621
670
  raise MyError(
622
- f"variable `{node.val}` not found. Did you mean a string literal."
671
+ f"variable `{node.val}` not found. Did you mean a string literal.\n{stacktrace}"
623
672
  )
624
673
  return val
625
674
 
@@ -631,44 +680,56 @@ def my_eval(env: Env, node: object) -> Any:
631
680
  raise MyError("Illegal () expression")
632
681
 
633
682
  oper = my_eval(env, node[0])
634
- if not callable(oper):
635
- """
636
- ...No one wants to write (aref a x y) when they could write a[x,y].
637
- In this particular case there is a way to finesse our way out of the
638
- problem. If we treat data structures as if they were functions on indexes,
639
- we could write (a x y) instead, which is even shorter than the Perl form.
640
- """
641
- if is_iterable(oper):
642
- length = len(node[1:])
643
- if length > 3:
644
- raise MyError(f"{print_str(node[0])}: slice expects 1 argument")
645
- if length in (2, 3):
646
- return p_slice(oper, *(my_eval(env, c) for c in node[1:]))
647
- if length == 1:
648
- return ref(oper, my_eval(env, node[1]))
683
+ if isinstance(node[0], Sym):
684
+ stack_trace_manager.push(node[0])
649
685
 
650
- raise MyError(
651
- f"{print_str(oper)} is not a function. Tried to run with args: {print_str(node[1:])}"
652
- )
686
+ try:
687
+ if not callable(oper):
688
+ """
689
+ ...No one wants to write (aref a x y) when they could write a[x,y].
690
+ In this particular case there is a way to finesse our way out of the
691
+ problem. If we treat data structures as if they were functions on indexes,
692
+ we could write (a x y) instead, which is even shorter than the Perl form.
693
+ """
694
+ if is_iterable(oper):
695
+ length = len(node[1:])
696
+ if length > 3:
697
+ raise MyError(f"{print_str(node[0])}: slice expects 1 argument")
698
+ if length in (2, 3):
699
+ return p_slice(oper, *(my_eval(env, c) for c in node[1:]))
700
+ if length == 1:
701
+ return ref(oper, my_eval(env, node[1]))
653
702
 
654
- if type(oper) is Syntax:
655
- return oper(env, node)
703
+ raise MyError(
704
+ f"{print_str(oper)} is not a function. Tried to run with args: {print_str(node[1:])}"
705
+ )
656
706
 
657
- i = 1
658
- args: list[Any] = []
659
- kwargs: dict[str, Any] = {}
660
- while i < len(node):
661
- result = my_eval(env, node[i])
662
- if type(result) is Keyword:
707
+ if type(oper) is Syntax:
708
+ return oper(env, node)
709
+
710
+ i = 1
711
+ args: list[Any] = []
712
+ kwargs: dict[str, Any] = {}
713
+ while i < len(node):
714
+ result = my_eval(env, node[i])
715
+ if type(result) is Keyword:
716
+ i += 1
717
+ if i >= len(node):
718
+ raise MyError("Keyword need argument")
719
+ kwargs[result.val] = my_eval(env, node[i])
720
+ else:
721
+ args.append(result)
663
722
  i += 1
664
- if i >= len(node):
665
- raise MyError("Keyword need argument")
666
- kwargs[result.val] = my_eval(env, node[i])
667
- else:
668
- args.append(result)
669
- i += 1
670
723
 
671
- return oper(*args, **kwargs)
724
+ return oper(*args, **kwargs)
725
+ except MyError as e:
726
+ error_msg = str(e)
727
+ if not error_msg.endswith(make_trace(node[0])):
728
+ error_msg += f"\n{make_trace(node[0])}"
729
+ raise MyError(error_msg)
730
+ finally:
731
+ if isinstance(node[0], Sym):
732
+ stack_trace_manager.pop()
672
733
 
673
734
  return node
674
735