typed-ffmpeg-compatible 2.1.0__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- typed_ffmpeg/__init__.py +25 -0
- typed_ffmpeg/base.py +114 -0
- typed_ffmpeg/common/__init__.py +0 -0
- typed_ffmpeg/common/schema.py +308 -0
- typed_ffmpeg/common/serialize.py +132 -0
- typed_ffmpeg/dag/__init__.py +13 -0
- typed_ffmpeg/dag/compile.py +51 -0
- typed_ffmpeg/dag/context.py +221 -0
- typed_ffmpeg/dag/factory.py +31 -0
- typed_ffmpeg/dag/global_runnable/__init__.py +0 -0
- typed_ffmpeg/dag/global_runnable/global_args.py +178 -0
- typed_ffmpeg/dag/global_runnable/runnable.py +174 -0
- typed_ffmpeg/dag/io/__init__.py +0 -0
- typed_ffmpeg/dag/io/_input.py +197 -0
- typed_ffmpeg/dag/io/_output.py +318 -0
- typed_ffmpeg/dag/io/output_args.py +327 -0
- typed_ffmpeg/dag/nodes.py +479 -0
- typed_ffmpeg/dag/schema.py +210 -0
- typed_ffmpeg/dag/utils.py +41 -0
- typed_ffmpeg/dag/validate.py +172 -0
- typed_ffmpeg/exceptions.py +42 -0
- typed_ffmpeg/filters.py +3510 -0
- typed_ffmpeg/probe.py +43 -0
- typed_ffmpeg/py.typed +0 -0
- typed_ffmpeg/schema.py +29 -0
- typed_ffmpeg/streams/__init__.py +5 -0
- typed_ffmpeg/streams/audio.py +6955 -0
- typed_ffmpeg/streams/av.py +22 -0
- typed_ffmpeg/streams/channel_layout.py +39 -0
- typed_ffmpeg/streams/video.py +12974 -0
- typed_ffmpeg/types.py +119 -0
- typed_ffmpeg/utils/__init__.py +0 -0
- typed_ffmpeg/utils/escaping.py +49 -0
- typed_ffmpeg/utils/lazy_eval/__init__.py +0 -0
- typed_ffmpeg/utils/lazy_eval/operator.py +134 -0
- typed_ffmpeg/utils/lazy_eval/schema.py +211 -0
- typed_ffmpeg/utils/run.py +27 -0
- typed_ffmpeg/utils/snapshot.py +26 -0
- typed_ffmpeg/utils/typing.py +17 -0
- typed_ffmpeg/utils/view.py +64 -0
- typed_ffmpeg_compatible-2.1.0.dist-info/LICENSE +21 -0
- typed_ffmpeg_compatible-2.1.0.dist-info/METADATA +183 -0
- typed_ffmpeg_compatible-2.1.0.dist-info/RECORD +45 -0
- typed_ffmpeg_compatible-2.1.0.dist-info/WHEEL +4 -0
- typed_ffmpeg_compatible-2.1.0.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,221 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from collections import defaultdict
|
4
|
+
from dataclasses import dataclass
|
5
|
+
from functools import cached_property
|
6
|
+
from typing import Any, TypeVar
|
7
|
+
|
8
|
+
from ..utils.typing import override
|
9
|
+
from .nodes import FilterNode, InputNode
|
10
|
+
from .schema import Node, Stream
|
11
|
+
|
12
|
+
T = TypeVar("T")
|
13
|
+
|
14
|
+
|
15
|
+
def _remove_duplicates(seq: list[T]) -> list[T]:
|
16
|
+
"""
|
17
|
+
Remove duplicates from a list while preserving order.
|
18
|
+
|
19
|
+
Args:
|
20
|
+
seq: The list to remove duplicates from.
|
21
|
+
|
22
|
+
Returns:
|
23
|
+
The list with duplicates removed.
|
24
|
+
"""
|
25
|
+
seen = set()
|
26
|
+
output = []
|
27
|
+
|
28
|
+
for x in seq:
|
29
|
+
if x not in seen:
|
30
|
+
output.append(x)
|
31
|
+
seen.add(x)
|
32
|
+
|
33
|
+
return output
|
34
|
+
|
35
|
+
|
36
|
+
def _collect(node: Node) -> tuple[list[Node], list[Stream]]:
|
37
|
+
"""
|
38
|
+
Collect all nodes and streams that are upstreamed to the given node
|
39
|
+
|
40
|
+
Args:
|
41
|
+
node: The node to collect from.
|
42
|
+
|
43
|
+
Returns:
|
44
|
+
A tuple of all nodes and streams that are upstreamed to the given node.
|
45
|
+
"""
|
46
|
+
nodes, streams = [node], [*node.inputs]
|
47
|
+
|
48
|
+
for stream in node.inputs:
|
49
|
+
_nodes, _streams = _collect(stream.node)
|
50
|
+
nodes += _nodes
|
51
|
+
streams += _streams
|
52
|
+
|
53
|
+
return nodes, streams
|
54
|
+
|
55
|
+
|
56
|
+
@dataclass(frozen=True, kw_only=True)
|
57
|
+
class DAGContext:
|
58
|
+
"""
|
59
|
+
A context for a directed acyclic graph (DAG).
|
60
|
+
"""
|
61
|
+
|
62
|
+
node: Node
|
63
|
+
"""
|
64
|
+
The root node (the destination) of the DAG.
|
65
|
+
"""
|
66
|
+
|
67
|
+
nodes: tuple[Node, ...]
|
68
|
+
"""
|
69
|
+
All nodes in the graph.
|
70
|
+
"""
|
71
|
+
|
72
|
+
streams: tuple[Stream, ...]
|
73
|
+
"""
|
74
|
+
All streams in the graph.
|
75
|
+
"""
|
76
|
+
|
77
|
+
@classmethod
|
78
|
+
def build(cls, node: Node) -> DAGContext:
|
79
|
+
"""
|
80
|
+
create a DAG context based on the given node
|
81
|
+
|
82
|
+
Args:
|
83
|
+
node: The root node of the DAG.
|
84
|
+
|
85
|
+
Returns:
|
86
|
+
A DAG context based on the given node.
|
87
|
+
"""
|
88
|
+
nodes, streams = _collect(node)
|
89
|
+
|
90
|
+
return cls(
|
91
|
+
node=node,
|
92
|
+
nodes=tuple(_remove_duplicates(nodes)),
|
93
|
+
streams=tuple(_remove_duplicates(streams)),
|
94
|
+
)
|
95
|
+
|
96
|
+
@cached_property
|
97
|
+
def all_nodes(self) -> list[Node]:
|
98
|
+
"""
|
99
|
+
All nodes in the graph sorted by the number of upstream nodes.
|
100
|
+
"""
|
101
|
+
return sorted(self.nodes, key=lambda node: len(node.upstream_nodes))
|
102
|
+
|
103
|
+
@cached_property
|
104
|
+
def all_streams(self) -> list[Stream]:
|
105
|
+
"""
|
106
|
+
All streams in the graph sorted by the number of upstream nodes and the index of the stream.
|
107
|
+
"""
|
108
|
+
return sorted(self.streams, key=lambda stream: (len(stream.node.upstream_nodes), stream.index))
|
109
|
+
|
110
|
+
@cached_property
|
111
|
+
def outgoing_nodes(self) -> dict[Stream, list[tuple[Node, int]]]:
|
112
|
+
"""
|
113
|
+
A dictionary of outgoing nodes for each stream.
|
114
|
+
"""
|
115
|
+
outgoing_nodes: dict[Stream, list[tuple[Node, int]]] = defaultdict(list)
|
116
|
+
|
117
|
+
for node in self.nodes:
|
118
|
+
for idx, stream in enumerate(node.inputs):
|
119
|
+
outgoing_nodes[stream].append((node, idx))
|
120
|
+
|
121
|
+
return outgoing_nodes
|
122
|
+
|
123
|
+
@cached_property
|
124
|
+
def outgoing_streams(self) -> dict[Node, list[Stream]]:
|
125
|
+
"""
|
126
|
+
A dictionary of outgoing streams for each node.
|
127
|
+
"""
|
128
|
+
|
129
|
+
outgoing_streams: dict[Node, list[Stream]] = defaultdict(list)
|
130
|
+
|
131
|
+
for stream in self.streams:
|
132
|
+
outgoing_streams[stream.node].append(stream)
|
133
|
+
|
134
|
+
return outgoing_streams
|
135
|
+
|
136
|
+
@cached_property
|
137
|
+
def node_labels(self) -> dict[Node, str]:
|
138
|
+
"""
|
139
|
+
A dictionary of outgoing streams for each node.
|
140
|
+
"""
|
141
|
+
|
142
|
+
input_node_index = 0
|
143
|
+
filter_node_index = 0
|
144
|
+
node_labels: dict[Node, str] = {}
|
145
|
+
|
146
|
+
for node in sorted(self.nodes, key=lambda node: node.max_depth):
|
147
|
+
if isinstance(node, InputNode):
|
148
|
+
node_labels[node] = str(input_node_index)
|
149
|
+
input_node_index += 1
|
150
|
+
elif isinstance(node, FilterNode):
|
151
|
+
node_labels[node] = f"s{filter_node_index}"
|
152
|
+
filter_node_index += 1
|
153
|
+
else:
|
154
|
+
node_labels[node] = "out"
|
155
|
+
|
156
|
+
return node_labels
|
157
|
+
|
158
|
+
@override
|
159
|
+
def get_outgoing_nodes(self, stream: Stream) -> list[tuple[Node, int]]:
|
160
|
+
"""
|
161
|
+
Get all outgoing nodes of the stream.
|
162
|
+
|
163
|
+
Args:
|
164
|
+
stream: The stream to get the outgoing nodes of.
|
165
|
+
|
166
|
+
Returns:
|
167
|
+
The outgoing nodes of the stream.
|
168
|
+
"""
|
169
|
+
return self.outgoing_nodes[stream]
|
170
|
+
|
171
|
+
@override
|
172
|
+
def get_node_label(self, node: Node) -> str:
|
173
|
+
"""
|
174
|
+
Get the label of the node.
|
175
|
+
|
176
|
+
Args:
|
177
|
+
node: The node to get the label of.
|
178
|
+
|
179
|
+
Returns:
|
180
|
+
The label of the node.
|
181
|
+
"""
|
182
|
+
|
183
|
+
assert isinstance(node, (InputNode, FilterNode)), "Only input and filter nodes have labels"
|
184
|
+
return self.node_labels[node]
|
185
|
+
|
186
|
+
@override
|
187
|
+
def get_outgoing_streams(self, node: Node) -> list[Stream]:
|
188
|
+
"""
|
189
|
+
Extract all node's outgoing streams from the given set of streams, Because a node only know its incoming streams.
|
190
|
+
|
191
|
+
Args:
|
192
|
+
node: The node to get the outgoing streams of.
|
193
|
+
|
194
|
+
Returns:
|
195
|
+
The outgoing streams of the node.
|
196
|
+
"""
|
197
|
+
return self.outgoing_streams[node]
|
198
|
+
|
199
|
+
def render(self, obj: Any) -> Any:
|
200
|
+
"""
|
201
|
+
Render the object to a string.
|
202
|
+
|
203
|
+
Args:
|
204
|
+
obj: The object to render.
|
205
|
+
|
206
|
+
Returns:
|
207
|
+
The rendered object.
|
208
|
+
"""
|
209
|
+
|
210
|
+
if isinstance(obj, (list, tuple)):
|
211
|
+
return [self.render(o) for o in obj]
|
212
|
+
elif isinstance(obj, dict):
|
213
|
+
return {self.render(k): self.render(v) for k, v in obj.items()}
|
214
|
+
|
215
|
+
if isinstance(obj, Node):
|
216
|
+
return f"Node({obj.repr()}#{self.node_labels[obj]})"
|
217
|
+
|
218
|
+
if isinstance(obj, Stream):
|
219
|
+
return f"Stream({self.render(obj.node)}#{obj.index})"
|
220
|
+
|
221
|
+
return obj
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import re
|
2
|
+
from typing import Any
|
3
|
+
|
4
|
+
from ..common.schema import FFMpegFilterDef, StreamType
|
5
|
+
from ..schema import Auto
|
6
|
+
from ..utils.run import ignore_default
|
7
|
+
from .nodes import FilterableStream, FilterNode
|
8
|
+
|
9
|
+
|
10
|
+
def filter_node_factory(filter: FFMpegFilterDef, *inputs: FilterableStream, **kwargs: Any) -> FilterNode:
|
11
|
+
for k, v in kwargs.items():
|
12
|
+
if isinstance(v, Auto):
|
13
|
+
kwargs[k] = eval(v, {"StreamType": StreamType, "re": re, **kwargs, "streams": inputs})
|
14
|
+
|
15
|
+
if isinstance(filter.typings_input, str):
|
16
|
+
input_typings = tuple(eval(filter.typings_input, {"StreamType": StreamType, "re": re, **kwargs}))
|
17
|
+
else:
|
18
|
+
input_typings = tuple(StreamType.video if k == "video" else StreamType.audio for k in filter.typings_input)
|
19
|
+
|
20
|
+
if isinstance(filter.typings_output, str):
|
21
|
+
output_typings = tuple(eval(filter.typings_output, {"StreamType": StreamType, "re": re, **kwargs}))
|
22
|
+
else:
|
23
|
+
output_typings = tuple(StreamType.video if k == "video" else StreamType.audio for k in filter.typings_output)
|
24
|
+
|
25
|
+
return FilterNode(
|
26
|
+
name=filter.name,
|
27
|
+
input_typings=input_typings,
|
28
|
+
output_typings=output_typings,
|
29
|
+
inputs=inputs,
|
30
|
+
kwargs=ignore_default(kwargs),
|
31
|
+
)
|
File without changes
|
@@ -0,0 +1,178 @@
|
|
1
|
+
# NOTE: this file is auto-generated, do not modify
|
2
|
+
from __future__ import annotations
|
3
|
+
|
4
|
+
from abc import ABC, abstractmethod
|
5
|
+
from typing import TYPE_CHECKING, Any
|
6
|
+
|
7
|
+
from ...types import Boolean, Float, Func, Int
|
8
|
+
|
9
|
+
if TYPE_CHECKING:
|
10
|
+
from ..nodes import GlobalNode, GlobalStream, OutputStream
|
11
|
+
|
12
|
+
|
13
|
+
class GlobalArgs(ABC):
|
14
|
+
@abstractmethod
|
15
|
+
def _global_node(self, *streams: OutputStream, **kwargs: Any) -> GlobalNode:
|
16
|
+
...
|
17
|
+
|
18
|
+
def global_args(
|
19
|
+
self,
|
20
|
+
*,
|
21
|
+
loglevel: Func = None,
|
22
|
+
v: Func = None,
|
23
|
+
report: Func = None,
|
24
|
+
max_alloc: Func = None,
|
25
|
+
cpuflags: Func = None,
|
26
|
+
cpucount: Func = None,
|
27
|
+
hide_banner: Boolean = None,
|
28
|
+
y: Boolean = None,
|
29
|
+
n: Boolean = None,
|
30
|
+
ignore_unknown: Boolean = None,
|
31
|
+
copy_unknown: Boolean = None,
|
32
|
+
recast_media: Boolean = None,
|
33
|
+
benchmark: Boolean = None,
|
34
|
+
benchmark_all: Boolean = None,
|
35
|
+
progress: Func = None,
|
36
|
+
stdin: Boolean = None,
|
37
|
+
timelimit: Func = None,
|
38
|
+
dump: Boolean = None,
|
39
|
+
hex: Boolean = None,
|
40
|
+
frame_drop_threshold: Float = None,
|
41
|
+
copyts: Boolean = None,
|
42
|
+
start_at_zero: Boolean = None,
|
43
|
+
copytb: Int = None,
|
44
|
+
dts_delta_threshold: Float = None,
|
45
|
+
dts_error_threshold: Float = None,
|
46
|
+
xerror: Boolean = None,
|
47
|
+
abort_on: Func = None,
|
48
|
+
filter_threads: Func = None,
|
49
|
+
filter_complex: Func = None,
|
50
|
+
filter_complex_threads: Int = None,
|
51
|
+
lavfi: Func = None,
|
52
|
+
filter_complex_script: Func = None,
|
53
|
+
auto_conversion_filters: Boolean = None,
|
54
|
+
stats: Boolean = None,
|
55
|
+
stats_period: Func = None,
|
56
|
+
debug_ts: Boolean = None,
|
57
|
+
max_error_rate: Float = None,
|
58
|
+
vstats: Func = None,
|
59
|
+
vstats_file: Func = None,
|
60
|
+
vstats_version: Int = None,
|
61
|
+
init_hw_device: Func = None,
|
62
|
+
filter_hw_device: Func = None,
|
63
|
+
adrift_threshold: Func = None,
|
64
|
+
qphist: Func = None,
|
65
|
+
vsync: Func = None,
|
66
|
+
**kwargs: Any,
|
67
|
+
) -> GlobalStream:
|
68
|
+
"""
|
69
|
+
Set global options.
|
70
|
+
|
71
|
+
Args:
|
72
|
+
loglevel: set logging level
|
73
|
+
v: set logging level
|
74
|
+
report: generate a report
|
75
|
+
max_alloc: set maximum size of a single allocated block
|
76
|
+
cpuflags: force specific cpu flags
|
77
|
+
cpucount: force specific cpu count
|
78
|
+
hide_banner: do not show program banner
|
79
|
+
y: overwrite output files
|
80
|
+
n: never overwrite output files
|
81
|
+
ignore_unknown: Ignore unknown stream types
|
82
|
+
copy_unknown: Copy unknown stream types
|
83
|
+
recast_media: allow recasting stream type in order to force a decoder of different media type
|
84
|
+
benchmark: add timings for benchmarking
|
85
|
+
benchmark_all: add timings for each task
|
86
|
+
progress: write program-readable progress information
|
87
|
+
stdin: enable or disable interaction on standard input
|
88
|
+
timelimit: set max runtime in seconds in CPU user time
|
89
|
+
dump: dump each input packet
|
90
|
+
hex: when dumping packets, also dump the payload
|
91
|
+
frame_drop_threshold: frame drop threshold
|
92
|
+
copyts: copy timestamps
|
93
|
+
start_at_zero: shift input timestamps to start at 0 when using copyts
|
94
|
+
copytb: copy input stream time base when stream copying
|
95
|
+
dts_delta_threshold: timestamp discontinuity delta threshold
|
96
|
+
dts_error_threshold: timestamp error delta threshold
|
97
|
+
xerror: exit on error
|
98
|
+
abort_on: abort on the specified condition flags
|
99
|
+
filter_threads: number of non-complex filter threads
|
100
|
+
filter_complex: create a complex filtergraph
|
101
|
+
filter_complex_threads: number of threads for -filter_complex
|
102
|
+
lavfi: create a complex filtergraph
|
103
|
+
filter_complex_script: deprecated, use -/filter_complex instead
|
104
|
+
auto_conversion_filters: enable automatic conversion filters globally
|
105
|
+
stats: print progress report during encoding
|
106
|
+
stats_period: set the period at which ffmpeg updates stats and -progress output
|
107
|
+
debug_ts: print timestamp debugging info
|
108
|
+
max_error_rate: ratio of decoding errors (0.0: no errors, 1.0: 100% errors) above which ffmpeg returns an error instead of success.
|
109
|
+
vstats: dump video coding statistics to file
|
110
|
+
vstats_file: dump video coding statistics to file
|
111
|
+
vstats_version: Version of the vstats format to use.
|
112
|
+
init_hw_device: initialise hardware device
|
113
|
+
filter_hw_device: set hardware device used when filtering
|
114
|
+
adrift_threshold: deprecated, does nothing
|
115
|
+
qphist: deprecated, does nothing
|
116
|
+
vsync: set video sync method globally; deprecated, use -fps_mode
|
117
|
+
**kwargs: Additional options
|
118
|
+
|
119
|
+
Returns:
|
120
|
+
GlobalStream: GlobalStream instance
|
121
|
+
"""
|
122
|
+
|
123
|
+
return self._global_node(
|
124
|
+
**(
|
125
|
+
{
|
126
|
+
k: v
|
127
|
+
for k, v in {
|
128
|
+
"loglevel": loglevel,
|
129
|
+
"v": v,
|
130
|
+
"report": report,
|
131
|
+
"max_alloc": max_alloc,
|
132
|
+
"cpuflags": cpuflags,
|
133
|
+
"cpucount": cpucount,
|
134
|
+
"hide_banner": hide_banner,
|
135
|
+
"y": y,
|
136
|
+
"n": n,
|
137
|
+
"ignore_unknown": ignore_unknown,
|
138
|
+
"copy_unknown": copy_unknown,
|
139
|
+
"recast_media": recast_media,
|
140
|
+
"benchmark": benchmark,
|
141
|
+
"benchmark_all": benchmark_all,
|
142
|
+
"progress": progress,
|
143
|
+
"stdin": stdin,
|
144
|
+
"timelimit": timelimit,
|
145
|
+
"dump": dump,
|
146
|
+
"hex": hex,
|
147
|
+
"frame_drop_threshold": frame_drop_threshold,
|
148
|
+
"copyts": copyts,
|
149
|
+
"start_at_zero": start_at_zero,
|
150
|
+
"copytb": copytb,
|
151
|
+
"dts_delta_threshold": dts_delta_threshold,
|
152
|
+
"dts_error_threshold": dts_error_threshold,
|
153
|
+
"xerror": xerror,
|
154
|
+
"abort_on": abort_on,
|
155
|
+
"filter_threads": filter_threads,
|
156
|
+
"filter_complex": filter_complex,
|
157
|
+
"filter_complex_threads": filter_complex_threads,
|
158
|
+
"lavfi": lavfi,
|
159
|
+
"filter_complex_script": filter_complex_script,
|
160
|
+
"auto_conversion_filters": auto_conversion_filters,
|
161
|
+
"stats": stats,
|
162
|
+
"stats_period": stats_period,
|
163
|
+
"debug_ts": debug_ts,
|
164
|
+
"max_error_rate": max_error_rate,
|
165
|
+
"vstats": vstats,
|
166
|
+
"vstats_file": vstats_file,
|
167
|
+
"vstats_version": vstats_version,
|
168
|
+
"init_hw_device": init_hw_device,
|
169
|
+
"filter_hw_device": filter_hw_device,
|
170
|
+
"adrift_threshold": adrift_threshold,
|
171
|
+
"qphist": qphist,
|
172
|
+
"vsync": vsync,
|
173
|
+
}.items()
|
174
|
+
if v is not None
|
175
|
+
}
|
176
|
+
| kwargs
|
177
|
+
),
|
178
|
+
).stream()
|
@@ -0,0 +1,174 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import logging
|
4
|
+
import subprocess
|
5
|
+
from typing import TYPE_CHECKING
|
6
|
+
|
7
|
+
from ...exceptions import FFMpegExecuteError
|
8
|
+
from ...utils.run import command_line
|
9
|
+
from .global_args import GlobalArgs
|
10
|
+
|
11
|
+
if TYPE_CHECKING:
|
12
|
+
from ..nodes import GlobalStream, OutputStream
|
13
|
+
|
14
|
+
logger = logging.getLogger(__name__)
|
15
|
+
|
16
|
+
|
17
|
+
class GlobalRunable(GlobalArgs):
|
18
|
+
def merge_outputs(self, *streams: OutputStream) -> GlobalStream:
|
19
|
+
"""
|
20
|
+
Merge multiple output streams into one.
|
21
|
+
|
22
|
+
Args:
|
23
|
+
*streams: The output streams to merge.
|
24
|
+
|
25
|
+
Returns:
|
26
|
+
The merged output stream.
|
27
|
+
"""
|
28
|
+
return self._global_node(*streams).stream()
|
29
|
+
|
30
|
+
def overwrite_output(self) -> GlobalStream:
|
31
|
+
"""
|
32
|
+
Overwrite output files without asking (ffmpeg `-y` option)
|
33
|
+
|
34
|
+
Returns:
|
35
|
+
the output stream
|
36
|
+
"""
|
37
|
+
return self._global_node(y=True).stream()
|
38
|
+
|
39
|
+
def compile(
|
40
|
+
self,
|
41
|
+
cmd: str | list[str] = "ffmpeg",
|
42
|
+
overwrite_output: bool = None,
|
43
|
+
auto_fix: bool = True,
|
44
|
+
) -> list[str]:
|
45
|
+
"""
|
46
|
+
Build command-line for invoking ffmpeg.
|
47
|
+
|
48
|
+
Args:
|
49
|
+
cmd: the command to invoke ffmpeg
|
50
|
+
overwrite_output: whether to overwrite output files without asking
|
51
|
+
auto_fix: whether to automatically fix the stream
|
52
|
+
|
53
|
+
Returns:
|
54
|
+
the command-line
|
55
|
+
"""
|
56
|
+
from ..compile import compile
|
57
|
+
|
58
|
+
if isinstance(cmd, str):
|
59
|
+
cmd = [cmd]
|
60
|
+
|
61
|
+
if overwrite_output is True:
|
62
|
+
return self.global_args(y=True).compile(cmd, auto_fix=auto_fix)
|
63
|
+
elif overwrite_output is False:
|
64
|
+
return self.global_args(n=True).compile(cmd, auto_fix=auto_fix)
|
65
|
+
|
66
|
+
return cmd + compile(self._global_node().stream(), auto_fix=auto_fix)
|
67
|
+
|
68
|
+
def compile_line(
|
69
|
+
self,
|
70
|
+
cmd: str | list[str] = "ffmpeg",
|
71
|
+
overwrite_output: bool = None,
|
72
|
+
auto_fix: bool = True,
|
73
|
+
) -> str:
|
74
|
+
"""
|
75
|
+
Build command-line for invoking ffmpeg.
|
76
|
+
|
77
|
+
Args:
|
78
|
+
cmd: the command to invoke ffmpeg
|
79
|
+
overwrite_output: whether to overwrite output files without asking
|
80
|
+
auto_fix: whether to automatically fix the stream
|
81
|
+
|
82
|
+
Returns:
|
83
|
+
the command-line
|
84
|
+
"""
|
85
|
+
return command_line(self.compile(cmd, overwrite_output=overwrite_output, auto_fix=auto_fix))
|
86
|
+
|
87
|
+
def run_async(
|
88
|
+
self,
|
89
|
+
cmd: str | list[str] = "ffmpeg",
|
90
|
+
pipe_stdin: bool = False,
|
91
|
+
pipe_stdout: bool = False,
|
92
|
+
pipe_stderr: bool = False,
|
93
|
+
quiet: bool = False,
|
94
|
+
overwrite_output: bool = None,
|
95
|
+
auto_fix: bool = True,
|
96
|
+
) -> subprocess.Popen[bytes]:
|
97
|
+
"""
|
98
|
+
Run ffmpeg asynchronously.
|
99
|
+
|
100
|
+
Args:
|
101
|
+
cmd: the command to invoke ffmpeg
|
102
|
+
pipe_stdin: whether to pipe stdin
|
103
|
+
pipe_stdout: whether to pipe stdout
|
104
|
+
pipe_stderr: whether to pipe stderr
|
105
|
+
quiet: whether to pipe stderr to stdout
|
106
|
+
overwrite_output: whether to overwrite output files without asking
|
107
|
+
auto_fix: whether to automatically fix the stream
|
108
|
+
|
109
|
+
Returns:
|
110
|
+
the process
|
111
|
+
"""
|
112
|
+
|
113
|
+
args = self.compile(cmd, overwrite_output=overwrite_output, auto_fix=auto_fix)
|
114
|
+
stdin_stream = subprocess.PIPE if pipe_stdin else None
|
115
|
+
stdout_stream = subprocess.PIPE if pipe_stdout or quiet else None
|
116
|
+
stderr_stream = subprocess.PIPE if pipe_stderr or quiet else None
|
117
|
+
|
118
|
+
logger.info(f"Running command: {self.compile_line(cmd, overwrite_output=overwrite_output, auto_fix=auto_fix)}")
|
119
|
+
|
120
|
+
return subprocess.Popen(
|
121
|
+
args,
|
122
|
+
stdin=stdin_stream,
|
123
|
+
stdout=stdout_stream,
|
124
|
+
stderr=stderr_stream,
|
125
|
+
)
|
126
|
+
|
127
|
+
def run(
|
128
|
+
self,
|
129
|
+
cmd: str | list[str] = "ffmpeg",
|
130
|
+
capture_stdout: bool = False,
|
131
|
+
capture_stderr: bool = False,
|
132
|
+
input: bytes | None = None,
|
133
|
+
quiet: bool = False,
|
134
|
+
overwrite_output: bool = None,
|
135
|
+
auto_fix: bool = True,
|
136
|
+
) -> tuple[bytes, bytes]:
|
137
|
+
"""
|
138
|
+
Run ffmpeg synchronously.
|
139
|
+
|
140
|
+
Args:
|
141
|
+
cmd: the command to invoke ffmpeg
|
142
|
+
capture_stdout: whether to capture stdout
|
143
|
+
capture_stderr: whether to capture stderr
|
144
|
+
input: the input
|
145
|
+
quiet: whether to pipe stderr to stdout
|
146
|
+
overwrite_output: whether to overwrite output files without asking
|
147
|
+
auto_fix: whether to automatically fix the stream
|
148
|
+
|
149
|
+
Returns:
|
150
|
+
stdout: he stdout
|
151
|
+
stderr: the stderr
|
152
|
+
"""
|
153
|
+
|
154
|
+
process = self.run_async(
|
155
|
+
cmd,
|
156
|
+
pipe_stdin=input is not None,
|
157
|
+
pipe_stdout=capture_stdout,
|
158
|
+
pipe_stderr=capture_stderr,
|
159
|
+
quiet=quiet,
|
160
|
+
overwrite_output=overwrite_output,
|
161
|
+
auto_fix=auto_fix,
|
162
|
+
)
|
163
|
+
stdout, stderr = process.communicate(input)
|
164
|
+
retcode = process.poll()
|
165
|
+
|
166
|
+
if retcode:
|
167
|
+
raise FFMpegExecuteError(
|
168
|
+
retcode=retcode,
|
169
|
+
cmd=self.compile_line(cmd, overwrite_output=overwrite_output, auto_fix=auto_fix),
|
170
|
+
stdout=stdout,
|
171
|
+
stderr=stderr,
|
172
|
+
)
|
173
|
+
|
174
|
+
return stdout, stderr
|
File without changes
|