mulmocast 0.0.2 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -9
- package/assets/font/NotoSansJP-Regular.ttf +0 -0
- package/assets/html/chart.html +1 -10
- package/assets/html/mermaid.html +1 -13
- package/assets/templates/business.json +16 -27
- package/assets/templates/coding.json +58 -21
- package/lib/actions/audio.d.ts +1 -1
- package/lib/actions/audio.js +43 -27
- package/lib/actions/images.js +20 -26
- package/lib/actions/index.d.ts +5 -0
- package/lib/actions/index.js +5 -0
- package/lib/actions/movie.d.ts +9 -1
- package/lib/actions/movie.js +97 -38
- package/lib/actions/pdf.d.ts +2 -0
- package/lib/actions/pdf.js +211 -0
- package/lib/actions/pdf2.d.ts +2 -0
- package/lib/actions/pdf2.js +203 -0
- package/lib/actions/translate.js +22 -9
- package/lib/agents/anthropic_agent.d.ts +23 -0
- package/lib/agents/anthropic_agent.js +162 -0
- package/lib/agents/combine_audio_files_agent.js +13 -22
- package/lib/agents/nested_agent.d.ts +9 -0
- package/lib/agents/nested_agent.js +138 -0
- package/lib/cli/args.d.ts +3 -1
- package/lib/cli/args.js +49 -34
- package/lib/cli/cli.d.ts +14 -0
- package/lib/cli/cli.js +48 -46
- package/lib/cli/tool-args.d.ts +2 -0
- package/lib/cli/tool-args.js +12 -2
- package/lib/cli/tool-cli.js +6 -4
- package/lib/methods/index.d.ts +1 -0
- package/lib/methods/index.js +1 -0
- package/lib/methods/mulmo_media_source.d.ts +4 -0
- package/lib/methods/mulmo_media_source.js +21 -0
- package/lib/methods/mulmo_script.d.ts +2 -6
- package/lib/methods/mulmo_script.js +12 -5
- package/lib/tools/create_mulmo_script_interactively.d.ts +1 -1
- package/lib/tools/create_mulmo_script_interactively.js +61 -20
- package/lib/types/index.d.ts +1 -0
- package/lib/types/index.js +1 -0
- package/lib/types/schema.d.ts +3626 -3162
- package/lib/types/schema.js +75 -41
- package/lib/types/type.d.ts +28 -1
- package/lib/utils/const.d.ts +2 -0
- package/lib/utils/const.js +2 -0
- package/lib/utils/file.d.ts +4 -1
- package/lib/utils/file.js +15 -1
- package/lib/utils/filters.js +1 -1
- package/lib/utils/image_plugins/chart.d.ts +3 -0
- package/lib/utils/image_plugins/chart.js +18 -0
- package/lib/utils/image_plugins/image.d.ts +2 -0
- package/lib/utils/image_plugins/image.js +3 -0
- package/lib/utils/image_plugins/index.d.ts +7 -0
- package/lib/utils/image_plugins/index.js +7 -0
- package/lib/utils/image_plugins/markdown.d.ts +3 -0
- package/lib/utils/image_plugins/markdown.js +11 -0
- package/lib/utils/image_plugins/mermaid.d.ts +3 -0
- package/lib/utils/image_plugins/mermaid.js +21 -0
- package/lib/utils/image_plugins/movie.d.ts +2 -0
- package/lib/utils/image_plugins/movie.js +3 -0
- package/lib/utils/image_plugins/source.d.ts +4 -0
- package/lib/utils/image_plugins/source.js +15 -0
- package/lib/utils/image_plugins/text_slide.d.ts +3 -0
- package/lib/utils/image_plugins/text_slide.js +12 -0
- package/lib/utils/image_plugins/type_guards.d.ts +6 -0
- package/lib/utils/image_plugins/type_guards.js +21 -0
- package/lib/utils/markdown.js +4 -1
- package/lib/utils/pdf.d.ts +8 -0
- package/lib/utils/pdf.js +75 -0
- package/lib/utils/preprocess.d.ts +58 -128
- package/lib/utils/preprocess.js +37 -37
- package/lib/utils/utils.d.ts +12 -0
- package/lib/utils/utils.js +34 -0
- package/package.json +13 -4
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
2
|
+
import { getMergeValue } from "@graphai/llm_utils";
|
|
3
|
+
const convToolCall = (tool_call) => {
|
|
4
|
+
const { id, name, input } = tool_call;
|
|
5
|
+
return { id, name, arguments: input };
|
|
6
|
+
};
|
|
7
|
+
// https://docs.anthropic.com/ja/api/messages
|
|
8
|
+
const convertOpenAIChatCompletion = (response, messages) => {
|
|
9
|
+
// SDK bug https://github.com/anthropics/anthropic-sdk-typescript/issues/432
|
|
10
|
+
const text = response.content[0].text;
|
|
11
|
+
const functionResponses = response.content.filter((content) => content.type === "tool_use") ?? [];
|
|
12
|
+
const tool_calls = functionResponses.map(convToolCall);
|
|
13
|
+
const tool = tool_calls[0] ? tool_calls[0] : undefined;
|
|
14
|
+
const message = { role: response.role, content: text };
|
|
15
|
+
messages.push(message);
|
|
16
|
+
return { ...response, choices: [{ message }], text, tool, tool_calls, message, messages };
|
|
17
|
+
};
|
|
18
|
+
export const anthropicAgent = async ({ params, namedInputs, filterParams, config, }) => {
|
|
19
|
+
const { verbose, system, temperature, tools, tool_choice, max_tokens, prompt, messages } = { ...params, ...namedInputs };
|
|
20
|
+
const { apiKey, stream, forWeb, model } = {
|
|
21
|
+
...params,
|
|
22
|
+
...(config || {}),
|
|
23
|
+
};
|
|
24
|
+
console.log({ stream });
|
|
25
|
+
const userPrompt = getMergeValue(namedInputs, params, "mergeablePrompts", prompt);
|
|
26
|
+
const systemPrompt = getMergeValue(namedInputs, params, "mergeableSystem", system);
|
|
27
|
+
const messagesCopy = messages ? messages.map((m) => m) : [];
|
|
28
|
+
if (userPrompt) {
|
|
29
|
+
messagesCopy.push({
|
|
30
|
+
role: "user",
|
|
31
|
+
content: userPrompt,
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
if (verbose) {
|
|
35
|
+
console.log(messagesCopy);
|
|
36
|
+
}
|
|
37
|
+
const anthropic_tools = tools && tools.length > 0
|
|
38
|
+
? tools.map((tool) => {
|
|
39
|
+
const { function: func } = tool;
|
|
40
|
+
const { name, description, parameters } = func;
|
|
41
|
+
return {
|
|
42
|
+
name,
|
|
43
|
+
description,
|
|
44
|
+
input_schema: parameters,
|
|
45
|
+
};
|
|
46
|
+
})
|
|
47
|
+
: undefined;
|
|
48
|
+
const anthropic = new Anthropic({ apiKey, dangerouslyAllowBrowser: !!forWeb });
|
|
49
|
+
const chatParams = {
|
|
50
|
+
model: model ?? "claude-3-5-sonnet-latest",
|
|
51
|
+
messages: messagesCopy,
|
|
52
|
+
tools: anthropic_tools,
|
|
53
|
+
tool_choice,
|
|
54
|
+
system: systemPrompt,
|
|
55
|
+
temperature: temperature ?? 0.7,
|
|
56
|
+
max_tokens: max_tokens ?? 1024,
|
|
57
|
+
};
|
|
58
|
+
if (!stream) {
|
|
59
|
+
const messageResponse = await anthropic.messages.create(chatParams);
|
|
60
|
+
return convertOpenAIChatCompletion(messageResponse, messagesCopy);
|
|
61
|
+
}
|
|
62
|
+
try {
|
|
63
|
+
console.log("###");
|
|
64
|
+
const chatStream = await anthropic.messages.create({
|
|
65
|
+
...chatParams,
|
|
66
|
+
stream: true,
|
|
67
|
+
});
|
|
68
|
+
console.log("###2");
|
|
69
|
+
const contents = [];
|
|
70
|
+
const partials = [];
|
|
71
|
+
let streamResponse = null;
|
|
72
|
+
console.log("###3");
|
|
73
|
+
for await (const messageStreamEvent of chatStream) {
|
|
74
|
+
console.log("AA");
|
|
75
|
+
if (messageStreamEvent.type === "message_start") {
|
|
76
|
+
streamResponse = messageStreamEvent.message;
|
|
77
|
+
}
|
|
78
|
+
if (messageStreamEvent.type === "content_block_start") {
|
|
79
|
+
if (streamResponse) {
|
|
80
|
+
streamResponse.content.push(messageStreamEvent.content_block);
|
|
81
|
+
}
|
|
82
|
+
partials.push("");
|
|
83
|
+
}
|
|
84
|
+
if (messageStreamEvent.type === "content_block_delta") {
|
|
85
|
+
const { index, delta } = messageStreamEvent;
|
|
86
|
+
if (delta.type === "input_json_delta") {
|
|
87
|
+
partials[index] = partials[index] + delta.partial_json;
|
|
88
|
+
}
|
|
89
|
+
if (delta.type === "text_delta") {
|
|
90
|
+
partials[index] = partials[index] + delta.text;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
if (messageStreamEvent.type === "content_block_delta" && messageStreamEvent.delta.type === "text_delta") {
|
|
94
|
+
const token = messageStreamEvent.delta.text;
|
|
95
|
+
contents.push(token);
|
|
96
|
+
if (filterParams && filterParams.streamTokenCallback && token) {
|
|
97
|
+
filterParams.streamTokenCallback(token);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
if (streamResponse === null) {
|
|
102
|
+
throw new Error("Anthoropic no response");
|
|
103
|
+
}
|
|
104
|
+
partials.forEach((partial, index) => {
|
|
105
|
+
if (streamResponse.content[index].type === "text") {
|
|
106
|
+
streamResponse.content[index].text = partial;
|
|
107
|
+
}
|
|
108
|
+
if (streamResponse.content[index].type === "tool_use") {
|
|
109
|
+
streamResponse.content[index].input = JSON.parse(partial);
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
return convertOpenAIChatCompletion(streamResponse, messagesCopy);
|
|
113
|
+
/*
|
|
114
|
+
|
|
115
|
+
const content = contents.join("");
|
|
116
|
+
const message = { role: "assistant" as const, content: content };
|
|
117
|
+
messagesCopy.push(message);
|
|
118
|
+
return { choices: [{ message }], text: content, message, messages: messagesCopy };
|
|
119
|
+
*/
|
|
120
|
+
}
|
|
121
|
+
catch (e) {
|
|
122
|
+
console.log(e);
|
|
123
|
+
throw new Error("ant Error");
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
const anthropicAgentInfo = {
|
|
127
|
+
name: "anthropicAgent",
|
|
128
|
+
agent: anthropicAgent,
|
|
129
|
+
mock: anthropicAgent,
|
|
130
|
+
inputs: {
|
|
131
|
+
type: "object",
|
|
132
|
+
properties: {
|
|
133
|
+
model: { type: "string" },
|
|
134
|
+
system: { type: "string" },
|
|
135
|
+
max_tokens: { type: "number" },
|
|
136
|
+
temperature: { type: "number" },
|
|
137
|
+
prompt: {
|
|
138
|
+
type: "string",
|
|
139
|
+
description: "query string",
|
|
140
|
+
},
|
|
141
|
+
messages: {
|
|
142
|
+
anyOf: [{ type: "string" }, { type: "integer" }, { type: "object" }, { type: "array" }],
|
|
143
|
+
description: "chat messages",
|
|
144
|
+
},
|
|
145
|
+
},
|
|
146
|
+
},
|
|
147
|
+
output: {
|
|
148
|
+
type: "object",
|
|
149
|
+
},
|
|
150
|
+
samples: [],
|
|
151
|
+
description: "Anthropic Agent",
|
|
152
|
+
category: ["llm"],
|
|
153
|
+
author: "Receptron team",
|
|
154
|
+
repository: "https://github.com/receptron/graphai",
|
|
155
|
+
source: "https://github.com/receptron/graphai/blob/main/llm_agents/anthropic_agent/src/anthropic_agent.ts",
|
|
156
|
+
package: "@graphai/anthropic_agent",
|
|
157
|
+
license: "MIT",
|
|
158
|
+
stream: true,
|
|
159
|
+
environmentVariables: ["ANTHROPIC_API_KEY"],
|
|
160
|
+
npms: ["@anthropic-ai/sdk"],
|
|
161
|
+
};
|
|
162
|
+
export default anthropicAgentInfo;
|
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
import { GraphAILogger } from "graphai";
|
|
2
2
|
import ffmpeg from "fluent-ffmpeg";
|
|
3
|
-
import { silentPath, silentLastPath
|
|
4
|
-
import { MulmoStudioContextMethods } from "../methods/index.js";
|
|
3
|
+
import { silentPath, silentLastPath } from "../utils/file.js";
|
|
5
4
|
const combineAudioFilesAgent = async ({ namedInputs }) => {
|
|
6
5
|
const { context, combinedFileName, audioDirPath } = namedInputs;
|
|
7
6
|
const command = ffmpeg();
|
|
8
|
-
const getDuration = (filePath,
|
|
7
|
+
const getDuration = (filePath, isLastGap) => {
|
|
9
8
|
return new Promise((resolve, reject) => {
|
|
10
9
|
ffmpeg.ffprobe(filePath, (err, metadata) => {
|
|
11
10
|
if (err) {
|
|
@@ -13,30 +12,22 @@ const combineAudioFilesAgent = async ({ namedInputs }) => {
|
|
|
13
12
|
reject(err);
|
|
14
13
|
}
|
|
15
14
|
else {
|
|
16
|
-
|
|
15
|
+
// TODO: Remove hard-coded 0.8 and 0.3
|
|
16
|
+
resolve(metadata.format.duration + (isLastGap ? 0.8 : 0.3));
|
|
17
17
|
}
|
|
18
18
|
});
|
|
19
19
|
});
|
|
20
20
|
};
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
21
|
+
await Promise.all(context.studio.beats.map(async (studioBeat, index) => {
|
|
22
|
+
const isLastGap = index === context.studio.beats.length - 2;
|
|
23
|
+
if (studioBeat.audioFile) {
|
|
24
|
+
command.input(studioBeat.audioFile);
|
|
25
|
+
command.input(isLastGap ? silentLastPath : silentPath);
|
|
26
|
+
studioBeat.duration = await getDuration(studioBeat.audioFile, isLastGap);
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
GraphAILogger.error("Missing studioBeat.audioFile:", index);
|
|
30
30
|
}
|
|
31
|
-
return getAudioSegmentFilePath(audioDirPath, context.studio.filename, mulmoBeat.audioFile ?? "");
|
|
32
|
-
};
|
|
33
|
-
await Promise.all(context.studio.beats.map(async (mulmoBeat, index) => {
|
|
34
|
-
const filePath = resolveAudioFilePath(context, mulmoBeat, audioDirPath);
|
|
35
|
-
const isLast = index === context.studio.beats.length - 2;
|
|
36
|
-
command.input(filePath);
|
|
37
|
-
command.input(isLast ? silentLastPath : silentPath);
|
|
38
|
-
// Measure and log the timestamp of each section
|
|
39
|
-
context.studio.beats[index]["duration"] = await getDuration(filePath, isLast);
|
|
40
31
|
}));
|
|
41
32
|
await new Promise((resolve, reject) => {
|
|
42
33
|
command
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { AgentFunction, AgentFunctionInfo, AgentFunctionContext, GraphData, ResultData, DefaultResultData } from "graphai";
|
|
2
|
+
import type { GraphAISupressError, GraphAIOnError } from "@graphai/agent_utils";
|
|
3
|
+
type NestedAgentGeneratorOption = {
|
|
4
|
+
resultNodeId: string;
|
|
5
|
+
};
|
|
6
|
+
export declare const nestedAgentGenerator: (graphData: GraphData, options?: NestedAgentGeneratorOption) => (context: AgentFunctionContext) => Promise<ResultData<DefaultResultData> | GraphAIOnError>;
|
|
7
|
+
export declare const nestedAgent: AgentFunction<Partial<GraphAISupressError> & NestedAgentGeneratorOption>;
|
|
8
|
+
declare const nestedAgentInfo: AgentFunctionInfo;
|
|
9
|
+
export default nestedAgentInfo;
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import { GraphAI, assert, graphDataLatestVersion } from "graphai";
|
|
2
|
+
export const nestedAgentGenerator = (graphData, options) => {
|
|
3
|
+
return async (context) => {
|
|
4
|
+
const { namedInputs, log, debugInfo, params, forNestedGraph } = context;
|
|
5
|
+
assert(!!forNestedGraph, "Please update graphai to 0.5.19 or higher");
|
|
6
|
+
const { agents, graphOptions, onLogCallback, callbacks } = forNestedGraph;
|
|
7
|
+
const { taskManager } = graphOptions;
|
|
8
|
+
const supressError = params.supressError ?? false;
|
|
9
|
+
if (taskManager) {
|
|
10
|
+
const status = taskManager.getStatus(false);
|
|
11
|
+
assert(status.concurrency > status.running, `nestedAgent: Concurrency is too low: ${status.concurrency}`);
|
|
12
|
+
}
|
|
13
|
+
assert(!!graphData, "nestedAgent: graph is required");
|
|
14
|
+
const { nodes } = graphData;
|
|
15
|
+
const newNodes = Object.keys(nodes).reduce((tmp, key) => {
|
|
16
|
+
const node = nodes[key];
|
|
17
|
+
if ("agent" in node) {
|
|
18
|
+
tmp[key] = node;
|
|
19
|
+
}
|
|
20
|
+
else {
|
|
21
|
+
const { value, update, isResult, console } = node;
|
|
22
|
+
tmp[key] = { value, update, isResult, console };
|
|
23
|
+
}
|
|
24
|
+
return tmp;
|
|
25
|
+
}, {});
|
|
26
|
+
const nestedGraphData = { ...graphData, nodes: newNodes, version: graphDataLatestVersion }; // deep enough copy
|
|
27
|
+
const nodeIds = Object.keys(namedInputs);
|
|
28
|
+
if (nodeIds.length > 0) {
|
|
29
|
+
nodeIds.forEach((nodeId) => {
|
|
30
|
+
if (nestedGraphData.nodes[nodeId] === undefined) {
|
|
31
|
+
// If the input node does not exist, automatically create a static node
|
|
32
|
+
nestedGraphData.nodes[nodeId] = { value: namedInputs[nodeId] };
|
|
33
|
+
}
|
|
34
|
+
else {
|
|
35
|
+
// Otherwise, inject the proper data here (instead of calling injectTo method later)
|
|
36
|
+
if (namedInputs[nodeId] !== undefined) {
|
|
37
|
+
nestedGraphData.nodes[nodeId]["value"] = namedInputs[nodeId];
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
try {
|
|
43
|
+
if (nestedGraphData.version === undefined && debugInfo.version) {
|
|
44
|
+
nestedGraphData.version = debugInfo.version;
|
|
45
|
+
}
|
|
46
|
+
const graphAI = new GraphAI(nestedGraphData, agents || {}, graphOptions);
|
|
47
|
+
// for backward compatibility. Remove 'if' later
|
|
48
|
+
if (onLogCallback) {
|
|
49
|
+
graphAI.onLogCallback = onLogCallback;
|
|
50
|
+
}
|
|
51
|
+
if (callbacks) {
|
|
52
|
+
graphAI.callbacks = callbacks;
|
|
53
|
+
}
|
|
54
|
+
debugInfo.subGraphs.set(graphAI.graphId, graphAI);
|
|
55
|
+
const results = await graphAI.run(false);
|
|
56
|
+
debugInfo.subGraphs.delete(graphAI.graphId);
|
|
57
|
+
log?.push(...graphAI.transactionLogs());
|
|
58
|
+
console.log("FFF");
|
|
59
|
+
if (options && options.resultNodeId) {
|
|
60
|
+
return results[options.resultNodeId];
|
|
61
|
+
}
|
|
62
|
+
return results;
|
|
63
|
+
}
|
|
64
|
+
catch (error) {
|
|
65
|
+
console.log("FFF2");
|
|
66
|
+
if (error instanceof Error && supressError) {
|
|
67
|
+
console.log("FFF3");
|
|
68
|
+
return {
|
|
69
|
+
onError: {
|
|
70
|
+
message: error.message,
|
|
71
|
+
error,
|
|
72
|
+
},
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
console.log("FFF4", error);
|
|
76
|
+
throw error;
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
};
|
|
80
|
+
export const nestedAgent = async (context) => {
|
|
81
|
+
const { forNestedGraph, params } = context;
|
|
82
|
+
const { graphData } = forNestedGraph ?? { graphData: { nodes: {} } };
|
|
83
|
+
assert(!!graphData, "No GraphData");
|
|
84
|
+
return await nestedAgentGenerator(graphData, params)(context);
|
|
85
|
+
};
|
|
86
|
+
const nestedAgentInfo = {
|
|
87
|
+
name: "nestedAgent",
|
|
88
|
+
agent: nestedAgent,
|
|
89
|
+
mock: nestedAgent,
|
|
90
|
+
samples: [
|
|
91
|
+
{
|
|
92
|
+
inputs: {
|
|
93
|
+
message: "hello",
|
|
94
|
+
},
|
|
95
|
+
params: {},
|
|
96
|
+
result: {
|
|
97
|
+
test: ["hello"],
|
|
98
|
+
},
|
|
99
|
+
graph: {
|
|
100
|
+
nodes: {
|
|
101
|
+
test: {
|
|
102
|
+
agent: "copyAgent",
|
|
103
|
+
params: { namedKey: "messages" },
|
|
104
|
+
inputs: { messages: [":message"] },
|
|
105
|
+
isResult: true,
|
|
106
|
+
},
|
|
107
|
+
},
|
|
108
|
+
},
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
inputs: {
|
|
112
|
+
message: "hello",
|
|
113
|
+
},
|
|
114
|
+
params: {
|
|
115
|
+
resultNodeId: "test",
|
|
116
|
+
},
|
|
117
|
+
result: ["hello"],
|
|
118
|
+
graph: {
|
|
119
|
+
nodes: {
|
|
120
|
+
test: {
|
|
121
|
+
agent: "copyAgent",
|
|
122
|
+
params: { namedKey: "messages" },
|
|
123
|
+
inputs: { messages: [":message"] },
|
|
124
|
+
isResult: true,
|
|
125
|
+
},
|
|
126
|
+
},
|
|
127
|
+
},
|
|
128
|
+
},
|
|
129
|
+
],
|
|
130
|
+
description: "nested Agent",
|
|
131
|
+
category: ["graph"],
|
|
132
|
+
author: "Receptron team",
|
|
133
|
+
repository: "https://github.com/receptron/graphai",
|
|
134
|
+
source: "https://github.com/receptron/graphai/blob/main/agents/vanilla_agents/src/graph_agents/nested_agent.ts",
|
|
135
|
+
package: "@graphai/vanilla",
|
|
136
|
+
license: "MIT",
|
|
137
|
+
};
|
|
138
|
+
export default nestedAgentInfo;
|
package/lib/cli/args.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export declare const
|
|
1
|
+
export declare const getArgs: () => {
|
|
2
2
|
[x: string]: unknown;
|
|
3
3
|
v: boolean;
|
|
4
4
|
o: string | undefined;
|
|
@@ -6,6 +6,8 @@ export declare const args: {
|
|
|
6
6
|
a: string | undefined;
|
|
7
7
|
i: string | undefined;
|
|
8
8
|
f: boolean;
|
|
9
|
+
pdf_mode: string;
|
|
10
|
+
pdf_size: string;
|
|
9
11
|
_: (string | number)[];
|
|
10
12
|
$0: string;
|
|
11
13
|
};
|
package/lib/cli/args.js
CHANGED
|
@@ -1,40 +1,55 @@
|
|
|
1
1
|
import yargs from "yargs";
|
|
2
2
|
import { hideBin } from "yargs/helpers";
|
|
3
3
|
import { commonOptions } from "./common.js";
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
.
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
.option("i", {
|
|
13
|
-
alias: "imagedir",
|
|
14
|
-
description: "image dir",
|
|
15
|
-
demandOption: false,
|
|
16
|
-
type: "string",
|
|
17
|
-
})
|
|
18
|
-
.option("f", {
|
|
19
|
-
alias: "force",
|
|
20
|
-
description: "force generate",
|
|
21
|
-
demandOption: false,
|
|
22
|
-
default: false,
|
|
23
|
-
type: "boolean",
|
|
24
|
-
})
|
|
25
|
-
.command("$0 <action> <file>", "Run mulmocast", (yargs) => {
|
|
26
|
-
return yargs
|
|
27
|
-
.positional("action", {
|
|
28
|
-
describe: "action to perform",
|
|
29
|
-
choices: ["translate", "audio", "images", "movie", "preprocess"],
|
|
4
|
+
import { pdf_modes, pdf_sizes } from "../utils/const.js";
|
|
5
|
+
export const getArgs = () => {
|
|
6
|
+
return commonOptions(yargs(hideBin(process.argv)))
|
|
7
|
+
.scriptName("mulmo")
|
|
8
|
+
.option("a", {
|
|
9
|
+
alias: "audiodir",
|
|
10
|
+
description: "audio dir",
|
|
11
|
+
demandOption: false,
|
|
30
12
|
type: "string",
|
|
31
13
|
})
|
|
32
|
-
.
|
|
33
|
-
|
|
14
|
+
.option("i", {
|
|
15
|
+
alias: "imagedir",
|
|
16
|
+
description: "image dir",
|
|
17
|
+
demandOption: false,
|
|
34
18
|
type: "string",
|
|
35
|
-
})
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
19
|
+
})
|
|
20
|
+
.option("f", {
|
|
21
|
+
alias: "force",
|
|
22
|
+
description: "force generate",
|
|
23
|
+
demandOption: false,
|
|
24
|
+
default: false,
|
|
25
|
+
type: "boolean",
|
|
26
|
+
})
|
|
27
|
+
.option("pdf_mode", {
|
|
28
|
+
description: "pdf mode",
|
|
29
|
+
demandOption: false,
|
|
30
|
+
choices: pdf_modes,
|
|
31
|
+
type: "string",
|
|
32
|
+
default: "slide",
|
|
33
|
+
})
|
|
34
|
+
.option("pdf_size", {
|
|
35
|
+
choices: pdf_sizes,
|
|
36
|
+
default: "letter",
|
|
37
|
+
describe: "PDF paper size (default: letter for US standard)",
|
|
38
|
+
})
|
|
39
|
+
.command("$0 <action> <file>", "Run mulmocast", (yargs) => {
|
|
40
|
+
return yargs
|
|
41
|
+
.positional("action", {
|
|
42
|
+
describe: "action to perform",
|
|
43
|
+
choices: ["translate", "audio", "images", "movie", "pdf", "preprocess"],
|
|
44
|
+
type: "string",
|
|
45
|
+
})
|
|
46
|
+
.positional("file", {
|
|
47
|
+
describe: "Mulmo Script File",
|
|
48
|
+
type: "string",
|
|
49
|
+
});
|
|
50
|
+
})
|
|
51
|
+
.strict()
|
|
52
|
+
.help()
|
|
53
|
+
.alias("help", "h")
|
|
54
|
+
.parseSync();
|
|
55
|
+
};
|
package/lib/cli/cli.d.ts
CHANGED
|
@@ -1,2 +1,16 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import "dotenv/config";
|
|
3
|
+
export declare const getFileObject: (_args: {
|
|
4
|
+
[x: string]: unknown;
|
|
5
|
+
}) => {
|
|
6
|
+
baseDirPath: string;
|
|
7
|
+
mulmoFilePath: string;
|
|
8
|
+
mulmoFileDirPath: string;
|
|
9
|
+
outDirPath: string;
|
|
10
|
+
imageDirPath: string;
|
|
11
|
+
audioDirPath: string;
|
|
12
|
+
isHttpPath: boolean;
|
|
13
|
+
fileOrUrl: string;
|
|
14
|
+
outputStudioFilePath: string;
|
|
15
|
+
fileName: string;
|
|
16
|
+
};
|
package/lib/cli/cli.js
CHANGED
|
@@ -2,35 +2,47 @@
|
|
|
2
2
|
import "dotenv/config";
|
|
3
3
|
import fs from "fs";
|
|
4
4
|
import path from "path";
|
|
5
|
+
import { fileURLToPath } from "url";
|
|
5
6
|
import { GraphAILogger } from "graphai";
|
|
6
|
-
import {
|
|
7
|
+
import { getArgs } from "./args.js";
|
|
7
8
|
import { createOrUpdateStudioData } from "../utils/preprocess.js";
|
|
8
9
|
import { outDirName, imageDirName, audioDirName } from "../utils/const.js";
|
|
9
|
-
import {
|
|
10
|
-
import {
|
|
11
|
-
import {
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
import { getBaseDirPath, getFullPath, readMulmoScriptFile, fetchMulmoScriptFile } from "../utils/file.js";
|
|
15
|
-
import { mulmoScriptSchema } from "../types/schema.js";
|
|
16
|
-
const isHttp = (fileOrUrl) => {
|
|
17
|
-
return /^https?:\/\//.test(fileOrUrl);
|
|
18
|
-
};
|
|
19
|
-
const getFileObject = () => {
|
|
20
|
-
const { basedir, file, outdir, imagedir, audiodir } = args;
|
|
10
|
+
import { translate, audio, images, movie, pdf } from "../../src/actions/index.js";
|
|
11
|
+
import { getBaseDirPath, getFullPath, readMulmoScriptFile, fetchMulmoScriptFile, getOutputStudioFilePath } from "../utils/file.js";
|
|
12
|
+
import { isHttp } from "../utils/utils.js";
|
|
13
|
+
export const getFileObject = (_args) => {
|
|
14
|
+
const { basedir, file, outdir, imagedir, audiodir } = _args;
|
|
21
15
|
const baseDirPath = getBaseDirPath(basedir);
|
|
22
16
|
const fileOrUrl = file ?? "";
|
|
17
|
+
const fileName = path.parse(fileOrUrl).name;
|
|
23
18
|
const isHttpPath = isHttp(fileOrUrl);
|
|
24
19
|
const mulmoFilePath = isHttpPath ? "" : getFullPath(baseDirPath, fileOrUrl);
|
|
25
20
|
const mulmoFileDirPath = path.dirname(isHttpPath ? baseDirPath : mulmoFilePath);
|
|
26
21
|
const outDirPath = getFullPath(baseDirPath, outdir ?? outDirName);
|
|
27
22
|
const imageDirPath = getFullPath(outDirPath, imagedir ?? imageDirName);
|
|
28
23
|
const audioDirPath = getFullPath(outDirPath, audiodir ?? audioDirName);
|
|
29
|
-
|
|
24
|
+
const outputStudioFilePath = getOutputStudioFilePath(outDirPath, fileName);
|
|
25
|
+
return { baseDirPath, mulmoFilePath, mulmoFileDirPath, outDirPath, imageDirPath, audioDirPath, isHttpPath, fileOrUrl, outputStudioFilePath, fileName };
|
|
26
|
+
};
|
|
27
|
+
const fetchScript = async (isHttpPath, mulmoFilePath, fileOrUrl) => {
|
|
28
|
+
if (isHttpPath) {
|
|
29
|
+
const res = await fetchMulmoScriptFile(fileOrUrl);
|
|
30
|
+
if (!res.result || !res.script) {
|
|
31
|
+
GraphAILogger.info(`ERROR: HTTP error! ${res.status} ${fileOrUrl}`);
|
|
32
|
+
process.exit(1);
|
|
33
|
+
}
|
|
34
|
+
return res.script;
|
|
35
|
+
}
|
|
36
|
+
if (!fs.existsSync(mulmoFilePath)) {
|
|
37
|
+
GraphAILogger.info(`ERROR: File not exists ${mulmoFilePath}`);
|
|
38
|
+
process.exit(1);
|
|
39
|
+
}
|
|
40
|
+
return readMulmoScriptFile(mulmoFilePath, "ERROR: File does not exist " + mulmoFilePath).mulmoData;
|
|
30
41
|
};
|
|
31
42
|
const main = async () => {
|
|
32
|
-
const
|
|
33
|
-
const
|
|
43
|
+
const args = getArgs();
|
|
44
|
+
const files = getFileObject(args);
|
|
45
|
+
const { mulmoFilePath, isHttpPath, fileOrUrl, fileName, outputStudioFilePath } = files;
|
|
34
46
|
if (args.v) {
|
|
35
47
|
GraphAILogger.info(files);
|
|
36
48
|
}
|
|
@@ -39,37 +51,20 @@ const main = async () => {
|
|
|
39
51
|
GraphAILogger.setLevelEnabled("log", false);
|
|
40
52
|
GraphAILogger.setLevelEnabled("warn", false);
|
|
41
53
|
}
|
|
42
|
-
const { action, force } = args;
|
|
43
|
-
const
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
return {
|
|
51
|
-
mulmoData: res.script,
|
|
52
|
-
fileName: path.parse(fileOrUrl).name,
|
|
53
|
-
};
|
|
54
|
+
const { action, force, pdf_mode, pdf_size } = args;
|
|
55
|
+
const mulmoScript = await fetchScript(isHttpPath, mulmoFilePath, fileOrUrl);
|
|
56
|
+
// Create or update MulmoStudio file with MulmoScript
|
|
57
|
+
const currentStudio = readMulmoScriptFile(outputStudioFilePath);
|
|
58
|
+
const studio = (() => {
|
|
59
|
+
try {
|
|
60
|
+
// validate mulmoStudioSchema. skip if __test_invalid__ is true
|
|
61
|
+
return createOrUpdateStudioData(mulmoScript, currentStudio?.mulmoData, fileName);
|
|
54
62
|
}
|
|
55
|
-
|
|
56
|
-
GraphAILogger.info(
|
|
63
|
+
catch (error) {
|
|
64
|
+
GraphAILogger.info(`Error: invalid MulmoScript Schema: ${isHttpPath ? fileOrUrl : mulmoFilePath} \n ${error}`);
|
|
57
65
|
process.exit(1);
|
|
58
66
|
}
|
|
59
|
-
return readMulmoScriptFile(mulmoFilePath, "ERROR: File does not exist " + mulmoFilePath);
|
|
60
67
|
})();
|
|
61
|
-
const { mulmoData: mulmoScript, fileName } = readData;
|
|
62
|
-
// validate mulmoStudioSchema. skip if __test_invalid__ is true
|
|
63
|
-
try {
|
|
64
|
-
if (!mulmoScript?.__test_invalid__) {
|
|
65
|
-
mulmoScriptSchema.parse(mulmoScript);
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
catch (error) {
|
|
69
|
-
GraphAILogger.info(`Error: invalid MulmoScript Schema: ${isHttpPath ? fileOrUrl : mulmoFilePath} \n ${error}`);
|
|
70
|
-
process.exit(1);
|
|
71
|
-
}
|
|
72
|
-
const studio = createOrUpdateStudioData(mulmoScript, fileName, files);
|
|
73
68
|
const context = {
|
|
74
69
|
studio,
|
|
75
70
|
fileDirs: files,
|
|
@@ -79,15 +74,22 @@ const main = async () => {
|
|
|
79
74
|
await translate(context);
|
|
80
75
|
}
|
|
81
76
|
if (action === "audio") {
|
|
82
|
-
await audio(context
|
|
77
|
+
await audio(context);
|
|
83
78
|
}
|
|
84
79
|
if (action === "images") {
|
|
85
80
|
await images(context);
|
|
86
81
|
}
|
|
87
82
|
if (action === "movie") {
|
|
88
|
-
await audio(context
|
|
83
|
+
await audio(context);
|
|
89
84
|
await images(context);
|
|
90
85
|
await movie(context);
|
|
91
86
|
}
|
|
87
|
+
if (action === "pdf") {
|
|
88
|
+
await images(context);
|
|
89
|
+
await pdf(context, pdf_mode, pdf_size);
|
|
90
|
+
}
|
|
92
91
|
};
|
|
93
|
-
|
|
92
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
93
|
+
if (process.argv[1] === __filename) {
|
|
94
|
+
main();
|
|
95
|
+
}
|
package/lib/cli/tool-args.d.ts
CHANGED
package/lib/cli/tool-args.js
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import yargs from "yargs";
|
|
2
2
|
import { hideBin } from "yargs/helpers";
|
|
3
3
|
import { commonOptions } from "./common.js";
|
|
4
|
-
import { GraphAILogger } from "graphai";
|
|
5
4
|
import { getAvailableTemplates } from "../utils/file.js";
|
|
6
|
-
|
|
5
|
+
import { llmAgents } from "../utils/utils.js";
|
|
7
6
|
const availableTemplateNames = getAvailableTemplates().map((template) => template.filename);
|
|
8
7
|
export const args = commonOptions(yargs(hideBin(process.argv)))
|
|
9
8
|
.scriptName("mulmo-tool")
|
|
@@ -40,6 +39,17 @@ export const args = commonOptions(yargs(hideBin(process.argv)))
|
|
|
40
39
|
demandOption: false,
|
|
41
40
|
default: "script",
|
|
42
41
|
type: "string",
|
|
42
|
+
})
|
|
43
|
+
.option("llm_agent", {
|
|
44
|
+
description: "llm agent",
|
|
45
|
+
demandOption: false,
|
|
46
|
+
choices: llmAgents,
|
|
47
|
+
type: "string",
|
|
48
|
+
})
|
|
49
|
+
.option("llm_model", {
|
|
50
|
+
description: "llm model",
|
|
51
|
+
demandOption: false,
|
|
52
|
+
type: "string",
|
|
43
53
|
})
|
|
44
54
|
.command("$0 <action>", "Run mulmocast tool", (yargs) => {
|
|
45
55
|
return yargs.positional("action", {
|