@contextstream/mcp-server 0.4.42 → 0.4.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +760 -20
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -7689,8 +7689,10 @@ var ContextStreamClient = class {
|
|
|
7689
7689
|
project_id: withDefaults.project_id,
|
|
7690
7690
|
...versionNotice2 ? { version_notice: versionNotice2 } : {},
|
|
7691
7691
|
...Array.isArray(data?.errors) ? { errors: data.errors } : {},
|
|
7692
|
+
...Array.isArray(data?.warnings) && data.warnings.length > 0 ? { warnings: data.warnings } : {},
|
|
7692
7693
|
...this.indexRefreshInProgress ? { index_status: "refreshing" } : {},
|
|
7693
|
-
...data?.context_pressure ? { context_pressure: data.context_pressure } : {}
|
|
7694
|
+
...data?.context_pressure ? { context_pressure: data.context_pressure } : {},
|
|
7695
|
+
...data?.semantic_intent ? { semantic_intent: data.semantic_intent } : {}
|
|
7694
7696
|
};
|
|
7695
7697
|
} catch (err) {
|
|
7696
7698
|
const message2 = err instanceof Error ? err.message : String(err);
|
|
@@ -8830,6 +8832,145 @@ W:${wsHint}
|
|
|
8830
8832
|
uuidSchema.parse(params.task_id);
|
|
8831
8833
|
return request(this.config, `/tasks/${params.task_id}`, { method: "DELETE" });
|
|
8832
8834
|
}
|
|
8835
|
+
// ============================================================================
|
|
8836
|
+
// Media/Content Methods (for video, audio, image indexing)
|
|
8837
|
+
// ============================================================================
|
|
8838
|
+
/**
|
|
8839
|
+
* Initialize a media upload and get a presigned URL.
|
|
8840
|
+
* After calling this, upload the file to the returned upload_url with the specified headers.
|
|
8841
|
+
*/
|
|
8842
|
+
async mediaInitUpload(params) {
|
|
8843
|
+
const withDefaults = this.withDefaults(params);
|
|
8844
|
+
if (!withDefaults.workspace_id) {
|
|
8845
|
+
throw new Error("workspace_id is required for media upload");
|
|
8846
|
+
}
|
|
8847
|
+
const body = {
|
|
8848
|
+
filename: params.filename,
|
|
8849
|
+
size_bytes: params.size_bytes,
|
|
8850
|
+
content_type: params.content_type.toLowerCase(),
|
|
8851
|
+
// Backend uses snake_case (lowercase)
|
|
8852
|
+
mime_type: params.mime_type,
|
|
8853
|
+
title: params.title,
|
|
8854
|
+
tags: params.tags || []
|
|
8855
|
+
};
|
|
8856
|
+
const result = await request(
|
|
8857
|
+
this.config,
|
|
8858
|
+
`/workspaces/${withDefaults.workspace_id}/content/uploads/init`,
|
|
8859
|
+
{ method: "POST", body }
|
|
8860
|
+
);
|
|
8861
|
+
return unwrapApiResponse(result);
|
|
8862
|
+
}
|
|
8863
|
+
/**
|
|
8864
|
+
* Complete a media upload and trigger indexing.
|
|
8865
|
+
* Call this after successfully uploading the file to the presigned URL.
|
|
8866
|
+
*/
|
|
8867
|
+
async mediaCompleteUpload(params) {
|
|
8868
|
+
const withDefaults = this.withDefaults(params);
|
|
8869
|
+
if (!withDefaults.workspace_id) {
|
|
8870
|
+
throw new Error("workspace_id is required to complete upload");
|
|
8871
|
+
}
|
|
8872
|
+
uuidSchema.parse(params.content_id);
|
|
8873
|
+
const result = await request(
|
|
8874
|
+
this.config,
|
|
8875
|
+
`/workspaces/${withDefaults.workspace_id}/content/${params.content_id}/complete-upload`,
|
|
8876
|
+
{ method: "POST" }
|
|
8877
|
+
);
|
|
8878
|
+
return unwrapApiResponse(result);
|
|
8879
|
+
}
|
|
8880
|
+
/**
|
|
8881
|
+
* Get the status of a content item (for checking indexing progress).
|
|
8882
|
+
*/
|
|
8883
|
+
async mediaGetContent(params) {
|
|
8884
|
+
const withDefaults = this.withDefaults(params);
|
|
8885
|
+
if (!withDefaults.workspace_id) {
|
|
8886
|
+
throw new Error("workspace_id is required for getting content");
|
|
8887
|
+
}
|
|
8888
|
+
uuidSchema.parse(params.content_id);
|
|
8889
|
+
const result = await request(
|
|
8890
|
+
this.config,
|
|
8891
|
+
`/workspaces/${withDefaults.workspace_id}/content/${params.content_id}`,
|
|
8892
|
+
{ method: "GET" }
|
|
8893
|
+
);
|
|
8894
|
+
return unwrapApiResponse(result);
|
|
8895
|
+
}
|
|
8896
|
+
/**
|
|
8897
|
+
* List content items in a workspace.
|
|
8898
|
+
*/
|
|
8899
|
+
async mediaListContent(params) {
|
|
8900
|
+
const withDefaults = this.withDefaults(params || {});
|
|
8901
|
+
if (!withDefaults.workspace_id) {
|
|
8902
|
+
throw new Error("workspace_id is required for listing content");
|
|
8903
|
+
}
|
|
8904
|
+
const query = new URLSearchParams();
|
|
8905
|
+
if (params?.content_type) query.set("content_type", params.content_type);
|
|
8906
|
+
if (params?.status) query.set("status", params.status);
|
|
8907
|
+
if (params?.limit) query.set("limit", String(params.limit));
|
|
8908
|
+
if (params?.offset) query.set("offset", String(params.offset));
|
|
8909
|
+
const suffix = query.toString() ? `?${query.toString()}` : "";
|
|
8910
|
+
const result = await request(
|
|
8911
|
+
this.config,
|
|
8912
|
+
`/workspaces/${withDefaults.workspace_id}/content${suffix}`,
|
|
8913
|
+
{ method: "GET" }
|
|
8914
|
+
);
|
|
8915
|
+
return unwrapApiResponse(result);
|
|
8916
|
+
}
|
|
8917
|
+
/**
|
|
8918
|
+
* Search content by semantic query (transcripts, descriptions, etc.).
|
|
8919
|
+
*/
|
|
8920
|
+
async mediaSearchContent(params) {
|
|
8921
|
+
const withDefaults = this.withDefaults(params);
|
|
8922
|
+
if (!withDefaults.workspace_id) {
|
|
8923
|
+
throw new Error("workspace_id is required for searching content");
|
|
8924
|
+
}
|
|
8925
|
+
const query = new URLSearchParams();
|
|
8926
|
+
query.set("q", params.query);
|
|
8927
|
+
if (params.content_type) query.set("content_type", params.content_type);
|
|
8928
|
+
if (params.limit) query.set("limit", String(params.limit));
|
|
8929
|
+
if (params.offset) query.set("offset", String(params.offset));
|
|
8930
|
+
const result = await request(
|
|
8931
|
+
this.config,
|
|
8932
|
+
`/workspaces/${withDefaults.workspace_id}/content/search?${query.toString()}`,
|
|
8933
|
+
{ method: "GET" }
|
|
8934
|
+
);
|
|
8935
|
+
return unwrapApiResponse(result);
|
|
8936
|
+
}
|
|
8937
|
+
/**
|
|
8938
|
+
* Get a specific clip/segment from indexed content.
|
|
8939
|
+
*/
|
|
8940
|
+
async mediaGetClip(params) {
|
|
8941
|
+
const withDefaults = this.withDefaults(params);
|
|
8942
|
+
if (!withDefaults.workspace_id) {
|
|
8943
|
+
throw new Error("workspace_id is required for getting clips");
|
|
8944
|
+
}
|
|
8945
|
+
uuidSchema.parse(params.content_id);
|
|
8946
|
+
const query = new URLSearchParams();
|
|
8947
|
+
if (params.start_time !== void 0) query.set("start_time", String(params.start_time));
|
|
8948
|
+
if (params.end_time !== void 0) query.set("end_time", String(params.end_time));
|
|
8949
|
+
if (params.format) query.set("format", params.format);
|
|
8950
|
+
const suffix = query.toString() ? `?${query.toString()}` : "";
|
|
8951
|
+
const result = await request(
|
|
8952
|
+
this.config,
|
|
8953
|
+
`/workspaces/${withDefaults.workspace_id}/content/${params.content_id}/clip${suffix}`,
|
|
8954
|
+
{ method: "GET" }
|
|
8955
|
+
);
|
|
8956
|
+
return unwrapApiResponse(result);
|
|
8957
|
+
}
|
|
8958
|
+
/**
|
|
8959
|
+
* Delete a content item.
|
|
8960
|
+
*/
|
|
8961
|
+
async mediaDeleteContent(params) {
|
|
8962
|
+
const withDefaults = this.withDefaults(params);
|
|
8963
|
+
if (!withDefaults.workspace_id) {
|
|
8964
|
+
throw new Error("workspace_id is required for deleting content");
|
|
8965
|
+
}
|
|
8966
|
+
uuidSchema.parse(params.content_id);
|
|
8967
|
+
const result = await request(
|
|
8968
|
+
this.config,
|
|
8969
|
+
`/workspaces/${withDefaults.workspace_id}/content/${params.content_id}`,
|
|
8970
|
+
{ method: "DELETE" }
|
|
8971
|
+
);
|
|
8972
|
+
return unwrapApiResponse(result);
|
|
8973
|
+
}
|
|
8833
8974
|
};
|
|
8834
8975
|
|
|
8835
8976
|
// src/tools.ts
|
|
@@ -8871,6 +9012,38 @@ function applyMcpToolPrefix(markdown, toolPrefix) {
|
|
|
8871
9012
|
const toolRegex = new RegExp(`(?<!__)\\b(${toolPattern})\\b(?=\\s*\\()`, "g");
|
|
8872
9013
|
return markdown.replace(toolRegex, `${toolPrefix}$1`);
|
|
8873
9014
|
}
|
|
9015
|
+
var CONTEXTSTREAM_RULES_DYNAMIC = `
|
|
9016
|
+
## ContextStream Dynamic Rules (Powered by our SmartRouter)
|
|
9017
|
+
|
|
9018
|
+
All rules are served dynamically via \`context_smart\`. This file is minimal by design.
|
|
9019
|
+
|
|
9020
|
+
### Every Message Protocol
|
|
9021
|
+
|
|
9022
|
+
| Message | What to Call |
|
|
9023
|
+
|---------|--------------|
|
|
9024
|
+
| **1st message** | \`session_init(folder_path="<cwd>", context_hint="<msg>")\` then \`context_smart(user_message="<msg>")\` |
|
|
9025
|
+
| **2nd+ messages** | \`context_smart(user_message="<msg>")\` |
|
|
9026
|
+
|
|
9027
|
+
### Follow the Instructions Field
|
|
9028
|
+
|
|
9029
|
+
The \`context_smart\` response includes an \`instructions\` field with context-aware guidance.
|
|
9030
|
+
**Follow these instructions.** They are dynamically matched to your query and include:
|
|
9031
|
+
- Search guidance (when/how to search)
|
|
9032
|
+
- Git workflow rules (commit, PR, safety)
|
|
9033
|
+
- Planning rules (use ContextStream plans, not file-based)
|
|
9034
|
+
- Media/code analysis guidance
|
|
9035
|
+
- Lessons from past mistakes
|
|
9036
|
+
- And more...
|
|
9037
|
+
|
|
9038
|
+
### Notices
|
|
9039
|
+
|
|
9040
|
+
Handle notices from \`context_smart\` response:
|
|
9041
|
+
- **[VERSION_NOTICE]**: Tell user to update MCP
|
|
9042
|
+
- **[RULES_NOTICE]**: Run \`generate_rules()\`
|
|
9043
|
+
- **[LESSONS_WARNING]**: Apply lessons immediately
|
|
9044
|
+
|
|
9045
|
+
Rules Version: ${RULES_VERSION}
|
|
9046
|
+
`.trim();
|
|
8874
9047
|
var CONTEXTSTREAM_RULES_FULL = `
|
|
8875
9048
|
## ContextStream Rules
|
|
8876
9049
|
|
|
@@ -8995,6 +9168,7 @@ Rules Version: ${RULES_VERSION}
|
|
|
8995
9168
|
| **Code search** | \`search(mode="hybrid", query="...")\` \u2014 BEFORE Glob/Grep/Read |
|
|
8996
9169
|
| **After significant work** | \`session(action="capture", event_type="decision", ...)\` |
|
|
8997
9170
|
| **User correction** | \`session(action="capture_lesson", ...)\` |
|
|
9171
|
+
| **\u26A0\uFE0F When warnings received** | **STOP**, acknowledge, explain mitigation, then proceed |
|
|
8998
9172
|
|
|
8999
9173
|
**How to detect simple utility operations:**
|
|
9000
9174
|
- Single-word commands: "list", "show", "version", "help"
|
|
@@ -9138,6 +9312,24 @@ session(action="capture", event_type="session_snapshot", title="Pre-compaction s
|
|
|
9138
9312
|
|
|
9139
9313
|
**NEVER fall back to local tools (Glob/Grep/Read) just because search returned 0 results on first try.** Retry first.
|
|
9140
9314
|
|
|
9315
|
+
### Enhanced Context (Server-Side Warnings)
|
|
9316
|
+
|
|
9317
|
+
\`context_smart\` now includes **intelligent server-side filtering** that proactively surfaces relevant warnings:
|
|
9318
|
+
|
|
9319
|
+
**Response fields:**
|
|
9320
|
+
- \`warnings\`: Array of warning strings (displayed with \u26A0\uFE0F prefix)
|
|
9321
|
+
|
|
9322
|
+
**What triggers warnings:**
|
|
9323
|
+
- **Lessons**: Past mistakes relevant to the current query (via semantic matching)
|
|
9324
|
+
- **Risky actions**: Detected high-risk operations (deployments, migrations, destructive commands)
|
|
9325
|
+
- **Breaking changes**: When modifications may impact other parts of the codebase
|
|
9326
|
+
|
|
9327
|
+
**When you receive warnings:**
|
|
9328
|
+
1. **STOP** and read each warning carefully
|
|
9329
|
+
2. **Acknowledge** the warning to the user
|
|
9330
|
+
3. **Explain** how you will avoid the issue
|
|
9331
|
+
4. Only proceed after addressing the warnings
|
|
9332
|
+
|
|
9141
9333
|
### Search & Code Intelligence (ContextStream-first)
|
|
9142
9334
|
|
|
9143
9335
|
\u26A0\uFE0F **STOP: Before using Search/Glob/Grep/Read/Explore** \u2192 Call \`search(mode="hybrid")\` FIRST. Use local tools ONLY if ContextStream returns 0 results.
|
|
@@ -9395,6 +9587,11 @@ ContextStream search is **indexed** and returns semantic matches + context in ON
|
|
|
9395
9587
|
- If \`context_smart\` returns high/critical \`context_pressure\`: call \`session(action="capture", ...)\` to save state
|
|
9396
9588
|
- PreCompact hooks automatically save snapshots before compaction (if installed)
|
|
9397
9589
|
|
|
9590
|
+
### Enhanced Context (Warnings)
|
|
9591
|
+
|
|
9592
|
+
\`context_smart\` returns server-side \`warnings\` for lessons, risky actions, and breaking changes.
|
|
9593
|
+
When warnings are present: **STOP**, acknowledge them, explain mitigation, then proceed.
|
|
9594
|
+
|
|
9398
9595
|
### Automatic Context Restoration
|
|
9399
9596
|
|
|
9400
9597
|
**Context restoration is now enabled by default.** Every \`session_init\` call automatically:
|
|
@@ -9503,8 +9700,8 @@ function getTemplate(editor) {
|
|
|
9503
9700
|
function generateRuleContent(editor, options) {
|
|
9504
9701
|
const template = getTemplate(editor);
|
|
9505
9702
|
if (!template) return null;
|
|
9506
|
-
const mode = options?.mode || "
|
|
9507
|
-
const rules = mode === "full" ? CONTEXTSTREAM_RULES_FULL : CONTEXTSTREAM_RULES_MINIMAL;
|
|
9703
|
+
const mode = options?.mode || "dynamic";
|
|
9704
|
+
const rules = mode === "full" ? CONTEXTSTREAM_RULES_FULL : mode === "minimal" ? CONTEXTSTREAM_RULES_MINIMAL : CONTEXTSTREAM_RULES_DYNAMIC;
|
|
9508
9705
|
let content = template.build(rules);
|
|
9509
9706
|
if (options?.workspaceName || options?.projectName) {
|
|
9510
9707
|
const header = `
|
|
@@ -9611,6 +9808,17 @@ var TOOL_CATALOG = [
|
|
|
9611
9808
|
{ name: "contradictions", hint: "conflicts" }
|
|
9612
9809
|
]
|
|
9613
9810
|
},
|
|
9811
|
+
{
|
|
9812
|
+
name: "Media",
|
|
9813
|
+
tools: [
|
|
9814
|
+
{ name: "index", hint: "add-media" },
|
|
9815
|
+
{ name: "status", hint: "progress" },
|
|
9816
|
+
{ name: "search", hint: "find-clip" },
|
|
9817
|
+
{ name: "get_clip", hint: "get-segment" },
|
|
9818
|
+
{ name: "list", hint: "browse" },
|
|
9819
|
+
{ name: "delete", hint: "remove" }
|
|
9820
|
+
]
|
|
9821
|
+
},
|
|
9614
9822
|
{
|
|
9615
9823
|
name: "Workspace",
|
|
9616
9824
|
tools: [
|
|
@@ -9875,6 +10083,78 @@ def main():
|
|
|
9875
10083
|
print(json.dumps({"hookSpecificOutput": {"hookEventName": "UserPromptSubmit", "additionalContext": REMINDER}}))
|
|
9876
10084
|
sys.exit(0)
|
|
9877
10085
|
|
|
10086
|
+
if __name__ == "__main__":
|
|
10087
|
+
main()
|
|
10088
|
+
`;
|
|
10089
|
+
var MEDIA_AWARE_HOOK_SCRIPT = `#!/usr/bin/env python3
|
|
10090
|
+
"""
|
|
10091
|
+
ContextStream Media-Aware Hook for Claude Code
|
|
10092
|
+
|
|
10093
|
+
Detects media-related prompts and injects context about the media tool.
|
|
10094
|
+
"""
|
|
10095
|
+
|
|
10096
|
+
import json
|
|
10097
|
+
import sys
|
|
10098
|
+
import os
|
|
10099
|
+
import re
|
|
10100
|
+
|
|
10101
|
+
ENABLED = os.environ.get("CONTEXTSTREAM_MEDIA_HOOK_ENABLED", "true").lower() == "true"
|
|
10102
|
+
|
|
10103
|
+
# Media patterns (case-insensitive)
|
|
10104
|
+
PATTERNS = [
|
|
10105
|
+
r"\\b(video|videos|clip|clips|footage|keyframe)s?\\b",
|
|
10106
|
+
r"\\b(remotion|timeline|video\\s*edit)\\b",
|
|
10107
|
+
r"\\b(image|images|photo|photos|picture|thumbnail)s?\\b",
|
|
10108
|
+
r"\\b(audio|podcast|transcript|transcription|voice)\\b",
|
|
10109
|
+
r"\\b(media|asset|assets|creative|b-roll)\\b",
|
|
10110
|
+
r"\\b(find|search|show).*(clip|video|image|audio|footage|media)\\b",
|
|
10111
|
+
]
|
|
10112
|
+
|
|
10113
|
+
COMPILED = [re.compile(p, re.IGNORECASE) for p in PATTERNS]
|
|
10114
|
+
|
|
10115
|
+
MEDIA_CONTEXT = """[MEDIA TOOLS AVAILABLE]
|
|
10116
|
+
Your workspace may have indexed media. Use ContextStream media tools:
|
|
10117
|
+
|
|
10118
|
+
- **Search**: \`mcp__contextstream__media(action="search", query="description")\`
|
|
10119
|
+
- **Get clip**: \`mcp__contextstream__media(action="get_clip", content_id="...", start="1:34", end="2:15", output_format="remotion|ffmpeg|raw")\`
|
|
10120
|
+
- **List assets**: \`mcp__contextstream__media(action="list")\`
|
|
10121
|
+
- **Index**: \`mcp__contextstream__media(action="index", file_path="...", content_type="video|audio|image|document")\`
|
|
10122
|
+
|
|
10123
|
+
For Remotion: use \`output_format="remotion"\` to get frame-based props.
|
|
10124
|
+
[END MEDIA TOOLS]"""
|
|
10125
|
+
|
|
10126
|
+
def matches(text):
|
|
10127
|
+
return any(p.search(text) for p in COMPILED)
|
|
10128
|
+
|
|
10129
|
+
def main():
|
|
10130
|
+
if not ENABLED:
|
|
10131
|
+
sys.exit(0)
|
|
10132
|
+
|
|
10133
|
+
try:
|
|
10134
|
+
data = json.load(sys.stdin)
|
|
10135
|
+
except:
|
|
10136
|
+
sys.exit(0)
|
|
10137
|
+
|
|
10138
|
+
prompt = data.get("prompt", "")
|
|
10139
|
+
if not prompt:
|
|
10140
|
+
session = data.get("session", {})
|
|
10141
|
+
for msg in reversed(session.get("messages", [])):
|
|
10142
|
+
if msg.get("role") == "user":
|
|
10143
|
+
content = msg.get("content", "")
|
|
10144
|
+
prompt = content if isinstance(content, str) else ""
|
|
10145
|
+
if isinstance(content, list):
|
|
10146
|
+
for b in content:
|
|
10147
|
+
if isinstance(b, dict) and b.get("type") == "text":
|
|
10148
|
+
prompt = b.get("text", "")
|
|
10149
|
+
break
|
|
10150
|
+
break
|
|
10151
|
+
|
|
10152
|
+
if not prompt or not matches(prompt):
|
|
10153
|
+
sys.exit(0)
|
|
10154
|
+
|
|
10155
|
+
print(json.dumps({"hookSpecificOutput": {"hookEventName": "UserPromptSubmit", "additionalContext": MEDIA_CONTEXT}}))
|
|
10156
|
+
sys.exit(0)
|
|
10157
|
+
|
|
9878
10158
|
if __name__ == "__main__":
|
|
9879
10159
|
main()
|
|
9880
10160
|
`;
|
|
@@ -10123,6 +10403,31 @@ function buildHooksConfig(options) {
|
|
|
10123
10403
|
const preToolUsePath = path5.join(hooksDir, "contextstream-redirect.py");
|
|
10124
10404
|
const userPromptPath = path5.join(hooksDir, "contextstream-reminder.py");
|
|
10125
10405
|
const preCompactPath = path5.join(hooksDir, "contextstream-precompact.py");
|
|
10406
|
+
const mediaAwarePath = path5.join(hooksDir, "contextstream-media-aware.py");
|
|
10407
|
+
const userPromptHooks = [
|
|
10408
|
+
{
|
|
10409
|
+
matcher: "*",
|
|
10410
|
+
hooks: [
|
|
10411
|
+
{
|
|
10412
|
+
type: "command",
|
|
10413
|
+
command: `python3 "${userPromptPath}"`,
|
|
10414
|
+
timeout: 5
|
|
10415
|
+
}
|
|
10416
|
+
]
|
|
10417
|
+
}
|
|
10418
|
+
];
|
|
10419
|
+
if (options?.includeMediaAware !== false) {
|
|
10420
|
+
userPromptHooks.push({
|
|
10421
|
+
matcher: "*",
|
|
10422
|
+
hooks: [
|
|
10423
|
+
{
|
|
10424
|
+
type: "command",
|
|
10425
|
+
command: `python3 "${mediaAwarePath}"`,
|
|
10426
|
+
timeout: 5
|
|
10427
|
+
}
|
|
10428
|
+
]
|
|
10429
|
+
});
|
|
10430
|
+
}
|
|
10126
10431
|
const config = {
|
|
10127
10432
|
PreToolUse: [
|
|
10128
10433
|
{
|
|
@@ -10136,18 +10441,7 @@ function buildHooksConfig(options) {
|
|
|
10136
10441
|
]
|
|
10137
10442
|
}
|
|
10138
10443
|
],
|
|
10139
|
-
UserPromptSubmit:
|
|
10140
|
-
{
|
|
10141
|
-
matcher: "*",
|
|
10142
|
-
hooks: [
|
|
10143
|
-
{
|
|
10144
|
-
type: "command",
|
|
10145
|
-
command: `python3 "${userPromptPath}"`,
|
|
10146
|
-
timeout: 5
|
|
10147
|
-
}
|
|
10148
|
-
]
|
|
10149
|
-
}
|
|
10150
|
-
]
|
|
10444
|
+
UserPromptSubmit: userPromptHooks
|
|
10151
10445
|
};
|
|
10152
10446
|
if (options?.includePreCompact) {
|
|
10153
10447
|
config.PreCompact = [
|
|
@@ -10172,6 +10466,7 @@ async function installHookScripts(options) {
|
|
|
10172
10466
|
const preToolUsePath = path5.join(hooksDir, "contextstream-redirect.py");
|
|
10173
10467
|
const userPromptPath = path5.join(hooksDir, "contextstream-reminder.py");
|
|
10174
10468
|
const preCompactPath = path5.join(hooksDir, "contextstream-precompact.py");
|
|
10469
|
+
const mediaAwarePath = path5.join(hooksDir, "contextstream-media-aware.py");
|
|
10175
10470
|
await fs4.writeFile(preToolUsePath, PRETOOLUSE_HOOK_SCRIPT, { mode: 493 });
|
|
10176
10471
|
await fs4.writeFile(userPromptPath, USER_PROMPT_HOOK_SCRIPT, { mode: 493 });
|
|
10177
10472
|
const result = {
|
|
@@ -10182,6 +10477,10 @@ async function installHookScripts(options) {
|
|
|
10182
10477
|
await fs4.writeFile(preCompactPath, PRECOMPACT_HOOK_SCRIPT, { mode: 493 });
|
|
10183
10478
|
result.preCompact = preCompactPath;
|
|
10184
10479
|
}
|
|
10480
|
+
if (options?.includeMediaAware !== false) {
|
|
10481
|
+
await fs4.writeFile(mediaAwarePath, MEDIA_AWARE_HOOK_SCRIPT, { mode: 493 });
|
|
10482
|
+
result.mediaAware = mediaAwarePath;
|
|
10483
|
+
}
|
|
10185
10484
|
return result;
|
|
10186
10485
|
}
|
|
10187
10486
|
async function readClaudeSettings(scope, projectPath) {
|
|
@@ -10857,6 +11156,7 @@ var TOOL_DISPLAY_NAMES = {
|
|
|
10857
11156
|
search: "search",
|
|
10858
11157
|
memory: "memory",
|
|
10859
11158
|
graph: "graph",
|
|
11159
|
+
media: "media",
|
|
10860
11160
|
ai: "ai",
|
|
10861
11161
|
generate_rules: "rules",
|
|
10862
11162
|
generate_editor_rules: "rules",
|
|
@@ -12240,6 +12540,8 @@ var CONSOLIDATED_TOOLS = /* @__PURE__ */ new Set([
|
|
|
12240
12540
|
// Consolidates reminders_list, reminders_create, etc.
|
|
12241
12541
|
"integration",
|
|
12242
12542
|
// Consolidates slack_*, github_*, notion_*, integrations_*
|
|
12543
|
+
"media",
|
|
12544
|
+
// Consolidates media indexing, search, and clip retrieval for Remotion/FFmpeg
|
|
12243
12545
|
"help"
|
|
12244
12546
|
// Consolidates session_tools, auth_me, mcp_server_version, etc.
|
|
12245
12547
|
]);
|
|
@@ -16430,8 +16732,11 @@ The conversation may compact soon. Save important decisions, insights, and progr
|
|
|
16430
16732
|
Action: ${cp.suggested_action === "prepare_save" ? "Consider saving important decisions and conversation state soon." : cp.suggested_action}`;
|
|
16431
16733
|
}
|
|
16432
16734
|
}
|
|
16735
|
+
const serverWarnings = result.warnings || [];
|
|
16736
|
+
const serverWarningsLine = serverWarnings.length > 0 ? "\n\n" + serverWarnings.map((w) => `\u26A0\uFE0F ${w}`).join("\n") : "";
|
|
16433
16737
|
const allWarnings = [
|
|
16434
|
-
lessonsWarningLine,
|
|
16738
|
+
serverWarningsLine || lessonsWarningLine,
|
|
16739
|
+
// Server warnings OR client-side lesson detection
|
|
16435
16740
|
rulesWarningLine ? `
|
|
16436
16741
|
|
|
16437
16742
|
${rulesWarningLine}` : "",
|
|
@@ -17042,6 +17347,8 @@ Returns: the created page ID, URL, title, and timestamps.
|
|
|
17042
17347
|
Use this to save notes, documentation, or any content to Notion.
|
|
17043
17348
|
Supports Markdown content which is automatically converted to Notion blocks.
|
|
17044
17349
|
|
|
17350
|
+
IMPORTANT: If using parent_database_id, you MUST call integration(provider="notion", action="list_databases") FIRST to get valid database IDs. Do NOT use database IDs from memory or previous conversations - they may be stale or inaccessible.
|
|
17351
|
+
|
|
17045
17352
|
Example prompts:
|
|
17046
17353
|
- "Create a Notion page with today's meeting notes"
|
|
17047
17354
|
- "Save this documentation to Notion"
|
|
@@ -17051,7 +17358,7 @@ Example prompts:
|
|
|
17051
17358
|
project_id: external_exports.string().uuid().optional().describe("Project ID (uses session default if not provided). If provided, the memory event will be scoped to this project."),
|
|
17052
17359
|
title: external_exports.string().describe("Page title"),
|
|
17053
17360
|
content: external_exports.string().optional().describe("Page content in Markdown format"),
|
|
17054
|
-
parent_database_id: external_exports.string().optional().describe("Parent database ID to
|
|
17361
|
+
parent_database_id: external_exports.string().optional().describe("Parent database ID. MUST call integration(provider='notion', action='list_databases') first to get valid IDs - do NOT use IDs from memory"),
|
|
17055
17362
|
parent_page_id: external_exports.string().optional().describe("Parent page ID to create page under")
|
|
17056
17363
|
})
|
|
17057
17364
|
},
|
|
@@ -19167,7 +19474,7 @@ ${formatContent(result)}`
|
|
|
19167
19474
|
title: external_exports.string().optional().describe("Page/database title (for Notion create_page/update_page/create_database)"),
|
|
19168
19475
|
content: external_exports.string().optional().describe("Page content in Markdown (for Notion create_page/update_page)"),
|
|
19169
19476
|
description: external_exports.string().optional().describe("Database description (for Notion create_database)"),
|
|
19170
|
-
parent_database_id: external_exports.string().optional().describe("Parent database ID (for Notion create_page)"),
|
|
19477
|
+
parent_database_id: external_exports.string().optional().describe("Parent database ID (for Notion create_page). MUST call list_databases first - do NOT use IDs from memory"),
|
|
19171
19478
|
parent_page_id: external_exports.string().optional().describe("Parent page ID (for Notion create_page/create_database)"),
|
|
19172
19479
|
page_id: external_exports.string().optional().describe("Page ID (for Notion get_page/update_page)"),
|
|
19173
19480
|
database_id: external_exports.string().optional().describe("Database ID (for Notion query_database/search_pages/activity)"),
|
|
@@ -19633,6 +19940,430 @@ Last edited: ${updatedPage.last_edited_time}`
|
|
|
19633
19940
|
}
|
|
19634
19941
|
}
|
|
19635
19942
|
);
|
|
19943
|
+
registerTool(
|
|
19944
|
+
"media",
|
|
19945
|
+
{
|
|
19946
|
+
title: "Media",
|
|
19947
|
+
description: `Media operations for video/audio/image assets. Enables AI agents to index, search, and retrieve media with semantic understanding - solving the "LLM as video editor has no context" problem for tools like Remotion.
|
|
19948
|
+
|
|
19949
|
+
Actions:
|
|
19950
|
+
- index: Index a local media file or external URL. Triggers ML processing (Whisper transcription, CLIP embeddings, keyframe extraction).
|
|
19951
|
+
- status: Check indexing progress for a content_id. Returns transcript_available, keyframe_count, duration.
|
|
19952
|
+
- search: Semantic search across indexed media. Returns timestamps, transcript excerpts, keyframe URLs.
|
|
19953
|
+
- get_clip: Get clip details for a time range. Supports output_format: remotion (frame-based props), ffmpeg (timecodes), raw.
|
|
19954
|
+
- list: List indexed media assets.
|
|
19955
|
+
- delete: Remove a media asset from the index.
|
|
19956
|
+
|
|
19957
|
+
Example workflow:
|
|
19958
|
+
1. media(action="index", file_path="/path/to/video.mp4") \u2192 get content_id
|
|
19959
|
+
2. media(action="status", content_id="...") \u2192 wait for indexed
|
|
19960
|
+
3. media(action="search", query="where John explains authentication") \u2192 get timestamps
|
|
19961
|
+
4. media(action="get_clip", content_id="...", start="1:34", end="2:15", output_format="remotion") \u2192 get Remotion props`,
|
|
19962
|
+
inputSchema: external_exports.object({
|
|
19963
|
+
action: external_exports.enum(["index", "status", "search", "get_clip", "list", "delete"]).describe("Action to perform"),
|
|
19964
|
+
workspace_id: external_exports.string().uuid().optional(),
|
|
19965
|
+
project_id: external_exports.string().uuid().optional(),
|
|
19966
|
+
// Index params
|
|
19967
|
+
file_path: external_exports.string().optional().describe("Local path to media file for indexing"),
|
|
19968
|
+
external_url: external_exports.string().url().optional().describe("External URL to media file for indexing"),
|
|
19969
|
+
content_type: external_exports.enum(["video", "audio", "image", "document"]).optional().describe("Type of media content (auto-detected if not provided)"),
|
|
19970
|
+
// Status/get_clip/delete params
|
|
19971
|
+
content_id: external_exports.string().uuid().optional().describe("Content ID from index operation"),
|
|
19972
|
+
// Search params
|
|
19973
|
+
query: external_exports.string().optional().describe("Semantic search query for media content"),
|
|
19974
|
+
content_types: external_exports.array(external_exports.enum(["video", "audio", "image", "document"])).optional().describe("Filter search to specific content types"),
|
|
19975
|
+
// Get clip params
|
|
19976
|
+
start: external_exports.string().optional().describe('Start time for clip. Formats: "1:34", "94s", or seconds as string'),
|
|
19977
|
+
end: external_exports.string().optional().describe('End time for clip. Formats: "2:15", "135s", or seconds as string'),
|
|
19978
|
+
output_format: external_exports.enum(["remotion", "ffmpeg", "raw"]).optional().describe(
|
|
19979
|
+
"Output format: remotion (frame-based props for Video component), ffmpeg (timecodes), raw (seconds)"
|
|
19980
|
+
),
|
|
19981
|
+
fps: external_exports.number().optional().describe("Frames per second for remotion format (default: 30)"),
|
|
19982
|
+
// Common params
|
|
19983
|
+
tags: external_exports.array(external_exports.string()).optional().describe("Tags to associate with media"),
|
|
19984
|
+
limit: external_exports.number().optional().describe("Maximum results to return")
|
|
19985
|
+
})
|
|
19986
|
+
},
|
|
19987
|
+
async (input) => {
|
|
19988
|
+
const workspaceId = resolveWorkspaceId(input.workspace_id);
|
|
19989
|
+
const projectId = resolveProjectId(input.project_id);
|
|
19990
|
+
switch (input.action) {
|
|
19991
|
+
case "index": {
|
|
19992
|
+
if (!input.file_path && !input.external_url) {
|
|
19993
|
+
return errorResult("index requires: file_path or external_url");
|
|
19994
|
+
}
|
|
19995
|
+
if (!workspaceId) {
|
|
19996
|
+
return errorResult(
|
|
19997
|
+
"Error: workspace_id is required. Please call session_init first or provide workspace_id explicitly."
|
|
19998
|
+
);
|
|
19999
|
+
}
|
|
20000
|
+
if (input.file_path) {
|
|
20001
|
+
const fs8 = await import("fs/promises");
|
|
20002
|
+
const pathModule = await import("path");
|
|
20003
|
+
const filePath = input.file_path.startsWith("~") ? input.file_path.replace("~", process.env.HOME || "") : input.file_path;
|
|
20004
|
+
const resolvedPath = pathModule.resolve(filePath);
|
|
20005
|
+
let fileStats;
|
|
20006
|
+
try {
|
|
20007
|
+
fileStats = await fs8.stat(resolvedPath);
|
|
20008
|
+
} catch {
|
|
20009
|
+
return errorResult(`File not found: ${resolvedPath}`);
|
|
20010
|
+
}
|
|
20011
|
+
if (!fileStats.isFile()) {
|
|
20012
|
+
return errorResult(`Not a file: ${resolvedPath}`);
|
|
20013
|
+
}
|
|
20014
|
+
const ext = pathModule.extname(resolvedPath).toLowerCase();
|
|
20015
|
+
const mimeTypes = {
|
|
20016
|
+
".mp4": "video/mp4",
|
|
20017
|
+
".webm": "video/webm",
|
|
20018
|
+
".mov": "video/quicktime",
|
|
20019
|
+
".avi": "video/x-msvideo",
|
|
20020
|
+
".mkv": "video/x-matroska",
|
|
20021
|
+
".mp3": "audio/mpeg",
|
|
20022
|
+
".wav": "audio/wav",
|
|
20023
|
+
".ogg": "audio/ogg",
|
|
20024
|
+
".flac": "audio/flac",
|
|
20025
|
+
".m4a": "audio/mp4",
|
|
20026
|
+
".png": "image/png",
|
|
20027
|
+
".jpg": "image/jpeg",
|
|
20028
|
+
".jpeg": "image/jpeg",
|
|
20029
|
+
".gif": "image/gif",
|
|
20030
|
+
".webp": "image/webp",
|
|
20031
|
+
".svg": "image/svg+xml"
|
|
20032
|
+
};
|
|
20033
|
+
const mimeType = mimeTypes[ext] || "application/octet-stream";
|
|
20034
|
+
let contentType = "other";
|
|
20035
|
+
if (input.content_type) {
|
|
20036
|
+
contentType = input.content_type;
|
|
20037
|
+
} else if (mimeType.startsWith("video/")) {
|
|
20038
|
+
contentType = "video";
|
|
20039
|
+
} else if (mimeType.startsWith("audio/")) {
|
|
20040
|
+
contentType = "audio";
|
|
20041
|
+
} else if (mimeType.startsWith("image/")) {
|
|
20042
|
+
contentType = "image";
|
|
20043
|
+
}
|
|
20044
|
+
const filename = pathModule.basename(resolvedPath);
|
|
20045
|
+
try {
|
|
20046
|
+
const uploadInit = await client.mediaInitUpload({
|
|
20047
|
+
workspace_id: workspaceId,
|
|
20048
|
+
filename,
|
|
20049
|
+
size_bytes: fileStats.size,
|
|
20050
|
+
content_type: contentType,
|
|
20051
|
+
mime_type: mimeType,
|
|
20052
|
+
tags: input.tags
|
|
20053
|
+
});
|
|
20054
|
+
const fileBuffer = await fs8.readFile(resolvedPath);
|
|
20055
|
+
const uploadResponse = await fetch(uploadInit.upload_url, {
|
|
20056
|
+
method: "PUT",
|
|
20057
|
+
headers: uploadInit.headers,
|
|
20058
|
+
body: fileBuffer
|
|
20059
|
+
});
|
|
20060
|
+
if (!uploadResponse.ok) {
|
|
20061
|
+
return errorResult(
|
|
20062
|
+
`Failed to upload file: ${uploadResponse.status} ${uploadResponse.statusText}`
|
|
20063
|
+
);
|
|
20064
|
+
}
|
|
20065
|
+
await client.mediaCompleteUpload({
|
|
20066
|
+
workspace_id: workspaceId,
|
|
20067
|
+
content_id: uploadInit.content_id
|
|
20068
|
+
});
|
|
20069
|
+
const result2 = {
|
|
20070
|
+
status: "uploaded",
|
|
20071
|
+
message: "Media file uploaded successfully. Indexing has been triggered.",
|
|
20072
|
+
content_id: uploadInit.content_id,
|
|
20073
|
+
filename,
|
|
20074
|
+
content_type: contentType,
|
|
20075
|
+
size_bytes: fileStats.size,
|
|
20076
|
+
mime_type: mimeType,
|
|
20077
|
+
note: "Use media(action='status', content_id='...') to check indexing progress."
|
|
20078
|
+
};
|
|
20079
|
+
return {
|
|
20080
|
+
content: [
|
|
20081
|
+
{
|
|
20082
|
+
type: "text",
|
|
20083
|
+
text: `\u2705 Media uploaded successfully!
|
|
20084
|
+
|
|
20085
|
+
Content ID: ${uploadInit.content_id}
|
|
20086
|
+
Filename: ${filename}
|
|
20087
|
+
Type: ${contentType}
|
|
20088
|
+
Size: ${(fileStats.size / 1024 / 1024).toFixed(2)} MB
|
|
20089
|
+
|
|
20090
|
+
Indexing has been triggered. Use media(action='status', content_id='${uploadInit.content_id}') to check progress.`
|
|
20091
|
+
}
|
|
20092
|
+
],
|
|
20093
|
+
structuredContent: toStructured(result2)
|
|
20094
|
+
};
|
|
20095
|
+
} catch (err) {
|
|
20096
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
20097
|
+
return errorResult(`Failed to upload media: ${errMsg}`);
|
|
20098
|
+
}
|
|
20099
|
+
}
|
|
20100
|
+
const result = {
|
|
20101
|
+
status: "not_implemented",
|
|
20102
|
+
message: "External URL indexing is not yet implemented. Please use file_path for local files instead.",
|
|
20103
|
+
action: "index",
|
|
20104
|
+
external_url: input.external_url,
|
|
20105
|
+
content_type: input.content_type
|
|
20106
|
+
};
|
|
20107
|
+
return {
|
|
20108
|
+
content: [{ type: "text", text: formatContent(result) }],
|
|
20109
|
+
structuredContent: toStructured(result)
|
|
20110
|
+
};
|
|
20111
|
+
}
|
|
20112
|
+
case "status": {
|
|
20113
|
+
if (!input.content_id) {
|
|
20114
|
+
return errorResult("status requires: content_id");
|
|
20115
|
+
}
|
|
20116
|
+
if (!workspaceId) {
|
|
20117
|
+
return errorResult(
|
|
20118
|
+
"Error: workspace_id is required. Please call session_init first or provide workspace_id explicitly."
|
|
20119
|
+
);
|
|
20120
|
+
}
|
|
20121
|
+
try {
|
|
20122
|
+
const content = await client.mediaGetContent({
|
|
20123
|
+
workspace_id: workspaceId,
|
|
20124
|
+
content_id: input.content_id
|
|
20125
|
+
});
|
|
20126
|
+
let statusEmoji = "\u23F3";
|
|
20127
|
+
let statusMessage = "Pending";
|
|
20128
|
+
if (content.status === "indexed" || content.status === "ready") {
|
|
20129
|
+
statusEmoji = "\u2705";
|
|
20130
|
+
statusMessage = "Indexed and ready";
|
|
20131
|
+
} else if (content.status === "processing" || content.status === "indexing") {
|
|
20132
|
+
statusEmoji = "\u{1F504}";
|
|
20133
|
+
statusMessage = `Processing${content.indexing_progress ? ` (${content.indexing_progress}%)` : ""}`;
|
|
20134
|
+
} else if (content.status === "error" || content.status === "failed") {
|
|
20135
|
+
statusEmoji = "\u274C";
|
|
20136
|
+
statusMessage = content.indexing_error || "Indexing failed";
|
|
20137
|
+
}
|
|
20138
|
+
return {
|
|
20139
|
+
content: [
|
|
20140
|
+
{
|
|
20141
|
+
type: "text",
|
|
20142
|
+
text: `${statusEmoji} ${content.filename}
|
|
20143
|
+
|
|
20144
|
+
Status: ${statusMessage}
|
|
20145
|
+
Content ID: ${content.id}
|
|
20146
|
+
Type: ${content.content_type}
|
|
20147
|
+
Size: ${(content.size_bytes / 1024 / 1024).toFixed(2)} MB
|
|
20148
|
+
Created: ${content.created_at}`
|
|
20149
|
+
}
|
|
20150
|
+
],
|
|
20151
|
+
structuredContent: toStructured(content)
|
|
20152
|
+
};
|
|
20153
|
+
} catch (err) {
|
|
20154
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
20155
|
+
return errorResult(`Failed to get content status: ${errMsg}`);
|
|
20156
|
+
}
|
|
20157
|
+
}
|
|
20158
|
+
case "search": {
|
|
20159
|
+
if (!input.query) {
|
|
20160
|
+
return errorResult("search requires: query");
|
|
20161
|
+
}
|
|
20162
|
+
if (!workspaceId) {
|
|
20163
|
+
return errorResult(
|
|
20164
|
+
"Error: workspace_id is required. Please call session_init first or provide workspace_id explicitly."
|
|
20165
|
+
);
|
|
20166
|
+
}
|
|
20167
|
+
try {
|
|
20168
|
+
const searchResult = await client.mediaSearchContent({
|
|
20169
|
+
workspace_id: workspaceId,
|
|
20170
|
+
query: input.query,
|
|
20171
|
+
content_type: input.content_types?.[0],
|
|
20172
|
+
// API accepts single type for now
|
|
20173
|
+
limit: input.limit
|
|
20174
|
+
});
|
|
20175
|
+
if (searchResult.results.length === 0) {
|
|
20176
|
+
return {
|
|
20177
|
+
content: [
|
|
20178
|
+
{
|
|
20179
|
+
type: "text",
|
|
20180
|
+
text: `No results found for: "${input.query}"
|
|
20181
|
+
|
|
20182
|
+
Try a different query or check that you have indexed media content.`
|
|
20183
|
+
}
|
|
20184
|
+
],
|
|
20185
|
+
structuredContent: toStructured(searchResult)
|
|
20186
|
+
};
|
|
20187
|
+
}
|
|
20188
|
+
const resultsText = searchResult.results.map((r, i) => {
|
|
20189
|
+
let timeInfo = "";
|
|
20190
|
+
if (r.timestamp_start !== void 0) {
|
|
20191
|
+
const startMin = Math.floor(r.timestamp_start / 60);
|
|
20192
|
+
const startSec = Math.floor(r.timestamp_start % 60);
|
|
20193
|
+
timeInfo = ` @ ${startMin}:${startSec.toString().padStart(2, "0")}`;
|
|
20194
|
+
if (r.timestamp_end !== void 0) {
|
|
20195
|
+
const endMin = Math.floor(r.timestamp_end / 60);
|
|
20196
|
+
const endSec = Math.floor(r.timestamp_end % 60);
|
|
20197
|
+
timeInfo += `-${endMin}:${endSec.toString().padStart(2, "0")}`;
|
|
20198
|
+
}
|
|
20199
|
+
}
|
|
20200
|
+
const matchText = r.match_text ? `
|
|
20201
|
+
"${r.match_text}"` : "";
|
|
20202
|
+
return `${i + 1}. ${r.filename} (${r.content_type})${timeInfo}${matchText}`;
|
|
20203
|
+
}).join("\n\n");
|
|
20204
|
+
return {
|
|
20205
|
+
content: [
|
|
20206
|
+
{
|
|
20207
|
+
type: "text",
|
|
20208
|
+
text: `Found ${searchResult.total} result(s) for: "${input.query}"
|
|
20209
|
+
|
|
20210
|
+
${resultsText}`
|
|
20211
|
+
}
|
|
20212
|
+
],
|
|
20213
|
+
structuredContent: toStructured(searchResult)
|
|
20214
|
+
};
|
|
20215
|
+
} catch (err) {
|
|
20216
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
20217
|
+
return errorResult(`Failed to search media: ${errMsg}`);
|
|
20218
|
+
}
|
|
20219
|
+
}
|
|
20220
|
+
case "get_clip": {
|
|
20221
|
+
if (!input.content_id) {
|
|
20222
|
+
return errorResult("get_clip requires: content_id");
|
|
20223
|
+
}
|
|
20224
|
+
if (!workspaceId) {
|
|
20225
|
+
return errorResult(
|
|
20226
|
+
"Error: workspace_id is required. Please call session_init first or provide workspace_id explicitly."
|
|
20227
|
+
);
|
|
20228
|
+
}
|
|
20229
|
+
const parseTime = (t) => {
|
|
20230
|
+
if (!t) return void 0;
|
|
20231
|
+
if (t.includes(":")) {
|
|
20232
|
+
const parts = t.split(":").map(Number);
|
|
20233
|
+
if (parts.length === 2) return parts[0] * 60 + parts[1];
|
|
20234
|
+
if (parts.length === 3) return parts[0] * 3600 + parts[1] * 60 + parts[2];
|
|
20235
|
+
}
|
|
20236
|
+
if (t.endsWith("s")) return parseFloat(t.slice(0, -1));
|
|
20237
|
+
return parseFloat(t);
|
|
20238
|
+
};
|
|
20239
|
+
const startTime = parseTime(input.start);
|
|
20240
|
+
const endTime = parseTime(input.end);
|
|
20241
|
+
try {
|
|
20242
|
+
const clipResult = await client.mediaGetClip({
|
|
20243
|
+
workspace_id: workspaceId,
|
|
20244
|
+
content_id: input.content_id,
|
|
20245
|
+
start_time: startTime,
|
|
20246
|
+
end_time: endTime,
|
|
20247
|
+
format: input.output_format === "remotion" ? "remotion" : input.output_format === "ffmpeg" ? "ffmpeg" : "json"
|
|
20248
|
+
});
|
|
20249
|
+
let outputText = `\u{1F4F9} Clip from: ${clipResult.filename}
|
|
20250
|
+
|
|
20251
|
+
`;
|
|
20252
|
+
outputText += `Time range: ${clipResult.clip.start_time}s - ${clipResult.clip.end_time}s
|
|
20253
|
+
`;
|
|
20254
|
+
if (clipResult.clip.transcript) {
|
|
20255
|
+
outputText += `
|
|
20256
|
+
Transcript:
|
|
20257
|
+
"${clipResult.clip.transcript}"
|
|
20258
|
+
`;
|
|
20259
|
+
}
|
|
20260
|
+
if (clipResult.remotion_props && input.output_format === "remotion") {
|
|
20261
|
+
outputText += `
|
|
20262
|
+
### Remotion Props:
|
|
20263
|
+
\`\`\`json
|
|
20264
|
+
${JSON.stringify(clipResult.remotion_props, null, 2)}
|
|
20265
|
+
\`\`\``;
|
|
20266
|
+
}
|
|
20267
|
+
if (clipResult.ffmpeg_command && input.output_format === "ffmpeg") {
|
|
20268
|
+
outputText += `
|
|
20269
|
+
### FFmpeg Command:
|
|
20270
|
+
\`\`\`bash
|
|
20271
|
+
${clipResult.ffmpeg_command}
|
|
20272
|
+
\`\`\``;
|
|
20273
|
+
}
|
|
20274
|
+
return {
|
|
20275
|
+
content: [{ type: "text", text: outputText }],
|
|
20276
|
+
structuredContent: toStructured(clipResult)
|
|
20277
|
+
};
|
|
20278
|
+
} catch (err) {
|
|
20279
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
20280
|
+
return errorResult(`Failed to get clip: ${errMsg}`);
|
|
20281
|
+
}
|
|
20282
|
+
}
|
|
20283
|
+
case "list": {
|
|
20284
|
+
if (!workspaceId) {
|
|
20285
|
+
return errorResult(
|
|
20286
|
+
"Error: workspace_id is required. Please call session_init first or provide workspace_id explicitly."
|
|
20287
|
+
);
|
|
20288
|
+
}
|
|
20289
|
+
try {
|
|
20290
|
+
const listResult = await client.mediaListContent({
|
|
20291
|
+
workspace_id: workspaceId,
|
|
20292
|
+
content_type: input.content_types?.[0],
|
|
20293
|
+
// API accepts single type for now
|
|
20294
|
+
limit: input.limit
|
|
20295
|
+
});
|
|
20296
|
+
if (listResult.items.length === 0) {
|
|
20297
|
+
return {
|
|
20298
|
+
content: [
|
|
20299
|
+
{
|
|
20300
|
+
type: "text",
|
|
20301
|
+
text: "No media content found.\n\nUse media(action='index', file_path='...') to index media files."
|
|
20302
|
+
}
|
|
20303
|
+
],
|
|
20304
|
+
structuredContent: toStructured(listResult)
|
|
20305
|
+
};
|
|
20306
|
+
}
|
|
20307
|
+
const itemsText = listResult.items.map((item, i) => {
|
|
20308
|
+
const statusEmoji = item.status === "indexed" || item.status === "ready" ? "\u2705" : item.status === "processing" ? "\u{1F504}" : item.status === "error" ? "\u274C" : "\u23F3";
|
|
20309
|
+
const size = (item.size_bytes / 1024 / 1024).toFixed(2);
|
|
20310
|
+
return `${i + 1}. ${statusEmoji} ${item.filename}
|
|
20311
|
+
Type: ${item.content_type} | Size: ${size} MB | Status: ${item.status}
|
|
20312
|
+
ID: ${item.id}`;
|
|
20313
|
+
}).join("\n\n");
|
|
20314
|
+
return {
|
|
20315
|
+
content: [
|
|
20316
|
+
{
|
|
20317
|
+
type: "text",
|
|
20318
|
+
text: `\u{1F4DA} Media Library (${listResult.total} items)
|
|
20319
|
+
|
|
20320
|
+
${itemsText}`
|
|
20321
|
+
}
|
|
20322
|
+
],
|
|
20323
|
+
structuredContent: toStructured(listResult)
|
|
20324
|
+
};
|
|
20325
|
+
} catch (err) {
|
|
20326
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
20327
|
+
return errorResult(`Failed to list media: ${errMsg}`);
|
|
20328
|
+
}
|
|
20329
|
+
}
|
|
20330
|
+
case "delete": {
|
|
20331
|
+
if (!input.content_id) {
|
|
20332
|
+
return errorResult("delete requires: content_id");
|
|
20333
|
+
}
|
|
20334
|
+
if (!workspaceId) {
|
|
20335
|
+
return errorResult(
|
|
20336
|
+
"Error: workspace_id is required. Please call session_init first or provide workspace_id explicitly."
|
|
20337
|
+
);
|
|
20338
|
+
}
|
|
20339
|
+
try {
|
|
20340
|
+
const deleteResult = await client.mediaDeleteContent({
|
|
20341
|
+
workspace_id: workspaceId,
|
|
20342
|
+
content_id: input.content_id
|
|
20343
|
+
});
|
|
20344
|
+
return {
|
|
20345
|
+
content: [
|
|
20346
|
+
{
|
|
20347
|
+
type: "text",
|
|
20348
|
+
text: deleteResult.deleted ? `\u2705 Content deleted successfully.
|
|
20349
|
+
|
|
20350
|
+
Content ID: ${input.content_id}` : `\u26A0\uFE0F Content may not have been deleted.
|
|
20351
|
+
|
|
20352
|
+
Content ID: ${input.content_id}`
|
|
20353
|
+
}
|
|
20354
|
+
],
|
|
20355
|
+
structuredContent: toStructured(deleteResult)
|
|
20356
|
+
};
|
|
20357
|
+
} catch (err) {
|
|
20358
|
+
const errMsg = err instanceof Error ? err.message : String(err);
|
|
20359
|
+
return errorResult(`Failed to delete content: ${errMsg}`);
|
|
20360
|
+
}
|
|
20361
|
+
}
|
|
20362
|
+
default:
|
|
20363
|
+
return errorResult(`Unknown action: ${input.action}`);
|
|
20364
|
+
}
|
|
20365
|
+
}
|
|
20366
|
+
);
|
|
19636
20367
|
registerTool(
|
|
19637
20368
|
"help",
|
|
19638
20369
|
{
|
|
@@ -22163,7 +22894,8 @@ function buildClientConfig(params) {
|
|
|
22163
22894
|
defaultWorkspaceId: void 0,
|
|
22164
22895
|
defaultProjectId: void 0,
|
|
22165
22896
|
userAgent: `contextstream-mcp/setup/${VERSION}`,
|
|
22166
|
-
contextPackEnabled: true
|
|
22897
|
+
contextPackEnabled: true,
|
|
22898
|
+
showTiming: false
|
|
22167
22899
|
};
|
|
22168
22900
|
}
|
|
22169
22901
|
async function runSetupWizard(args) {
|
|
@@ -22410,7 +23142,15 @@ Code: ${device.user_code}`);
|
|
|
22410
23142
|
}
|
|
22411
23143
|
}
|
|
22412
23144
|
}
|
|
22413
|
-
|
|
23145
|
+
console.log("\nRules mode (how rules are delivered to the AI):");
|
|
23146
|
+
console.log(" 1) Dynamic (recommended) \u2014 minimal rules file, context_smart delivers rules dynamically");
|
|
23147
|
+
console.log(" Best for: efficiency, better results, rules always up-to-date");
|
|
23148
|
+
console.log(" The rules file just tells the AI to call session_init + context_smart");
|
|
23149
|
+
console.log(" 2) Full \u2014 complete rules embedded in file");
|
|
23150
|
+
console.log(" Best for: offline use, debugging, or if you prefer static rules");
|
|
23151
|
+
console.log("");
|
|
23152
|
+
const ruleModeChoice = normalizeInput(await rl.question("Choose [1/2] (default 1): ")) || "1";
|
|
23153
|
+
const mode = ruleModeChoice === "2" ? "full" : "dynamic";
|
|
22414
23154
|
const detectedPlanName = await client.getPlanName();
|
|
22415
23155
|
const detectedGraphTier = await client.getGraphTier();
|
|
22416
23156
|
const graphTierLabel = detectedGraphTier === "full" ? "full graph" : detectedGraphTier === "lite" ? "graph-lite" : "none";
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@contextstream/mcp-server",
|
|
3
3
|
"mcpName": "io.github.contextstreamio/mcp-server",
|
|
4
|
-
"version": "0.4.
|
|
4
|
+
"version": "0.4.44",
|
|
5
5
|
"description": "ContextStream MCP server - v0.4.x with consolidated domain tools (~11 tools, ~75% token reduction). Code context, memory, search, and AI tools.",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"license": "MIT",
|