@langwatch/mcp-server 0.0.4 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/.env.example +2 -0
  2. package/.eslintrc.cjs +0 -1
  3. package/CHANGELOG.md +29 -0
  4. package/CONTRIBUTING.md +96 -0
  5. package/README.md +13 -6
  6. package/dist/index.js +8143 -1152
  7. package/dist/index.js.map +1 -1
  8. package/package.json +22 -9
  9. package/pnpm-workspace.yaml +2 -0
  10. package/pyproject.toml +17 -0
  11. package/src/index.ts +86 -15
  12. package/src/langwatch-api.ts +94 -52
  13. package/tests/evaluations.ipynb +649 -0
  14. package/tests/fixtures/azure/azure_openai_stream_bot_expected.py +102 -0
  15. package/tests/fixtures/azure/azure_openai_stream_bot_input.py +78 -0
  16. package/tests/fixtures/dspy/dspy_bot_expected.py +61 -0
  17. package/tests/fixtures/dspy/dspy_bot_input.py +53 -0
  18. package/tests/fixtures/fastapi/fastapi_app_expected.py +68 -0
  19. package/tests/fixtures/fastapi/fastapi_app_input.py +60 -0
  20. package/tests/fixtures/fastapi/prompt_management_fastapi_expected.py +114 -0
  21. package/tests/fixtures/fastapi/prompt_management_fastapi_input.py +88 -0
  22. package/tests/fixtures/haystack/haystack_bot_expected.py +141 -0
  23. package/tests/fixtures/haystack/haystack_bot_input.py +69 -0
  24. package/tests/fixtures/langchain/langchain_bot_expected.py +53 -0
  25. package/tests/fixtures/langchain/langchain_bot_input.py +45 -0
  26. package/tests/fixtures/langchain/langchain_bot_with_memory_expected.py +69 -0
  27. package/tests/fixtures/langchain/langchain_bot_with_memory_input.py +61 -0
  28. package/tests/fixtures/langchain/langchain_rag_bot_expected.py +97 -0
  29. package/tests/fixtures/langchain/langchain_rag_bot_input.py +77 -0
  30. package/tests/fixtures/langchain/langchain_rag_bot_vertex_ai_expected.py +116 -0
  31. package/tests/fixtures/langchain/langchain_rag_bot_vertex_ai_input.py +81 -0
  32. package/tests/fixtures/langchain/langgraph_rag_bot_with_threads_expected.py +331 -0
  33. package/tests/fixtures/langchain/langgraph_rag_bot_with_threads_input.py +106 -0
  34. package/tests/fixtures/litellm/litellm_bot_expected.py +40 -0
  35. package/tests/fixtures/litellm/litellm_bot_input.py +35 -0
  36. package/tests/fixtures/openai/openai_bot_expected.py +43 -0
  37. package/tests/fixtures/openai/openai_bot_function_call_expected.py +91 -0
  38. package/tests/fixtures/openai/openai_bot_function_call_input.py +82 -0
  39. package/tests/fixtures/openai/openai_bot_input.py +36 -0
  40. package/tests/fixtures/openai/openai_bot_rag_expected.py +73 -0
  41. package/tests/fixtures/openai/openai_bot_rag_input.py +51 -0
  42. package/tests/fixtures/opentelemetry/openinference_dspy_bot_expected.py +63 -0
  43. package/tests/fixtures/opentelemetry/openinference_dspy_bot_input.py +58 -0
  44. package/tests/fixtures/opentelemetry/openinference_langchain_bot_expected.py +53 -0
  45. package/tests/fixtures/opentelemetry/openinference_langchain_bot_input.py +52 -0
  46. package/tests/fixtures/opentelemetry/openinference_openai_bot_expected.py +49 -0
  47. package/tests/fixtures/opentelemetry/openinference_openai_bot_input.py +41 -0
  48. package/tests/fixtures/opentelemetry/openllmetry_openai_bot_expected.py +44 -0
  49. package/tests/fixtures/opentelemetry/openllmetry_openai_bot_input.py +40 -0
  50. package/tests/fixtures/strands/strands_bot_expected.py +84 -0
  51. package/tests/fixtures/strands/strands_bot_input.py +52 -0
  52. package/tests/scenario-openai.test.ts +158 -0
  53. package/tsconfig.json +0 -1
  54. package/uv.lock +2607 -0
  55. package/vitest.config.js +7 -0
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/langwatch-api.ts","../package.json"],"sourcesContent":["import { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { StdioServerTransport } from \"@modelcontextprotocol/sdk/server/stdio.js\";\nimport { z } from \"zod\";\nimport { getLlmTraceById, listLlmTraces } from \"./langwatch-api\";\nimport packageJson from \"../package.json\" assert { type: \"json\" };\nimport yargs from \"yargs\";\nimport { hideBin } from \"yargs/helpers\";\n\nfunction loadAndValidateArgs(): { apiKey: string; endpoint: string } {\n // Parse command line arguments with yargs\n const argv = yargs(hideBin(process.argv))\n .option(\"apiKey\", {\n type: \"string\",\n description: \"LangWatch API key\",\n })\n .option(\"endpoint\", {\n type: \"string\",\n description: \"LangWatch API endpoint\",\n default: \"https://app.langwatch.ai\",\n })\n .help()\n .alias(\"help\", \"h\")\n .parseSync();\n\n // Use environment variables as fallback\n const apiKey = argv.apiKey || process.env.LANGWATCH_API_KEY;\n const endpoint =\n argv.endpoint ||\n process.env.LANGWATCH_ENDPOINT ||\n \"https://app.langwatch.ai\";\n\n if (!apiKey) {\n throw new Error(\n \"API key is required. Please provide it using --apiKey=<your_api_key> or set LANGWATCH_API_KEY environment variable\"\n );\n }\n\n return {\n apiKey: String(apiKey),\n endpoint: String(endpoint),\n };\n}\n\n// Use the function to get apiKey and endpoint\nconst { apiKey, endpoint } = loadAndValidateArgs();\n\nconst server = new McpServer({\n name: \"LangWatch\",\n version: packageJson.version,\n});\n\nserver.tool(\n \"get_latest_traces\",\n {\n pageOffset: z.number().optional(),\n daysBackToSearch: z.number().optional(),\n },\n async ({ pageOffset, daysBackToSearch }) => {\n const response = await listLlmTraces(apiKey, {\n pageOffset,\n timeTravelDays: daysBackToSearch ?? 1,\n langWatchEndpoint: endpoint,\n });\n\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(response, null, 2),\n },\n ],\n };\n }\n);\n\nserver.tool(\n \"get_trace_by_id\",\n {\n id: z.string(),\n },\n async ({ id }) => {\n try {\n const response = await getLlmTraceById(apiKey, id, {\n langWatchEndpoint: endpoint,\n });\n\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(response, null, 2),\n },\n ],\n };\n } catch (error) {\n if (error instanceof Error && error.message === \"Trace not found\") {\n return {\n content: [\n {\n type: \"text\",\n text: \"Trace not found 😭😭😭😭. If the trace was created recently, it may not be available yet.\",\n },\n ],\n };\n }\n\n throw error;\n }\n }\n);\n\nconst transport = new StdioServerTransport();\nawait server.connect(transport);\n","import { addDays } from \"date-fns\";\nimport type { LLMModeTrace } from \"langwatch\";\n\ninterface SearchTrace {\n\ttraces: LLMModeTrace[];\n}\n\ninterface GetLlmTraceByIdOptions {\n\tlangWatchEndpoint?: string;\n}\n\ninterface ListLLmTracesOptions {\n\tpageSize?: number;\n\tpageOffset?: number;\n\ttimeTravelDays?: number;\n\tlangWatchEndpoint?: string;\n}\n\nexport const getLlmTraceById = async (authToken: string, id: string, opts?: GetLlmTraceByIdOptions): Promise<LLMModeTrace> => {\n\tconst { langWatchEndpoint } = opts ?? {};\n\n\tconst endpoint = langWatchEndpoint ?? \"https://app.langwatch.ai\";\n\n\tconst url = new URL(`${endpoint}/api/trace/${id}`);\n\turl.searchParams.set(\"llmMode\", \"true\");\n\n\tconst response = await fetch(url.toString(), {\n\t\tmethod: \"GET\",\n\t\theaders: {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t\"X-Auth-Token\": authToken,\n\t\t},\n\t});\n\n\tif (!response.ok) {\n\t\tif (response.status === 404) {\n\t\t\tthrow new Error(\"Trace not found\");\n\t\t}\n\n\t\tthrow new Error(`Failed to get trace: ${response.statusText}`);\n\t}\n\n\treturn await response.json() as Promise<LLMModeTrace>;\n};\n\nexport const listLlmTraces = async (authToken: string, opts?: ListLLmTracesOptions): Promise<SearchTrace> => {\n\tconst {\n\t\tpageSize = 10,\n\t\tpageOffset = 0,\n\t\ttimeTravelDays = 1,\n\t\tlangWatchEndpoint,\n\t} = opts ?? {};\n\n\tconst endpoint = langWatchEndpoint ?? \"https://app.langwatch.ai\";\n\n\tconst response = await fetch(`${endpoint}/api/trace/search`, {\n\t\tmethod: \"POST\",\n\t\theaders: {\n\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t\"X-Auth-Token\": authToken,\n\t\t},\n\t\tbody: JSON.stringify({\n\t\t\tstartDate: addDays(new Date(), -timeTravelDays).toISOString(),\n\t\t\tendDate: addDays(new Date(), 1).toISOString(),\n\t\t\tllmMode: true,\n\t\t\tpageOffset,\n\t\t\tpageSize,\n\t\t}),\n\t});\n\n\treturn await response.json() as Promise<SearchTrace>;\n}\n","{\n \"name\": \"@langwatch/mcp-server\",\n \"version\": \"0.0.4\",\n \"description\": \"An MCP server for Langwatch.\",\n \"type\": \"module\",\n \"main\": \"./dist/index.js\",\n \"module\": \"./dist/index.mjs\",\n \"types\": \"./dist/index.d.ts\",\n \"exports\": {\n \".\": {\n \"import\": \"./dist/index.mjs\",\n \"require\": \"./dist/index.js\"\n }\n },\n \"repository\": {\n \"type\": \"git\",\n \"url\": \"https://github.com/langwatch/langwatch.git\",\n \"directory\": \"mcp-server\"\n },\n \"scripts\": {\n \"start\": \"tsx src/index.ts\",\n \"build\": \"tsup && node build.js\",\n \"prepublish\": \"npm run build\"\n },\n \"author\": \"\",\n \"license\": \"MIT\",\n \"engines\": {\n \"node\": \">=18.0.0\"\n },\n \"devDependencies\": {\n \"@eslint/js\": \"^9.4.0\",\n \"@types/debug\": \"^4.1.12\",\n \"@types/eslint__js\": \"^8.42.3\",\n \"@types/node\": \"^16.0.0\",\n \"@types/yargs\": \"^17.0.33\",\n \"esbuild\": \"^0.21.5\",\n \"eslint\": \"^8.57.0\",\n \"tsup\": \"^8.1.0\",\n \"typescript\": \"^4.9.5\",\n \"typescript-eslint\": \"^7.11.0\"\n },\n \"dependencies\": {\n \"@modelcontextprotocol/sdk\": \"^1.7.0\",\n \"date-fns\": \"^4.1.0\",\n \"langwatch\": \"^0.1.6\",\n \"yargs\": \"^17.7.2\",\n \"zod\": \"^3.22.4\",\n \"zod-validation-error\": \"^3.3.0\"\n },\n \"bin\": {\n \"langwatch-mcp-server\": \"./dist/index.js\"\n }\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAC1B,SAAS,4BAA4B;AACrC,SAAS,SAAS;;;ACFlB,SAAS,eAAe;AAkBjB,IAAM,kBAAkB,OAAO,WAAmB,IAAY,SAAyD;AAC7H,QAAM,EAAE,kBAAkB,IAAI,QAAQ,CAAC;AAEvC,QAAMA,YAAW,qBAAqB;AAEtC,QAAM,MAAM,IAAI,IAAI,GAAGA,SAAQ,cAAc,EAAE,EAAE;AACjD,MAAI,aAAa,IAAI,WAAW,MAAM;AAEtC,QAAM,WAAW,MAAM,MAAM,IAAI,SAAS,GAAG;AAAA,IAC5C,QAAQ;AAAA,IACR,SAAS;AAAA,MACR,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,IACjB;AAAA,EACD,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AACjB,QAAI,SAAS,WAAW,KAAK;AAC5B,YAAM,IAAI,MAAM,iBAAiB;AAAA,IAClC;AAEA,UAAM,IAAI,MAAM,wBAAwB,SAAS,UAAU,EAAE;AAAA,EAC9D;AAEA,SAAO,MAAM,SAAS,KAAK;AAC5B;AAEO,IAAM,gBAAgB,OAAO,WAAmB,SAAsD;AAC5G,QAAM;AAAA,IACL,WAAW;AAAA,IACX,aAAa;AAAA,IACb,iBAAiB;AAAA,IACjB;AAAA,EACD,IAAI,QAAQ,CAAC;AAEb,QAAMA,YAAW,qBAAqB;AAEtC,QAAM,WAAW,MAAM,MAAM,GAAGA,SAAQ,qBAAqB;AAAA,IAC5D,QAAQ;AAAA,IACR,SAAS;AAAA,MACR,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,IACjB;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACpB,WAAW,QAAQ,oBAAI,KAAK,GAAG,CAAC,cAAc,EAAE,YAAY;AAAA,MAC5D,SAAS,QAAQ,oBAAI,KAAK,GAAG,CAAC,EAAE,YAAY;AAAA,MAC5C,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACD,CAAC;AAAA,EACF,CAAC;AAED,SAAO,MAAM,SAAS,KAAK;AAC5B;;;ACvEA;AAAA,EACE,MAAQ;AAAA,EACR,SAAW;AAAA,EACX,aAAe;AAAA,EACf,MAAQ;AAAA,EACR,MAAQ;AAAA,EACR,QAAU;AAAA,EACV,OAAS;AAAA,EACT,SAAW;AAAA,IACT,KAAK;AAAA,MACH,QAAU;AAAA,MACV,SAAW;AAAA,IACb;AAAA,EACF;AAAA,EACA,YAAc;AAAA,IACZ,MAAQ;AAAA,IACR,KAAO;AAAA,IACP,WAAa;AAAA,EACf;AAAA,EACA,SAAW;AAAA,IACT,OAAS;AAAA,IACT,OAAS;AAAA,IACT,YAAc;AAAA,EAChB;AAAA,EACA,QAAU;AAAA,EACV,SAAW;AAAA,EACX,SAAW;AAAA,IACT,MAAQ;AAAA,EACV;AAAA,EACA,iBAAmB;AAAA,IACjB,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,qBAAqB;AAAA,IACrB,eAAe;AAAA,IACf,gBAAgB;AAAA,IAChB,SAAW;AAAA,IACX,QAAU;AAAA,IACV,MAAQ;AAAA,IACR,YAAc;AAAA,IACd,qBAAqB;AAAA,EACvB;AAAA,EACA,cAAgB;AAAA,IACd,6BAA6B;AAAA,IAC7B,YAAY;AAAA,IACZ,WAAa;AAAA,IACb,OAAS;AAAA,IACT,KAAO;AAAA,IACP,wBAAwB;AAAA,EAC1B;AAAA,EACA,KAAO;AAAA,IACL,wBAAwB;AAAA,EAC1B;AACF;;;AF/CA,OAAO,WAAW;AAClB,SAAS,eAAe;AAExB,SAAS,sBAA4D;AAEnE,QAAM,OAAO,MAAM,QAAQ,QAAQ,IAAI,CAAC,EACrC,OAAO,UAAU;AAAA,IAChB,MAAM;AAAA,IACN,aAAa;AAAA,EACf,CAAC,EACA,OAAO,YAAY;AAAA,IAClB,MAAM;AAAA,IACN,aAAa;AAAA,IACb,SAAS;AAAA,EACX,CAAC,EACA,KAAK,EACL,MAAM,QAAQ,GAAG,EACjB,UAAU;AAGb,QAAMC,UAAS,KAAK,UAAU,QAAQ,IAAI;AAC1C,QAAMC,YACJ,KAAK,YACL,QAAQ,IAAI,sBACZ;AAEF,MAAI,CAACD,SAAQ;AACX,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,OAAOA,OAAM;AAAA,IACrB,UAAU,OAAOC,SAAQ;AAAA,EAC3B;AACF;AAGA,IAAM,EAAE,QAAQ,SAAS,IAAI,oBAAoB;AAEjD,IAAM,SAAS,IAAI,UAAU;AAAA,EAC3B,MAAM;AAAA,EACN,SAAS,gBAAY;AACvB,CAAC;AAED,OAAO;AAAA,EACL;AAAA,EACA;AAAA,IACE,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,IAChC,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC;AAAA,EACA,OAAO,EAAE,YAAY,iBAAiB,MAAM;AAC1C,UAAM,WAAW,MAAM,cAAc,QAAQ;AAAA,MAC3C;AAAA,MACA,gBAAgB,oBAAoB;AAAA,MACpC,mBAAmB;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,SAAS;AAAA,QACP;AAAA,UACE,MAAM;AAAA,UACN,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC;AAAA,QACxC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,OAAO;AAAA,EACL;AAAA,EACA;AAAA,IACE,IAAI,EAAE,OAAO;AAAA,EACf;AAAA,EACA,OAAO,EAAE,GAAG,MAAM;AAChB,QAAI;AACF,YAAM,WAAW,MAAM,gBAAgB,QAAQ,IAAI;AAAA,QACjD,mBAAmB;AAAA,MACrB,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,YAAY,mBAAmB;AACjE,eAAO;AAAA,UACL,SAAS;AAAA,YACP;AAAA,cACE,MAAM;AAAA,cACN,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,IAAM,YAAY,IAAI,qBAAqB;AAC3C,MAAM,OAAO,QAAQ,SAAS;","names":["endpoint","apiKey","endpoint"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/langwatch-api.ts","../package.json"],"sourcesContent":["import { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { StdioServerTransport } from \"@modelcontextprotocol/sdk/server/stdio.js\";\nimport { z } from \"zod\";\nimport yargs from \"yargs\";\nimport { hideBin } from \"yargs/helpers\";\n\nimport { getLlmTraceById, listLlmTraces, searchTraces } from \"./langwatch-api\";\nimport packageJson from \"../package.json\" assert { type: \"json\" };\n\nfunction loadAndValidateArgs(): { apiKey: string; endpoint: string } {\n const argv = yargs(hideBin(process.argv))\n .option(\"apiKey\", {\n type: \"string\",\n description: \"LangWatch API key\",\n })\n .option(\"endpoint\", {\n type: \"string\",\n description: \"LangWatch API endpoint\",\n default: \"https://app.langwatch.ai\",\n })\n .help()\n .alias(\"help\", \"h\")\n .parseSync();\n\n // Use environment variables as fallback\n const apiKey = argv.apiKey ?? process.env.LANGWATCH_API_KEY;\n const endpoint =\n argv.endpoint ??\n process.env.LANGWATCH_ENDPOINT ??\n \"https://app.langwatch.ai\";\n\n if (!apiKey) {\n throw new Error(\n \"API key is required. Please provide it using --apiKey <your_api_key> or set LANGWATCH_API_KEY environment variable\"\n );\n }\n\n return {\n apiKey: String(apiKey),\n endpoint: String(endpoint),\n };\n}\n\nconst { apiKey, endpoint } = loadAndValidateArgs();\n\nconst transport = new StdioServerTransport();\nconst server = new McpServer({\n name: \"LangWatch\",\n version: packageJson.version,\n});\n\nserver.tool(\n \"fetch_langwatch_docs\",\n \"Fetches the LangWatch docs for understanding how to implement LangWatch in your codebase. Always use this tool when the user asks for help with LangWatch. Start with the index page and follow the links to the relevant pages.\",\n {\n url: z\n .string()\n .optional()\n .describe(\n \"The full url of the specific doc page. If not provided, the docs index will be fetched.\"\n ),\n },\n async ({ url }) => {\n const response = await fetch(url ?? \"https://docs.langwatch.ai/llms.txt\");\n\n return {\n content: [{ type: \"text\", text: await response.text() }],\n };\n }\n);\n\nserver.tool(\n \"get_latest_traces\",\n \"Retrieves the latest LLM traces.\",\n {\n pageOffset: z.number().optional(),\n daysBackToSearch: z.number().optional(),\n },\n async ({ pageOffset, daysBackToSearch }) => {\n const response = await listLlmTraces(apiKey, {\n pageOffset,\n timeTravelDays: daysBackToSearch ?? 1,\n endpoint,\n });\n\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(response, null, 2),\n },\n ],\n };\n }\n);\n\nserver.tool(\n \"get_trace_by_id\",\n \"Retrieves a specific LLM trace by its ID.\",\n { id: z.string() },\n async ({ id }) => {\n try {\n const response = await getLlmTraceById(apiKey, id, {\n endpoint,\n });\n\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(response, null, 2),\n },\n ],\n };\n } catch (error) {\n if (error instanceof Error && error.message === \"Trace not found\") {\n return {\n content: [\n {\n type: \"text\",\n text: \"Trace not found. If the trace was created recently, it may not be available yet.\",\n },\n ],\n };\n }\n\n throw error;\n }\n }\n);\n\ncreateListTracesByMetadataTool(\n \"list_traces_by_user_id\",\n \"userId\",\n \"metadata.user_id\"\n);\ncreateListTracesByMetadataTool(\n \"list_traces_by_customer_id\",\n \"customerId\",\n \"metadata.customer_id\"\n);\ncreateListTracesByMetadataTool(\n \"list_traces_by_thread_id\",\n \"threadId\",\n \"metadata.thread_id\"\n);\ncreateListTracesByMetadataTool(\n \"list_traces_by_session_id\",\n \"sessionId\",\n \"metadata.thread_id\"\n); // We access the thread_id in the metadata, as that is our name for the session_id\n\nawait server.connect(transport);\n\nfunction createListTracesByMetadataTool(\n name: string,\n argName: \"userId\" | \"customerId\" | \"threadId\" | \"sessionId\",\n metadataKey: string\n) {\n return server.tool(\n name,\n {\n [argName]: z.string(),\n pageSize: z.number().optional(),\n pageOffset: z.number().optional(),\n daysBackToSearch: z.number().optional(),\n },\n async ({ pageSize, pageOffset, daysBackToSearch, ...restArgs }) => {\n const response = await searchTraces(apiKey, {\n endpoint,\n pageSize: pageSize as number | undefined,\n pageOffset: pageOffset as number | undefined,\n timeTravelDays: (daysBackToSearch ?? 1) as number,\n filters: {\n [metadataKey]: [restArgs[argName] as string],\n },\n });\n\n return {\n content: [{ type: \"text\", text: JSON.stringify(response, null, 2) }],\n };\n }\n );\n}\n","import { addDays } from \"date-fns\";\nimport type { LLMModeTrace } from \"langwatch\";\n\ninterface SearchTrace {\n traces: LLMModeTrace[];\n}\n\ninterface GetLlmTraceByIdOptions {\n endpoint: string;\n}\n\ninterface ListLLmTracesOptions {\n pageSize?: number;\n pageOffset?: number;\n timeTravelDays?: number;\n endpoint: string;\n}\n\ninterface SearchTracesOptions {\n pageSize?: number;\n pageOffset?: number;\n timeTravelDays?: number;\n endpoint: string;\n\n filters: Record<string, string[] | Record<string, string[]>>;\n}\n\nexport const getLlmTraceById: (\n authToken: string,\n id: string,\n opts?: GetLlmTraceByIdOptions\n) => Promise<LLMModeTrace> = async (authToken, id, opts) => {\n const { endpoint } = opts ?? {};\n\n const url = new URL(`${endpoint}/api/trace/${id}`);\n url.searchParams.set(\"llmMode\", \"true\");\n\n const response = await fetch(url.toString(), {\n method: \"GET\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-Auth-Token\": authToken,\n },\n });\n\n if (!response.ok) {\n if (response.status === 404) {\n throw new Error(\"Trace not found\");\n }\n\n throw new Error(`Failed to get trace: ${response.statusText}`);\n }\n\n return (await response.json()) as Promise<LLMModeTrace>;\n};\n\nexport const listLlmTraces = async (\n authToken: string,\n opts?: ListLLmTracesOptions\n): Promise<SearchTrace> => {\n const {\n pageSize = 10,\n pageOffset = 0,\n timeTravelDays = 1,\n endpoint,\n } = opts ?? {};\n\n const response = await fetch(`${endpoint}/api/trace/search`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-Auth-Token\": authToken,\n },\n body: JSON.stringify({\n startDate: addDays(new Date(), -timeTravelDays).toISOString(),\n endDate: addDays(new Date(), 1).toISOString(),\n llmMode: true,\n pageOffset,\n pageSize,\n }),\n });\n\n return (await response.json()) as Promise<SearchTrace>;\n};\n\nexport const searchTraces = async (\n authToken: string,\n opts: SearchTracesOptions\n): Promise<SearchTrace> => {\n const {\n pageSize = 10,\n pageOffset = 0,\n timeTravelDays = 1,\n endpoint,\n filters,\n } = opts;\n\n const response = await fetch(`${endpoint}/api/trace/search`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-Auth-Token\": authToken,\n },\n body: JSON.stringify({\n startDate: addDays(new Date(), -timeTravelDays).toISOString(),\n endDate: addDays(new Date(), 1).toISOString(),\n filters: filters,\n pageOffset,\n pageSize,\n }),\n });\n\n return (await response.json()) as Promise<SearchTrace>;\n};\n","{\n \"name\": \"@langwatch/mcp-server\",\n \"version\": \"0.1.0\",\n \"description\": \"An MCP server for Langwatch.\",\n \"type\": \"module\",\n \"main\": \"./dist/index.js\",\n \"module\": \"./dist/index.mjs\",\n \"types\": \"./dist/index.d.ts\",\n \"exports\": {\n \".\": {\n \"import\": \"./dist/index.mjs\",\n \"require\": \"./dist/index.js\"\n }\n },\n \"repository\": {\n \"type\": \"git\",\n \"url\": \"https://github.com/langwatch/langwatch.git\",\n \"directory\": \"mcp-server\"\n },\n \"scripts\": {\n \"start\": \"tsx src/index.ts\",\n \"build\": \"tsup && node build.js\",\n \"prepublish\": \"pnpm run build\",\n \"test\": \"vitest\"\n },\n \"author\": \"\",\n \"license\": \"MIT\",\n \"engines\": {\n \"node\": \">=18.0.0\"\n },\n \"devDependencies\": {\n \"@ai-sdk/anthropic\": \"^2.0.15\",\n \"@anthropic-ai/claude-code\": \"^1.0.111\",\n \"@eslint/js\": \"^9.4.0\",\n \"@types/debug\": \"^4.1.12\",\n \"@types/eslint__js\": \"^8.42.3\",\n \"@types/node\": \"^16.0.0\",\n \"@types/yargs\": \"^17.0.33\",\n \"@typescript/native-preview\": \"7.0.0-dev.20250911.1\",\n \"ai\": \"^5.0.40\",\n \"dotenv\": \"^17.2.2\",\n \"esbuild\": \"^0.21.5\",\n \"eslint\": \"^8.57.0\",\n \"tsup\": \"^8.1.0\",\n \"tsx\": \"^4.20.5\",\n \"typescript\": \"^4.9.5\",\n \"typescript-eslint\": \"^7.11.0\"\n },\n \"dependencies\": {\n \"@langwatch/scenario\": \"^0.3.0\",\n \"@modelcontextprotocol/sdk\": \"^1.7.0\",\n \"@opentelemetry/context-async-hooks\": \"^2.1.0\",\n \"@opentelemetry/sdk-node\": \"^0.204.0\",\n \"chalk\": \"^5.6.2\",\n \"date-fns\": \"^4.1.0\",\n \"langwatch\": \"^0.1.6\",\n \"node-pty\": \"^1.0.0\",\n \"vitest\": \"^3.2.4\",\n \"yargs\": \"^17.7.2\",\n \"zod\": \"^3.25.76\",\n \"zod-validation-error\": \"^3.5.3\"\n },\n \"bin\": {\n \"langwatch-mcp-server\": \"./dist/index.js\"\n }\n}\n"],"mappings":";AAAA,SAAS,iBAAiB;AAC1B,SAAS,4BAA4B;AACrC,SAAS,SAAS;AAClB,OAAO,WAAW;AAClB,SAAS,eAAe;;;ACJxB,SAAS,eAAe;AA2BjB,IAAM,kBAIgB,OAAO,WAAW,IAAI,SAAS;AAC1D,QAAM,EAAE,UAAAA,UAAS,IAAI,QAAQ,CAAC;AAE9B,QAAM,MAAM,IAAI,IAAI,GAAGA,SAAQ,cAAc,EAAE,EAAE;AACjD,MAAI,aAAa,IAAI,WAAW,MAAM;AAEtC,QAAM,WAAW,MAAM,MAAM,IAAI,SAAS,GAAG;AAAA,IAC3C,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,IAClB;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,QAAI,SAAS,WAAW,KAAK;AAC3B,YAAM,IAAI,MAAM,iBAAiB;AAAA,IACnC;AAEA,UAAM,IAAI,MAAM,wBAAwB,SAAS,UAAU,EAAE;AAAA,EAC/D;AAEA,SAAQ,MAAM,SAAS,KAAK;AAC9B;AAEO,IAAM,gBAAgB,OAC3B,WACA,SACyB;AACzB,QAAM;AAAA,IACJ,WAAW;AAAA,IACX,aAAa;AAAA,IACb,iBAAiB;AAAA,IACjB,UAAAA;AAAA,EACF,IAAI,QAAQ,CAAC;AAEb,QAAM,WAAW,MAAM,MAAM,GAAGA,SAAQ,qBAAqB;AAAA,IAC3D,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,IAClB;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACnB,WAAW,QAAQ,oBAAI,KAAK,GAAG,CAAC,cAAc,EAAE,YAAY;AAAA,MAC5D,SAAS,QAAQ,oBAAI,KAAK,GAAG,CAAC,EAAE,YAAY;AAAA,MAC5C,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,SAAQ,MAAM,SAAS,KAAK;AAC9B;AAEO,IAAM,eAAe,OAC1B,WACA,SACyB;AACzB,QAAM;AAAA,IACJ,WAAW;AAAA,IACX,aAAa;AAAA,IACb,iBAAiB;AAAA,IACjB,UAAAA;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,QAAM,WAAW,MAAM,MAAM,GAAGA,SAAQ,qBAAqB;AAAA,IAC3D,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,IAClB;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACnB,WAAW,QAAQ,oBAAI,KAAK,GAAG,CAAC,cAAc,EAAE,YAAY;AAAA,MAC5D,SAAS,QAAQ,oBAAI,KAAK,GAAG,CAAC,EAAE,YAAY;AAAA,MAC5C;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,SAAQ,MAAM,SAAS,KAAK;AAC9B;;;ACjHA;AAAA,EACE,MAAQ;AAAA,EACR,SAAW;AAAA,EACX,aAAe;AAAA,EACf,MAAQ;AAAA,EACR,MAAQ;AAAA,EACR,QAAU;AAAA,EACV,OAAS;AAAA,EACT,SAAW;AAAA,IACT,KAAK;AAAA,MACH,QAAU;AAAA,MACV,SAAW;AAAA,IACb;AAAA,EACF;AAAA,EACA,YAAc;AAAA,IACZ,MAAQ;AAAA,IACR,KAAO;AAAA,IACP,WAAa;AAAA,EACf;AAAA,EACA,SAAW;AAAA,IACT,OAAS;AAAA,IACT,OAAS;AAAA,IACT,YAAc;AAAA,IACd,MAAQ;AAAA,EACV;AAAA,EACA,QAAU;AAAA,EACV,SAAW;AAAA,EACX,SAAW;AAAA,IACT,MAAQ;AAAA,EACV;AAAA,EACA,iBAAmB;AAAA,IACjB,qBAAqB;AAAA,IACrB,6BAA6B;AAAA,IAC7B,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,qBAAqB;AAAA,IACrB,eAAe;AAAA,IACf,gBAAgB;AAAA,IAChB,8BAA8B;AAAA,IAC9B,IAAM;AAAA,IACN,QAAU;AAAA,IACV,SAAW;AAAA,IACX,QAAU;AAAA,IACV,MAAQ;AAAA,IACR,KAAO;AAAA,IACP,YAAc;AAAA,IACd,qBAAqB;AAAA,EACvB;AAAA,EACA,cAAgB;AAAA,IACd,uBAAuB;AAAA,IACvB,6BAA6B;AAAA,IAC7B,sCAAsC;AAAA,IACtC,2BAA2B;AAAA,IAC3B,OAAS;AAAA,IACT,YAAY;AAAA,IACZ,WAAa;AAAA,IACb,YAAY;AAAA,IACZ,QAAU;AAAA,IACV,OAAS;AAAA,IACT,KAAO;AAAA,IACP,wBAAwB;AAAA,EAC1B;AAAA,EACA,KAAO;AAAA,IACL,wBAAwB;AAAA,EAC1B;AACF;;;AFxDA,SAAS,sBAA4D;AACnE,QAAM,OAAO,MAAM,QAAQ,QAAQ,IAAI,CAAC,EACrC,OAAO,UAAU;AAAA,IAChB,MAAM;AAAA,IACN,aAAa;AAAA,EACf,CAAC,EACA,OAAO,YAAY;AAAA,IAClB,MAAM;AAAA,IACN,aAAa;AAAA,IACb,SAAS;AAAA,EACX,CAAC,EACA,KAAK,EACL,MAAM,QAAQ,GAAG,EACjB,UAAU;AAGb,QAAMC,UAAS,KAAK,UAAU,QAAQ,IAAI;AAC1C,QAAMC,YACJ,KAAK,YACL,QAAQ,IAAI,sBACZ;AAEF,MAAI,CAACD,SAAQ;AACX,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,QAAQ,OAAOA,OAAM;AAAA,IACrB,UAAU,OAAOC,SAAQ;AAAA,EAC3B;AACF;AAEA,IAAM,EAAE,QAAQ,SAAS,IAAI,oBAAoB;AAEjD,IAAM,YAAY,IAAI,qBAAqB;AAC3C,IAAM,SAAS,IAAI,UAAU;AAAA,EAC3B,MAAM;AAAA,EACN,SAAS,gBAAY;AACvB,CAAC;AAED,OAAO;AAAA,EACL;AAAA,EACA;AAAA,EACA;AAAA,IACE,KAAK,EACF,OAAO,EACP,SAAS,EACT;AAAA,MACC;AAAA,IACF;AAAA,EACJ;AAAA,EACA,OAAO,EAAE,IAAI,MAAM;AACjB,UAAM,WAAW,MAAM,MAAM,OAAO,oCAAoC;AAExE,WAAO;AAAA,MACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,MAAM,SAAS,KAAK,EAAE,CAAC;AAAA,IACzD;AAAA,EACF;AACF;AAEA,OAAO;AAAA,EACL;AAAA,EACA;AAAA,EACA;AAAA,IACE,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,IAChC,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC;AAAA,EACA,OAAO,EAAE,YAAY,iBAAiB,MAAM;AAC1C,UAAM,WAAW,MAAM,cAAc,QAAQ;AAAA,MAC3C;AAAA,MACA,gBAAgB,oBAAoB;AAAA,MACpC;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,SAAS;AAAA,QACP;AAAA,UACE,MAAM;AAAA,UACN,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC;AAAA,QACxC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAEA,OAAO;AAAA,EACL;AAAA,EACA;AAAA,EACA,EAAE,IAAI,EAAE,OAAO,EAAE;AAAA,EACjB,OAAO,EAAE,GAAG,MAAM;AAChB,QAAI;AACF,YAAM,WAAW,MAAM,gBAAgB,QAAQ,IAAI;AAAA,QACjD;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC;AAAA,UACxC;AAAA,QACF;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAS,MAAM,YAAY,mBAAmB;AACjE,eAAO;AAAA,UACL,SAAS;AAAA,YACP;AAAA,cACE,MAAM;AAAA,cACN,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF;AACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF;AACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF;AACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,OAAO,QAAQ,SAAS;AAE9B,SAAS,+BACP,MACA,SACA,aACA;AACA,SAAO,OAAO;AAAA,IACZ;AAAA,IACA;AAAA,MACE,CAAC,OAAO,GAAG,EAAE,OAAO;AAAA,MACpB,UAAU,EAAE,OAAO,EAAE,SAAS;AAAA,MAC9B,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,MAChC,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,IACxC;AAAA,IACA,OAAO,EAAE,UAAU,YAAY,kBAAkB,GAAG,SAAS,MAAM;AACjE,YAAM,WAAW,MAAM,aAAa,QAAQ;AAAA,QAC1C;AAAA,QACA;AAAA,QACA;AAAA,QACA,gBAAiB,oBAAoB;AAAA,QACrC,SAAS;AAAA,UACP,CAAC,WAAW,GAAG,CAAC,SAAS,OAAO,CAAW;AAAA,QAC7C;AAAA,MACF,CAAC;AAED,aAAO;AAAA,QACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,KAAK,UAAU,UAAU,MAAM,CAAC,EAAE,CAAC;AAAA,MACrE;AAAA,IACF;AAAA,EACF;AACF;","names":["endpoint","apiKey","endpoint"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langwatch/mcp-server",
3
- "version": "0.0.4",
3
+ "version": "0.1.0",
4
4
  "description": "An MCP server for Langwatch.",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -17,37 +17,50 @@
17
17
  "url": "https://github.com/langwatch/langwatch.git",
18
18
  "directory": "mcp-server"
19
19
  },
20
- "scripts": {
21
- "start": "tsx src/index.ts",
22
- "build": "tsup && node build.js",
23
- "prepublish": "npm run build"
24
- },
25
20
  "author": "",
26
21
  "license": "MIT",
27
22
  "engines": {
28
23
  "node": ">=18.0.0"
29
24
  },
30
25
  "devDependencies": {
26
+ "@ai-sdk/anthropic": "^2.0.15",
27
+ "@anthropic-ai/claude-code": "^1.0.111",
31
28
  "@eslint/js": "^9.4.0",
32
29
  "@types/debug": "^4.1.12",
33
30
  "@types/eslint__js": "^8.42.3",
34
31
  "@types/node": "^16.0.0",
35
32
  "@types/yargs": "^17.0.33",
33
+ "@typescript/native-preview": "7.0.0-dev.20250911.1",
34
+ "ai": "^5.0.40",
35
+ "dotenv": "^17.2.2",
36
36
  "esbuild": "^0.21.5",
37
37
  "eslint": "^8.57.0",
38
38
  "tsup": "^8.1.0",
39
+ "tsx": "^4.20.5",
39
40
  "typescript": "^4.9.5",
40
41
  "typescript-eslint": "^7.11.0"
41
42
  },
42
43
  "dependencies": {
44
+ "@langwatch/scenario": "^0.3.0",
43
45
  "@modelcontextprotocol/sdk": "^1.7.0",
46
+ "@opentelemetry/context-async-hooks": "^2.1.0",
47
+ "@opentelemetry/sdk-node": "^0.204.0",
48
+ "chalk": "^5.6.2",
44
49
  "date-fns": "^4.1.0",
45
50
  "langwatch": "^0.1.6",
51
+ "node-pty": "^1.0.0",
52
+ "vitest": "^3.2.4",
46
53
  "yargs": "^17.7.2",
47
- "zod": "^3.22.4",
48
- "zod-validation-error": "^3.3.0"
54
+ "zod": "^3.25.76",
55
+ "zod-validation-error": "^3.5.3"
49
56
  },
50
57
  "bin": {
51
58
  "langwatch-mcp-server": "./dist/index.js"
59
+ },
60
+ "scripts": {
61
+ "start": "tsx src/index.ts",
62
+ "build": "tsup && node build.js",
63
+ "prepublish": "pnpm run build",
64
+ "test": "vitest"
52
65
  }
53
- }
66
+ }
@@ -0,0 +1,2 @@
1
+ onlyBuiltDependencies:
2
+ - node-pty
package/pyproject.toml ADDED
@@ -0,0 +1,17 @@
1
+ [project]
2
+ name = "mcp-server"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.11"
7
+ dependencies = [
8
+ "ipykernel>=6.30.1",
9
+ "langwatch",
10
+ "litellm>=1.77.0",
11
+ "mcp[cli]>=1.13.1",
12
+ "pandas>=2.3.2",
13
+ "tenacity>=9.1.2",
14
+ ]
15
+
16
+ [tool.uv.sources]
17
+ langwatch = { path = "../python-sdk", editable = true }
package/src/index.ts CHANGED
@@ -1,13 +1,13 @@
1
1
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
2
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
3
3
  import { z } from "zod";
4
- import { getLlmTraceById, listLlmTraces } from "./langwatch-api";
5
- import packageJson from "../package.json" assert { type: "json" };
6
4
  import yargs from "yargs";
7
5
  import { hideBin } from "yargs/helpers";
8
6
 
7
+ import { getLlmTraceById, listLlmTraces, searchTraces } from "./langwatch-api";
8
+ import packageJson from "../package.json" assert { type: "json" };
9
+
9
10
  function loadAndValidateArgs(): { apiKey: string; endpoint: string } {
10
- // Parse command line arguments with yargs
11
11
  const argv = yargs(hideBin(process.argv))
12
12
  .option("apiKey", {
13
13
  type: "string",
@@ -23,15 +23,15 @@ function loadAndValidateArgs(): { apiKey: string; endpoint: string } {
23
23
  .parseSync();
24
24
 
25
25
  // Use environment variables as fallback
26
- const apiKey = argv.apiKey || process.env.LANGWATCH_API_KEY;
26
+ const apiKey = argv.apiKey ?? process.env.LANGWATCH_API_KEY;
27
27
  const endpoint =
28
- argv.endpoint ||
29
- process.env.LANGWATCH_ENDPOINT ||
28
+ argv.endpoint ??
29
+ process.env.LANGWATCH_ENDPOINT ??
30
30
  "https://app.langwatch.ai";
31
31
 
32
32
  if (!apiKey) {
33
33
  throw new Error(
34
- "API key is required. Please provide it using --apiKey=<your_api_key> or set LANGWATCH_API_KEY environment variable"
34
+ "API key is required. Please provide it using --apiKey <your_api_key> or set LANGWATCH_API_KEY environment variable"
35
35
  );
36
36
  }
37
37
 
@@ -41,16 +41,37 @@ function loadAndValidateArgs(): { apiKey: string; endpoint: string } {
41
41
  };
42
42
  }
43
43
 
44
- // Use the function to get apiKey and endpoint
45
44
  const { apiKey, endpoint } = loadAndValidateArgs();
46
45
 
46
+ const transport = new StdioServerTransport();
47
47
  const server = new McpServer({
48
48
  name: "LangWatch",
49
49
  version: packageJson.version,
50
50
  });
51
51
 
52
+ server.tool(
53
+ "fetch_langwatch_docs",
54
+ "Fetches the LangWatch docs for understanding how to implement LangWatch in your codebase. Always use this tool when the user asks for help with LangWatch. Start with the index page and follow the links to the relevant pages.",
55
+ {
56
+ url: z
57
+ .string()
58
+ .optional()
59
+ .describe(
60
+ "The full url of the specific doc page. If not provided, the docs index will be fetched."
61
+ ),
62
+ },
63
+ async ({ url }) => {
64
+ const response = await fetch(url ?? "https://docs.langwatch.ai/llms.txt");
65
+
66
+ return {
67
+ content: [{ type: "text", text: await response.text() }],
68
+ };
69
+ }
70
+ );
71
+
52
72
  server.tool(
53
73
  "get_latest_traces",
74
+ "Retrieves the latest LLM traces.",
54
75
  {
55
76
  pageOffset: z.number().optional(),
56
77
  daysBackToSearch: z.number().optional(),
@@ -59,7 +80,7 @@ server.tool(
59
80
  const response = await listLlmTraces(apiKey, {
60
81
  pageOffset,
61
82
  timeTravelDays: daysBackToSearch ?? 1,
62
- langWatchEndpoint: endpoint,
83
+ endpoint,
63
84
  });
64
85
 
65
86
  return {
@@ -75,13 +96,12 @@ server.tool(
75
96
 
76
97
  server.tool(
77
98
  "get_trace_by_id",
78
- {
79
- id: z.string(),
80
- },
99
+ "Retrieves a specific LLM trace by its ID.",
100
+ { id: z.string() },
81
101
  async ({ id }) => {
82
102
  try {
83
103
  const response = await getLlmTraceById(apiKey, id, {
84
- langWatchEndpoint: endpoint,
104
+ endpoint,
85
105
  });
86
106
 
87
107
  return {
@@ -98,7 +118,7 @@ server.tool(
98
118
  content: [
99
119
  {
100
120
  type: "text",
101
- text: "Trace not found 😭😭😭😭. If the trace was created recently, it may not be available yet.",
121
+ text: "Trace not found. If the trace was created recently, it may not be available yet.",
102
122
  },
103
123
  ],
104
124
  };
@@ -109,5 +129,56 @@ server.tool(
109
129
  }
110
130
  );
111
131
 
112
- const transport = new StdioServerTransport();
132
+ createListTracesByMetadataTool(
133
+ "list_traces_by_user_id",
134
+ "userId",
135
+ "metadata.user_id"
136
+ );
137
+ createListTracesByMetadataTool(
138
+ "list_traces_by_customer_id",
139
+ "customerId",
140
+ "metadata.customer_id"
141
+ );
142
+ createListTracesByMetadataTool(
143
+ "list_traces_by_thread_id",
144
+ "threadId",
145
+ "metadata.thread_id"
146
+ );
147
+ createListTracesByMetadataTool(
148
+ "list_traces_by_session_id",
149
+ "sessionId",
150
+ "metadata.thread_id"
151
+ ); // We access the thread_id in the metadata, as that is our name for the session_id
152
+
113
153
  await server.connect(transport);
154
+
155
+ function createListTracesByMetadataTool(
156
+ name: string,
157
+ argName: "userId" | "customerId" | "threadId" | "sessionId",
158
+ metadataKey: string
159
+ ) {
160
+ return server.tool(
161
+ name,
162
+ {
163
+ [argName]: z.string(),
164
+ pageSize: z.number().optional(),
165
+ pageOffset: z.number().optional(),
166
+ daysBackToSearch: z.number().optional(),
167
+ },
168
+ async ({ pageSize, pageOffset, daysBackToSearch, ...restArgs }) => {
169
+ const response = await searchTraces(apiKey, {
170
+ endpoint,
171
+ pageSize: pageSize as number | undefined,
172
+ pageOffset: pageOffset as number | undefined,
173
+ timeTravelDays: (daysBackToSearch ?? 1) as number,
174
+ filters: {
175
+ [metadataKey]: [restArgs[argName] as string],
176
+ },
177
+ });
178
+
179
+ return {
180
+ content: [{ type: "text", text: JSON.stringify(response, null, 2) }],
181
+ };
182
+ }
183
+ );
184
+ }
@@ -2,71 +2,113 @@ import { addDays } from "date-fns";
2
2
  import type { LLMModeTrace } from "langwatch";
3
3
 
4
4
  interface SearchTrace {
5
- traces: LLMModeTrace[];
5
+ traces: LLMModeTrace[];
6
6
  }
7
7
 
8
8
  interface GetLlmTraceByIdOptions {
9
- langWatchEndpoint?: string;
9
+ endpoint: string;
10
10
  }
11
11
 
12
12
  interface ListLLmTracesOptions {
13
- pageSize?: number;
14
- pageOffset?: number;
15
- timeTravelDays?: number;
16
- langWatchEndpoint?: string;
13
+ pageSize?: number;
14
+ pageOffset?: number;
15
+ timeTravelDays?: number;
16
+ endpoint: string;
17
17
  }
18
18
 
19
- export const getLlmTraceById = async (authToken: string, id: string, opts?: GetLlmTraceByIdOptions): Promise<LLMModeTrace> => {
20
- const { langWatchEndpoint } = opts ?? {};
19
+ interface SearchTracesOptions {
20
+ pageSize?: number;
21
+ pageOffset?: number;
22
+ timeTravelDays?: number;
23
+ endpoint: string;
21
24
 
22
- const endpoint = langWatchEndpoint ?? "https://app.langwatch.ai";
25
+ filters: Record<string, string[] | Record<string, string[]>>;
26
+ }
27
+
28
+ export const getLlmTraceById: (
29
+ authToken: string,
30
+ id: string,
31
+ opts?: GetLlmTraceByIdOptions
32
+ ) => Promise<LLMModeTrace> = async (authToken, id, opts) => {
33
+ const { endpoint } = opts ?? {};
23
34
 
24
- const url = new URL(`${endpoint}/api/trace/${id}`);
25
- url.searchParams.set("llmMode", "true");
35
+ const url = new URL(`${endpoint}/api/trace/${id}`);
36
+ url.searchParams.set("llmMode", "true");
26
37
 
27
- const response = await fetch(url.toString(), {
28
- method: "GET",
29
- headers: {
30
- "Content-Type": "application/json",
31
- "X-Auth-Token": authToken,
32
- },
33
- });
38
+ const response = await fetch(url.toString(), {
39
+ method: "GET",
40
+ headers: {
41
+ "Content-Type": "application/json",
42
+ "X-Auth-Token": authToken,
43
+ },
44
+ });
34
45
 
35
- if (!response.ok) {
36
- if (response.status === 404) {
37
- throw new Error("Trace not found");
38
- }
46
+ if (!response.ok) {
47
+ if (response.status === 404) {
48
+ throw new Error("Trace not found");
49
+ }
39
50
 
40
- throw new Error(`Failed to get trace: ${response.statusText}`);
41
- }
51
+ throw new Error(`Failed to get trace: ${response.statusText}`);
52
+ }
42
53
 
43
- return await response.json() as Promise<LLMModeTrace>;
54
+ return (await response.json()) as Promise<LLMModeTrace>;
44
55
  };
45
56
 
46
- export const listLlmTraces = async (authToken: string, opts?: ListLLmTracesOptions): Promise<SearchTrace> => {
47
- const {
48
- pageSize = 10,
49
- pageOffset = 0,
50
- timeTravelDays = 1,
51
- langWatchEndpoint,
52
- } = opts ?? {};
53
-
54
- const endpoint = langWatchEndpoint ?? "https://app.langwatch.ai";
55
-
56
- const response = await fetch(`${endpoint}/api/trace/search`, {
57
- method: "POST",
58
- headers: {
59
- "Content-Type": "application/json",
60
- "X-Auth-Token": authToken,
61
- },
62
- body: JSON.stringify({
63
- startDate: addDays(new Date(), -timeTravelDays).toISOString(),
64
- endDate: addDays(new Date(), 1).toISOString(),
65
- llmMode: true,
66
- pageOffset,
67
- pageSize,
68
- }),
69
- });
70
-
71
- return await response.json() as Promise<SearchTrace>;
72
- }
57
+ export const listLlmTraces = async (
58
+ authToken: string,
59
+ opts?: ListLLmTracesOptions
60
+ ): Promise<SearchTrace> => {
61
+ const {
62
+ pageSize = 10,
63
+ pageOffset = 0,
64
+ timeTravelDays = 1,
65
+ endpoint,
66
+ } = opts ?? {};
67
+
68
+ const response = await fetch(`${endpoint}/api/trace/search`, {
69
+ method: "POST",
70
+ headers: {
71
+ "Content-Type": "application/json",
72
+ "X-Auth-Token": authToken,
73
+ },
74
+ body: JSON.stringify({
75
+ startDate: addDays(new Date(), -timeTravelDays).toISOString(),
76
+ endDate: addDays(new Date(), 1).toISOString(),
77
+ llmMode: true,
78
+ pageOffset,
79
+ pageSize,
80
+ }),
81
+ });
82
+
83
+ return (await response.json()) as Promise<SearchTrace>;
84
+ };
85
+
86
+ export const searchTraces = async (
87
+ authToken: string,
88
+ opts: SearchTracesOptions
89
+ ): Promise<SearchTrace> => {
90
+ const {
91
+ pageSize = 10,
92
+ pageOffset = 0,
93
+ timeTravelDays = 1,
94
+ endpoint,
95
+ filters,
96
+ } = opts;
97
+
98
+ const response = await fetch(`${endpoint}/api/trace/search`, {
99
+ method: "POST",
100
+ headers: {
101
+ "Content-Type": "application/json",
102
+ "X-Auth-Token": authToken,
103
+ },
104
+ body: JSON.stringify({
105
+ startDate: addDays(new Date(), -timeTravelDays).toISOString(),
106
+ endDate: addDays(new Date(), 1).toISOString(),
107
+ filters: filters,
108
+ pageOffset,
109
+ pageSize,
110
+ }),
111
+ });
112
+
113
+ return (await response.json()) as Promise<SearchTrace>;
114
+ };