@dcluttr/dclare-mcp 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,32 @@
1
+ import { z } from "zod";
2
+ const envSchema = z.object({
3
+ NODE_ENV: z.enum(["development", "test", "production"]).default("development"),
4
+ MCP_SERVER_NAME: z.string().default("@dcluttr/dclare-mcp"),
5
+ MCP_SERVER_VERSION: z.string().default("0.1.0"),
6
+ JWT_SECRET: z.string().min(16).optional(),
7
+ REQUIRE_AUTH_TOKEN: z.coerce.boolean().default(true),
8
+ DEFAULT_BRAND_ID: z.string().default("1131"),
9
+ MAX_ROWS: z.coerce.number().int().positive().default(40_000),
10
+ MAX_PREVIEW_ROWS: z.coerce.number().int().positive().default(200),
11
+ CUBE_API_URL: z.string().url().default("https://apollo.dcluttr.ai/cubejs-api"),
12
+ CUBE_API_TOKEN: z.string().optional(),
13
+ CUBE_QUERY_TIMEOUT_MS: z.coerce.number().int().positive().default(20_000),
14
+ METADATA_PATH: z.string().default("context/datasets.json"),
15
+ REDIS_URL: z.string().url().optional(),
16
+ CACHE_TTL_SECONDS: z.coerce.number().int().positive().default(120),
17
+ CACHE_PREFIX: z.string().default("ttd:mcp:query"),
18
+ OTEL_ENABLED: z.coerce.boolean().default(true),
19
+ OTEL_CONSOLE_EXPORTER: z.coerce.boolean().default(false),
20
+ LANGFUSE_ENABLED: z.coerce.boolean().default(false),
21
+ LANGFUSE_BASE_URL: z.string().url().default("https://cloud.langfuse.com"),
22
+ LANGFUSE_PUBLIC_KEY: z.string().optional(),
23
+ LANGFUSE_SECRET_KEY: z.string().optional(),
24
+ LANGFUSE_TIMEOUT_MS: z.coerce.number().int().positive().default(5000),
25
+ PLANNER_PROVIDER: z.enum(["heuristic", "openai"]).default("heuristic"),
26
+ OPENAI_API_KEY: z.string().optional(),
27
+ OPENAI_BASE_URL: z.string().url().default("https://api.openai.com/v1"),
28
+ OPENAI_MODEL: z.string().default("gpt-4.1-mini"),
29
+ AUTH_LOGIN_URL: z.string().url().default("https://auth.dcluttr.ai/auth/login"),
30
+ AUTH_LOGIN_TIMEOUT_MS: z.coerce.number().int().positive().default(15000)
31
+ });
32
+ export const env = envSchema.parse(process.env);
package/dist/index.js ADDED
@@ -0,0 +1,212 @@
1
+ #!/usr/bin/env node
2
+ import { McpServer, ResourceTemplate } from "@modelcontextprotocol/sdk/server/mcp.js";
3
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
+ import { z } from "zod";
5
+ import { env } from "./config/env.js";
6
+ import { AppError } from "./utils/errors.js";
7
+ import { textResult } from "./utils/result.js";
8
+ import { AuthService } from "./services/auth-service.js";
9
+ import { CubeClient } from "./services/cube-client.js";
10
+ import { LangfuseHook } from "./services/langfuse-hook.js";
11
+ import { MetadataStore } from "./services/metadata-store.js";
12
+ import { ObservabilityService } from "./services/observability-service.js";
13
+ import { PlannerExecutor } from "./services/planner-executor.js";
14
+ import { PlannerService } from "./services/planner-service.js";
15
+ import { QueryCache } from "./services/query-cache.js";
16
+ import { QueryGuardrails } from "./services/query-guardrails.js";
17
+ import { QueryService } from "./services/query-service.js";
18
+ import { ResultProfiler } from "./services/result-profiler.js";
19
+ import { plannerOutputSchema, queryFilterSchema, queryOrderSchema, queryTimeDimensionSchema } from "./types/planner.js";
20
+ const metadataStore = new MetadataStore();
21
+ const authService = new AuthService();
22
+ const observability = new ObservabilityService();
23
+ const langfuse = new LangfuseHook();
24
+ const queryService = new QueryService(metadataStore, new QueryGuardrails(), new CubeClient(), new ResultProfiler(), new QueryCache(), observability, langfuse);
25
+ const plannerService = new PlannerService(metadataStore);
26
+ const plannerExecutor = new PlannerExecutor(queryService);
27
+ const server = new McpServer({
28
+ name: env.MCP_SERVER_NAME,
29
+ version: env.MCP_SERVER_VERSION
30
+ });
31
+ server.registerTool("list_datasets", {
32
+ title: "List datasets",
33
+ description: "Lists datasets, metrics, and dimensions available to the MCP client."
34
+ }, async () => {
35
+ const datasets = metadataStore.listDatasets();
36
+ return textResult(`Found ${datasets.length} dataset(s).`, { datasets });
37
+ });
38
+ server.registerTool("search_datasets", {
39
+ title: "Search datasets",
40
+ description: "Searches datasets by keyword against names, descriptions, metrics, and column names. Returns the top 10 matches ranked by relevance. Use this BEFORE ask_data_question or run_semantic_query to find the correct dataset name.",
41
+ inputSchema: {
42
+ query: z.string().min(1).describe("Search keywords, e.g. 'blinkit city sku' or 'zepto ads keywords'")
43
+ }
44
+ }, async (args) => {
45
+ const results = metadataStore.searchDatasets(args.query);
46
+ if (results.length === 0) {
47
+ return textResult("No datasets matched your search. Try broader keywords.");
48
+ }
49
+ return textResult(`Found ${JSON.stringify(results)} matching dataset(s).`, { datasets: results });
50
+ });
51
+ server.registerTool("get_dataset_context", {
52
+ title: "Get dataset context",
53
+ description: "Returns full context, columns, metrics, examples, and available dimensions for a dataset.",
54
+ inputSchema: {
55
+ dataset: z.string().min(1)
56
+ }
57
+ }, async (args) => {
58
+ try {
59
+ const dataset = metadataStore.getDatasetWithJoins(args.dataset);
60
+ return textResult(`Loaded context for '${dataset.name}'. ${dataset.joinedDimensions.length} joined dimension(s) available.`, { dataset });
61
+ }
62
+ catch (error) {
63
+ return toToolError(error);
64
+ }
65
+ });
66
+ server.registerTool("run_semantic_query", {
67
+ title: "Run semantic query",
68
+ description: "Runs a semantic query against the data platform with guardrails. " +
69
+ "IMPORTANT: Brand-specific datasets contain both the user's brand AND competitor data. " +
70
+ "When the user asks about their own brand's performance, include the 'curr_brand' segment to filter to their brand only. " +
71
+ "When the user asks for competitor analysis or market-wide comparison, do NOT include 'curr_brand' so all brands are returned.",
72
+ inputSchema: {
73
+ tenantToken: z.string().optional(),
74
+ query: z.object({
75
+ dataset: z.string(),
76
+ metrics: z.array(z.string()).optional(),
77
+ dimensions: z.array(z.string()).optional(),
78
+ segments: z.array(z.string()).optional(),
79
+ filters: z.array(queryFilterSchema).optional(),
80
+ order: z.array(queryOrderSchema).optional(),
81
+ timeDimensions: z.array(queryTimeDimensionSchema).optional(),
82
+ limit: z.number().int().positive().optional()
83
+ })
84
+ }
85
+ }, async (args) => {
86
+ try {
87
+ const tenant = authService.resolveTenantContext(args.tenantToken);
88
+ const token = authService.getToken(args.tenantToken);
89
+ const result = await queryService.runQuery(args.query, tenant, token);
90
+ return textResult(`Query executed for dataset '${result.dataset}'. Returned ${result.rowCount} row(s).\n\n${JSON.stringify(result.previewRows, null, 2)}`, result);
91
+ }
92
+ catch (error) {
93
+ return toToolError(error);
94
+ }
95
+ });
96
+ server.registerTool("ask_data_question", {
97
+ title: "Ask data question",
98
+ description: "Converts natural language to a strict semantic query plan and optionally executes it with tenant guardrails. " +
99
+ "IMPORTANT: Always call search_datasets first to resolve the exact dataset name and pass it as datasetHint. Do NOT guess dataset names. " +
100
+ "Brand-specific datasets contain both the user's brand AND competitor data. " +
101
+ "When the question is about the user's own brand, the plan should include the 'curr_brand' segment. " +
102
+ "When the question involves competitor analysis or market-wide data, do NOT include 'curr_brand'.",
103
+ inputSchema: {
104
+ tenantToken: z.string().optional(),
105
+ question: z.string().min(5),
106
+ datasetHint: z.string().optional(),
107
+ limitHint: z.number().int().positive().optional(),
108
+ execute: z.boolean().default(true)
109
+ },
110
+ outputSchema: {
111
+ plan: plannerOutputSchema,
112
+ executed: z.boolean(),
113
+ execution: z.unknown().optional()
114
+ }
115
+ }, async (args) => {
116
+ try {
117
+ return await observability.span("planner.ask_data_question", {
118
+ "planner.provider": env.PLANNER_PROVIDER,
119
+ "planner.execute": args.execute
120
+ }, async () => {
121
+ const tenant = authService.resolveTenantContext(args.tenantToken);
122
+ const token = authService.getToken(args.tenantToken);
123
+ const plan = plannerOutputSchema.parse(await plannerService.plan({
124
+ question: args.question,
125
+ datasetHint: args.datasetHint,
126
+ limitHint: args.limitHint
127
+ }));
128
+ await langfuse.capture({
129
+ name: "planner.generated",
130
+ input: {
131
+ question: args.question,
132
+ datasetHint: args.datasetHint
133
+ },
134
+ output: plan,
135
+ metadata: {
136
+ provider: env.PLANNER_PROVIDER,
137
+ tenant: tenant.brandId
138
+ }
139
+ });
140
+ if (!args.execute) {
141
+ return textResult("Generated semantic plan only.", {
142
+ plan,
143
+ executed: false
144
+ });
145
+ }
146
+ const execution = await plannerExecutor.executePlan(plan, tenant, token);
147
+ return textResult(`Planned and executed query for dataset '${execution.dataset}'. Returned ${execution.rowCount} row(s).\n\n${JSON.stringify(execution.previewRows, null, 2)}`, {
148
+ plan,
149
+ execution,
150
+ executed: true
151
+ });
152
+ });
153
+ }
154
+ catch (error) {
155
+ return toToolError(error);
156
+ }
157
+ });
158
+ server.registerTool("login_brand", {
159
+ title: "Login brand",
160
+ description: "Authenticates with the Dcluttr auth service using email and password. Stores the session so subsequent queries run under the correct brand without requiring a manual token.",
161
+ inputSchema: {
162
+ email: z.string().email(),
163
+ password: z.string().min(1)
164
+ }
165
+ }, async (args) => {
166
+ try {
167
+ const { brandId } = await authService.login(args.email, args.password);
168
+ return textResult(`Logged in successfully. Active brand ID: ${brandId}.`, { brandId });
169
+ }
170
+ catch (error) {
171
+ return toToolError(error);
172
+ }
173
+ });
174
+ server.resource("catalog", new ResourceTemplate("catalog://datasets", { list: undefined }), {
175
+ title: "Dataset catalog",
176
+ description: "Complete metadata catalog for dataset, columns, and metric context.",
177
+ mimeType: "application/json"
178
+ }, async () => ({
179
+ contents: [
180
+ {
181
+ uri: "catalog://datasets",
182
+ mimeType: "application/json",
183
+ text: JSON.stringify(metadataStore.getCatalog(), null, 2)
184
+ }
185
+ ]
186
+ }));
187
+ async function main() {
188
+ const transport = new StdioServerTransport();
189
+ await server.connect(transport);
190
+ }
191
+ function toToolError(error) {
192
+ if (error instanceof AppError) {
193
+ return {
194
+ isError: true,
195
+ ...textResult(error.message)
196
+ };
197
+ }
198
+ if (error instanceof z.ZodError) {
199
+ return {
200
+ isError: true,
201
+ ...textResult("Invalid input. Please check your query parameters and try again.")
202
+ };
203
+ }
204
+ return {
205
+ isError: true,
206
+ ...textResult("Something went wrong. Please try again.")
207
+ };
208
+ }
209
+ main().catch((error) => {
210
+ process.stderr.write("Failed to start MCP server.\n");
211
+ process.exit(1);
212
+ });
@@ -0,0 +1,9 @@
1
+ import jwt from "jsonwebtoken";
2
+ const secret = process.env.JWT_SECRET;
3
+ if (!secret) {
4
+ throw new Error("JWT_SECRET is required");
5
+ }
6
+ const brandId = process.argv[2] ?? "demo-brand";
7
+ const role = process.argv[3] ?? "brand_user";
8
+ const token = jwt.sign({ brandId, role }, secret, { expiresIn: "12h" });
9
+ process.stdout.write(`${token}\n`);
@@ -0,0 +1,84 @@
1
+ import { env } from "../config/env.js";
2
+ import { AppError } from "../utils/errors.js";
3
+ export class AuthService {
4
+ session = null;
5
+ async login(email, password) {
6
+ const controller = new AbortController();
7
+ const timeout = setTimeout(() => controller.abort(), env.AUTH_LOGIN_TIMEOUT_MS);
8
+ try {
9
+ const response = await fetch(env.AUTH_LOGIN_URL, {
10
+ method: "POST",
11
+ headers: {
12
+ "Content-Type": "application/json",
13
+ Accept: "application/json"
14
+ },
15
+ body: JSON.stringify({ email, password }),
16
+ signal: controller.signal
17
+ });
18
+ if (!response.ok) {
19
+ throw new AppError("AUTH_FAILED", "Login failed. Please check your email and password.", 401);
20
+ }
21
+ const data = (await response.json());
22
+ const nested = (data.data ?? {});
23
+ const token = String(data.token ?? data.access_token ?? data.accessToken ?? nested.accessToken ?? nested.access_token ?? nested.token ?? "");
24
+ if (!token) {
25
+ throw new AppError("AUTH_FAILED", "Login succeeded but session could not be established. Please try again.", 401);
26
+ }
27
+ const payload = this.decodeJwtPayload(token);
28
+ const { brandId, role } = this.extractTenant(payload);
29
+ this.session = { token, brandId, role };
30
+ return { brandId };
31
+ }
32
+ catch (error) {
33
+ if (error instanceof AppError)
34
+ throw error;
35
+ throw new AppError("AUTH_FAILED", "Failed to connect to the authentication service.", 502);
36
+ }
37
+ finally {
38
+ clearTimeout(timeout);
39
+ }
40
+ }
41
+ getToken(explicitToken) {
42
+ if (explicitToken)
43
+ return explicitToken;
44
+ if (this.session)
45
+ return this.session.token;
46
+ throw new AppError("AUTH_REQUIRED", "Not logged in. Use the login_brand tool to authenticate first.", 401);
47
+ }
48
+ resolveTenantContext(token) {
49
+ if (token) {
50
+ const payload = this.decodeJwtPayload(token);
51
+ return this.extractTenant(payload);
52
+ }
53
+ if (this.session) {
54
+ return { brandId: this.session.brandId, role: this.session.role };
55
+ }
56
+ if (!env.REQUIRE_AUTH_TOKEN) {
57
+ return { brandId: env.DEFAULT_BRAND_ID, role: "brand_user" };
58
+ }
59
+ throw new AppError("AUTH_REQUIRED", "No authentication token provided. Use the login_brand tool to authenticate first.", 401);
60
+ }
61
+ extractTenant(payload) {
62
+ const brandEntry = payload.organizationAuthorities?.organizationBrandAuthorities?.[0];
63
+ const brandId = String(brandEntry?.brandId ?? "");
64
+ if (!brandId) {
65
+ throw new AppError("AUTH_INVALID_TOKEN", "Token does not contain a brandId.", 401);
66
+ }
67
+ const roles = payload.authorities?.map((a) => a.role) ?? [];
68
+ const role = roles.includes("write_user") ? "write_user" : roles[0] ?? "brand_user";
69
+ return { brandId, role };
70
+ }
71
+ decodeJwtPayload(token) {
72
+ try {
73
+ const parts = token.split(".");
74
+ if (parts.length !== 3)
75
+ throw new Error("Not a JWT");
76
+ const padded = parts[1].replace(/-/g, "+").replace(/_/g, "/");
77
+ const json = Buffer.from(padded, "base64").toString("utf8");
78
+ return JSON.parse(json);
79
+ }
80
+ catch {
81
+ throw new AppError("AUTH_INVALID_TOKEN", "Failed to decode authentication token.", 401);
82
+ }
83
+ }
84
+ }
@@ -0,0 +1,30 @@
1
+ import axios from "axios";
2
+ import { env } from "../config/env.js";
3
+ import { AppError } from "../utils/errors.js";
4
+ export class CubeClient {
5
+ async load(query, brandId, authToken) {
6
+ try {
7
+ const response = await axios.post(`${env.CUBE_API_URL}/v1/load`, { query }, {
8
+ headers: {
9
+ "Content-Type": "application/json",
10
+ Authorization: authToken,
11
+ brandid: brandId
12
+ },
13
+ timeout: env.CUBE_QUERY_TIMEOUT_MS
14
+ });
15
+ return response.data;
16
+ }
17
+ catch (error) {
18
+ if (error instanceof AppError)
19
+ throw error;
20
+ if (axios.isAxiosError(error)) {
21
+ const status = error.response?.status;
22
+ if (status === 401 || status === 403) {
23
+ throw new AppError("QUERY_ERROR", "Authentication expired. Please login again using login_brand.", 401);
24
+ }
25
+ throw new AppError("QUERY_ERROR", "Failed to execute query. Please try again.", 502);
26
+ }
27
+ throw new AppError("QUERY_ERROR", "Failed to execute query. Please try again.", 502);
28
+ }
29
+ }
30
+ }
@@ -0,0 +1,46 @@
1
+ import crypto from "node:crypto";
2
+ import { env } from "../config/env.js";
3
+ export class LangfuseHook {
4
+ async capture(event) {
5
+ if (!env.LANGFUSE_ENABLED || !env.LANGFUSE_PUBLIC_KEY || !env.LANGFUSE_SECRET_KEY) {
6
+ return;
7
+ }
8
+ const auth = Buffer.from(`${env.LANGFUSE_PUBLIC_KEY}:${env.LANGFUSE_SECRET_KEY}`).toString("base64");
9
+ const timestamp = new Date().toISOString();
10
+ const body = {
11
+ batch: [
12
+ {
13
+ id: crypto.randomUUID(),
14
+ type: "event-create",
15
+ timestamp,
16
+ body: {
17
+ name: event.name,
18
+ input: event.input,
19
+ output: event.output,
20
+ metadata: event.metadata,
21
+ level: event.level ?? "DEFAULT"
22
+ }
23
+ }
24
+ ]
25
+ };
26
+ const controller = new AbortController();
27
+ const timeout = setTimeout(() => controller.abort(), env.LANGFUSE_TIMEOUT_MS);
28
+ try {
29
+ await fetch(`${env.LANGFUSE_BASE_URL}/api/public/ingestion`, {
30
+ method: "POST",
31
+ headers: {
32
+ Authorization: `Basic ${auth}`,
33
+ "Content-Type": "application/json"
34
+ },
35
+ body: JSON.stringify(body),
36
+ signal: controller.signal
37
+ });
38
+ }
39
+ catch {
40
+ // Best-effort hook: never fail tool execution on observability side-effects.
41
+ }
42
+ finally {
43
+ clearTimeout(timeout);
44
+ }
45
+ }
46
+ }
@@ -0,0 +1,103 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ import { fileURLToPath } from "node:url";
4
+ import { env } from "../config/env.js";
5
+ import { AppError } from "../utils/errors.js";
6
+ const __filename = fileURLToPath(import.meta.url);
7
+ const __dirname = path.dirname(__filename);
8
+ const PKG_ROOT = path.resolve(__dirname, "../..");
9
+ export class MetadataStore {
10
+ catalog;
11
+ constructor() {
12
+ const absolutePath = path.isAbsolute(env.METADATA_PATH)
13
+ ? env.METADATA_PATH
14
+ : path.resolve(PKG_ROOT, env.METADATA_PATH);
15
+ const file = fs.readFileSync(absolutePath, "utf8");
16
+ this.catalog = JSON.parse(file);
17
+ }
18
+ getCatalog() {
19
+ return this.catalog;
20
+ }
21
+ listDatasets() {
22
+ return this.catalog.datasets.map((dataset) => ({
23
+ name: dataset.name,
24
+ description: dataset.description,
25
+ grain: dataset.grain,
26
+ owner: dataset.owner,
27
+ metrics: dataset.metrics.map((metric) => metric.name),
28
+ dimensions: dataset.columns.map((column) => column.name),
29
+ joins: dataset.joins?.map((j) => j.cube)
30
+ }));
31
+ }
32
+ getDataset(name) {
33
+ const dataset = this.catalog.datasets.find((item) => item.name === name);
34
+ if (!dataset) {
35
+ throw new AppError("DATASET_NOT_FOUND", `Dataset '${name}' is not configured.`, 404);
36
+ }
37
+ return dataset;
38
+ }
39
+ getDatasetWithJoins(name) {
40
+ const dataset = this.getDataset(name);
41
+ const joinedDimensions = this.resolveJoinedDimensions(dataset);
42
+ return {
43
+ ...dataset,
44
+ joinedDimensions
45
+ };
46
+ }
47
+ resolveJoinedDimensions(dataset) {
48
+ if (!dataset.joins || dataset.joins.length === 0)
49
+ return [];
50
+ return dataset.joins.flatMap((join) => {
51
+ const joinedCube = this.catalog.datasets.find((d) => d.name === join.cube);
52
+ if (!joinedCube)
53
+ return [];
54
+ return joinedCube.columns.map((col) => ({
55
+ name: `${join.cube}.${col.name}`,
56
+ dataType: col.dataType,
57
+ description: col.description,
58
+ fromCube: join.cube,
59
+ relationship: join.relationship
60
+ }));
61
+ });
62
+ }
63
+ searchDatasets(query) {
64
+ const tokens = query
65
+ .toLowerCase()
66
+ .split(/\s+/)
67
+ .filter((t) => t.length > 1);
68
+ if (tokens.length === 0)
69
+ return this.listDatasets();
70
+ const scored = this.catalog.datasets.map((dataset) => {
71
+ const haystack = [
72
+ dataset.name,
73
+ dataset.description,
74
+ dataset.owner,
75
+ ...dataset.metrics.map((m) => `${m.name} ${m.description}`),
76
+ ...dataset.columns.map((c) => `${c.name} ${c.description} ${(c.synonyms ?? []).join(" ")}`),
77
+ ...(dataset.joins ?? []).map((j) => j.cube)
78
+ ]
79
+ .join(" ")
80
+ .toLowerCase();
81
+ const hits = tokens.filter((t) => haystack.includes(t)).length;
82
+ return { dataset, score: hits / tokens.length };
83
+ });
84
+ return scored
85
+ .filter((s) => s.score > 0)
86
+ .sort((a, b) => b.score - a.score)
87
+ .slice(0, 10)
88
+ .map((s) => ({
89
+ name: s.dataset.name,
90
+ description: s.dataset.description,
91
+ grain: s.dataset.grain,
92
+ owner: s.dataset.owner,
93
+ score: s.score,
94
+ metrics: s.dataset.metrics.map((m) => m.name),
95
+ dimensions: s.dataset.columns.map((c) => c.name),
96
+ joins: s.dataset.joins?.map((j) => j.cube),
97
+ joinedDimensions: this.resolveJoinedDimensions(s.dataset).map((d) => d.name)
98
+ }));
99
+ }
100
+ isDatasetAllowed(name) {
101
+ return this.catalog.datasets.some((dataset) => dataset.name === name);
102
+ }
103
+ }
@@ -0,0 +1,49 @@
1
+ import { trace, context, SpanStatusCode } from "@opentelemetry/api";
2
+ import { BasicTracerProvider, BatchSpanProcessor, ConsoleSpanExporter } from "@opentelemetry/sdk-trace-base";
3
+ import { Resource } from "@opentelemetry/resources";
4
+ import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from "@opentelemetry/semantic-conventions";
5
+ import { env } from "../config/env.js";
6
+ let initialized = false;
7
+ function ensureTracerProvider() {
8
+ if (initialized || !env.OTEL_ENABLED) {
9
+ return;
10
+ }
11
+ const provider = new BasicTracerProvider({
12
+ resource: new Resource({
13
+ [ATTR_SERVICE_NAME]: env.MCP_SERVER_NAME,
14
+ [ATTR_SERVICE_VERSION]: env.MCP_SERVER_VERSION
15
+ })
16
+ });
17
+ if (env.OTEL_CONSOLE_EXPORTER) {
18
+ provider.addSpanProcessor(new BatchSpanProcessor(new ConsoleSpanExporter()));
19
+ }
20
+ trace.setGlobalTracerProvider(provider);
21
+ initialized = true;
22
+ }
23
+ export class ObservabilityService {
24
+ tracer = trace.getTracer("talk-to-data-mcp");
25
+ constructor() {
26
+ ensureTracerProvider();
27
+ this.tracer = trace.getTracer(env.MCP_SERVER_NAME);
28
+ }
29
+ async span(name, attributes, fn) {
30
+ return await this.tracer.startActiveSpan(name, { attributes }, async (span) => {
31
+ try {
32
+ const result = await context.with(context.active(), fn);
33
+ span.setStatus({ code: SpanStatusCode.OK });
34
+ return result;
35
+ }
36
+ catch (error) {
37
+ span.setStatus({
38
+ code: SpanStatusCode.ERROR,
39
+ message: error instanceof Error ? error.message : String(error)
40
+ });
41
+ span.recordException(error);
42
+ throw error;
43
+ }
44
+ finally {
45
+ span.end();
46
+ }
47
+ });
48
+ }
49
+ }
@@ -0,0 +1,9 @@
1
+ export class PlannerExecutor {
2
+ queryService;
3
+ constructor(queryService) {
4
+ this.queryService = queryService;
5
+ }
6
+ async executePlan(plan, tenant, authToken) {
7
+ return await this.queryService.runQuery(plan.semanticQuery, tenant, authToken);
8
+ }
9
+ }