@daghis/teamcity-mcp 1.9.2 → 1.9.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,19 @@
1
+ {
2
+ "packages": {
3
+ ".": {
4
+ "release-type": "node",
5
+ "extra-files": [
6
+ {
7
+ "type": "json",
8
+ "path": "server.json",
9
+ "jsonpath": "$.version"
10
+ },
11
+ {
12
+ "type": "json",
13
+ "path": "server.json",
14
+ "jsonpath": "$.packages[0].version"
15
+ }
16
+ ]
17
+ }
18
+ }
19
+ }
@@ -0,0 +1,3 @@
1
+ {
2
+ ".": "1.9.4"
3
+ }
package/CHANGELOG.md CHANGED
@@ -1,5 +1,18 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.9.4](https://github.com/Daghis/teamcity-mcp/compare/v1.9.3...v1.9.4) (2025-09-25)
4
+
5
+ ### Bug Fixes
6
+
7
+ * **tools:** merge build step defaults during update (199) ([#203](https://github.com/Daghis/teamcity-mcp/issues/203)) ([43a668f](https://github.com/Daghis/teamcity-mcp/commit/43a668f0bdc3b0f69c6a3bca0e2a47c49f1cd1a7))
8
+
9
+ ## [1.9.3](https://github.com/Daghis/teamcity-mcp/compare/v1.9.2...v1.9.3) (2025-09-21)
10
+
11
+
12
+ ### Bug Fixes
13
+
14
+ * **teamcity:** resolve nested artifact downloads ([#188](https://github.com/Daghis/teamcity-mcp/issues/188)) ([e309b90](https://github.com/Daghis/teamcity-mcp/commit/e309b90de42fe121f072fe5b549ab25df3a91aaf))
15
+
3
16
  ## [1.9.2](https://github.com/Daghis/teamcity-mcp/compare/v1.9.1...v1.9.2) (2025-09-21)
4
17
 
5
18
 
package/dist/index.js CHANGED
@@ -655,6 +655,7 @@ var import_server = require("@modelcontextprotocol/sdk/server/index.js");
655
655
  var import_types = require("@modelcontextprotocol/sdk/types.js");
656
656
 
657
657
  // src/utils/logger/index.ts
658
+ var import_node_fs = require("node:fs");
658
659
  var import_winston = __toESM(require("winston"));
659
660
  var TeamCityLogger = class _TeamCityLogger {
660
661
  winston;
@@ -758,9 +759,8 @@ var TeamCityLogger = class _TeamCityLogger {
758
759
  */
759
760
  ensureLogDirectory(directory) {
760
761
  try {
761
- const fs2 = require("fs");
762
- if (fs2.existsSync(directory) === false) {
763
- fs2.mkdirSync(directory, { recursive: true });
762
+ if ((0, import_node_fs.existsSync)(directory) === false) {
763
+ (0, import_node_fs.mkdirSync)(directory, { recursive: true });
764
764
  }
765
765
  } catch (error2) {
766
766
  this.winston?.warn?.("Failed to create log directory, using current directory", { error: error2 });
@@ -922,7 +922,7 @@ function debug(message, meta) {
922
922
 
923
923
  // src/tools.ts
924
924
  var import_node_crypto = require("node:crypto");
925
- var import_node_fs = require("node:fs");
925
+ var import_node_fs2 = require("node:fs");
926
926
  var import_node_os = require("node:os");
927
927
  var import_node_path = require("node:path");
928
928
  var import_promises = require("node:stream/promises");
@@ -1216,26 +1216,72 @@ var ArtifactManager = class _ArtifactManager {
1216
1216
  }
1217
1217
  return payload;
1218
1218
  }
1219
- parseArtifacts(data, buildId, includeNested, baseUrl) {
1219
+ parseArtifacts(data, buildId, includeNested, baseUrl, parentSegments = []) {
1220
1220
  const artifacts = [];
1221
1221
  const files = data.file ?? [];
1222
1222
  for (const file of files) {
1223
- if (file.children && includeNested) {
1224
- const nested = this.parseArtifacts(file.children, buildId, includeNested, baseUrl);
1225
- artifacts.push(...nested);
1226
- } else if (!file.children) {
1227
- artifacts.push({
1228
- name: file.name ?? "",
1229
- path: file.fullName ?? file.name ?? "",
1230
- size: file.size ?? 0,
1231
- modificationTime: file.modificationTime ?? "",
1232
- downloadUrl: `${baseUrl}/app/rest/builds/id:${buildId}/artifacts/content/${file.fullName ?? file.name ?? ""}`,
1233
- isDirectory: false
1234
- });
1235
- }
1223
+ const pathSegments = this.buildArtifactSegments(file, parentSegments);
1224
+ const resolvedPath = pathSegments.join("/");
1225
+ const isDirectory = Boolean(file.children);
1226
+ if (isDirectory) {
1227
+ if (includeNested && file.children) {
1228
+ const nested = this.parseArtifacts(
1229
+ file.children,
1230
+ buildId,
1231
+ includeNested,
1232
+ baseUrl,
1233
+ pathSegments
1234
+ );
1235
+ artifacts.push(...nested);
1236
+ }
1237
+ continue;
1238
+ }
1239
+ if (!resolvedPath) {
1240
+ continue;
1241
+ }
1242
+ artifacts.push({
1243
+ name: file.name ?? pathSegments[pathSegments.length - 1] ?? "",
1244
+ path: resolvedPath,
1245
+ size: file.size ?? 0,
1246
+ modificationTime: file.modificationTime ?? "",
1247
+ downloadUrl: `${baseUrl}/app/rest/builds/id:${buildId}/artifacts/content/${this.encodeArtifactPath(pathSegments)}`,
1248
+ isDirectory: false
1249
+ });
1236
1250
  }
1237
1251
  return artifacts;
1238
1252
  }
1253
+ buildArtifactSegments(file, parentSegments) {
1254
+ const fullName = typeof file.fullName === "string" ? file.fullName : void 0;
1255
+ const name = typeof file.name === "string" ? file.name : void 0;
1256
+ const segmentsFromFullName = fullName ? fullName.split("/").filter((segment) => segment.length > 0) : [];
1257
+ if (segmentsFromFullName.length === 0) {
1258
+ if (name && name.length > 0) {
1259
+ return [...parentSegments, name];
1260
+ }
1261
+ return [...parentSegments];
1262
+ }
1263
+ if (parentSegments.length === 0) {
1264
+ return segmentsFromFullName;
1265
+ }
1266
+ if (this.segmentsStartWithParent(segmentsFromFullName, parentSegments)) {
1267
+ return segmentsFromFullName;
1268
+ }
1269
+ return [...parentSegments, ...segmentsFromFullName];
1270
+ }
1271
+ segmentsStartWithParent(segments, parent) {
1272
+ if (parent.length === 0 || segments.length < parent.length) {
1273
+ return false;
1274
+ }
1275
+ for (let i = 0; i < parent.length; i += 1) {
1276
+ if (segments[i] !== parent[i]) {
1277
+ return false;
1278
+ }
1279
+ }
1280
+ return true;
1281
+ }
1282
+ encodeArtifactPath(segments) {
1283
+ return segments.map((segment) => encodeURIComponent(segment)).join("/");
1284
+ }
1239
1285
  ensureBinaryBuffer(payload) {
1240
1286
  if (Buffer.isBuffer(payload)) {
1241
1287
  return payload;
@@ -2717,7 +2763,7 @@ function formatError(err, context) {
2717
2763
  }
2718
2764
  if (err instanceof import_zod2.z.ZodError) {
2719
2765
  error("Validation Error", err, {
2720
- errors: err.errors,
2766
+ errors: err.issues,
2721
2767
  ...context
2722
2768
  });
2723
2769
  return {
@@ -2725,7 +2771,7 @@ function formatError(err, context) {
2725
2771
  error: {
2726
2772
  message: "Validation failed",
2727
2773
  code: "VALIDATION_ERROR",
2728
- data: err.errors
2774
+ data: err.issues
2729
2775
  }
2730
2776
  };
2731
2777
  }
@@ -37191,7 +37237,7 @@ var buildRandomFileName = (artifactName) => {
37191
37237
  };
37192
37238
  var sanitizePathSegments = (artifactPath, fallbackName) => {
37193
37239
  const rawSegments = artifactPath?.split("/") ?? [];
37194
- const sanitizedSegments = rawSegments.map((segment) => segment.trim()).filter((segment) => segment && segment !== "." && segment !== "..").map((segment) => segment.replace(/[^a-zA-Z0-9._-]/g, "_"));
37240
+ const sanitizedSegments = rawSegments.map((segment) => segment.trim()).filter((segment) => segment.length > 0 && segment !== "." && segment !== "..").map((segment) => segment.replace(/[^a-zA-Z0-9._-]/g, "_"));
37195
37241
  if (sanitizedSegments.length === 0) {
37196
37242
  const { sanitizedBase } = sanitizeFileName(fallbackName);
37197
37243
  sanitizedSegments.push(sanitizedBase);
@@ -37204,7 +37250,7 @@ var ensureUniquePath = async (candidate) => {
37204
37250
  const probe = async (attempt) => {
37205
37251
  const next = attempt === 0 ? candidate : `${stem}-${attempt}${ext}`;
37206
37252
  try {
37207
- const handle = await import_node_fs.promises.open(next, "wx");
37253
+ const handle = await import_node_fs2.promises.open(next, "wx");
37208
37254
  await handle.close();
37209
37255
  return next;
37210
37256
  } catch (error2) {
@@ -37218,9 +37264,10 @@ var ensureUniquePath = async (candidate) => {
37218
37264
  return probe(0);
37219
37265
  };
37220
37266
  var resolveStreamOutputPath = async (artifact, options) => {
37221
- if (options.explicitOutputPath) {
37222
- const target = options.explicitOutputPath;
37223
- await import_node_fs.promises.mkdir((0, import_node_path.dirname)(target), { recursive: true });
37267
+ const { explicitOutputPath } = options;
37268
+ if (typeof explicitOutputPath === "string" && explicitOutputPath.length > 0) {
37269
+ const target = explicitOutputPath;
37270
+ await import_node_fs2.promises.mkdir((0, import_node_path.dirname)(target), { recursive: true });
37224
37271
  return target;
37225
37272
  }
37226
37273
  if (options.outputDir) {
@@ -37233,17 +37280,17 @@ var resolveStreamOutputPath = async (artifact, options) => {
37233
37280
  if (relativePath.startsWith("..") || (0, import_node_path.isAbsolute)(relativePath)) {
37234
37281
  throw new Error("Resolved artifact path escapes the configured output directory");
37235
37282
  }
37236
- await import_node_fs.promises.mkdir((0, import_node_path.dirname)(candidate), { recursive: true });
37283
+ await import_node_fs2.promises.mkdir((0, import_node_path.dirname)(candidate), { recursive: true });
37237
37284
  return ensureUniquePath(candidate);
37238
37285
  }
37239
37286
  const tempFilePath = (0, import_node_path.join)((0, import_node_os.tmpdir)(), buildRandomFileName(artifact.name));
37240
- await import_node_fs.promises.mkdir((0, import_node_path.dirname)(tempFilePath), { recursive: true });
37287
+ await import_node_fs2.promises.mkdir((0, import_node_path.dirname)(tempFilePath), { recursive: true });
37241
37288
  return tempFilePath;
37242
37289
  };
37243
37290
  var writeArtifactStreamToDisk = async (artifact, stream, options) => {
37244
37291
  const targetPath = await resolveStreamOutputPath(artifact, options);
37245
- await (0, import_promises.pipeline)(stream, (0, import_node_fs.createWriteStream)(targetPath));
37246
- const stats = await import_node_fs.promises.stat(targetPath);
37292
+ await (0, import_promises.pipeline)(stream, (0, import_node_fs2.createWriteStream)(targetPath));
37293
+ const stats = await import_node_fs2.promises.stat(targetPath);
37247
37294
  return { outputPath: targetPath, bytesWritten: stats.size };
37248
37295
  };
37249
37296
  var buildArtifactPayload = async (artifact, encoding, options) => {
@@ -37982,9 +38029,9 @@ var DEV_TOOLS = [
37982
38029
  const safeBuildId = effectiveBuildId.replace(/[^a-zA-Z0-9._-]/g, "_") || "build";
37983
38030
  const defaultFileName = `build-log-${safeBuildId}-${startLine}-${(0, import_node_crypto.randomUUID)()}.log`;
37984
38031
  const targetPath = typed.outputPath ?? (0, import_node_path.join)((0, import_node_os.tmpdir)(), defaultFileName);
37985
- await import_node_fs.promises.mkdir((0, import_node_path.dirname)(targetPath), { recursive: true });
37986
- await (0, import_promises.pipeline)(stream, (0, import_node_fs.createWriteStream)(targetPath));
37987
- const stats = await import_node_fs.promises.stat(targetPath);
38032
+ await import_node_fs2.promises.mkdir((0, import_node_path.dirname)(targetPath), { recursive: true });
38033
+ await (0, import_promises.pipeline)(stream, (0, import_node_fs2.createWriteStream)(targetPath));
38034
+ const stats = await import_node_fs2.promises.stat(targetPath);
37988
38035
  const page = Math.floor(startLine / effectivePageSize) + 1;
37989
38036
  return json({
37990
38037
  encoding: "stream",
@@ -40446,7 +40493,7 @@ var FULL_MODE_TOOLS = [
40446
40493
  stepId: import_zod4.z.string().min(1).optional(),
40447
40494
  name: import_zod4.z.string().optional(),
40448
40495
  type: import_zod4.z.string().optional(),
40449
- properties: import_zod4.z.record(import_zod4.z.unknown()).optional()
40496
+ properties: import_zod4.z.record(import_zod4.z.string(), import_zod4.z.unknown()).optional()
40450
40497
  }).superRefine((value, ctx) => {
40451
40498
  if (value.action === "update" || value.action === "delete") {
40452
40499
  if (!value.stepId || value.stepId.trim() === "") {
@@ -40493,24 +40540,56 @@ var FULL_MODE_TOOLS = [
40493
40540
  });
40494
40541
  }
40495
40542
  case "update": {
40543
+ const existingStepResponse = await adapter.modules.buildTypes.getBuildStep(
40544
+ typedArgs.buildTypeId,
40545
+ typedArgs.stepId,
40546
+ "id,name,type,disabled,properties(property(name,value))",
40547
+ {
40548
+ headers: {
40549
+ Accept: "application/json"
40550
+ }
40551
+ }
40552
+ );
40553
+ const existingStep = existingStepResponse.data;
40554
+ const toRecord2 = (collection) => {
40555
+ if (!collection || !Array.isArray(collection.property)) {
40556
+ return {};
40557
+ }
40558
+ const entries = collection.property.filter((item) => {
40559
+ return Boolean(item?.name);
40560
+ }).map((item) => {
40561
+ return [item.name, item.value != null ? String(item.value) : ""];
40562
+ });
40563
+ return Object.fromEntries(entries);
40564
+ };
40565
+ const existingProperties = toRecord2(existingStep?.properties);
40496
40566
  const updatePayload = {};
40497
- if (typedArgs.name != null) {
40498
- updatePayload["name"] = typedArgs.name;
40567
+ const mergedName = typedArgs.name ?? existingStep?.name;
40568
+ if (mergedName != null) {
40569
+ updatePayload["name"] = mergedName;
40499
40570
  }
40500
- if (typedArgs.type != null) {
40501
- updatePayload["type"] = typedArgs.type;
40571
+ const mergedType = typedArgs.type ?? existingStep?.type;
40572
+ if (mergedType != null) {
40573
+ updatePayload["type"] = mergedType;
40574
+ }
40575
+ if (existingStep?.disabled != null) {
40576
+ updatePayload["disabled"] = existingStep.disabled;
40502
40577
  }
40503
40578
  const rawProps = typedArgs.properties ?? {};
40504
- const stepProps = Object.fromEntries(
40579
+ const providedProps = Object.fromEntries(
40505
40580
  Object.entries(rawProps).map(([k, v]) => [k, String(v)])
40506
40581
  );
40507
- if (stepProps["script.content"]) {
40508
- stepProps["use.custom.script"] = stepProps["use.custom.script"] ?? "true";
40509
- stepProps["script.type"] = stepProps["script.type"] ?? "customScript";
40582
+ const mergedProps = {
40583
+ ...existingProperties,
40584
+ ...providedProps
40585
+ };
40586
+ if (mergedProps["script.content"] && mergedType === "simpleRunner") {
40587
+ mergedProps["use.custom.script"] = mergedProps["use.custom.script"] ?? "true";
40588
+ mergedProps["script.type"] = mergedProps["script.type"] ?? "customScript";
40510
40589
  }
40511
- if (Object.keys(stepProps).length > 0) {
40590
+ if (Object.keys(mergedProps).length > 0) {
40512
40591
  updatePayload["properties"] = {
40513
- property: Object.entries(stepProps).map(([name, value]) => ({ name, value }))
40592
+ property: Object.entries(mergedProps).map(([name, value]) => ({ name, value }))
40514
40593
  };
40515
40594
  }
40516
40595
  if (Object.keys(updatePayload).length === 0) {
@@ -40664,7 +40743,9 @@ var FULL_MODE_TOOLS = [
40664
40743
  const queue = await adapter.modules.buildQueue.getAllQueuedBuilds();
40665
40744
  const builds = queue.data?.build ?? [];
40666
40745
  const ids = new Set(typed.buildTypeIds);
40667
- const toCancel = builds.filter((b) => b.buildTypeId && ids.has(b.buildTypeId));
40746
+ const toCancel = builds.filter(
40747
+ (build) => typeof build.buildTypeId === "string" && ids.has(build.buildTypeId)
40748
+ );
40668
40749
  for (const b of toCancel) {
40669
40750
  if (b.id == null) continue;
40670
40751
  await adapter.modules.buildQueue.deleteQueuedBuild(String(b.id));