@openontology/opencode-palantir 0.1.5 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +11 -8
  2. package/dist/index.js +892 -468
  3. package/package.json +12 -12
package/dist/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  // @bun
2
2
  // src/index.ts
3
- import path4 from "path";
3
+ import path5 from "path";
4
4
  import { tool } from "@opencode-ai/plugin/tool";
5
5
 
6
6
  // node_modules/hyparquet/src/constants.js
@@ -71,11 +71,6 @@ var PageTypes = [
71
71
  "DICTIONARY_PAGE",
72
72
  "DATA_PAGE_V2"
73
73
  ];
74
- var BoundaryOrders = [
75
- "UNORDERED",
76
- "ASCENDING",
77
- "DESCENDING"
78
- ];
79
74
  var EdgeInterpolationAlgorithms = [
80
75
  "SPHERICAL",
81
76
  "VINCENTY",
@@ -872,30 +867,6 @@ function pageLocation(loc) {
872
867
  }
873
868
 
874
869
  // node_modules/hyparquet/src/utils.js
875
- function toJson(obj) {
876
- if (obj === undefined)
877
- return null;
878
- if (typeof obj === "bigint")
879
- return Number(obj);
880
- if (Object.is(obj, -0))
881
- return 0;
882
- if (Array.isArray(obj))
883
- return obj.map(toJson);
884
- if (obj instanceof Uint8Array)
885
- return Array.from(obj);
886
- if (obj instanceof Date)
887
- return obj.toISOString();
888
- if (obj instanceof Object) {
889
- const newObj = {};
890
- for (const key of Object.keys(obj)) {
891
- if (obj[key] === undefined)
892
- continue;
893
- newObj[key] = toJson(obj[key]);
894
- }
895
- return newObj;
896
- }
897
- return obj;
898
- }
899
870
  function concat(aaa, bbb) {
900
871
  const chunk = 1e4;
901
872
  for (let i = 0;i < bbb.length; i += chunk) {
@@ -953,9 +924,9 @@ function columnsNeededForFilter(filter) {
953
924
  } else if ("$nor" in filter && Array.isArray(filter.$nor)) {
954
925
  columns.push(...filter.$nor.flatMap(columnsNeededForFilter));
955
926
  } else {
956
- columns.push(...Object.keys(filter));
927
+ columns.push(...Object.keys(filter).map((key) => key.split(".")[0]));
957
928
  }
958
- return columns;
929
+ return [...new Set(columns)];
959
930
  }
960
931
  function matchFilter(record, filter, strict = true) {
961
932
  if ("$and" in filter && Array.isArray(filter.$and)) {
@@ -968,7 +939,7 @@ function matchFilter(record, filter, strict = true) {
968
939
  return !filter.$nor.some((subQuery) => matchFilter(record, subQuery, strict));
969
940
  }
970
941
  return Object.entries(filter).every(([field, condition]) => {
971
- const value = record[field];
942
+ const value = resolve(record, field);
972
943
  if (typeof condition !== "object" || condition === null || Array.isArray(condition)) {
973
944
  return equals(value, condition, strict);
974
945
  }
@@ -1040,6 +1011,13 @@ function canSkipRowGroup({ rowGroup, physicalColumns, filter, strict = true }) {
1040
1011
  }
1041
1012
  return false;
1042
1013
  }
1014
+ function resolve(record, path) {
1015
+ let value = record;
1016
+ for (const part of path.split(".")) {
1017
+ value = value?.[part];
1018
+ }
1019
+ return value;
1020
+ }
1043
1021
 
1044
1022
  // node_modules/hyparquet/src/plan.js
1045
1023
  var runLimit = 1 << 21;
@@ -2093,6 +2071,7 @@ function readColumn(reader, { groupStart, selectStart, selectEnd }, columnDecode
2093
2071
  let dictionary = undefined;
2094
2072
  let lastChunk = undefined;
2095
2073
  let rowCount = 0;
2074
+ let skipped = 0;
2096
2075
  const emitLastChunk = onPage && (() => {
2097
2076
  lastChunk && onPage({
2098
2077
  pathInSchema,
@@ -2106,23 +2085,29 @@ function readColumn(reader, { groupStart, selectStart, selectEnd }, columnDecode
2106
2085
  break;
2107
2086
  const header = parquetHeader(reader);
2108
2087
  if (header.type === "DICTIONARY_PAGE") {
2109
- dictionary = readPage(reader, header, columnDecoder, dictionary, undefined, 0);
2110
- dictionary = convert(dictionary, columnDecoder);
2088
+ const { data } = readPage(reader, header, columnDecoder, dictionary, undefined, 0);
2089
+ if (data)
2090
+ dictionary = convert(data, columnDecoder);
2111
2091
  } else {
2112
2092
  const lastChunkLength = lastChunk?.length || 0;
2113
- const values = readPage(reader, header, columnDecoder, dictionary, lastChunk, selectStart - rowCount);
2114
- if (lastChunk === values) {
2115
- rowCount += values.length - lastChunkLength;
2116
- } else {
2093
+ const result = readPage(reader, header, columnDecoder, dictionary, lastChunk, selectStart - rowCount);
2094
+ if (result.skipped) {
2095
+ if (!chunks.length) {
2096
+ skipped += result.skipped;
2097
+ }
2098
+ rowCount += result.skipped;
2099
+ } else if (result.data && lastChunk === result.data) {
2100
+ rowCount += result.data.length - lastChunkLength;
2101
+ } else if (result.data && result.data.length) {
2117
2102
  emitLastChunk?.();
2118
- chunks.push(values);
2119
- rowCount += values.length;
2120
- lastChunk = values;
2103
+ chunks.push(result.data);
2104
+ rowCount += result.data.length;
2105
+ lastChunk = result.data;
2121
2106
  }
2122
2107
  }
2123
2108
  }
2124
2109
  emitLastChunk?.();
2125
- return chunks;
2110
+ return { data: chunks, skipped };
2126
2111
  }
2127
2112
  function readPage(reader, header, columnDecoder, dictionary, previousChunk, pageStart) {
2128
2113
  const { type, element, schemaPath, codec, compressors } = columnDecoder;
@@ -2133,31 +2118,34 @@ function readPage(reader, header, columnDecoder, dictionary, previousChunk, page
2133
2118
  if (!daph)
2134
2119
  throw new Error("parquet data page header is undefined");
2135
2120
  if (pageStart > daph.num_values && isFlatColumn(schemaPath)) {
2136
- return new Array(daph.num_values);
2121
+ return { skipped: daph.num_values };
2137
2122
  }
2138
2123
  const page = decompressPage(compressedBytes, Number(header.uncompressed_page_size), codec, compressors);
2139
2124
  const { definitionLevels, repetitionLevels, dataPage } = readDataPage(page, daph, columnDecoder);
2140
2125
  const values = convertWithDictionary(dataPage, dictionary, daph.encoding, columnDecoder);
2141
2126
  const output = Array.isArray(previousChunk) ? previousChunk : [];
2142
- return assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath);
2127
+ const assembled = assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath);
2128
+ return { skipped: 0, data: assembled };
2143
2129
  } else if (header.type === "DATA_PAGE_V2") {
2144
2130
  const daph2 = header.data_page_header_v2;
2145
2131
  if (!daph2)
2146
2132
  throw new Error("parquet data page header v2 is undefined");
2147
2133
  if (pageStart > daph2.num_rows) {
2148
- return new Array(daph2.num_values);
2134
+ return { skipped: daph2.num_values };
2149
2135
  }
2150
2136
  const { definitionLevels, repetitionLevels, dataPage } = readDataPageV2(compressedBytes, header, columnDecoder);
2151
2137
  const values = convertWithDictionary(dataPage, dictionary, daph2.encoding, columnDecoder);
2152
2138
  const output = Array.isArray(previousChunk) ? previousChunk : [];
2153
- return assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath);
2139
+ const assembled = assembleLists(output, definitionLevels, repetitionLevels, values, schemaPath);
2140
+ return { skipped: 0, data: assembled };
2154
2141
  } else if (header.type === "DICTIONARY_PAGE") {
2155
2142
  const diph = header.dictionary_page_header;
2156
2143
  if (!diph)
2157
2144
  throw new Error("parquet dictionary page header is undefined");
2158
2145
  const page = decompressPage(compressedBytes, Number(header.uncompressed_page_size), codec, compressors);
2159
2146
  const reader2 = { view: new DataView(page.buffer, page.byteOffset, page.byteLength), offset: 0 };
2160
- return readPlain(reader2, type, diph.num_values, element.type_length);
2147
+ const dictArray = readPlain(reader2, type, diph.num_values, element.type_length);
2148
+ return { skipped: 0, data: dictArray };
2161
2149
  } else {
2162
2150
  throw new Error(`parquet unsupported page type: ${header.type}`);
2163
2151
  }
@@ -2233,10 +2221,7 @@ function readRowGroup(options, { metadata }, groupPlan) {
2233
2221
  pathInSchema: columnMetadata.path_in_schema,
2234
2222
  data: Promise.resolve(file.slice(chunkPlan.range.startByte, chunkPlan.range.endByte)).then((buffer) => {
2235
2223
  const reader = { view: new DataView(buffer), offset: 0 };
2236
- return {
2237
- pageSkip: 0,
2238
- data: readColumn(reader, groupPlan, columnDecoder, options.onPage)
2239
- };
2224
+ return readColumn(reader, groupPlan, columnDecoder, options.onPage);
2240
2225
  })
2241
2226
  });
2242
2227
  continue;
@@ -2249,7 +2234,7 @@ function readRowGroup(options, { metadata }, groupPlan) {
2249
2234
  const pages = offsetIndex.page_locations;
2250
2235
  let startByte = NaN;
2251
2236
  let endByte = NaN;
2252
- let pageSkip = 0;
2237
+ let skipped = 0;
2253
2238
  for (let i = 0;i < pages.length; i++) {
2254
2239
  const page = pages[i];
2255
2240
  const pageStart = Number(page.first_row_index);
@@ -2257,22 +2242,23 @@ function readRowGroup(options, { metadata }, groupPlan) {
2257
2242
  if (pageStart < selectEnd && pageEnd > selectStart) {
2258
2243
  if (Number.isNaN(startByte)) {
2259
2244
  startByte = Number(page.offset);
2260
- pageSkip = pageStart;
2245
+ skipped = pageStart;
2261
2246
  }
2262
2247
  endByte = Number(page.offset) + page.compressed_page_size;
2263
2248
  }
2264
2249
  }
2265
2250
  const buffer = await file.slice(startByte, endByte);
2266
2251
  const reader = { view: new DataView(buffer), offset: 0 };
2267
- const adjustedGroupPlan = pageSkip ? {
2252
+ const adjustedGroupPlan = skipped ? {
2268
2253
  ...groupPlan,
2269
- groupStart: groupPlan.groupStart + pageSkip,
2270
- selectStart: groupPlan.selectStart - pageSkip,
2271
- selectEnd: groupPlan.selectEnd - pageSkip
2254
+ groupStart: groupPlan.groupStart + skipped,
2255
+ selectStart: groupPlan.selectStart - skipped,
2256
+ selectEnd: groupPlan.selectEnd - skipped
2272
2257
  } : groupPlan;
2258
+ const { data, skipped: columnSkipped } = readColumn(reader, adjustedGroupPlan, columnDecoder, options.onPage);
2273
2259
  return {
2274
- data: readColumn(reader, adjustedGroupPlan, columnDecoder, options.onPage),
2275
- pageSkip
2260
+ data,
2261
+ skipped: skipped + columnSkipped
2276
2262
  };
2277
2263
  })
2278
2264
  });
@@ -2297,8 +2283,8 @@ async function asyncGroupToRows({ asyncColumns }, selectStart, selectEnd, column
2297
2283
  const row = selectStart + selectRow;
2298
2284
  const rowData = {};
2299
2285
  for (let i = 0;i < asyncColumns.length; i++) {
2300
- const { data, pageSkip } = asyncPages[i];
2301
- rowData[asyncColumns[i].pathInSchema[0]] = data[row - pageSkip];
2286
+ const { data, skipped } = asyncPages[i];
2287
+ rowData[asyncColumns[i].pathInSchema[0]] = data[row - skipped];
2302
2288
  }
2303
2289
  groupData2[selectRow] = rowData;
2304
2290
  }
@@ -2311,8 +2297,8 @@ async function asyncGroupToRows({ asyncColumns }, selectStart, selectEnd, column
2311
2297
  for (let i = 0;i < columnOrder.length; i++) {
2312
2298
  const colIdx = columnIndexes[i];
2313
2299
  if (colIdx >= 0) {
2314
- const { data, pageSkip } = asyncPages[colIdx];
2315
- rowData[i] = data[row - pageSkip];
2300
+ const { data, skipped } = asyncPages[colIdx];
2301
+ rowData[i] = data[row - skipped];
2316
2302
  }
2317
2303
  }
2318
2304
  groupData[selectRow] = rowData;
@@ -2338,7 +2324,7 @@ function assembleAsync(asyncRowGroup, schemaTree2, parsers) {
2338
2324
  const flatColumn = flatData.get(child.path.join("."));
2339
2325
  if (!flatColumn)
2340
2326
  throw new Error("parquet column data not assembled");
2341
- return { data: [flatColumn], pageSkip: 0 };
2327
+ return { data: [flatColumn], skipped: 0 };
2342
2328
  });
2343
2329
  assembled.push({ pathInSchema: child.path, data });
2344
2330
  } else {
@@ -2389,8 +2375,8 @@ async function parquetRead(options) {
2389
2375
  if (onChunk) {
2390
2376
  for (const asyncGroup of assembled) {
2391
2377
  for (const asyncColumn of asyncGroup.asyncColumns) {
2392
- asyncColumn.data.then(({ data, pageSkip }) => {
2393
- let rowStart2 = asyncGroup.groupStart + pageSkip;
2378
+ asyncColumn.data.then(({ data, skipped }) => {
2379
+ let rowStart2 = asyncGroup.groupStart + skipped;
2394
2380
  for (const columnData of data) {
2395
2381
  onChunk({
2396
2382
  columnName: asyncColumn.pathInSchema[0],
@@ -3976,6 +3962,108 @@ ByteWriter.prototype.appendZigZag = function(value) {
3976
3962
  }
3977
3963
  };
3978
3964
 
3965
+ // node_modules/hyparquet-writer/node_modules/hyparquet/src/schema.js
3966
+ function schemaTree2(schema, rootIndex, path) {
3967
+ const element = schema[rootIndex];
3968
+ const children = [];
3969
+ let count = 1;
3970
+ if (element.num_children) {
3971
+ while (children.length < element.num_children) {
3972
+ const childElement = schema[rootIndex + count];
3973
+ const child = schemaTree2(schema, rootIndex + count, [...path, childElement.name]);
3974
+ count += child.count;
3975
+ children.push(child);
3976
+ }
3977
+ }
3978
+ return { count, element, children, path };
3979
+ }
3980
+ function getSchemaPath2(schema, name) {
3981
+ let tree = schemaTree2(schema, 0, []);
3982
+ const path = [tree];
3983
+ for (const part of name) {
3984
+ const child = tree.children.find((child2) => child2.element.name === part);
3985
+ if (!child)
3986
+ throw new Error(`parquet schema element not found: ${name}`);
3987
+ path.push(child);
3988
+ tree = child;
3989
+ }
3990
+ return path;
3991
+ }
3992
+
3993
+ // node_modules/hyparquet-writer/node_modules/hyparquet/src/constants.js
3994
+ var ParquetTypes2 = [
3995
+ "BOOLEAN",
3996
+ "INT32",
3997
+ "INT64",
3998
+ "INT96",
3999
+ "FLOAT",
4000
+ "DOUBLE",
4001
+ "BYTE_ARRAY",
4002
+ "FIXED_LEN_BYTE_ARRAY"
4003
+ ];
4004
+ var Encodings2 = [
4005
+ "PLAIN",
4006
+ "GROUP_VAR_INT",
4007
+ "PLAIN_DICTIONARY",
4008
+ "RLE",
4009
+ "BIT_PACKED",
4010
+ "DELTA_BINARY_PACKED",
4011
+ "DELTA_LENGTH_BYTE_ARRAY",
4012
+ "DELTA_BYTE_ARRAY",
4013
+ "RLE_DICTIONARY",
4014
+ "BYTE_STREAM_SPLIT"
4015
+ ];
4016
+ var FieldRepetitionTypes2 = [
4017
+ "REQUIRED",
4018
+ "OPTIONAL",
4019
+ "REPEATED"
4020
+ ];
4021
+ var ConvertedTypes2 = [
4022
+ "UTF8",
4023
+ "MAP",
4024
+ "MAP_KEY_VALUE",
4025
+ "LIST",
4026
+ "ENUM",
4027
+ "DECIMAL",
4028
+ "DATE",
4029
+ "TIME_MILLIS",
4030
+ "TIME_MICROS",
4031
+ "TIMESTAMP_MILLIS",
4032
+ "TIMESTAMP_MICROS",
4033
+ "UINT_8",
4034
+ "UINT_16",
4035
+ "UINT_32",
4036
+ "UINT_64",
4037
+ "INT_8",
4038
+ "INT_16",
4039
+ "INT_32",
4040
+ "INT_64",
4041
+ "JSON",
4042
+ "BSON",
4043
+ "INTERVAL"
4044
+ ];
4045
+ var CompressionCodecs2 = [
4046
+ "UNCOMPRESSED",
4047
+ "SNAPPY",
4048
+ "GZIP",
4049
+ "LZO",
4050
+ "BROTLI",
4051
+ "LZ4",
4052
+ "ZSTD",
4053
+ "LZ4_RAW"
4054
+ ];
4055
+ var PageTypes2 = [
4056
+ "DATA_PAGE",
4057
+ "INDEX_PAGE",
4058
+ "DICTIONARY_PAGE",
4059
+ "DATA_PAGE_V2"
4060
+ ];
4061
+ var BoundaryOrders = [
4062
+ "UNORDERED",
4063
+ "ASCENDING",
4064
+ "DESCENDING"
4065
+ ];
4066
+
3979
4067
  // node_modules/hyparquet-writer/src/delta.js
3980
4068
  var BLOCK_SIZE = 128;
3981
4069
  var MINIBLOCKS_PER_BLOCK = 4;
@@ -4451,6 +4539,24 @@ function bigIntArray(values) {
4451
4539
  throw new Error("Expected bigint array for BYTE_STREAM_SPLIT encoding");
4452
4540
  }
4453
4541
 
4542
+ // node_modules/hyparquet-writer/node_modules/hyparquet/src/thrift.js
4543
+ var CompactType2 = {
4544
+ STOP: 0,
4545
+ TRUE: 1,
4546
+ FALSE: 2,
4547
+ BYTE: 3,
4548
+ I16: 4,
4549
+ I32: 5,
4550
+ I64: 6,
4551
+ DOUBLE: 7,
4552
+ BINARY: 8,
4553
+ LIST: 9,
4554
+ SET: 10,
4555
+ MAP: 11,
4556
+ STRUCT: 12,
4557
+ UUID: 13
4558
+ };
4559
+
4454
4560
  // node_modules/hyparquet-writer/src/thrift.js
4455
4561
  function serializeTCompactProtocol(writer, data) {
4456
4562
  let lastFid = 0;
@@ -4475,55 +4581,55 @@ function serializeTCompactProtocol(writer, data) {
4475
4581
  writeElement(writer, type, value);
4476
4582
  lastFid = fid;
4477
4583
  }
4478
- writer.appendUint8(CompactType.STOP);
4584
+ writer.appendUint8(CompactType2.STOP);
4479
4585
  }
4480
4586
  function getCompactTypeForValue(value) {
4481
4587
  if (value === true)
4482
- return CompactType.TRUE;
4588
+ return CompactType2.TRUE;
4483
4589
  if (value === false)
4484
- return CompactType.FALSE;
4590
+ return CompactType2.FALSE;
4485
4591
  if (Number.isInteger(value))
4486
- return CompactType.I32;
4592
+ return CompactType2.I32;
4487
4593
  if (typeof value === "number")
4488
- return CompactType.DOUBLE;
4594
+ return CompactType2.DOUBLE;
4489
4595
  if (typeof value === "bigint")
4490
- return CompactType.I64;
4596
+ return CompactType2.I64;
4491
4597
  if (typeof value === "string")
4492
- return CompactType.BINARY;
4598
+ return CompactType2.BINARY;
4493
4599
  if (value instanceof Uint8Array)
4494
- return CompactType.BINARY;
4600
+ return CompactType2.BINARY;
4495
4601
  if (Array.isArray(value))
4496
- return CompactType.LIST;
4602
+ return CompactType2.LIST;
4497
4603
  if (value && typeof value === "object")
4498
- return CompactType.STRUCT;
4604
+ return CompactType2.STRUCT;
4499
4605
  throw new Error(`Cannot determine thrift compact type for: ${value}`);
4500
4606
  }
4501
4607
  function writeElement(writer, type, value) {
4502
- if (type === CompactType.TRUE)
4608
+ if (type === CompactType2.TRUE)
4503
4609
  return;
4504
- if (type === CompactType.FALSE)
4610
+ if (type === CompactType2.FALSE)
4505
4611
  return;
4506
- if (type === CompactType.BYTE && typeof value === "number") {
4612
+ if (type === CompactType2.BYTE && typeof value === "number") {
4507
4613
  writer.appendUint8(value);
4508
- } else if (type === CompactType.I32 && typeof value === "number") {
4614
+ } else if (type === CompactType2.I32 && typeof value === "number") {
4509
4615
  const zigzag = value << 1 ^ value >> 31;
4510
4616
  writer.appendVarInt(zigzag);
4511
- } else if (type === CompactType.I64 && typeof value === "bigint") {
4617
+ } else if (type === CompactType2.I64 && typeof value === "bigint") {
4512
4618
  const zigzag = value << 1n ^ value >> 63n;
4513
4619
  writer.appendVarBigInt(zigzag);
4514
- } else if (type === CompactType.DOUBLE && typeof value === "number") {
4620
+ } else if (type === CompactType2.DOUBLE && typeof value === "number") {
4515
4621
  writer.appendFloat64(value);
4516
- } else if (type === CompactType.BINARY && typeof value === "string") {
4622
+ } else if (type === CompactType2.BINARY && typeof value === "string") {
4517
4623
  const bytes = new TextEncoder().encode(value);
4518
4624
  writer.appendVarInt(bytes.length);
4519
4625
  writer.appendBytes(bytes);
4520
- } else if (type === CompactType.BINARY && value instanceof Uint8Array) {
4626
+ } else if (type === CompactType2.BINARY && value instanceof Uint8Array) {
4521
4627
  writer.appendVarInt(value.byteLength);
4522
4628
  writer.appendBytes(value);
4523
- } else if (type === CompactType.LIST && Array.isArray(value)) {
4629
+ } else if (type === CompactType2.LIST && Array.isArray(value)) {
4524
4630
  const size = value.length;
4525
4631
  if (size === 0) {
4526
- writer.appendUint8(0 << 4 | CompactType.BYTE);
4632
+ writer.appendUint8(0 << 4 | CompactType2.BYTE);
4527
4633
  return;
4528
4634
  }
4529
4635
  const elemType = getCompactTypeForValue(value[0]);
@@ -4532,7 +4638,7 @@ function writeElement(writer, type, value) {
4532
4638
  if (size > 14) {
4533
4639
  writer.appendVarInt(size);
4534
4640
  }
4535
- if (elemType === CompactType.TRUE || elemType === CompactType.FALSE) {
4641
+ if (elemType === CompactType2.TRUE || elemType === CompactType2.FALSE) {
4536
4642
  for (const v of value) {
4537
4643
  writer.appendUint8(v ? 1 : 0);
4538
4644
  }
@@ -4541,7 +4647,7 @@ function writeElement(writer, type, value) {
4541
4647
  writeElement(writer, elemType, v);
4542
4648
  }
4543
4649
  }
4544
- } else if (type === CompactType.STRUCT && typeof value === "object") {
4650
+ } else if (type === CompactType2.STRUCT && typeof value === "object") {
4545
4651
  let lastFid = 0;
4546
4652
  for (const [k, v] of Object.entries(value)) {
4547
4653
  if (v === undefined)
@@ -4564,7 +4670,7 @@ function writeElement(writer, type, value) {
4564
4670
  writeElement(writer, t, v);
4565
4671
  lastFid = fid;
4566
4672
  }
4567
- writer.appendUint8(CompactType.STOP);
4673
+ writer.appendUint8(CompactType2.STOP);
4568
4674
  } else {
4569
4675
  throw new Error(`unhandled type in writeElement: ${type} for value ${value}`);
4570
4676
  }
@@ -4646,25 +4752,25 @@ function writeDataPageV2({ writer, values, column, encoding, pageData }) {
4646
4752
  }
4647
4753
  function writePageHeader(writer, header) {
4648
4754
  const compact = {
4649
- field_1: PageTypes.indexOf(header.type),
4755
+ field_1: PageTypes2.indexOf(header.type),
4650
4756
  field_2: header.uncompressed_page_size,
4651
4757
  field_3: header.compressed_page_size,
4652
4758
  field_4: header.crc,
4653
4759
  field_5: header.data_page_header && {
4654
4760
  field_1: header.data_page_header.num_values,
4655
- field_2: Encodings.indexOf(header.data_page_header.encoding),
4656
- field_3: Encodings.indexOf(header.data_page_header.definition_level_encoding),
4657
- field_4: Encodings.indexOf(header.data_page_header.repetition_level_encoding)
4761
+ field_2: Encodings2.indexOf(header.data_page_header.encoding),
4762
+ field_3: Encodings2.indexOf(header.data_page_header.definition_level_encoding),
4763
+ field_4: Encodings2.indexOf(header.data_page_header.repetition_level_encoding)
4658
4764
  },
4659
4765
  field_7: header.dictionary_page_header && {
4660
4766
  field_1: header.dictionary_page_header.num_values,
4661
- field_2: Encodings.indexOf(header.dictionary_page_header.encoding)
4767
+ field_2: Encodings2.indexOf(header.dictionary_page_header.encoding)
4662
4768
  },
4663
4769
  field_8: header.data_page_header_v2 && {
4664
4770
  field_1: header.data_page_header_v2.num_values,
4665
4771
  field_2: header.data_page_header_v2.num_nulls,
4666
4772
  field_3: header.data_page_header_v2.num_rows,
4667
- field_4: Encodings.indexOf(header.data_page_header_v2.encoding),
4773
+ field_4: Encodings2.indexOf(header.data_page_header_v2.encoding),
4668
4774
  field_5: header.data_page_header_v2.definition_levels_byte_length,
4669
4775
  field_6: header.data_page_header_v2.repetition_levels_byte_length,
4670
4776
  field_7: header.data_page_header_v2.is_compressed ? undefined : false
@@ -4798,6 +4904,31 @@ function inferCoordinateDimensions(value) {
4798
4904
  return maxDim || 2;
4799
4905
  }
4800
4906
 
4907
+ // node_modules/hyparquet-writer/node_modules/hyparquet/src/utils.js
4908
+ function toJson(obj) {
4909
+ if (obj === undefined)
4910
+ return null;
4911
+ if (typeof obj === "bigint")
4912
+ return Number(obj);
4913
+ if (Object.is(obj, -0))
4914
+ return 0;
4915
+ if (Array.isArray(obj))
4916
+ return obj.map(toJson);
4917
+ if (obj instanceof Uint8Array)
4918
+ return Array.from(obj);
4919
+ if (obj instanceof Date)
4920
+ return obj.toISOString();
4921
+ if (obj instanceof Object) {
4922
+ const newObj = {};
4923
+ for (const key of Object.keys(obj)) {
4924
+ if (obj[key] === undefined)
4925
+ continue;
4926
+ newObj[key] = toJson(obj[key]);
4927
+ }
4928
+ return newObj;
4929
+ }
4930
+ return obj;
4931
+ }
4801
4932
  // node_modules/hyparquet-writer/src/wkb.js
4802
4933
  function geojsonToWkb(geometry) {
4803
4934
  const writer = new ByteWriter;
@@ -5608,12 +5739,12 @@ function writeMetadata(writer, metadata) {
5608
5739
  const compact = {
5609
5740
  field_1: metadata.version,
5610
5741
  field_2: metadata.schema && metadata.schema.map((element) => ({
5611
- field_1: element.type && ParquetTypes.indexOf(element.type),
5742
+ field_1: element.type && ParquetTypes2.indexOf(element.type),
5612
5743
  field_2: element.type_length,
5613
- field_3: element.repetition_type && FieldRepetitionTypes.indexOf(element.repetition_type),
5744
+ field_3: element.repetition_type && FieldRepetitionTypes2.indexOf(element.repetition_type),
5614
5745
  field_4: element.name,
5615
5746
  field_5: element.num_children,
5616
- field_6: element.converted_type && ConvertedTypes.indexOf(element.converted_type),
5747
+ field_6: element.converted_type && ConvertedTypes2.indexOf(element.converted_type),
5617
5748
  field_7: element.scale,
5618
5749
  field_8: element.precision,
5619
5750
  field_9: element.field_id,
@@ -5625,10 +5756,10 @@ function writeMetadata(writer, metadata) {
5625
5756
  field_1: c.file_path,
5626
5757
  field_2: c.file_offset,
5627
5758
  field_3: c.meta_data && {
5628
- field_1: ParquetTypes.indexOf(c.meta_data.type),
5629
- field_2: c.meta_data.encodings.map((e) => Encodings.indexOf(e)),
5759
+ field_1: ParquetTypes2.indexOf(c.meta_data.type),
5760
+ field_2: c.meta_data.encodings.map((e) => Encodings2.indexOf(e)),
5630
5761
  field_3: c.meta_data.path_in_schema,
5631
- field_4: CompressionCodecs.indexOf(c.meta_data.codec),
5762
+ field_4: CompressionCodecs2.indexOf(c.meta_data.codec),
5632
5763
  field_5: c.meta_data.num_values,
5633
5764
  field_6: c.meta_data.total_uncompressed_size,
5634
5765
  field_7: c.meta_data.total_compressed_size,
@@ -5641,8 +5772,8 @@ function writeMetadata(writer, metadata) {
5641
5772
  field_11: c.meta_data.dictionary_page_offset,
5642
5773
  field_12: c.meta_data.statistics && unconvertStatistics(c.meta_data.statistics, schemaElement(metadata.schema, c.meta_data.path_in_schema, columnIndex + 1)),
5643
5774
  field_13: c.meta_data.encoding_stats && c.meta_data.encoding_stats.map((es) => ({
5644
- field_1: PageTypes.indexOf(es.page_type),
5645
- field_2: Encodings.indexOf(es.encoding),
5775
+ field_1: PageTypes2.indexOf(es.page_type),
5776
+ field_2: Encodings2.indexOf(es.encoding),
5646
5777
  field_3: es.count
5647
5778
  })),
5648
5779
  field_14: c.meta_data.bloom_filter_offset,
@@ -5695,7 +5826,7 @@ function writeMetadata(writer, metadata) {
5695
5826
  }
5696
5827
  function schemaElement(schema, path, fallbackIndex) {
5697
5828
  if (path?.length) {
5698
- const resolved = getSchemaPath(schema, path).at(-1)?.element;
5829
+ const resolved = getSchemaPath2(schema, path).at(-1)?.element;
5699
5830
  if (resolved)
5700
5831
  return resolved;
5701
5832
  }
@@ -5933,7 +6064,7 @@ ParquetWriter.prototype.write = function({ columnData, rowGroupSize = [100, 1000
5933
6064
  for (let j = 0;j < columnData.length; j++) {
5934
6065
  const { name, data, encoding, columnIndex = false, offsetIndex = false } = columnData[j];
5935
6066
  const groupData = data.slice(groupStartIndex, groupStartIndex + groupSize);
5936
- const schemaTreePath = getSchemaPath(this.schema, [name]);
6067
+ const schemaTreePath = getSchemaPath2(this.schema, [name]);
5937
6068
  const leafPaths = getLeafSchemaPaths(schemaTreePath);
5938
6069
  const columnNode = schemaTreePath.at(-1);
5939
6070
  const normalizedData = columnNode ? Array.from(groupData, (row) => normalizeValue(columnNode, row)) : Array.from(groupData);
@@ -6088,6 +6219,9 @@ var BASE_DELAY_MS = 1000;
6088
6219
  var BACKOFF_FACTOR = 2;
6089
6220
  var JITTER_RANGE = 0.25;
6090
6221
  var BATCH_SIZE = 100;
6222
+ function formatError(error) {
6223
+ return error instanceof Error ? error.toString() : String(error);
6224
+ }
6091
6225
  function decompressPagefind(data) {
6092
6226
  const decompressed = gunzipSync(Buffer.from(data));
6093
6227
  if (decompressed.length < PAGEFIND_HEADER_SIZE) {
@@ -6144,7 +6278,9 @@ async function fetchAndParseMeta(langHash) {
6144
6278
  decoded = decode2(decompressed);
6145
6279
  } catch (error) {
6146
6280
  const detail = error instanceof Error ? error.message : String(error);
6147
- throw new Error(`Failed to decode pf_meta: ${detail}. The Pagefind binary format may have changed.`);
6281
+ throw new Error(`Failed to decode pf_meta: ${detail}. The Pagefind binary format may have changed.`, {
6282
+ cause: error
6283
+ });
6148
6284
  }
6149
6285
  if (!Array.isArray(decoded) || !Array.isArray(decoded[1])) {
6150
6286
  throw new Error("Failed to decode pf_meta: unexpected structure. The Pagefind binary format may have changed.");
@@ -6176,10 +6312,10 @@ async function withConcurrencyLimit(tasks, limit) {
6176
6312
  let running = 0;
6177
6313
  let index = 0;
6178
6314
  let completed = 0;
6179
- return new Promise((resolve) => {
6315
+ return new Promise((resolve2) => {
6180
6316
  function next() {
6181
6317
  if (completed === tasks.length) {
6182
- resolve(results);
6318
+ resolve2(results);
6183
6319
  return;
6184
6320
  }
6185
6321
  while (running < limit && index < tasks.length) {
@@ -6202,8 +6338,11 @@ async function withConcurrencyLimit(tasks, limit) {
6202
6338
  next();
6203
6339
  });
6204
6340
  }
6205
- async function fetchAllDocs(dbPath) {
6341
+ async function fetchAllDocs(dbPath, options = {}) {
6206
6342
  const entry = await fetchEntryPoint();
6343
+ const onProgress = options.onProgress;
6344
+ const concurrency = typeof options.concurrency === "number" && options.concurrency > 0 ? options.concurrency : DEFAULT_CONCURRENCY;
6345
+ const progressEvery = typeof options.progressEvery === "number" && options.progressEvery > 0 ? Math.floor(options.progressEvery) : BATCH_SIZE;
6207
6346
  const langKey = Object.keys(entry.languages)[0];
6208
6347
  if (!langKey) {
6209
6348
  throw new Error("No languages found in Pagefind entry");
@@ -6211,17 +6350,25 @@ async function fetchAllDocs(dbPath) {
6211
6350
  const langHash = entry.languages[langKey].hash;
6212
6351
  const pageHashes = await fetchAndParseMeta(langHash);
6213
6352
  const totalPages = pageHashes.length;
6353
+ onProgress?.({ type: "discovered", totalPages });
6214
6354
  const fetchedRecords = [];
6215
6355
  const failedUrls = [];
6216
- let done = 0;
6217
- const tasks = pageHashes.map((hash) => () => fetchFragment(hash).then((record) => {
6218
- done++;
6219
- if (done % BATCH_SIZE === 0 || done === totalPages) {
6220
- console.log(`Fetched ${done}/${totalPages} pages...`);
6356
+ let processedPages = 0;
6357
+ const tasks = pageHashes.map((hash) => () => fetchFragment(hash).catch((error) => {
6358
+ const url = `${PAGEFIND_BASE}/fragment/${hash}.pf_fragment`;
6359
+ onProgress?.({
6360
+ type: "page-failed",
6361
+ url,
6362
+ error: formatError(error)
6363
+ });
6364
+ throw error;
6365
+ }).finally(() => {
6366
+ processedPages += 1;
6367
+ if (processedPages % progressEvery === 0 || processedPages === totalPages) {
6368
+ onProgress?.({ type: "progress", processedPages, totalPages });
6221
6369
  }
6222
- return record;
6223
6370
  }));
6224
- const results = await withConcurrencyLimit(tasks, DEFAULT_CONCURRENCY);
6371
+ const results = await withConcurrencyLimit(tasks, concurrency);
6225
6372
  for (let i = 0;i < results.length; i++) {
6226
6373
  const result = results[i];
6227
6374
  if (result.status === "fulfilled") {
@@ -6229,10 +6376,15 @@ async function fetchAllDocs(dbPath) {
6229
6376
  } else {
6230
6377
  const url = `${PAGEFIND_BASE}/fragment/${pageHashes[i]}.pf_fragment`;
6231
6378
  failedUrls.push(url);
6232
- console.log(`[ERROR] Failed to fetch ${url}: ${result.reason.message}`);
6233
6379
  }
6234
6380
  }
6235
6381
  await writeParquet(fetchedRecords, dbPath);
6382
+ onProgress?.({
6383
+ type: "completed",
6384
+ totalPages,
6385
+ fetchedPages: fetchedRecords.length,
6386
+ failedPages: failedUrls.length
6387
+ });
6236
6388
  return {
6237
6389
  totalPages,
6238
6390
  fetchedPages: fetchedRecords.length,
@@ -6241,70 +6393,243 @@ async function fetchAllDocs(dbPath) {
6241
6393
  };
6242
6394
  }
6243
6395
 
6396
+ // src/docs/snapshot.ts
6397
+ import fs from "fs/promises";
6398
+ import path from "path";
6399
+ var DEFAULT_DOCS_SNAPSHOT_URLS = [
6400
+ "https://raw.githubusercontent.com/anand-testcompare/opencode-palantir/main/data/docs.parquet"
6401
+ ];
6402
+ var MIN_SNAPSHOT_BYTES = 64;
6403
+ var inFlightByPath = new Map;
6404
+ function formatError2(err) {
6405
+ return err instanceof Error ? err.toString() : String(err);
6406
+ }
6407
+ function emit(onEvent, event) {
6408
+ if (!onEvent)
6409
+ return;
6410
+ onEvent(event);
6411
+ }
6412
+ function normalizeSnapshotUrls(customUrls) {
6413
+ const envSingleRaw = process.env.OPENCODE_PALANTIR_DOCS_SNAPSHOT_URL;
6414
+ const envManyRaw = process.env.OPENCODE_PALANTIR_DOCS_SNAPSHOT_URLS;
6415
+ const envSingle = typeof envSingleRaw === "string" && envSingleRaw.trim().length > 0 ? [envSingleRaw.trim()] : [];
6416
+ const envMany = typeof envManyRaw === "string" && envManyRaw.trim().length > 0 ? envManyRaw.split(",").map((x) => x.trim()).filter((x) => x.length > 0) : [];
6417
+ const resolved = customUrls ?? [...envMany, ...envSingle, ...DEFAULT_DOCS_SNAPSHOT_URLS];
6418
+ return Array.from(new Set(resolved));
6419
+ }
6420
+ async function ensureDirectoryExists(dbPath) {
6421
+ await fs.mkdir(path.dirname(dbPath), { recursive: true });
6422
+ }
6423
+ async function statIfExists(filePath) {
6424
+ try {
6425
+ return await fs.stat(filePath);
6426
+ } catch (err) {
6427
+ if (err.code === "ENOENT")
6428
+ return null;
6429
+ throw err;
6430
+ }
6431
+ }
6432
+ function assertValidSnapshotSize(bytes, source) {
6433
+ if (bytes < MIN_SNAPSHOT_BYTES) {
6434
+ throw new Error(`Snapshot from ${source} is unexpectedly small (${bytes} bytes). Expected at least ${MIN_SNAPSHOT_BYTES} bytes.`);
6435
+ }
6436
+ }
6437
+ function tempPathFor(dbPath) {
6438
+ const base = path.basename(dbPath);
6439
+ return path.join(path.dirname(dbPath), `.${base}.tmp.${process.pid}.${Date.now()}`);
6440
+ }
6441
+ async function writeBufferAtomic(dbPath, bytes) {
6442
+ const tmp = tempPathFor(dbPath);
6443
+ await fs.writeFile(tmp, bytes);
6444
+ await fs.rename(tmp, dbPath);
6445
+ }
6446
+ async function copyFileAtomic(sourcePath, dbPath) {
6447
+ const tmp = tempPathFor(dbPath);
6448
+ await fs.copyFile(sourcePath, tmp);
6449
+ await fs.rename(tmp, dbPath);
6450
+ }
6451
+ function bundledSnapshotCandidates(dbPath, pluginDirectory) {
6452
+ const candidates = [];
6453
+ if (pluginDirectory && pluginDirectory.trim().length > 0) {
6454
+ candidates.push(path.resolve(pluginDirectory, "data", "docs.parquet"));
6455
+ } else {
6456
+ candidates.push(path.resolve(import.meta.dir, "..", "..", "data", "docs.parquet"));
6457
+ }
6458
+ const target = path.resolve(dbPath);
6459
+ const deduped = Array.from(new Set(candidates.map((x) => path.resolve(x))));
6460
+ return deduped.filter((candidate) => candidate !== target);
6461
+ }
6462
+ async function tryDownloadSnapshot(dbPath, urls, onEvent) {
6463
+ const errors = [];
6464
+ for (const url of urls) {
6465
+ emit(onEvent, { type: "download-start", url });
6466
+ try {
6467
+ const response = await fetch(url);
6468
+ if (!response.ok) {
6469
+ const reason = `HTTP ${response.status} ${response.statusText}`.trim();
6470
+ emit(onEvent, { type: "download-failed", url, error: reason });
6471
+ errors.push(`${url}: ${reason}`);
6472
+ continue;
6473
+ }
6474
+ const bytes = new Uint8Array(await response.arrayBuffer());
6475
+ assertValidSnapshotSize(bytes.byteLength, url);
6476
+ await writeBufferAtomic(dbPath, bytes);
6477
+ emit(onEvent, { type: "download-success", url, bytes: bytes.byteLength });
6478
+ return {
6479
+ dbPath,
6480
+ changed: true,
6481
+ source: "download",
6482
+ bytes: bytes.byteLength,
6483
+ downloadUrl: url
6484
+ };
6485
+ } catch (err) {
6486
+ const reason = formatError2(err);
6487
+ emit(onEvent, { type: "download-failed", url, error: reason });
6488
+ errors.push(`${url}: ${reason}`);
6489
+ }
6490
+ }
6491
+ if (errors.length === 0)
6492
+ return null;
6493
+ throw new Error([
6494
+ "Unable to download prebuilt docs snapshot from configured source URLs.",
6495
+ ...errors.map((line) => `- ${line}`)
6496
+ ].join(`
6497
+ `));
6498
+ }
6499
+ async function tryCopyBundledSnapshot(dbPath, pluginDirectory, onEvent) {
6500
+ const candidates = bundledSnapshotCandidates(dbPath, pluginDirectory);
6501
+ for (const sourcePath of candidates) {
6502
+ const stat = await statIfExists(sourcePath);
6503
+ if (!stat || !stat.isFile())
6504
+ continue;
6505
+ emit(onEvent, { type: "copy-start", sourcePath });
6506
+ assertValidSnapshotSize(stat.size, sourcePath);
6507
+ await copyFileAtomic(sourcePath, dbPath);
6508
+ emit(onEvent, { type: "copy-success", sourcePath, bytes: stat.size });
6509
+ return {
6510
+ dbPath,
6511
+ changed: true,
6512
+ source: "bundled-copy",
6513
+ bytes: stat.size
6514
+ };
6515
+ }
6516
+ return null;
6517
+ }
6518
+ async function ensureDocsParquetInternal(options) {
6519
+ const dbPath = path.resolve(options.dbPath);
6520
+ const force = options.force === true;
6521
+ const onEvent = options.onEvent;
6522
+ emit(onEvent, { type: "start", force });
6523
+ await ensureDirectoryExists(dbPath);
6524
+ if (!force) {
6525
+ const existing = await statIfExists(dbPath);
6526
+ if (existing && existing.isFile()) {
6527
+ assertValidSnapshotSize(existing.size, dbPath);
6528
+ const result = {
6529
+ dbPath,
6530
+ changed: false,
6531
+ source: "existing",
6532
+ bytes: existing.size
6533
+ };
6534
+ emit(onEvent, { type: "skip-existing", bytes: existing.size });
6535
+ emit(onEvent, { type: "done", result });
6536
+ return result;
6537
+ }
6538
+ }
6539
+ const snapshotUrls = normalizeSnapshotUrls(options.snapshotUrls);
6540
+ let downloadError = null;
6541
+ try {
6542
+ const downloaded = await tryDownloadSnapshot(dbPath, snapshotUrls, onEvent);
6543
+ if (downloaded) {
6544
+ emit(onEvent, { type: "done", result: downloaded });
6545
+ return downloaded;
6546
+ }
6547
+ } catch (err) {
6548
+ downloadError = err instanceof Error ? err : new Error(String(err));
6549
+ }
6550
+ const copied = await tryCopyBundledSnapshot(dbPath, options.pluginDirectory, onEvent);
6551
+ if (copied) {
6552
+ emit(onEvent, { type: "done", result: copied });
6553
+ return copied;
6554
+ }
6555
+ const fallbackHint = "No bundled snapshot was found. You can run /refresh-docs-rescrape as a fallback.";
6556
+ if (downloadError) {
6557
+ throw new Error(`${downloadError.message}
6558
+ ${fallbackHint}`);
6559
+ }
6560
+ throw new Error(`No docs snapshot sources were available. ${fallbackHint} ` + `Checked URLs=${snapshotUrls.length}, bundled candidates=${bundledSnapshotCandidates(dbPath, options.pluginDirectory).length}.`);
6561
+ }
6562
+ async function ensureDocsParquet(options) {
6563
+ const dbPath = path.resolve(options.dbPath);
6564
+ const existing = inFlightByPath.get(dbPath);
6565
+ if (existing)
6566
+ return existing;
6567
+ let promise;
6568
+ promise = ensureDocsParquetInternal({ ...options, dbPath }).finally(() => {
6569
+ if (inFlightByPath.get(dbPath) === promise) {
6570
+ inFlightByPath.delete(dbPath);
6571
+ }
6572
+ });
6573
+ inFlightByPath.set(dbPath, promise);
6574
+ return promise;
6575
+ }
6576
+
6244
6577
  // src/palantir-mcp/commands.ts
6245
- import path3 from "path";
6578
+ import path4 from "path";
6246
6579
 
6247
6580
  // src/palantir-mcp/allowlist.ts
6248
- function isMutatingTool(toolName) {
6249
- const re = /(?:^|[_-])(create|update|delete|remove|set|write|put|post|patch|deploy|publish|commit|run|execute|trigger|start|stop|cancel|schedule|grant|revoke|upload|import|export)(?:$|[_-])/i;
6250
- return re.test(toolName);
6251
- }
6252
- function isReadOnlyTool(toolName) {
6253
- const re = /(?:^|[_-])(get|list|search|query|describe|read|fetch|inspect|schema|metadata|lineage|preview|validate|diff)(?:$|[_-])/i;
6254
- return re.test(toolName);
6255
- }
6256
6581
  function matchesAny(toolName, patterns) {
6257
6582
  return patterns.some((p) => p.test(toolName));
6258
6583
  }
6584
+ var LIBRARIAN_MUTATION_DENY = [
6585
+ /(?:^|[_-])(create|update|delete|remove|set|write|put|post|patch|deploy|publish|commit|run|execute|trigger|start|stop|cancel|schedule|grant|revoke|upload|import|export|connect|convert|install|clone|close|replace|abort)(?:$|[_-])/i
6586
+ ];
6587
+ var FOUNDRY_DISCOVERY_DENY = [
6588
+ /documentation/i,
6589
+ /(?:^|[_-])search_foundry_(documentation|ontology|functions)(?:$|[_-])/i,
6590
+ /(?:^|[_-])list_platform_sdk_apis(?:$|[_-])/i,
6591
+ /(?:^|[_-])get_platform_sdk_api_reference(?:$|[_-])/i,
6592
+ /(?:^|[_-])get_ontology_sdk_(context|examples)(?:$|[_-])/i,
6593
+ /(?:^|[_-])view_osdk_definition(?:$|[_-])/i
6594
+ ];
6595
+ var PROFILE_POLICIES = {
6596
+ pipelines_transforms: {
6597
+ librarianDeny: [...LIBRARIAN_MUTATION_DENY],
6598
+ foundryDeny: [
6599
+ ...FOUNDRY_DISCOVERY_DENY,
6600
+ /(?:^|[_-])(convert_to_osdk_react|install_sdk_package|generate_new_ontology_sdk_version)(?:$|[_-])/i
6601
+ ]
6602
+ },
6603
+ osdk_functions_ts: {
6604
+ librarianDeny: [...LIBRARIAN_MUTATION_DENY],
6605
+ foundryDeny: [
6606
+ ...FOUNDRY_DISCOVERY_DENY,
6607
+ /(?:^|[_-])(create_python_transforms_code_repository|get_python_transforms_documentation)(?:$|[_-])/i
6608
+ ]
6609
+ },
6610
+ default: {
6611
+ librarianDeny: [...LIBRARIAN_MUTATION_DENY],
6612
+ foundryDeny: [...FOUNDRY_DISCOVERY_DENY]
6613
+ }
6614
+ };
6259
6615
  function computeAllowedTools(profile, toolNames) {
6260
6616
  const uniqueSortedTools = Array.from(new Set(toolNames)).sort((a, b) => a.localeCompare(b));
6617
+ const policy = PROFILE_POLICIES[profile];
6261
6618
  const librarianAllow = new Set;
6262
- const pipelinesBoost = [
6263
- /pipeline/i,
6264
- /transform/i,
6265
- /job/i,
6266
- /dataset/i,
6267
- /ontology/i,
6268
- /object/i,
6269
- /action/i,
6270
- /lineage/i,
6271
- /schema/i,
6272
- /preview/i
6273
- ];
6274
- const osdkBoost = [
6275
- /osdk/i,
6276
- /function/i,
6277
- /artifact/i,
6278
- /package/i,
6279
- /release/i,
6280
- /deploy/i
6281
- ];
6619
+ const foundryAllow = new Set;
6282
6620
  for (const name of uniqueSortedTools) {
6283
- if (isMutatingTool(name))
6284
- continue;
6285
- if (profile === "all") {
6286
- librarianAllow.add(name);
6287
- continue;
6288
- }
6289
- if (isReadOnlyTool(name)) {
6621
+ if (!matchesAny(name, policy.librarianDeny)) {
6290
6622
  librarianAllow.add(name);
6291
- continue;
6292
- }
6293
- if (profile === "pipelines_transforms" && matchesAny(name, pipelinesBoost)) {
6294
- librarianAllow.add(name);
6295
- continue;
6296
6623
  }
6297
- if (profile === "osdk_functions_ts" && matchesAny(name, osdkBoost)) {
6298
- librarianAllow.add(name);
6299
- continue;
6624
+ if (!matchesAny(name, policy.foundryDeny)) {
6625
+ foundryAllow.add(name);
6300
6626
  }
6301
6627
  }
6302
- const foundryAllow = new Set(librarianAllow);
6303
6628
  return { librarianAllow, foundryAllow };
6304
6629
  }
6305
6630
 
6306
6631
  // src/palantir-mcp/mcp-client.ts
6307
- function formatError(err) {
6632
+ function formatError3(err) {
6308
6633
  return err instanceof Error ? err.toString() : String(err);
6309
6634
  }
6310
6635
  function withTimeout(p, ms, label) {
@@ -6421,8 +6746,8 @@ ${errText}`));
6421
6746
  const req = { jsonrpc: "2.0", id, method, params };
6422
6747
  const line = `${JSON.stringify(req)}
6423
6748
  `;
6424
- const p = new Promise((resolve, reject) => {
6425
- pending.set(id, { resolve, reject });
6749
+ const p = new Promise((resolve2, reject) => {
6750
+ pending.set(id, { resolve: resolve2, reject });
6426
6751
  });
6427
6752
  await writeStdin(line);
6428
6753
  return p;
@@ -6445,8 +6770,10 @@ ${errText}`));
6445
6770
  return Array.from(new Set(names)).sort((a, b) => a.localeCompare(b));
6446
6771
  } catch (err) {
6447
6772
  const stderrText = stderrChunks.join("");
6448
- throw new Error(`[ERROR] Failed to list palantir-mcp tools: ${formatError(err)}
6449
- ${stderrText}`);
6773
+ throw new Error(`[ERROR] Failed to list palantir-mcp tools: ${formatError3(err)}
6774
+ ${stderrText}`, {
6775
+ cause: err
6776
+ });
6450
6777
  } finally {
6451
6778
  try {
6452
6779
  proc.kill();
@@ -6484,8 +6811,8 @@ function normalizeFoundryBaseUrl(raw) {
6484
6811
  }
6485
6812
 
6486
6813
  // src/palantir-mcp/opencode-config.ts
6487
- import fs from "fs/promises";
6488
- import path from "path";
6814
+ import fs2 from "fs/promises";
6815
+ import path2 from "path";
6489
6816
 
6490
6817
  // node_modules/jsonc-parser/lib/esm/impl/scanner.js
6491
6818
  function createScanner(text, ignoreTrivia = false) {
@@ -7297,28 +7624,28 @@ var OPENCODE_JSON_FILENAME = "opencode.json";
7297
7624
  function isRecord(value) {
7298
7625
  return !!value && typeof value === "object" && !Array.isArray(value);
7299
7626
  }
7300
- function formatError2(err) {
7627
+ function formatError4(err) {
7301
7628
  return err instanceof Error ? err.toString() : String(err);
7302
7629
  }
7303
7630
  async function pathExists(p) {
7304
7631
  try {
7305
- await fs.access(p);
7632
+ await fs2.access(p);
7306
7633
  return true;
7307
7634
  } catch {
7308
7635
  return false;
7309
7636
  }
7310
7637
  }
7311
7638
  async function readOpencodeJsonc(worktree) {
7312
- const configPath = path.join(worktree, OPENCODE_JSONC_FILENAME);
7639
+ const configPath = path2.join(worktree, OPENCODE_JSONC_FILENAME);
7313
7640
  if (!await pathExists(configPath))
7314
7641
  return { ok: false, missing: true };
7315
7642
  let text;
7316
7643
  try {
7317
- text = await fs.readFile(configPath, "utf8");
7644
+ text = await fs2.readFile(configPath, "utf8");
7318
7645
  } catch (err) {
7319
7646
  return {
7320
7647
  ok: false,
7321
- error: `[ERROR] Failed reading ${OPENCODE_JSONC_FILENAME}: ${formatError2(err)}`
7648
+ error: `[ERROR] Failed reading ${OPENCODE_JSONC_FILENAME}: ${formatError4(err)}`
7322
7649
  };
7323
7650
  }
7324
7651
  const errors = [];
@@ -7333,16 +7660,16 @@ async function readOpencodeJsonc(worktree) {
7333
7660
  return { ok: true, path: configPath, text, data };
7334
7661
  }
7335
7662
  async function readLegacyOpencodeJson(worktree) {
7336
- const legacyPath = path.join(worktree, OPENCODE_JSON_FILENAME);
7663
+ const legacyPath = path2.join(worktree, OPENCODE_JSON_FILENAME);
7337
7664
  if (!await pathExists(legacyPath))
7338
7665
  return { ok: false, missing: true };
7339
7666
  let text;
7340
7667
  try {
7341
- text = await fs.readFile(legacyPath, "utf8");
7668
+ text = await fs2.readFile(legacyPath, "utf8");
7342
7669
  } catch (err) {
7343
7670
  return {
7344
7671
  ok: false,
7345
- error: `[ERROR] Failed reading ${OPENCODE_JSON_FILENAME}: ${formatError2(err)}`
7672
+ error: `[ERROR] Failed reading ${OPENCODE_JSON_FILENAME}: ${formatError4(err)}`
7346
7673
  };
7347
7674
  }
7348
7675
  let data;
@@ -7351,7 +7678,7 @@ async function readLegacyOpencodeJson(worktree) {
7351
7678
  } catch (err) {
7352
7679
  return {
7353
7680
  ok: false,
7354
- error: `[ERROR] Failed parsing ${OPENCODE_JSON_FILENAME}: ${formatError2(err)}`
7681
+ error: `[ERROR] Failed parsing ${OPENCODE_JSON_FILENAME}: ${formatError4(err)}`
7355
7682
  };
7356
7683
  }
7357
7684
  return { ok: true, path: legacyPath, text, data };
@@ -7380,24 +7707,24 @@ function mergeLegacyIntoJsonc(legacyData, jsoncData) {
7380
7707
  return deepMergePreferTarget(base, legacy);
7381
7708
  }
7382
7709
  async function writeFileAtomic(filePath, text) {
7383
- const dir = path.dirname(filePath);
7384
- const base = path.basename(filePath);
7385
- const tmp = path.join(dir, `.${base}.tmp.${process.pid}.${Date.now()}`);
7386
- await fs.writeFile(tmp, text, "utf8");
7387
- await fs.rename(tmp, filePath);
7710
+ const dir = path2.dirname(filePath);
7711
+ const base = path2.basename(filePath);
7712
+ const tmp = path2.join(dir, `.${base}.tmp.${process.pid}.${Date.now()}`);
7713
+ await fs2.writeFile(tmp, text, "utf8");
7714
+ await fs2.rename(tmp, filePath);
7388
7715
  }
7389
7716
  async function renameLegacyToBak(worktree) {
7390
- const legacyPath = path.join(worktree, OPENCODE_JSON_FILENAME);
7717
+ const legacyPath = path2.join(worktree, OPENCODE_JSON_FILENAME);
7391
7718
  if (!await pathExists(legacyPath))
7392
7719
  return null;
7393
- const baseBak = path.join(worktree, `${OPENCODE_JSON_FILENAME}.bak`);
7720
+ const baseBak = path2.join(worktree, `${OPENCODE_JSON_FILENAME}.bak`);
7394
7721
  let bakPath = baseBak;
7395
7722
  let i = 1;
7396
7723
  while (await pathExists(bakPath)) {
7397
7724
  bakPath = `${baseBak}.${i}`;
7398
7725
  i += 1;
7399
7726
  }
7400
- await fs.rename(legacyPath, bakPath);
7727
+ await fs2.rename(legacyPath, bakPath);
7401
7728
  return bakPath;
7402
7729
  }
7403
7730
  function toolKey(toolName) {
@@ -7470,9 +7797,11 @@ function ensureAgentBase(data, agentName) {
7470
7797
  }
7471
7798
  function ensureAgentDefaults(agent, agentName) {
7472
7799
  const defaultDescription = agentName === "foundry-librarian" ? "Foundry exploration and context gathering (parallel-friendly)" : "Foundry execution agent (uses only enabled palantir-mcp tools)";
7800
+ const defaultMode = agentName === "foundry" ? "all" : "subagent";
7473
7801
  const mode = agent["mode"];
7474
- if (typeof mode !== "string")
7475
- agent["mode"] = "subagent";
7802
+ if (mode !== "subagent" && mode !== "primary" && mode !== "all") {
7803
+ agent["mode"] = defaultMode;
7804
+ }
7476
7805
  if (typeof agent["hidden"] !== "boolean")
7477
7806
  agent["hidden"] = false;
7478
7807
  if (typeof agent["description"] !== "string") {
@@ -7637,11 +7966,11 @@ function stringifyJsonc(data) {
7637
7966
  }
7638
7967
 
7639
7968
  // src/palantir-mcp/repo-scan.ts
7640
- import fs2 from "fs/promises";
7641
- import path2 from "path";
7969
+ import fs3 from "fs/promises";
7970
+ import path3 from "path";
7642
7971
  async function pathExists2(p) {
7643
7972
  try {
7644
- await fs2.access(p);
7973
+ await fs3.access(p);
7645
7974
  return true;
7646
7975
  } catch {
7647
7976
  return false;
@@ -7649,7 +7978,7 @@ async function pathExists2(p) {
7649
7978
  }
7650
7979
  async function readTextFileBounded(p, maxBytes) {
7651
7980
  try {
7652
- const file = await fs2.open(p, "r");
7981
+ const file = await fs3.open(p, "r");
7653
7982
  try {
7654
7983
  const buf = Buffer.alloc(maxBytes);
7655
7984
  const { bytesRead } = await file.read(buf, 0, maxBytes, 0);
@@ -7661,209 +7990,120 @@ async function readTextFileBounded(p, maxBytes) {
7661
7990
  return null;
7662
7991
  }
7663
7992
  }
7664
- function addScore(scores, reasons, profile, delta, reason) {
7665
- scores[profile] += delta;
7666
- reasons.push(reason);
7667
- }
7668
- function pickBestProfile(scores) {
7669
- const threshold = 3;
7670
- const ordered = ["all", "pipelines_transforms", "osdk_functions_ts", "unknown"];
7671
- let best = "unknown";
7672
- let bestScore = -1;
7673
- for (const p of ordered) {
7674
- const s = scores[p];
7675
- if (s > bestScore) {
7676
- best = p;
7677
- bestScore = s;
7678
- }
7679
- }
7680
- if (bestScore < threshold)
7681
- return "unknown";
7682
- return best;
7683
- }
7684
- async function parsePackageJson(p) {
7685
- const text = await readTextFileBounded(p, 200000);
7686
- if (!text)
7993
+ async function getPackageJson(root, context) {
7994
+ if (context.packageJsonLoaded)
7995
+ return context.packageJson;
7996
+ context.packageJsonLoaded = true;
7997
+ const packageJsonPath = path3.join(root, "package.json");
7998
+ const text = await readTextFileBounded(packageJsonPath, 200000);
7999
+ if (!text) {
8000
+ context.packageJson = null;
7687
8001
  return null;
8002
+ }
7688
8003
  try {
7689
- return JSON.parse(text);
8004
+ context.packageJson = JSON.parse(text);
7690
8005
  } catch {
7691
- return null;
7692
- }
7693
- }
7694
- function getAllDependencyKeys(pkg) {
7695
- const deps = pkg.dependencies ?? {};
7696
- const dev = pkg.devDependencies ?? {};
7697
- const peer = pkg.peerDependencies ?? {};
7698
- return Object.keys({ ...deps, ...dev, ...peer });
7699
- }
7700
- async function collectSampleFiles(root, limit) {
7701
- const ignoreDirs = new Set([
7702
- ".git",
7703
- "node_modules",
7704
- "dist",
7705
- ".opencode",
7706
- "data",
7707
- ".memory",
7708
- ".sisyphus",
7709
- ".zed",
7710
- ".mise",
7711
- "coverage",
7712
- "build"
7713
- ]);
7714
- const allowedExts = new Set([
7715
- ".md",
7716
- ".ts",
7717
- ".js",
7718
- ".py",
7719
- ".yaml",
7720
- ".yml",
7721
- ".toml",
7722
- ".json"
7723
- ]);
7724
- const results = [];
7725
- const queue = [root];
7726
- const maxDirs = 1500;
7727
- let visitedDirs = 0;
7728
- while (queue.length > 0 && results.length < limit && visitedDirs < maxDirs) {
7729
- const dir = queue.shift() ?? "";
7730
- if (!dir)
7731
- continue;
7732
- visitedDirs += 1;
7733
- let entries;
7734
- try {
7735
- entries = await fs2.readdir(dir, { withFileTypes: true });
7736
- } catch {
7737
- continue;
8006
+ context.packageJson = null;
8007
+ }
8008
+ return context.packageJson;
8009
+ }
8010
+ async function getPyprojectText(root, context) {
8011
+ if (context.pyprojectLoaded)
8012
+ return context.pyprojectText;
8013
+ context.pyprojectLoaded = true;
8014
+ context.pyprojectText = await readTextFileBounded(path3.join(root, "pyproject.toml"), 200000);
8015
+ return context.pyprojectText;
8016
+ }
8017
+ async function getRequirementsText(root, context) {
8018
+ if (context.requirementsLoaded)
8019
+ return context.requirementsText;
8020
+ context.requirementsLoaded = true;
8021
+ context.requirementsText = await readTextFileBounded(path3.join(root, "requirements.txt"), 200000);
8022
+ return context.requirementsText;
8023
+ }
8024
+ function listDependencyNames(pkg) {
8025
+ if (!pkg)
8026
+ return [];
8027
+ const dependencies = pkg.dependencies ?? {};
8028
+ const devDependencies = pkg.devDependencies ?? {};
8029
+ const peerDependencies = pkg.peerDependencies ?? {};
8030
+ return Object.keys({ ...dependencies, ...devDependencies, ...peerDependencies });
8031
+ }
8032
+ var HARD_SIGNATURES = [
8033
+ {
8034
+ profile: "pipelines_transforms",
8035
+ reason: "Found transforms/ directory.",
8036
+ matches: async (root) => pathExists2(path3.join(root, "transforms"))
8037
+ },
8038
+ {
8039
+ profile: "pipelines_transforms",
8040
+ reason: "Found pipelines/ directory.",
8041
+ matches: async (root) => pathExists2(path3.join(root, "pipelines"))
8042
+ },
8043
+ {
8044
+ profile: "pipelines_transforms",
8045
+ reason: "Found internal/transforms/ directory.",
8046
+ matches: async (root) => pathExists2(path3.join(root, "internal", "transforms"))
8047
+ },
8048
+ {
8049
+ profile: "pipelines_transforms",
8050
+ reason: "Found internal/pipeline/ directory.",
8051
+ matches: async (root) => pathExists2(path3.join(root, "internal", "pipeline"))
8052
+ },
8053
+ {
8054
+ profile: "pipelines_transforms",
8055
+ reason: "pyproject.toml references transforms.api.",
8056
+ matches: async (root, context) => {
8057
+ const text = await getPyprojectText(root, context);
8058
+ if (!text)
8059
+ return false;
8060
+ return /\btransforms\.api\b/i.test(text);
7738
8061
  }
7739
- for (const ent of entries) {
7740
- if (results.length >= limit)
7741
- break;
7742
- const full = path2.join(dir, ent.name);
7743
- if (ent.isDirectory()) {
7744
- if (ignoreDirs.has(ent.name))
7745
- continue;
7746
- queue.push(full);
7747
- continue;
7748
- }
7749
- if (!ent.isFile())
7750
- continue;
7751
- const ext = path2.extname(ent.name);
7752
- if (!allowedExts.has(ext))
7753
- continue;
7754
- results.push(full);
8062
+ },
8063
+ {
8064
+ profile: "pipelines_transforms",
8065
+ reason: "requirements.txt references transforms.api.",
8066
+ matches: async (root, context) => {
8067
+ const text = await getRequirementsText(root, context);
8068
+ if (!text)
8069
+ return false;
8070
+ return /\btransforms\.api\b/i.test(text);
8071
+ }
8072
+ },
8073
+ {
8074
+ profile: "osdk_functions_ts",
8075
+ reason: "package.json includes an @osdk/* dependency.",
8076
+ matches: async (root, context) => {
8077
+ const depNames = listDependencyNames(await getPackageJson(root, context));
8078
+ return depNames.some((name) => name.startsWith("@osdk/"));
7755
8079
  }
7756
8080
  }
7757
- return results;
7758
- }
8081
+ ];
7759
8082
  async function scanRepoForProfile(root) {
7760
- const scores = {
7761
- pipelines_transforms: 0,
7762
- osdk_functions_ts: 0,
7763
- all: 0,
7764
- unknown: 0
8083
+ const context = {
8084
+ packageJson: null,
8085
+ packageJsonLoaded: false,
8086
+ pyprojectText: null,
8087
+ pyprojectLoaded: false,
8088
+ requirementsText: null,
8089
+ requirementsLoaded: false
7765
8090
  };
7766
- const reasons = [];
7767
- const candidates = [
7768
- { p: "pnpm-workspace.yaml", profile: "all", score: 3, reason: "Found pnpm-workspace.yaml" },
7769
- { p: "turbo.json", profile: "all", score: 3, reason: "Found turbo.json" },
7770
- { p: "nx.json", profile: "all", score: 3, reason: "Found nx.json" },
7771
- { p: "lerna.json", profile: "all", score: 3, reason: "Found lerna.json" }
7772
- ];
7773
- for (const c of candidates) {
7774
- if (await pathExists2(path2.join(root, c.p))) {
7775
- addScore(scores, reasons, c.profile, c.score, c.reason);
7776
- }
7777
- }
7778
- const packageJsonPath = path2.join(root, "package.json");
7779
- const pyprojectPath = path2.join(root, "pyproject.toml");
7780
- const requirementsPath = path2.join(root, "requirements.txt");
7781
- const hasPackageJson = await pathExists2(packageJsonPath);
7782
- const hasPyproject = await pathExists2(pyprojectPath);
7783
- if (hasPackageJson && hasPyproject) {
7784
- addScore(scores, reasons, "all", 2, "Found both package.json and pyproject.toml");
7785
- }
7786
- if (hasPackageJson) {
7787
- const pkg = await parsePackageJson(packageJsonPath);
7788
- if (pkg) {
7789
- const depKeys = getAllDependencyKeys(pkg);
7790
- if (depKeys.some((d) => d.toLowerCase().includes("osdk") || d.startsWith("@osdk/"))) {
7791
- addScore(scores, reasons, "osdk_functions_ts", 5, "package.json includes OSDK dependency");
7792
- }
7793
- if (depKeys.some((d) => d.toLowerCase().includes("palantir") || d.toLowerCase().includes("foundry"))) {
7794
- addScore(scores, reasons, "pipelines_transforms", 1, "package.json references palantir/foundry");
7795
- }
7796
- }
7797
- }
7798
- if (hasPyproject) {
7799
- const text = await readTextFileBounded(pyprojectPath, 200000);
7800
- if (text) {
7801
- if (/foundry/i.test(text)) {
7802
- addScore(scores, reasons, "pipelines_transforms", 1, "pyproject.toml mentions foundry");
7803
- }
7804
- if (/transform/i.test(text)) {
7805
- addScore(scores, reasons, "pipelines_transforms", 1, "pyproject.toml mentions transform");
7806
- }
7807
- }
7808
- }
7809
- if (await pathExists2(requirementsPath)) {
7810
- const text = await readTextFileBounded(requirementsPath, 200000);
7811
- if (text) {
7812
- if (/foundry/i.test(text)) {
7813
- addScore(scores, reasons, "pipelines_transforms", 1, "requirements.txt mentions foundry");
7814
- }
7815
- if (/transform/i.test(text)) {
7816
- addScore(scores, reasons, "pipelines_transforms", 1, "requirements.txt mentions transform");
7817
- }
8091
+ for (const signature of HARD_SIGNATURES) {
8092
+ if (await signature.matches(root, context)) {
8093
+ return {
8094
+ profile: signature.profile,
8095
+ reasons: [signature.reason]
8096
+ };
7818
8097
  }
7819
8098
  }
7820
- if (await pathExists2(path2.join(root, "pipelines"))) {
7821
- addScore(scores, reasons, "pipelines_transforms", 3, "Found pipelines/ directory");
7822
- }
7823
- if (await pathExists2(path2.join(root, "transforms"))) {
7824
- addScore(scores, reasons, "pipelines_transforms", 3, "Found transforms/ directory");
7825
- }
7826
- if (await pathExists2(path2.join(root, "internal", "pipeline"))) {
7827
- addScore(scores, reasons, "pipelines_transforms", 3, "Found internal/pipeline/ directory");
7828
- }
7829
- if (await pathExists2(path2.join(root, "internal", "transforms"))) {
7830
- addScore(scores, reasons, "pipelines_transforms", 3, "Found internal/transforms/ directory");
7831
- }
7832
- if (await pathExists2(path2.join(root, "functions"))) {
7833
- addScore(scores, reasons, "osdk_functions_ts", 2, "Found functions/ directory");
7834
- }
7835
- if (await pathExists2(path2.join(root, "src", "functions"))) {
7836
- addScore(scores, reasons, "osdk_functions_ts", 2, "Found src/functions/ directory");
7837
- }
7838
- const sampleFiles = await collectSampleFiles(root, 50);
7839
- const maxTotalBytes = 200000;
7840
- let consumedBytes = 0;
7841
- let pipelinesHits = 0;
7842
- let osdkHits = 0;
7843
- for (const p of sampleFiles) {
7844
- if (consumedBytes >= maxTotalBytes)
7845
- break;
7846
- const text = await readTextFileBounded(p, 8000);
7847
- if (!text)
7848
- continue;
7849
- consumedBytes += text.length;
7850
- if (/\b(pipeline|pipelines|transform|transforms)\b/i.test(text))
7851
- pipelinesHits += 1;
7852
- if (/\bosdk\b/i.test(text))
7853
- osdkHits += 1;
7854
- }
7855
- if (pipelinesHits >= 3) {
7856
- addScore(scores, reasons, "pipelines_transforms", 2, `Keyword sample hits pipelines/transforms (${pipelinesHits})`);
7857
- }
7858
- if (osdkHits >= 2) {
7859
- addScore(scores, reasons, "osdk_functions_ts", 2, `Keyword sample hits osdk (${osdkHits})`);
7860
- }
7861
- const profile = pickBestProfile(scores);
7862
- return { profile, scores, reasons };
8099
+ return {
8100
+ profile: "default",
8101
+ reasons: ["No hard signature matched. Falling back to default profile."]
8102
+ };
7863
8103
  }
7864
8104
 
7865
8105
  // src/palantir-mcp/commands.ts
7866
- function formatError3(err) {
8106
+ function formatError5(err) {
7867
8107
  return err instanceof Error ? err.toString() : String(err);
7868
8108
  }
7869
8109
  function isRecord2(value) {
@@ -7905,15 +8145,33 @@ function formatPatchSummary(patch) {
7905
8145
  return lines.join(`
7906
8146
  `);
7907
8147
  }
8148
+ function getTrimmedEnvVar(name) {
8149
+ const raw = process.env[name];
8150
+ if (!raw)
8151
+ return null;
8152
+ const trimmed = raw.trim();
8153
+ return trimmed.length > 0 ? trimmed : null;
8154
+ }
8155
+ function formatMissingEnvGuidance(missingVars) {
8156
+ const exports = missingVars.map((name) => `export ${name}=...`).join(`
8157
+ `);
8158
+ return [
8159
+ `[ERROR] Missing required environment: ${missingVars.join(", ")}`,
8160
+ "",
8161
+ "Foundry MCP setup is inactive until these are exported.",
8162
+ "Local docs tools (`get_doc_page`, `list_all_docs`) remain available.",
8163
+ "",
8164
+ "Set:",
8165
+ ` ${exports}`
8166
+ ].join(`
8167
+ `);
8168
+ }
7908
8169
  async function resolveProfile(worktree) {
7909
8170
  try {
7910
8171
  const scan = await scanRepoForProfile(worktree);
7911
- return { profile: scan.profile, reasons: scan.reasons };
8172
+ return scan.profile;
7912
8173
  } catch (err) {
7913
- return {
7914
- profile: "unknown",
7915
- reasons: [`Repo scan failed; falling back to unknown: ${formatError3(err)}`]
7916
- };
8174
+ return "default";
7917
8175
  }
7918
8176
  }
7919
8177
  function hasPalantirToolToggles(data, agentName) {
@@ -7937,11 +8195,11 @@ function isAutoBootstrapAlreadyComplete(data) {
7937
8195
  }
7938
8196
  async function autoBootstrapPalantirMcpIfConfigured(worktree) {
7939
8197
  try {
7940
- const tokenRaw = process.env.FOUNDRY_TOKEN;
7941
- const urlRaw = process.env.FOUNDRY_URL;
7942
- if (!tokenRaw || tokenRaw.trim().length === 0)
8198
+ const tokenRaw = getTrimmedEnvVar("FOUNDRY_TOKEN");
8199
+ const urlRaw = getTrimmedEnvVar("FOUNDRY_URL");
8200
+ if (!tokenRaw)
7943
8201
  return;
7944
- if (!urlRaw || urlRaw.trim().length === 0)
8202
+ if (!urlRaw)
7945
8203
  return;
7946
8204
  const normalized = normalizeFoundryBaseUrl(urlRaw);
7947
8205
  if ("error" in normalized)
@@ -7959,7 +8217,7 @@ async function autoBootstrapPalantirMcpIfConfigured(worktree) {
7959
8217
  return;
7960
8218
  const existingMcpUrlRaw = extractFoundryApiUrlFromMcpConfig(merged);
7961
8219
  const existingMcpUrlNorm = existingMcpUrlRaw ? normalizeFoundryBaseUrl(existingMcpUrlRaw) : null;
7962
- const { profile } = await resolveProfile(worktree);
8220
+ const profile = await resolveProfile(worktree);
7963
8221
  const discoveryUrl = existingMcpUrlNorm && "url" in existingMcpUrlNorm ? existingMcpUrlNorm.url : normalized.url;
7964
8222
  const toolNames = await listPalantirMcpTools(discoveryUrl);
7965
8223
  if (toolNames.length === 0)
@@ -7976,7 +8234,7 @@ async function autoBootstrapPalantirMcpIfConfigured(worktree) {
7976
8234
  const changed = needsMigration || stableJsonStringify(merged) !== stableJsonStringify(patch.data);
7977
8235
  if (!changed)
7978
8236
  return;
7979
- const outPath = path3.join(worktree, OPENCODE_JSONC_FILENAME);
8237
+ const outPath = path4.join(worktree, OPENCODE_JSONC_FILENAME);
7980
8238
  const text = stringifyJsonc(patch.data);
7981
8239
  await writeFileAtomic(outPath, text);
7982
8240
  if (readLegacy.ok) {
@@ -7988,18 +8246,21 @@ async function autoBootstrapPalantirMcpIfConfigured(worktree) {
7988
8246
  }
7989
8247
  async function setupPalantirMcp(worktree, rawArgs) {
7990
8248
  const urlFromArgs = rawArgs.trim();
7991
- const urlFromEnvRaw = process.env.FOUNDRY_URL;
7992
- const urlFromEnv = typeof urlFromEnvRaw === "string" ? urlFromEnvRaw.trim() : "";
7993
- const urlArg = urlFromArgs || urlFromEnv;
8249
+ const urlFromEnv = getTrimmedEnvVar("FOUNDRY_URL");
8250
+ const token = getTrimmedEnvVar("FOUNDRY_TOKEN");
8251
+ const urlArg = urlFromArgs || urlFromEnv || "";
7994
8252
  if (!urlArg) {
8253
+ const missingVars = ["FOUNDRY_URL"];
8254
+ if (!token)
8255
+ missingVars.push("FOUNDRY_TOKEN");
7995
8256
  return [
7996
- "[ERROR] Missing Foundry base URL.",
8257
+ formatMissingEnvGuidance(missingVars),
7997
8258
  "",
7998
8259
  "Usage:",
7999
8260
  " /setup-palantir-mcp <foundry_api_url>",
8000
8261
  "",
8001
- "Or set:",
8002
- " export FOUNDRY_URL=<foundry_api_url>",
8262
+ "Or pass URL directly:",
8263
+ " /setup-palantir-mcp https://YOUR-STACK.palantirfoundry.com",
8003
8264
  "",
8004
8265
  "Example:",
8005
8266
  " /setup-palantir-mcp https://23dimethyl.usw-3.palantirfoundry.com"
@@ -8009,17 +8270,8 @@ async function setupPalantirMcp(worktree, rawArgs) {
8009
8270
  const normalized = normalizeFoundryBaseUrl(urlArg);
8010
8271
  if ("error" in normalized)
8011
8272
  return `[ERROR] ${normalized.error}`;
8012
- if (!process.env.FOUNDRY_TOKEN) {
8013
- return [
8014
- "[ERROR] FOUNDRY_TOKEN is not set in your environment.",
8015
- "",
8016
- "palantir-mcp tool discovery requires a token. Export FOUNDRY_TOKEN and retry.",
8017
- "",
8018
- "Tip: if `echo $FOUNDRY_TOKEN` prints a value but this still errors, it is likely " + "not exported.",
8019
- "Run `export FOUNDRY_TOKEN` (or set `export FOUNDRY_TOKEN=...` in your shell " + "secrets) and retry."
8020
- ].join(`
8021
- `);
8022
- }
8273
+ if (!token)
8274
+ return formatMissingEnvGuidance(["FOUNDRY_TOKEN"]);
8023
8275
  const readJsonc = await readOpencodeJsonc(worktree);
8024
8276
  if (!readJsonc.ok && "error" in readJsonc)
8025
8277
  return readJsonc.error;
@@ -8031,13 +8283,13 @@ async function setupPalantirMcp(worktree, rawArgs) {
8031
8283
  const merged = readLegacy.ok ? mergeLegacyIntoJsonc(readLegacy.data, base) : { ...base };
8032
8284
  const existingMcpUrlRaw = extractFoundryApiUrlFromMcpConfig(merged);
8033
8285
  const existingMcpUrlNorm = existingMcpUrlRaw ? normalizeFoundryBaseUrl(existingMcpUrlRaw) : null;
8034
- const { profile } = await resolveProfile(worktree);
8286
+ const profile = await resolveProfile(worktree);
8035
8287
  const discoveryUrl = existingMcpUrlNorm && "url" in existingMcpUrlNorm ? existingMcpUrlNorm.url : normalized.url;
8036
8288
  let toolNames;
8037
8289
  try {
8038
8290
  toolNames = await listPalantirMcpTools(discoveryUrl);
8039
8291
  } catch (err) {
8040
- return `[ERROR] ${formatError3(err)}`;
8292
+ return `[ERROR] ${formatError5(err)}`;
8041
8293
  }
8042
8294
  if (toolNames.length === 0)
8043
8295
  return "[ERROR] palantir-mcp tool discovery returned no tools.";
@@ -8048,12 +8300,12 @@ async function setupPalantirMcp(worktree, rawArgs) {
8048
8300
  profile,
8049
8301
  allowlist
8050
8302
  });
8051
- const outPath = path3.join(worktree, OPENCODE_JSONC_FILENAME);
8303
+ const outPath = path4.join(worktree, OPENCODE_JSONC_FILENAME);
8052
8304
  const text = stringifyJsonc(patch.data);
8053
8305
  try {
8054
8306
  await writeFileAtomic(outPath, text);
8055
8307
  } catch (err) {
8056
- return `[ERROR] Failed writing ${OPENCODE_JSONC_FILENAME}: ${formatError3(err)}`;
8308
+ return `[ERROR] Failed writing ${OPENCODE_JSONC_FILENAME}: ${formatError5(err)}`;
8057
8309
  }
8058
8310
  let bakInfo = "";
8059
8311
  if (readLegacy.ok) {
@@ -8064,7 +8316,7 @@ async function setupPalantirMcp(worktree, rawArgs) {
8064
8316
  Migrated legacy ${readLegacy.path} -> ${bakPath}`;
8065
8317
  } catch (err) {
8066
8318
  bakInfo = `
8067
- [ERROR] Wrote ${OPENCODE_JSONC_FILENAME}, but failed to rename legacy ${readLegacy.path}: ${formatError3(err)}`;
8319
+ [ERROR] Wrote ${OPENCODE_JSONC_FILENAME}, but failed to rename legacy ${readLegacy.path}: ${formatError5(err)}`;
8068
8320
  }
8069
8321
  }
8070
8322
  const warnings = [...normalized.warnings, ...patch.warnings];
@@ -8081,17 +8333,8 @@ Migrated legacy ${readLegacy.path} -> ${bakPath}`;
8081
8333
  `);
8082
8334
  }
8083
8335
  async function rescanPalantirMcpTools(worktree) {
8084
- if (!process.env.FOUNDRY_TOKEN) {
8085
- return [
8086
- "[ERROR] FOUNDRY_TOKEN is not set in your environment.",
8087
- "",
8088
- "palantir-mcp tool discovery requires a token. Export FOUNDRY_TOKEN and retry.",
8089
- "",
8090
- "Tip: if `echo $FOUNDRY_TOKEN` prints a value but this still errors, it is likely " + "not exported.",
8091
- "Run `export FOUNDRY_TOKEN` (or set `export FOUNDRY_TOKEN=...` in your shell " + "secrets) and retry."
8092
- ].join(`
8093
- `);
8094
- }
8336
+ if (!getTrimmedEnvVar("FOUNDRY_TOKEN"))
8337
+ return formatMissingEnvGuidance(["FOUNDRY_TOKEN"]);
8095
8338
  const readJsonc = await readOpencodeJsonc(worktree);
8096
8339
  if (!readJsonc.ok) {
8097
8340
  if ("missing" in readJsonc) {
@@ -8113,23 +8356,23 @@ async function rescanPalantirMcpTools(worktree) {
8113
8356
  const normalized = normalizeFoundryBaseUrl(foundryUrlRaw);
8114
8357
  if ("error" in normalized)
8115
8358
  return `[ERROR] Invalid Foundry URL in config: ${normalized.error}`;
8116
- const { profile } = await resolveProfile(worktree);
8359
+ const profile = await resolveProfile(worktree);
8117
8360
  let toolNames;
8118
8361
  try {
8119
8362
  toolNames = await listPalantirMcpTools(normalized.url);
8120
8363
  } catch (err) {
8121
- return `[ERROR] ${formatError3(err)}`;
8364
+ return `[ERROR] ${formatError5(err)}`;
8122
8365
  }
8123
8366
  if (toolNames.length === 0)
8124
8367
  return "[ERROR] palantir-mcp tool discovery returned no tools.";
8125
8368
  const allowlist = computeAllowedTools(profile, toolNames);
8126
8369
  const patch = patchConfigForRescan(baseData, { toolNames, profile, allowlist });
8127
- const outPath = path3.join(worktree, OPENCODE_JSONC_FILENAME);
8370
+ const outPath = path4.join(worktree, OPENCODE_JSONC_FILENAME);
8128
8371
  const text = stringifyJsonc(patch.data);
8129
8372
  try {
8130
8373
  await writeFileAtomic(outPath, text);
8131
8374
  } catch (err) {
8132
- return `[ERROR] Failed writing ${OPENCODE_JSONC_FILENAME}: ${formatError3(err)}`;
8375
+ return `[ERROR] Failed writing ${OPENCODE_JSONC_FILENAME}: ${formatError5(err)}`;
8133
8376
  }
8134
8377
  const warnings = [...normalized.warnings, ...patch.warnings];
8135
8378
  return [
@@ -8142,37 +8385,77 @@ async function rescanPalantirMcpTools(worktree) {
8142
8385
  }
8143
8386
 
8144
8387
  // src/index.ts
8145
- var NO_DB_MESSAGE = "Documentation database not found. Run /refresh-docs to download Palantir Foundry documentation.";
8146
8388
  var plugin = async (input) => {
8147
- const dbPath = path4.join(input.worktree, "data", "docs.parquet");
8389
+ const dbPath = path5.join(input.worktree, "data", "docs.parquet");
8148
8390
  let dbInstance = null;
8149
- let autoBootstrapStarted = false;
8391
+ let dbInitPromise = null;
8392
+ let autoBootstrapMcpStarted = false;
8393
+ let autoBootstrapDocsStarted = false;
8394
+ function formatError6(err) {
8395
+ return err instanceof Error ? err.toString() : String(err);
8396
+ }
8397
+ function formatBytes(bytes) {
8398
+ if (!Number.isFinite(bytes) || bytes < 0)
8399
+ return "0 B";
8400
+ const units = ["B", "KB", "MB", "GB"];
8401
+ let value = bytes;
8402
+ let index = 0;
8403
+ while (value >= 1024 && index < units.length - 1) {
8404
+ value /= 1024;
8405
+ index += 1;
8406
+ }
8407
+ const decimals = value >= 10 || index === 0 ? 0 : 1;
8408
+ return `${value.toFixed(decimals)} ${units[index]}`;
8409
+ }
8410
+ function getMissingFoundryEnvVars() {
8411
+ const missing = [];
8412
+ const url = process.env.FOUNDRY_URL;
8413
+ const token = process.env.FOUNDRY_TOKEN;
8414
+ if (!url || url.trim().length === 0)
8415
+ missing.push("FOUNDRY_URL");
8416
+ if (!token || token.trim().length === 0)
8417
+ missing.push("FOUNDRY_TOKEN");
8418
+ return missing;
8419
+ }
8150
8420
  function ensureCommandDefinitions(cfg) {
8151
8421
  if (!cfg.command)
8152
8422
  cfg.command = {};
8423
+ const missingEnvVars = getMissingFoundryEnvVars();
8424
+ const hasMissingToken = missingEnvVars.includes("FOUNDRY_TOKEN");
8425
+ const setupEnvSuffix = missingEnvVars.length === 0 ? "" : ` Missing env: ${missingEnvVars.join(", ")}. Local docs tools remain available.`;
8426
+ const rescanEnvSuffix = hasMissingToken ? " Missing env: FOUNDRY_TOKEN. Local docs tools remain available." : "";
8153
8427
  if (!cfg.command["refresh-docs"]) {
8154
8428
  cfg.command["refresh-docs"] = {
8155
- template: "Refresh Palantir documentation database.",
8156
- description: "Download Palantir docs and write data/docs.parquet (local)."
8429
+ template: "Refresh Palantir docs snapshot (recommended).",
8430
+ description: "Force refresh data/docs.parquet from a prebuilt snapshot (download/copy; no rescrape)."
8431
+ };
8432
+ }
8433
+ if (!cfg.command["refresh-docs-rescrape"]) {
8434
+ cfg.command["refresh-docs-rescrape"] = {
8435
+ template: "Refresh docs by live rescrape (unsafe/experimental).",
8436
+ description: "Explicit fallback: rescrape palantir.com docs and rebuild data/docs.parquet. Slower and less reliable than /refresh-docs."
8157
8437
  };
8158
8438
  }
8159
8439
  if (!cfg.command["setup-palantir-mcp"]) {
8160
8440
  cfg.command["setup-palantir-mcp"] = {
8161
8441
  template: "Set up palantir-mcp for this repo.",
8162
- description: "Guided MCP setup for Foundry. Usage: /setup-palantir-mcp <foundry_api_url>. Requires FOUNDRY_TOKEN for tool discovery."
8442
+ description: `Guided MCP setup for Foundry. Usage: /setup-palantir-mcp <foundry_api_url>. Requires FOUNDRY_TOKEN for tool discovery.${setupEnvSuffix}`
8163
8443
  };
8164
8444
  }
8165
8445
  if (!cfg.command["rescan-palantir-mcp-tools"]) {
8166
8446
  cfg.command["rescan-palantir-mcp-tools"] = {
8167
8447
  template: "Re-scan palantir-mcp tools and patch tool gating.",
8168
- description: "Re-discovers the palantir-mcp tool list and adds missing palantir-mcp_* toggles (does not overwrite existing toggles). Requires FOUNDRY_TOKEN."
8448
+ description: `Re-discovers the palantir-mcp tool list and adds missing palantir-mcp_* toggles (does not overwrite existing toggles). Requires FOUNDRY_TOKEN.${rescanEnvSuffix}`
8169
8449
  };
8170
8450
  }
8171
8451
  }
8172
8452
  function ensureAgentDefaults2(agent, agentName) {
8173
- const defaultDescription = agentName === "foundry-librarian" ? "Foundry exploration and context gathering (parallel-friendly)" : "Foundry execution agent (uses only enabled palantir-mcp tools)";
8453
+ const missingEnvVars = getMissingFoundryEnvVars();
8454
+ const envSuffix = missingEnvVars.length === 0 ? "" : ` (inactive until ${missingEnvVars.join(" and ")} ${missingEnvVars.length === 1 ? "is" : "are"} exported)`;
8455
+ const defaultDescription = agentName === "foundry-librarian" ? `Foundry exploration and context gathering (parallel-friendly)${envSuffix}` : `Foundry execution agent (uses only enabled palantir-mcp tools)${envSuffix}`;
8456
+ const defaultMode = agentName === "foundry" ? "all" : "subagent";
8174
8457
  if (agent.mode !== "subagent" && agent.mode !== "primary" && agent.mode !== "all") {
8175
- agent.mode = "subagent";
8458
+ agent.mode = defaultMode;
8176
8459
  }
8177
8460
  if (typeof agent["hidden"] !== "boolean")
8178
8461
  agent["hidden"] = false;
@@ -8220,8 +8503,8 @@ var plugin = async (input) => {
8220
8503
  ensureAgentDefaults2(foundry, "foundry");
8221
8504
  cfg.agent.foundry = foundry;
8222
8505
  }
8223
- function maybeStartAutoBootstrap() {
8224
- if (autoBootstrapStarted)
8506
+ function maybeStartAutoBootstrapMcp() {
8507
+ if (autoBootstrapMcpStarted)
8225
8508
  return;
8226
8509
  const token = process.env.FOUNDRY_TOKEN;
8227
8510
  const url = process.env.FOUNDRY_URL;
@@ -8229,21 +8512,106 @@ var plugin = async (input) => {
8229
8512
  return;
8230
8513
  if (!url || url.trim().length === 0)
8231
8514
  return;
8232
- autoBootstrapStarted = true;
8515
+ autoBootstrapMcpStarted = true;
8233
8516
  autoBootstrapPalantirMcpIfConfigured(input.worktree);
8234
8517
  }
8235
- async function getDb() {
8236
- if (!dbInstance) {
8237
- dbInstance = await createDatabase(dbPath);
8238
- }
8239
- return dbInstance;
8518
+ function maybeStartAutoBootstrapDocs() {
8519
+ if (autoBootstrapDocsStarted)
8520
+ return;
8521
+ autoBootstrapDocsStarted = true;
8522
+ ensureDocsAvailable().catch(() => {});
8240
8523
  }
8241
- async function dbExists() {
8242
- return Bun.file(dbPath).exists();
8524
+ function resetDb() {
8525
+ if (dbInstance)
8526
+ closeDatabase(dbInstance);
8527
+ dbInstance = null;
8528
+ dbInitPromise = null;
8529
+ }
8530
+ async function getDb() {
8531
+ if (dbInstance)
8532
+ return dbInstance;
8533
+ if (dbInitPromise)
8534
+ return dbInitPromise;
8535
+ dbInitPromise = createDatabase(dbPath).then((created) => {
8536
+ dbInstance = created;
8537
+ return created;
8538
+ }).finally(() => {
8539
+ dbInitPromise = null;
8540
+ });
8541
+ return dbInitPromise;
8243
8542
  }
8244
8543
  function pushText(output, text) {
8245
8544
  output.parts.push({ type: "text", text });
8246
8545
  }
8546
+ function formatSnapshotFailure(err) {
8547
+ return [
8548
+ "[ERROR] Unable to obtain Palantir docs snapshot.",
8549
+ "",
8550
+ `Reason: ${formatError6(err)}`,
8551
+ "",
8552
+ "Next steps:",
8553
+ "- Retry /refresh-docs (recommended prebuilt snapshot path).",
8554
+ "- If snapshot download is blocked, run /refresh-docs-rescrape (unsafe/experimental)."
8555
+ ].join(`
8556
+ `);
8557
+ }
8558
+ function formatSnapshotRefreshEvent(event) {
8559
+ if (event.type === "skip-existing")
8560
+ return `snapshot_status=already_present bytes=${event.bytes}`;
8561
+ if (event.type === "download-start")
8562
+ return `download_attempt url=${event.url}`;
8563
+ if (event.type === "download-failed")
8564
+ return `download_failed url=${event.url} error=${event.error}`;
8565
+ if (event.type === "download-success")
8566
+ return `download_succeeded url=${event.url} bytes=${event.bytes}`;
8567
+ if (event.type === "copy-start")
8568
+ return `copy_attempt source=${event.sourcePath}`;
8569
+ if (event.type === "copy-success")
8570
+ return `copy_succeeded source=${event.sourcePath} bytes=${event.bytes}`;
8571
+ return null;
8572
+ }
8573
+ async function ensureDocsAvailable(options = {}) {
8574
+ return ensureDocsParquet({
8575
+ dbPath,
8576
+ force: options.force === true,
8577
+ pluginDirectory: input.directory,
8578
+ onEvent: options.onEvent
8579
+ });
8580
+ }
8581
+ async function ensureDocsReadyForTool() {
8582
+ try {
8583
+ await ensureDocsAvailable();
8584
+ return null;
8585
+ } catch (err) {
8586
+ return formatSnapshotFailure(err);
8587
+ }
8588
+ }
8589
+ function formatRescrapeFailure(err) {
8590
+ return [
8591
+ "[ERROR] /refresh-docs-rescrape failed.",
8592
+ "",
8593
+ `Reason: ${formatError6(err)}`,
8594
+ "",
8595
+ "Try /refresh-docs for the recommended prebuilt snapshot flow."
8596
+ ].join(`
8597
+ `);
8598
+ }
8599
+ function formatRescrapeProgressEvent(event, progressLines, failureSamples) {
8600
+ if (event.type === "discovered") {
8601
+ progressLines.push(`discovered_pages=${event.totalPages}`);
8602
+ return;
8603
+ }
8604
+ if (event.type === "progress") {
8605
+ progressLines.push(`processed_pages=${event.processedPages}/${event.totalPages}`);
8606
+ return;
8607
+ }
8608
+ if (event.type === "page-failed") {
8609
+ if (failureSamples.length < 5) {
8610
+ failureSamples.push(`url=${event.url} error=${event.error}`);
8611
+ }
8612
+ return;
8613
+ }
8614
+ }
8247
8615
  function toPathname(inputUrl) {
8248
8616
  const trimmed = inputUrl.trim();
8249
8617
  if (trimmed.length === 0)
@@ -8297,8 +8665,8 @@ var plugin = async (input) => {
8297
8665
  function isInScope(pageUrl, scope) {
8298
8666
  if (scope === "all")
8299
8667
  return true;
8300
- const path5 = toPathname(pageUrl);
8301
- return path5.startsWith(`/${scope}/`) || path5.startsWith(`/docs/${scope}/`);
8668
+ const path6 = toPathname(pageUrl);
8669
+ return path6.startsWith(`/${scope}/`) || path6.startsWith(`/docs/${scope}/`);
8302
8670
  }
8303
8671
  function tokenizeQuery(query) {
8304
8672
  const tokens = query.toLowerCase().trim().split(/[\s/._-]+/g).map((t) => t.trim()).filter((t) => t.length > 0);
@@ -8308,13 +8676,13 @@ var plugin = async (input) => {
8308
8676
  const q = query.toLowerCase().trim();
8309
8677
  if (q.length === 0)
8310
8678
  return 0;
8311
- const path5 = toPathname(page.url).toLowerCase();
8679
+ const path6 = toPathname(page.url).toLowerCase();
8312
8680
  const title = page.title.toLowerCase();
8313
- if (path5 === q)
8681
+ if (path6 === q)
8314
8682
  return 2000;
8315
- if (path5 === toPathname(q).toLowerCase())
8683
+ if (path6 === toPathname(q).toLowerCase())
8316
8684
  return 2000;
8317
- if (path5.includes(q))
8685
+ if (path6.includes(q))
8318
8686
  return 1200;
8319
8687
  if (title.includes(q))
8320
8688
  return 1000;
@@ -8325,10 +8693,10 @@ var plugin = async (input) => {
8325
8693
  for (const t of tokens) {
8326
8694
  if (title.includes(t))
8327
8695
  score += 40;
8328
- if (path5.includes(t))
8696
+ if (path6.includes(t))
8329
8697
  score += 30;
8330
8698
  }
8331
- if (path5.startsWith(q))
8699
+ if (path6.startsWith(q))
8332
8700
  score += 100;
8333
8701
  if (title.startsWith(q))
8334
8702
  score += 100;
@@ -8338,7 +8706,8 @@ var plugin = async (input) => {
8338
8706
  config: async (cfg) => {
8339
8707
  ensureCommandDefinitions(cfg);
8340
8708
  ensureAgentDefinitions(cfg);
8341
- maybeStartAutoBootstrap();
8709
+ maybeStartAutoBootstrapMcp();
8710
+ maybeStartAutoBootstrapDocs();
8342
8711
  },
8343
8712
  tool: {
8344
8713
  get_doc_page: tool({
@@ -8349,8 +8718,9 @@ var plugin = async (input) => {
8349
8718
  scope: tool.schema.enum(["foundry", "apollo", "gotham", "all"]).optional().describe("Scope to search within when using query or fuzzy matching (default: foundry).")
8350
8719
  },
8351
8720
  async execute(args) {
8352
- if (!await dbExists())
8353
- return NO_DB_MESSAGE;
8721
+ const docsError = await ensureDocsReadyForTool();
8722
+ if (docsError)
8723
+ return docsError;
8354
8724
  const scope = parseScope(args.scope);
8355
8725
  if (!scope) {
8356
8726
  return [
@@ -8422,8 +8792,9 @@ ${bestPage.content}`;
8422
8792
  query: tool.schema.string().optional().describe("Optional query to filter/rank results by title/URL (case-insensitive).")
8423
8793
  },
8424
8794
  async execute(args) {
8425
- if (!await dbExists())
8426
- return NO_DB_MESSAGE;
8795
+ const docsError = await ensureDocsReadyForTool();
8796
+ if (docsError)
8797
+ return docsError;
8427
8798
  const scope = parseScope(args.scope);
8428
8799
  if (!scope) {
8429
8800
  return [
@@ -8502,12 +8873,65 @@ ${bestPage.content}`;
8502
8873
  },
8503
8874
  "command.execute.before": async (hookInput, output) => {
8504
8875
  if (hookInput.command === "refresh-docs") {
8505
- const result = await fetchAllDocs(dbPath);
8506
- if (dbInstance) {
8507
- closeDatabase(dbInstance);
8508
- dbInstance = null;
8876
+ const progressLines = [];
8877
+ try {
8878
+ const result = await ensureDocsAvailable({
8879
+ force: true,
8880
+ onEvent: (event) => {
8881
+ const line = formatSnapshotRefreshEvent(event);
8882
+ if (line)
8883
+ progressLines.push(line);
8884
+ }
8885
+ });
8886
+ resetDb();
8887
+ const db = await getDb();
8888
+ const indexedPages = getAllPages(db).length;
8889
+ pushText(output, [
8890
+ "refresh-docs complete (recommended snapshot path).",
8891
+ "",
8892
+ ...progressLines.map((line) => `- ${line}`),
8893
+ ...progressLines.length > 0 ? [""] : [],
8894
+ `snapshot_source=${result.source}`,
8895
+ result.downloadUrl ? `snapshot_url=${result.downloadUrl}` : null,
8896
+ `snapshot_bytes=${result.bytes} (${formatBytes(result.bytes)})`,
8897
+ `indexed_pages=${indexedPages}`
8898
+ ].filter((line) => !!line).join(`
8899
+ `));
8900
+ } catch (err) {
8901
+ pushText(output, formatSnapshotFailure(err));
8902
+ }
8903
+ return;
8904
+ }
8905
+ if (hookInput.command === "refresh-docs-rescrape") {
8906
+ const progressLines = [];
8907
+ const failureSamples = [];
8908
+ try {
8909
+ const result = await fetchAllDocs(dbPath, {
8910
+ progressEvery: 250,
8911
+ onProgress: (event) => {
8912
+ formatRescrapeProgressEvent(event, progressLines, failureSamples);
8913
+ }
8914
+ });
8915
+ resetDb();
8916
+ const db = await getDb();
8917
+ const indexedPages = getAllPages(db).length;
8918
+ pushText(output, [
8919
+ "refresh-docs-rescrape complete (unsafe/experimental).",
8920
+ "",
8921
+ "Warning: this command live-scrapes palantir.com and is slower/less reliable than /refresh-docs.",
8922
+ "",
8923
+ ...progressLines.map((line) => `- ${line}`),
8924
+ ...progressLines.length > 0 ? [""] : [],
8925
+ `total_pages=${result.totalPages}`,
8926
+ `fetched_pages=${result.fetchedPages}`,
8927
+ `failed_pages=${result.failedUrls.length}`,
8928
+ `indexed_pages=${indexedPages}`,
8929
+ ...failureSamples.length > 0 ? ["", "failure_samples:", ...failureSamples.map((line) => `- ${line}`)] : []
8930
+ ].join(`
8931
+ `));
8932
+ } catch (err) {
8933
+ pushText(output, formatRescrapeFailure(err));
8509
8934
  }
8510
- pushText(output, `Refreshed documentation: ${result.fetchedPages}/${result.totalPages} pages fetched. ${result.failedUrls.length} failures.`);
8511
8935
  return;
8512
8936
  }
8513
8937
  if (hookInput.command === "setup-palantir-mcp") {