@superblocksteam/vite-plugin-file-sync 2.0.51 → 2.0.52-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-service/agent/prompts/build-base-system-prompt.d.ts.map +1 -1
- package/dist/ai-service/agent/prompts/build-base-system-prompt.js +36 -7
- package/dist/ai-service/agent/prompts/build-base-system-prompt.js.map +1 -1
- package/dist/ai-service/agent/subagents/apis/generate-api-source.d.ts +28 -3
- package/dist/ai-service/agent/subagents/apis/generate-api-source.d.ts.map +1 -1
- package/dist/ai-service/agent/subagents/apis/generate-api-source.js +102 -8
- package/dist/ai-service/agent/subagents/apis/generate-api-source.js.map +1 -1
- package/dist/ai-service/agent/subagents/apis/prompt-builder.d.ts +1 -1
- package/dist/ai-service/agent/subagents/apis/prompt-builder.js +2 -2
- package/dist/ai-service/agent/subagents/apis/state.d.ts +10 -0
- package/dist/ai-service/agent/subagents/apis/state.d.ts.map +1 -1
- package/dist/ai-service/agent/subagents/apis/system-prompt.d.ts +1 -1
- package/dist/ai-service/agent/subagents/apis/system-prompt.d.ts.map +1 -1
- package/dist/ai-service/agent/subagents/apis/system-prompt.js +294 -5
- package/dist/ai-service/agent/subagents/apis/system-prompt.js.map +1 -1
- package/dist/ai-service/agent/tools/apis/build-api.d.ts +2 -0
- package/dist/ai-service/agent/tools/apis/build-api.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/apis/build-api.js +96 -13
- package/dist/ai-service/agent/tools/apis/build-api.js.map +1 -1
- package/dist/ai-service/agent/tools/apis/finalize-api.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/apis/finalize-api.js +13 -4
- package/dist/ai-service/agent/tools/apis/finalize-api.js.map +1 -1
- package/dist/ai-service/agent/tools/build-debug.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-debug.js +64 -2
- package/dist/ai-service/agent/tools/build-debug.js.map +1 -1
- package/dist/ai-service/agent/tools/build-edit-file.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-edit-file.js +12 -0
- package/dist/ai-service/agent/tools/build-edit-file.js.map +1 -1
- package/dist/ai-service/agent/tools/build-finalize.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-finalize.js +43 -0
- package/dist/ai-service/agent/tools/build-finalize.js.map +1 -1
- package/dist/ai-service/agent/tools/build-multi-edit-file.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-multi-edit-file.js +6 -0
- package/dist/ai-service/agent/tools/build-multi-edit-file.js.map +1 -1
- package/dist/ai-service/agent/tools/build-write-file.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/build-write-file.js +6 -0
- package/dist/ai-service/agent/tools/build-write-file.js.map +1 -1
- package/dist/ai-service/agent/tools/debug-cache.d.ts +42 -0
- package/dist/ai-service/agent/tools/debug-cache.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/debug-cache.js +121 -14
- package/dist/ai-service/agent/tools/debug-cache.js.map +1 -1
- package/dist/ai-service/agent/tools/index.d.ts +1 -0
- package/dist/ai-service/agent/tools/index.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/index.js +1 -0
- package/dist/ai-service/agent/tools/index.js.map +1 -1
- package/dist/ai-service/agent/tools/integrations/execute-request.d.ts +9 -5
- package/dist/ai-service/agent/tools/integrations/execute-request.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/integrations/execute-request.js +105 -7
- package/dist/ai-service/agent/tools/integrations/execute-request.js.map +1 -1
- package/dist/ai-service/agent/tools/integrations/index.d.ts +1 -1
- package/dist/ai-service/agent/tools/integrations/index.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/integrations/index.js +1 -1
- package/dist/ai-service/agent/tools/integrations/index.js.map +1 -1
- package/dist/ai-service/agent/tools/integrations/metadata.d.ts +0 -4
- package/dist/ai-service/agent/tools/integrations/metadata.d.ts.map +1 -1
- package/dist/ai-service/agent/tools/integrations/metadata.js +0 -44
- package/dist/ai-service/agent/tools/integrations/metadata.js.map +1 -1
- package/dist/ai-service/agent/tools.d.ts.map +1 -1
- package/dist/ai-service/agent/tools.js +45 -10
- package/dist/ai-service/agent/tools.js.map +1 -1
- package/dist/ai-service/agent/tools2/access-control.d.ts +1 -1
- package/dist/ai-service/agent/tools2/access-control.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/access-control.js +13 -5
- package/dist/ai-service/agent/tools2/access-control.js.map +1 -1
- package/dist/ai-service/agent/tools2/entity-permissions.d.ts +180 -0
- package/dist/ai-service/agent/tools2/entity-permissions.d.ts.map +1 -0
- package/dist/ai-service/agent/tools2/entity-permissions.js +154 -0
- package/dist/ai-service/agent/tools2/entity-permissions.js.map +1 -0
- package/dist/ai-service/agent/tools2/example.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/example.js +25 -6
- package/dist/ai-service/agent/tools2/example.js.map +1 -1
- package/dist/ai-service/agent/tools2/index.d.ts +4 -2
- package/dist/ai-service/agent/tools2/index.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/index.js +2 -1
- package/dist/ai-service/agent/tools2/index.js.map +1 -1
- package/dist/ai-service/agent/tools2/registry.d.ts +47 -0
- package/dist/ai-service/agent/tools2/registry.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/registry.js +122 -19
- package/dist/ai-service/agent/tools2/registry.js.map +1 -1
- package/dist/ai-service/agent/tools2/test-utils.d.ts +7 -0
- package/dist/ai-service/agent/tools2/test-utils.d.ts.map +1 -0
- package/dist/ai-service/agent/tools2/test-utils.js +10 -0
- package/dist/ai-service/agent/tools2/test-utils.js.map +1 -0
- package/dist/ai-service/agent/tools2/tools/ask-multi-choice.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/tools/ask-multi-choice.js +17 -9
- package/dist/ai-service/agent/tools2/tools/ask-multi-choice.js.map +1 -1
- package/dist/ai-service/agent/tools2/tools/exit-plan-mode.d.ts +2 -1
- package/dist/ai-service/agent/tools2/tools/exit-plan-mode.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/tools/exit-plan-mode.js +192 -27
- package/dist/ai-service/agent/tools2/tools/exit-plan-mode.js.map +1 -1
- package/dist/ai-service/agent/tools2/tools/grep-metadata-ripgrep.d.ts +18 -0
- package/dist/ai-service/agent/tools2/tools/grep-metadata-ripgrep.d.ts.map +1 -0
- package/dist/ai-service/agent/tools2/tools/grep-metadata-ripgrep.js +837 -0
- package/dist/ai-service/agent/tools2/tools/grep-metadata-ripgrep.js.map +1 -0
- package/dist/ai-service/agent/tools2/tools/grep-metadata.d.ts +96 -0
- package/dist/ai-service/agent/tools2/tools/grep-metadata.d.ts.map +1 -0
- package/dist/ai-service/agent/tools2/tools/grep-metadata.js +437 -0
- package/dist/ai-service/agent/tools2/tools/grep-metadata.js.map +1 -0
- package/dist/ai-service/agent/tools2/types.d.ts +113 -6
- package/dist/ai-service/agent/tools2/types.d.ts.map +1 -1
- package/dist/ai-service/agent/tools2/types.js +15 -0
- package/dist/ai-service/agent/tools2/types.js.map +1 -1
- package/dist/ai-service/agent/tools2/utils.d.ts +9 -0
- package/dist/ai-service/agent/tools2/utils.d.ts.map +1 -0
- package/dist/ai-service/agent/tools2/utils.js +11 -0
- package/dist/ai-service/agent/tools2/utils.js.map +1 -0
- package/dist/ai-service/chat/chat-session-store.d.ts.map +1 -1
- package/dist/ai-service/chat/chat-session-store.js +12 -0
- package/dist/ai-service/chat/chat-session-store.js.map +1 -1
- package/dist/ai-service/const.d.ts +0 -1
- package/dist/ai-service/const.d.ts.map +1 -1
- package/dist/ai-service/const.js +0 -1
- package/dist/ai-service/const.js.map +1 -1
- package/dist/ai-service/index.d.ts +11 -0
- package/dist/ai-service/index.d.ts.map +1 -1
- package/dist/ai-service/index.js +50 -3
- package/dist/ai-service/index.js.map +1 -1
- package/dist/ai-service/integrations/metadata-storage/index.d.ts +25 -0
- package/dist/ai-service/integrations/metadata-storage/index.d.ts.map +1 -0
- package/dist/ai-service/integrations/metadata-storage/index.js +2 -0
- package/dist/ai-service/integrations/metadata-storage/index.js.map +1 -0
- package/dist/ai-service/integrations/metadata-storage/local.d.ts +53 -0
- package/dist/ai-service/integrations/metadata-storage/local.d.ts.map +1 -0
- package/dist/ai-service/integrations/metadata-storage/local.js +315 -0
- package/dist/ai-service/integrations/metadata-storage/local.js.map +1 -0
- package/dist/ai-service/integrations/store.d.ts +17 -14
- package/dist/ai-service/integrations/store.d.ts.map +1 -1
- package/dist/ai-service/integrations/store.js +124 -131
- package/dist/ai-service/integrations/store.js.map +1 -1
- package/dist/ai-service/llm/utils.js +1 -1
- package/dist/ai-service/llm/utils.js.map +1 -1
- package/dist/ai-service/llmobs/helpers.d.ts.map +1 -1
- package/dist/ai-service/llmobs/helpers.js +17 -23
- package/dist/ai-service/llmobs/helpers.js.map +1 -1
- package/dist/ai-service/llmobs/middleware/stream-text.d.ts.map +1 -1
- package/dist/ai-service/llmobs/middleware/stream-text.js +1 -0
- package/dist/ai-service/llmobs/middleware/stream-text.js.map +1 -1
- package/dist/ai-service/state-machine/clark-fsm.d.ts +5 -2
- package/dist/ai-service/state-machine/clark-fsm.d.ts.map +1 -1
- package/dist/ai-service/state-machine/clark-fsm.js +2 -2
- package/dist/ai-service/state-machine/clark-fsm.js.map +1 -1
- package/dist/ai-service/state-machine/handlers/agent-planning.d.ts.map +1 -1
- package/dist/ai-service/state-machine/handlers/agent-planning.js +5 -2
- package/dist/ai-service/state-machine/handlers/agent-planning.js.map +1 -1
- package/dist/ai-service/state-machine/handlers/llm-generating.d.ts.map +1 -1
- package/dist/ai-service/state-machine/handlers/llm-generating.js +16 -7
- package/dist/ai-service/state-machine/handlers/llm-generating.js.map +1 -1
- package/dist/ai-service/state-machine/handlers/post-processing.d.ts.map +1 -1
- package/dist/ai-service/state-machine/handlers/post-processing.js +11 -2
- package/dist/ai-service/state-machine/handlers/post-processing.js.map +1 -1
- package/dist/ai-service/transform/api-builder/to-sdk-transformer.d.ts.map +1 -1
- package/dist/ai-service/transform/api-builder/to-sdk-transformer.js +0 -1
- package/dist/ai-service/transform/api-builder/to-sdk-transformer.js.map +1 -1
- package/dist/ai-service/types.d.ts +1 -0
- package/dist/ai-service/types.d.ts.map +1 -1
- package/dist/ai-service/types.js.map +1 -1
- package/dist/file-sync-vite-plugin.d.ts.map +1 -1
- package/dist/file-sync-vite-plugin.js +12 -0
- package/dist/file-sync-vite-plugin.js.map +1 -1
- package/dist/file-system-helpers.d.ts.map +1 -1
- package/dist/file-system-helpers.js +1 -0
- package/dist/file-system-helpers.js.map +1 -1
- package/dist/socket-manager.d.ts.map +1 -1
- package/dist/socket-manager.js +6 -0
- package/dist/socket-manager.js.map +1 -1
- package/dist/sync-service/index.d.ts +8 -0
- package/dist/sync-service/index.d.ts.map +1 -1
- package/dist/sync-service/index.js +15 -0
- package/dist/sync-service/index.js.map +1 -1
- package/dist/util/open-api.d.ts +2 -46
- package/dist/util/open-api.d.ts.map +1 -1
- package/dist/util/open-api.js +1 -114
- package/dist/util/open-api.js.map +1 -1
- package/package.json +7 -7
- package/dist/ai-service/integrations/metadata/database.d.ts +0 -15
- package/dist/ai-service/integrations/metadata/database.d.ts.map +0 -1
- package/dist/ai-service/integrations/metadata/database.js +0 -121
- package/dist/ai-service/integrations/metadata/database.js.map +0 -1
- package/dist/ai-service/integrations/metadata/databricks.d.ts +0 -25
- package/dist/ai-service/integrations/metadata/databricks.d.ts.map +0 -1
- package/dist/ai-service/integrations/metadata/databricks.js +0 -174
- package/dist/ai-service/integrations/metadata/databricks.js.map +0 -1
- package/dist/ai-service/integrations/metadata/graphql-based.d.ts +0 -16
- package/dist/ai-service/integrations/metadata/graphql-based.d.ts.map +0 -1
- package/dist/ai-service/integrations/metadata/graphql-based.js +0 -383
- package/dist/ai-service/integrations/metadata/graphql-based.js.map +0 -1
- package/dist/ai-service/integrations/metadata/index.d.ts +0 -9
- package/dist/ai-service/integrations/metadata/index.d.ts.map +0 -1
- package/dist/ai-service/integrations/metadata/index.js +0 -7
- package/dist/ai-service/integrations/metadata/index.js.map +0 -1
- package/dist/ai-service/integrations/metadata/llm-utils.d.ts +0 -24
- package/dist/ai-service/integrations/metadata/llm-utils.d.ts.map +0 -1
- package/dist/ai-service/integrations/metadata/llm-utils.js +0 -45
- package/dist/ai-service/integrations/metadata/llm-utils.js.map +0 -1
- package/dist/ai-service/integrations/metadata/open-api.d.ts +0 -17
- package/dist/ai-service/integrations/metadata/open-api.d.ts.map +0 -1
- package/dist/ai-service/integrations/metadata/open-api.js +0 -62
- package/dist/ai-service/integrations/metadata/open-api.js.map +0 -1
- package/dist/ai-service/integrations/metadata/table-based.d.ts +0 -10
- package/dist/ai-service/integrations/metadata/table-based.d.ts.map +0 -1
- package/dist/ai-service/integrations/metadata/table-based.js +0 -27
- package/dist/ai-service/integrations/metadata/table-based.js.map +0 -1
- package/dist/ai-service/prompt-builder-service/builders/dbschema-intent.d.ts +0 -19
- package/dist/ai-service/prompt-builder-service/builders/dbschema-intent.d.ts.map +0 -1
- package/dist/ai-service/prompt-builder-service/builders/dbschema-intent.js +0 -145
- package/dist/ai-service/prompt-builder-service/builders/dbschema-intent.js.map +0 -1
- package/dist/ai-service/prompt-builder-service/builders/openapi-intent.d.ts +0 -19
- package/dist/ai-service/prompt-builder-service/builders/openapi-intent.d.ts.map +0 -1
- package/dist/ai-service/prompt-builder-service/builders/openapi-intent.js +0 -41
- package/dist/ai-service/prompt-builder-service/builders/openapi-intent.js.map +0 -1
- package/dist/ai-service/prompt-builder-service/index.d.ts +0 -3
- package/dist/ai-service/prompt-builder-service/index.d.ts.map +0 -1
- package/dist/ai-service/prompt-builder-service/index.js +0 -3
- package/dist/ai-service/prompt-builder-service/index.js.map +0 -1
|
@@ -0,0 +1,837 @@
|
|
|
1
|
+
import { exec, spawn } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
import { getLogger } from "../../../../util/logger.js";
|
|
4
|
+
const execAsync = promisify(exec);
|
|
5
|
+
/**
|
|
6
|
+
* Search a gron file using ripgrep.
|
|
7
|
+
* Returns raw lines from the gron file that match the pattern.
|
|
8
|
+
*/
|
|
9
|
+
export async function searchGronWithRipgrep(gronPath, pattern) {
|
|
10
|
+
const logger = getLogger();
|
|
11
|
+
try {
|
|
12
|
+
// Use ripgrep with:
|
|
13
|
+
// --no-heading: don't group by file
|
|
14
|
+
// --no-line-number: don't show line numbers
|
|
15
|
+
// NOTE: We don't use -m (match limit) here! We want ALL matches so we can
|
|
16
|
+
// intelligently extract unique parent objects and then trim to the limit.
|
|
17
|
+
// This prevents broad patterns from hitting the limit and missing results.
|
|
18
|
+
const { stdout } = await execAsync(`rg --no-heading --no-line-number '${pattern.replace(/'/g, "'\\''")}' '${gronPath}'`, { maxBuffer: 50 * 1024 * 1024 });
|
|
19
|
+
const lines = stdout
|
|
20
|
+
.trim()
|
|
21
|
+
.split("\n")
|
|
22
|
+
.filter((line) => line.length > 0);
|
|
23
|
+
logger.debug(`[ripgrep] Found ${lines.length} matches in ${gronPath} for pattern: ${pattern}`);
|
|
24
|
+
// Warn if we're getting a huge number of matches (might indicate pattern is too broad)
|
|
25
|
+
if (lines.length > 10000) {
|
|
26
|
+
logger.warn(`[ripgrep] Pattern "${pattern}" matched ${lines.length} lines - consider using a more specific pattern`);
|
|
27
|
+
}
|
|
28
|
+
return lines;
|
|
29
|
+
}
|
|
30
|
+
catch (err) {
|
|
31
|
+
if (err.code === 1) {
|
|
32
|
+
// ripgrep exit code 1 = no matches (not an error)
|
|
33
|
+
logger.debug(`[ripgrep] No matches in ${gronPath} for pattern: ${pattern}`);
|
|
34
|
+
return [];
|
|
35
|
+
}
|
|
36
|
+
if (err.code === 127) {
|
|
37
|
+
throw new Error("ripgrep (rg) not found. Please install ripgrep: https://github.com/BurntSushi/ripgrep");
|
|
38
|
+
}
|
|
39
|
+
throw new Error(`ripgrep search failed: ${err.message}`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Strip quotes from a gron key.
|
|
44
|
+
* Example: '"/users"' -> '/users'
|
|
45
|
+
*/
|
|
46
|
+
function stripQuotes(key) {
|
|
47
|
+
if (key.startsWith('"') && key.endsWith('"')) {
|
|
48
|
+
// Unescape the content between quotes
|
|
49
|
+
return key.slice(1, -1).replace(/\\(.)/g, "$1");
|
|
50
|
+
}
|
|
51
|
+
return key;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Extract unique parent prefixes AND track which indices/keys matched.
|
|
55
|
+
* Example: 'json.dbSchema.tables[3].columns[5].name = "email";'
|
|
56
|
+
* Returns: { prefixes: ['json.dbSchema.tables[3].'], matched: { tables: [3], columns: {3: [5]} } }
|
|
57
|
+
*
|
|
58
|
+
* Example: 'json.openApiSpec.paths["/users"].get.summary = "..."'
|
|
59
|
+
* Returns: { prefixes: ['json.openApiSpec.paths["/users"].'], matched: { paths: ["/users"] } }
|
|
60
|
+
*
|
|
61
|
+
* The prefix is the path to the parent container (table, schema, type, etc.)
|
|
62
|
+
* so we can fetch ALL fields of that parent in one go.
|
|
63
|
+
*/
|
|
64
|
+
function extractParentPrefixes(matches) {
|
|
65
|
+
const logger = getLogger();
|
|
66
|
+
const prefixes = new Set();
|
|
67
|
+
const matched = {
|
|
68
|
+
tables: new Set(),
|
|
69
|
+
columns: new Map(),
|
|
70
|
+
schemas: new Set(),
|
|
71
|
+
paths: new Set(),
|
|
72
|
+
operations: new Set(),
|
|
73
|
+
types: new Set(),
|
|
74
|
+
fields: new Map(),
|
|
75
|
+
};
|
|
76
|
+
// Debug: log first few matches
|
|
77
|
+
if (matches.length > 0) {
|
|
78
|
+
logger.debug(`[prefix-extract] Sample matches (first 3): ${matches
|
|
79
|
+
.slice(0, 3)
|
|
80
|
+
.map((m) => m.substring(0, 100))
|
|
81
|
+
.join(" | ")}`);
|
|
82
|
+
}
|
|
83
|
+
for (const match of matches) {
|
|
84
|
+
// Extract table matches: json.{namespace}.tables[N]
|
|
85
|
+
const tableMatch = match.match(/json\.(?:schema|dbSchema|databaseSchemaMetadata)\.tables\[(\d+)\]/);
|
|
86
|
+
if (tableMatch?.[1]) {
|
|
87
|
+
const tableIdx = parseInt(tableMatch[1], 10);
|
|
88
|
+
matched.tables.add(tableIdx);
|
|
89
|
+
// Check if it's a column match: json.{namespace}.tables[N].columns[M]
|
|
90
|
+
const columnMatch = match.match(/json\.(?:schema|dbSchema|databaseSchemaMetadata)\.tables\[(\d+)\]\.columns\[(\d+)\]/);
|
|
91
|
+
if (columnMatch?.[2]) {
|
|
92
|
+
const colIdx = parseInt(columnMatch[2], 10);
|
|
93
|
+
if (!matched.columns.has(tableIdx)) {
|
|
94
|
+
matched.columns.set(tableIdx, new Set());
|
|
95
|
+
}
|
|
96
|
+
matched.columns.get(tableIdx).add(colIdx);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
// Extract schema matches: json.dbSchema.schemas[N]
|
|
100
|
+
const schemaMatch = match.match(/json\.dbSchema\.schemas\[(\d+)\]/);
|
|
101
|
+
if (schemaMatch?.[1]) {
|
|
102
|
+
matched.schemas.add(parseInt(schemaMatch[1], 10));
|
|
103
|
+
}
|
|
104
|
+
// Extract path matches: json.openApiSpec.paths["..."] or json.openApiSpec.paths[N]
|
|
105
|
+
// OpenAPI uses string keys like ["/users"], not numeric indices
|
|
106
|
+
const pathMatch = match.match(/json\.openApiSpec\.paths\[((?:"(?:[^"\\]|\\.)+"|[0-9]+))\]/);
|
|
107
|
+
if (pathMatch?.[1]) {
|
|
108
|
+
const pathKey = stripQuotes(pathMatch[1]);
|
|
109
|
+
matched.paths.add(pathKey);
|
|
110
|
+
}
|
|
111
|
+
// Extract operation matches: json.openApiSpec.operations["..."] or json.openApiSpec.operations[N]
|
|
112
|
+
const opMatch = match.match(/json\.openApiSpec\.operations\[((?:"(?:[^"\\]|\\.)+"|[0-9]+))\]/);
|
|
113
|
+
if (opMatch?.[1]) {
|
|
114
|
+
const opKey = stripQuotes(opMatch[1]);
|
|
115
|
+
matched.operations.add(opKey);
|
|
116
|
+
}
|
|
117
|
+
// Extract type matches: json.graphql.data.__schema.types[N]
|
|
118
|
+
const typeMatch = match.match(/json\.graphql\.data\.__schema\.types\[(\d+)\]/);
|
|
119
|
+
if (typeMatch?.[1]) {
|
|
120
|
+
matched.types.add(parseInt(typeMatch[1], 10));
|
|
121
|
+
}
|
|
122
|
+
// Extract field matches: json.graphql.data.__schema.types[N].fields[M]
|
|
123
|
+
// Also handle inputFields for INPUT_OBJECT types
|
|
124
|
+
const fieldMatch = match.match(/json\.graphql\.data\.__schema\.types\[(\d+)\]\.(fields|inputFields)\[(\d+)\]/);
|
|
125
|
+
if (fieldMatch?.[1] && fieldMatch?.[3]) {
|
|
126
|
+
const typeIdx = parseInt(fieldMatch[1], 10);
|
|
127
|
+
const fieldIdx = parseInt(fieldMatch[3], 10);
|
|
128
|
+
if (!matched.fields.has(typeIdx)) {
|
|
129
|
+
matched.fields.set(typeIdx, new Set());
|
|
130
|
+
}
|
|
131
|
+
matched.fields.get(typeIdx).add(fieldIdx);
|
|
132
|
+
}
|
|
133
|
+
// Note: Queries and mutations are not separate arrays in the standard GraphQL introspection format.
|
|
134
|
+
// They are found by:
|
|
135
|
+
// 1. Getting the type name from json.graphql.data.__schema.queryType.name (or mutationType.name)
|
|
136
|
+
// 2. Finding that type in the types[] array
|
|
137
|
+
// 3. Accessing that type's fields[] array
|
|
138
|
+
// This is handled in extractResultsFromJSON by looking at the Query/Mutation type's fields.
|
|
139
|
+
// Extract the parent container prefix
|
|
140
|
+
// Pattern: json.{namespace}.{container}[{index_or_key}].
|
|
141
|
+
// Examples:
|
|
142
|
+
// json.dbSchema.tables[3].columns[5].name → json.dbSchema.tables[3].
|
|
143
|
+
// json.openApiSpec.paths["/users"].get → json.openApiSpec.paths["/users"].
|
|
144
|
+
// json.graphqlSchema.types[4].fields[1].name → json.graphqlSchema.types[4].
|
|
145
|
+
// Match patterns like: json.anything[number_or_"string"].
|
|
146
|
+
// This matches both [123] and ["/users"]
|
|
147
|
+
const match1 = match.match(/^(json\.[^[]+\[(?:\d+|"(?:[^"\\]|\\.)+")]\.)/);
|
|
148
|
+
if (match1?.[1]) {
|
|
149
|
+
prefixes.add(match1[1]);
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
// For nested structures (columns inside tables, fields inside types)
|
|
153
|
+
// We want the grandparent (the table, not the column array)
|
|
154
|
+
// json.dbSchema.tables[3].columns[5].name → json.dbSchema.tables[3].
|
|
155
|
+
const match2 = match.match(/^(json\.[^[]+\[(?:\d+|"(?:[^"\\]|\\.)+")]\.[^[]+)\[(?:\d+|"(?:[^"\\]|\\.)+")]\./);
|
|
156
|
+
if (match2?.[1]) {
|
|
157
|
+
// Extract up to the first array: json.dbSchema.tables[3].
|
|
158
|
+
const grandparent = match2[1].match(/^(json\.[^[]+\[(?:\d+|"(?:[^"\\]|\\.)+")]\.)/)?.[1];
|
|
159
|
+
if (grandparent) {
|
|
160
|
+
prefixes.add(grandparent);
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
// Warn if we found ripgrep matches but extracted no prefixes
|
|
166
|
+
if (matches.length > 0 && prefixes.size === 0) {
|
|
167
|
+
logger.warn(`[prefix-extract] Found ${matches.length} ripgrep matches but extracted 0 prefixes. ` +
|
|
168
|
+
`This likely means the pattern doesn't match array elements in the gron format. ` +
|
|
169
|
+
`Expected formats: json.dbSchema.tables[N], json.openApiSpec.paths[N], etc.`);
|
|
170
|
+
}
|
|
171
|
+
return { prefixes, matched };
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Fetch and ungron parent objects in batched calls.
|
|
175
|
+
* Given prefixes like ['json.dbSchema.tables[3].', 'json.dbSchema.tables[12].'],
|
|
176
|
+
* fetches gron lines in batches and ungrons them.
|
|
177
|
+
*
|
|
178
|
+
* Returns: { json: reconstructed JSON, processedCount: number of prefixes processed }
|
|
179
|
+
*/
|
|
180
|
+
async function ungronMultiplePrefixes(gronPath, prefixes, startIndex = 0, batchSize = 250) {
|
|
181
|
+
const logger = getLogger();
|
|
182
|
+
if (prefixes.size === 0 || startIndex >= prefixes.size) {
|
|
183
|
+
return { json: {}, processedCount: 0 };
|
|
184
|
+
}
|
|
185
|
+
const prefixArray = Array.from(prefixes);
|
|
186
|
+
const endIndex = Math.min(startIndex + batchSize, prefixArray.length);
|
|
187
|
+
const batch = prefixArray.slice(startIndex, endIndex);
|
|
188
|
+
logger.debug(`[ungron-batch] Processing batch ${Math.floor(startIndex / batchSize) + 1}: ` +
|
|
189
|
+
`prefixes ${startIndex}-${endIndex - 1} of ${prefixArray.length} ` +
|
|
190
|
+
`(${batch.length} prefixes)`);
|
|
191
|
+
try {
|
|
192
|
+
// Build a regex that matches any of the prefixes in this batch
|
|
193
|
+
// ^json\.dbSchema\.tables\[3\]\.|^json\.dbSchema\.tables\[12\]\.
|
|
194
|
+
// Need to escape ALL regex metacharacters in the prefix
|
|
195
|
+
const escapeRegex = (str) => str.replace(/[.[\]{}()*+?^$|\\]/g, "\\$&");
|
|
196
|
+
const escapedPrefixes = batch.map((p) => `^${escapeRegex(p)}`);
|
|
197
|
+
const prefixRegex = escapedPrefixes.join("|");
|
|
198
|
+
// ONE ripgrep call to get all lines for this batch of prefixes
|
|
199
|
+
// Use spawn to avoid maxBuffer limits
|
|
200
|
+
const rg = spawn("rg", [prefixRegex, gronPath], {
|
|
201
|
+
stdio: ["ignore", "pipe", "inherit"],
|
|
202
|
+
});
|
|
203
|
+
const rgExitPromise = new Promise((resolve, reject) => {
|
|
204
|
+
rg.on("exit", (code) => {
|
|
205
|
+
if (code === null)
|
|
206
|
+
code = 0;
|
|
207
|
+
resolve(code);
|
|
208
|
+
});
|
|
209
|
+
rg.on("error", (err) => {
|
|
210
|
+
reject(new Error(`Failed to spawn ripgrep: ${err.message}`));
|
|
211
|
+
});
|
|
212
|
+
});
|
|
213
|
+
// Collect ripgrep output
|
|
214
|
+
const rgChunks = [];
|
|
215
|
+
for await (const chunk of rg.stdout) {
|
|
216
|
+
rgChunks.push(chunk);
|
|
217
|
+
}
|
|
218
|
+
const grepOutput = Buffer.concat(rgChunks).toString();
|
|
219
|
+
// Wait for ripgrep to exit
|
|
220
|
+
const rgExitCode = await rgExitPromise;
|
|
221
|
+
if (rgExitCode === 1) {
|
|
222
|
+
// ripgrep exit code 1 means no matches found
|
|
223
|
+
logger.debug(`[ungron-batch] No data found for batch`);
|
|
224
|
+
return { json: {}, processedCount: batch.length };
|
|
225
|
+
}
|
|
226
|
+
if (rgExitCode !== 0) {
|
|
227
|
+
throw new Error(`ripgrep exited with code ${rgExitCode}`);
|
|
228
|
+
}
|
|
229
|
+
if (!grepOutput.trim()) {
|
|
230
|
+
logger.debug(`[ungron-batch] No data found for batch`);
|
|
231
|
+
return { json: {}, processedCount: batch.length };
|
|
232
|
+
}
|
|
233
|
+
// Pipe gron output to 'gron --ungron' to reconstruct JSON
|
|
234
|
+
// Use spawn to avoid shell escaping issues with special characters
|
|
235
|
+
const ungron = spawn("gron", ["--ungron"], {
|
|
236
|
+
stdio: ["pipe", "pipe", "inherit"],
|
|
237
|
+
});
|
|
238
|
+
const exitPromise = new Promise((resolve, reject) => {
|
|
239
|
+
ungron.on("exit", (code) => {
|
|
240
|
+
if (code === null)
|
|
241
|
+
code = 0;
|
|
242
|
+
resolve(code);
|
|
243
|
+
});
|
|
244
|
+
ungron.on("error", (err) => {
|
|
245
|
+
reject(new Error(`Failed to spawn gron --ungron: ${err.message}`));
|
|
246
|
+
});
|
|
247
|
+
});
|
|
248
|
+
// Write gron lines to ungron's stdin
|
|
249
|
+
ungron.stdin.write(grepOutput);
|
|
250
|
+
ungron.stdin.end();
|
|
251
|
+
// Collect JSON output
|
|
252
|
+
const chunks = [];
|
|
253
|
+
for await (const chunk of ungron.stdout) {
|
|
254
|
+
chunks.push(chunk);
|
|
255
|
+
}
|
|
256
|
+
const jsonOutput = Buffer.concat(chunks).toString();
|
|
257
|
+
// Wait for ungron to exit
|
|
258
|
+
const exitCode = await exitPromise;
|
|
259
|
+
if (exitCode !== 0) {
|
|
260
|
+
throw new Error(`gron --ungron exited with code ${exitCode}`);
|
|
261
|
+
}
|
|
262
|
+
const reconstructed = JSON.parse(jsonOutput);
|
|
263
|
+
logger.debug(`[ungron-batch] Reconstructed ${batch.length} parent objects in batch`);
|
|
264
|
+
return { json: reconstructed, processedCount: batch.length };
|
|
265
|
+
}
|
|
266
|
+
catch (err) {
|
|
267
|
+
if (err.code === 1) {
|
|
268
|
+
logger.debug(`[ungron-batch] No matches for prefixes`);
|
|
269
|
+
return { json: {}, processedCount: batch.length };
|
|
270
|
+
}
|
|
271
|
+
logger.warn(`[ungron-batch] Error: ${err.message}`);
|
|
272
|
+
return { json: {}, processedCount: batch.length };
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
/**
|
|
276
|
+
* Extract results from the reconstructed JSON using matched indices.
|
|
277
|
+
* This walks the JSON structure and builds GrepMetadataResult items.
|
|
278
|
+
* Uses the pre-computed matched indices to avoid re-checking with regex.
|
|
279
|
+
*
|
|
280
|
+
* @param startGlobalIndex - Skip results before this global index (for pagination)
|
|
281
|
+
* @param globalIndexOffset - Start numbering results from this offset (for windowed ungronning)
|
|
282
|
+
*/
|
|
283
|
+
function extractResultsFromJSON(json, matched, limit, includeDetails, startGlobalIndex = 0, globalIndexOffset = 0) {
|
|
284
|
+
const results = [];
|
|
285
|
+
const logger = getLogger();
|
|
286
|
+
// Track global result index for pagination (increments for every result)
|
|
287
|
+
// Start from the offset to account for prefixes we skipped in windowed ungronning
|
|
288
|
+
let globalIndex = globalIndexOffset;
|
|
289
|
+
// Debug: log the JSON structure we received
|
|
290
|
+
logger.debug(`[extract] JSON keys: ${Object.keys(json).join(", ")}`);
|
|
291
|
+
if (json.schema) {
|
|
292
|
+
logger.debug(`[extract] json.schema keys: ${Object.keys(json.schema).join(", ")}`);
|
|
293
|
+
}
|
|
294
|
+
if (json.databaseSchemaMetadata) {
|
|
295
|
+
logger.debug(`[extract] json.databaseSchemaMetadata keys: ${Object.keys(json.databaseSchemaMetadata).join(", ")}`);
|
|
296
|
+
}
|
|
297
|
+
if (json.dbSchema) {
|
|
298
|
+
logger.debug(`[extract] json.dbSchema keys: ${Object.keys(json.dbSchema).join(", ")}`);
|
|
299
|
+
}
|
|
300
|
+
// Walk database tables (support multiple field names for compatibility)
|
|
301
|
+
const tables = json.schema?.tables ||
|
|
302
|
+
json.dbSchema?.tables ||
|
|
303
|
+
json.databaseSchemaMetadata?.tables;
|
|
304
|
+
if (tables) {
|
|
305
|
+
const nonNullTables = tables.filter((t) => t != null);
|
|
306
|
+
logger.debug(`[extract] Walking ${tables.length} tables (${nonNullTables.length} non-null) from reconstructed JSON`);
|
|
307
|
+
for (let i = 0; i < tables.length; i++) {
|
|
308
|
+
// Skip sparse array holes efficiently
|
|
309
|
+
if (!(i in tables))
|
|
310
|
+
continue;
|
|
311
|
+
const table = tables[i];
|
|
312
|
+
if (!table)
|
|
313
|
+
continue;
|
|
314
|
+
// Only process tables that matched
|
|
315
|
+
if (!matched.tables.has(i))
|
|
316
|
+
continue;
|
|
317
|
+
// Check if this is a table match (not just a column match)
|
|
318
|
+
const hasColumnMatches = matched.columns.has(i);
|
|
319
|
+
if (!hasColumnMatches) {
|
|
320
|
+
// Skip if before our pagination start point
|
|
321
|
+
if (globalIndex >= startGlobalIndex) {
|
|
322
|
+
// Table itself was matched (not via columns)
|
|
323
|
+
results.push({
|
|
324
|
+
type: "table",
|
|
325
|
+
name: table.name,
|
|
326
|
+
schema: table.schema,
|
|
327
|
+
columns: table.columns?.filter((c) => c != null) || [],
|
|
328
|
+
_globalIndex: globalIndex,
|
|
329
|
+
details: includeDetails
|
|
330
|
+
? {
|
|
331
|
+
columnCount: table.columns?.filter((c) => c != null).length || 0,
|
|
332
|
+
}
|
|
333
|
+
: undefined,
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
globalIndex++;
|
|
337
|
+
}
|
|
338
|
+
// Check columns if they were matched
|
|
339
|
+
if (hasColumnMatches && table.columns) {
|
|
340
|
+
const matchedColIndices = matched.columns.get(i);
|
|
341
|
+
for (let colIdx = 0; colIdx < table.columns.length; colIdx++) {
|
|
342
|
+
if (!(colIdx in table.columns))
|
|
343
|
+
continue;
|
|
344
|
+
const column = table.columns[colIdx];
|
|
345
|
+
if (!column)
|
|
346
|
+
continue;
|
|
347
|
+
// Only include columns that matched
|
|
348
|
+
if (!matchedColIndices.has(colIdx))
|
|
349
|
+
continue;
|
|
350
|
+
// Skip if before our pagination start point
|
|
351
|
+
if (globalIndex >= startGlobalIndex) {
|
|
352
|
+
results.push({
|
|
353
|
+
type: "column",
|
|
354
|
+
name: column.name,
|
|
355
|
+
table: table.name,
|
|
356
|
+
schema: table.schema,
|
|
357
|
+
columnType: column.type,
|
|
358
|
+
_globalIndex: globalIndex,
|
|
359
|
+
details: includeDetails
|
|
360
|
+
? {
|
|
361
|
+
type: column.type,
|
|
362
|
+
}
|
|
363
|
+
: undefined,
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
globalIndex++;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
// Walk database schemas (separate from table schemas)
|
|
372
|
+
if (json.dbSchema?.schemas) {
|
|
373
|
+
for (let i = 0; i < json.dbSchema.schemas.length; i++) {
|
|
374
|
+
if (!(i in json.dbSchema.schemas))
|
|
375
|
+
continue;
|
|
376
|
+
const schema = json.dbSchema.schemas[i];
|
|
377
|
+
if (!schema)
|
|
378
|
+
continue;
|
|
379
|
+
// Only include matched schemas
|
|
380
|
+
if (!matched.schemas.has(i))
|
|
381
|
+
continue;
|
|
382
|
+
if (globalIndex >= startGlobalIndex) {
|
|
383
|
+
results.push({
|
|
384
|
+
type: "schema",
|
|
385
|
+
name: schema.name,
|
|
386
|
+
_globalIndex: globalIndex,
|
|
387
|
+
details: includeDetails ? {} : undefined,
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
globalIndex++;
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
// Walk OpenAPI paths (object with string keys, not array)
|
|
394
|
+
if (json.openApiSpec?.paths && typeof json.openApiSpec.paths === "object") {
|
|
395
|
+
for (const [pathKey, pathItem] of Object.entries(json.openApiSpec.paths)) {
|
|
396
|
+
if (!matched.paths.has(pathKey))
|
|
397
|
+
continue;
|
|
398
|
+
if (!pathItem || typeof pathItem !== "object")
|
|
399
|
+
continue;
|
|
400
|
+
// Extract methods from path item (get, post, put, etc.)
|
|
401
|
+
const methods = Object.keys(pathItem).filter((k) => [
|
|
402
|
+
"get",
|
|
403
|
+
"post",
|
|
404
|
+
"put",
|
|
405
|
+
"delete",
|
|
406
|
+
"patch",
|
|
407
|
+
"head",
|
|
408
|
+
"options",
|
|
409
|
+
"trace",
|
|
410
|
+
].includes(k));
|
|
411
|
+
// Return each operation (method) with full parameter details and top-level request/response schemas
|
|
412
|
+
for (const method of methods) {
|
|
413
|
+
const operation = pathItem[method];
|
|
414
|
+
if (!operation || typeof operation !== "object")
|
|
415
|
+
continue;
|
|
416
|
+
const operationName = operation.operationId ||
|
|
417
|
+
operation.summary ||
|
|
418
|
+
`${method.toUpperCase()} ${pathKey}`;
|
|
419
|
+
// NOTE: Could think about just letting clark query the API to see the response structure.
|
|
420
|
+
// Returning the full response schema from the spec often goes over the character limit.
|
|
421
|
+
// Current approach: Extract property names and basic types (1-2 levels deep) as a middle ground.
|
|
422
|
+
/**
|
|
423
|
+
* Extract property names and types from a JSON schema (1-2 levels deep)
|
|
424
|
+
* to avoid massive nested schemas that hit character limits
|
|
425
|
+
*/
|
|
426
|
+
const extractSchemaProperties = (schema) => {
|
|
427
|
+
if (!schema || typeof schema !== "object")
|
|
428
|
+
return undefined;
|
|
429
|
+
const result = {
|
|
430
|
+
type: schema.type,
|
|
431
|
+
};
|
|
432
|
+
// Handle array types - extract item type and properties if it's an object
|
|
433
|
+
if (schema.type === "array" && schema.items) {
|
|
434
|
+
result.items = {
|
|
435
|
+
type: schema.items.type,
|
|
436
|
+
};
|
|
437
|
+
if (schema.items.type === "object" && schema.items.properties) {
|
|
438
|
+
result.items.properties = {};
|
|
439
|
+
for (const [propName, propSchema] of Object.entries(schema.items.properties)) {
|
|
440
|
+
const ps = propSchema;
|
|
441
|
+
result.items.properties[propName] = {
|
|
442
|
+
type: ps.type,
|
|
443
|
+
required: schema.items.required?.includes(propName),
|
|
444
|
+
enum: ps.enum,
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
// Handle object types - extract top-level properties only
|
|
450
|
+
if (schema.type === "object" && schema.properties) {
|
|
451
|
+
result.properties = {};
|
|
452
|
+
for (const [propName, propSchema] of Object.entries(schema.properties)) {
|
|
453
|
+
const ps = propSchema;
|
|
454
|
+
result.properties[propName] = {
|
|
455
|
+
type: ps.type,
|
|
456
|
+
required: schema.required?.includes(propName),
|
|
457
|
+
enum: ps.enum,
|
|
458
|
+
};
|
|
459
|
+
}
|
|
460
|
+
}
|
|
461
|
+
return result;
|
|
462
|
+
};
|
|
463
|
+
// Summarize responses (status codes, content types, and top-level schema properties)
|
|
464
|
+
const responsesSummary = {};
|
|
465
|
+
if (operation.responses && typeof operation.responses === "object") {
|
|
466
|
+
for (const [statusCode, response] of Object.entries(operation.responses)) {
|
|
467
|
+
if (response && typeof response === "object") {
|
|
468
|
+
const r = response;
|
|
469
|
+
const contentTypes = r.content ? Object.keys(r.content) : [];
|
|
470
|
+
const primaryContentType = contentTypes[0]; // Usually application/json
|
|
471
|
+
responsesSummary[statusCode] = {
|
|
472
|
+
description: r.description,
|
|
473
|
+
contentType: primaryContentType,
|
|
474
|
+
schema: primaryContentType && r.content?.[primaryContentType]?.schema
|
|
475
|
+
? extractSchemaProperties(r.content[primaryContentType].schema)
|
|
476
|
+
: undefined,
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
// Summarize request body (content type and top-level properties)
|
|
482
|
+
let requestBodySummary;
|
|
483
|
+
if (operation.requestBody &&
|
|
484
|
+
typeof operation.requestBody === "object") {
|
|
485
|
+
const rb = operation.requestBody;
|
|
486
|
+
const contentTypes = rb.content ? Object.keys(rb.content) : [];
|
|
487
|
+
const primaryContentType = contentTypes[0]; // Usually application/json
|
|
488
|
+
requestBodySummary = {
|
|
489
|
+
required: rb.required,
|
|
490
|
+
contentType: primaryContentType,
|
|
491
|
+
schema: primaryContentType && rb.content?.[primaryContentType]?.schema
|
|
492
|
+
? extractSchemaProperties(rb.content[primaryContentType].schema)
|
|
493
|
+
: undefined,
|
|
494
|
+
};
|
|
495
|
+
}
|
|
496
|
+
if (globalIndex >= startGlobalIndex) {
|
|
497
|
+
results.push({
|
|
498
|
+
type: "operation",
|
|
499
|
+
name: operationName,
|
|
500
|
+
method: method.toUpperCase(),
|
|
501
|
+
path: pathKey,
|
|
502
|
+
description: operation.description || operation.summary,
|
|
503
|
+
parameters: operation.parameters || [],
|
|
504
|
+
requestBody: requestBodySummary,
|
|
505
|
+
responses: responsesSummary,
|
|
506
|
+
_globalIndex: globalIndex,
|
|
507
|
+
details: includeDetails
|
|
508
|
+
? {
|
|
509
|
+
operationId: operation.operationId,
|
|
510
|
+
method: method.toUpperCase(),
|
|
511
|
+
path: pathKey,
|
|
512
|
+
parameterCount: (operation.parameters || []).length,
|
|
513
|
+
}
|
|
514
|
+
: undefined,
|
|
515
|
+
});
|
|
516
|
+
}
|
|
517
|
+
globalIndex++;
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
// OpenAPI schemas: Not currently tracked in matched indices
|
|
522
|
+
// Would need to add to extractParentPrefixes if needed
|
|
523
|
+
// Walk GraphQL types (standard introspection format)
|
|
524
|
+
const graphqlTypes = json.graphql?.data?.__schema?.types;
|
|
525
|
+
if (graphqlTypes && Array.isArray(graphqlTypes)) {
|
|
526
|
+
// only including fields when there aren't too many matches (otherwise we hit the character limit)
|
|
527
|
+
const typeMatchCount = matched.types.size;
|
|
528
|
+
const totalFieldMatches = Array.from(matched.fields.values()).reduce((sum, fields) => sum + fields.size, 0);
|
|
529
|
+
const shouldIncludeFields = totalFieldMatches > 0 || typeMatchCount <= 10;
|
|
530
|
+
// Get Query/Mutation type names to handle them specially
|
|
531
|
+
const queryTypeName = json.graphql?.data?.__schema?.queryType?.name;
|
|
532
|
+
const mutationTypeName = json.graphql?.data?.__schema?.mutationType?.name;
|
|
533
|
+
for (let i = 0; i < graphqlTypes.length; i++) {
|
|
534
|
+
if (!(i in graphqlTypes))
|
|
535
|
+
continue;
|
|
536
|
+
if (!matched.types.has(i))
|
|
537
|
+
continue;
|
|
538
|
+
const type = graphqlTypes[i];
|
|
539
|
+
if (!type)
|
|
540
|
+
continue;
|
|
541
|
+
// Check if any fields in this type were matched
|
|
542
|
+
const hasMatchedFields = matched.fields.has(i);
|
|
543
|
+
// Special handling for Query/Mutation types: never include full fields array
|
|
544
|
+
// because their fields are returned separately as query/mutation results
|
|
545
|
+
const isQueryOrMutationType = type.name === queryTypeName || type.name === mutationTypeName;
|
|
546
|
+
const includeFullFieldsForThisType = shouldIncludeFields && !isQueryOrMutationType;
|
|
547
|
+
// When includeDetails is false, return field names only (compact)
|
|
548
|
+
// When includeDetails is true, return full field objects (detailed)
|
|
549
|
+
// Note: GraphQL INPUT_OBJECT types use 'inputFields' instead of 'fields'
|
|
550
|
+
let fieldNames = [];
|
|
551
|
+
if (!includeDetails) {
|
|
552
|
+
const fieldsArray = type.fields || type.inputFields;
|
|
553
|
+
if (fieldsArray) {
|
|
554
|
+
// For includeDetails: false, extract just the field names (lightweight)
|
|
555
|
+
fieldNames = fieldsArray
|
|
556
|
+
.filter((f) => f != null)
|
|
557
|
+
.map((f) => f.name);
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
// Only return the type object if:
|
|
561
|
+
// 1. includeDetails is false (compact mode with fieldNames) - ALWAYS return type, OR
|
|
562
|
+
// 2. Not a field search (searching for types themselves)
|
|
563
|
+
const isFieldSearch = totalFieldMatches > 0;
|
|
564
|
+
const shouldReturnTypeObject = !includeDetails || (!isFieldSearch && !hasMatchedFields);
|
|
565
|
+
if (shouldReturnTypeObject) {
|
|
566
|
+
if (globalIndex >= startGlobalIndex) {
|
|
567
|
+
const fieldsArray = type.fields || type.inputFields;
|
|
568
|
+
results.push({
|
|
569
|
+
type: "graphql_type",
|
|
570
|
+
name: type.name,
|
|
571
|
+
kind: type.kind,
|
|
572
|
+
description: type.description,
|
|
573
|
+
fields: includeDetails && includeFullFieldsForThisType
|
|
574
|
+
? fieldsArray?.filter((f) => f != null) || []
|
|
575
|
+
: undefined,
|
|
576
|
+
fieldNames: !includeDetails && fieldNames.length > 0 ? fieldNames : undefined,
|
|
577
|
+
_globalIndex: globalIndex,
|
|
578
|
+
details: includeDetails
|
|
579
|
+
? {
|
|
580
|
+
kind: type.kind,
|
|
581
|
+
fieldCount: fieldsArray?.filter((f) => f != null).length || 0,
|
|
582
|
+
}
|
|
583
|
+
: undefined,
|
|
584
|
+
});
|
|
585
|
+
}
|
|
586
|
+
globalIndex++;
|
|
587
|
+
}
|
|
588
|
+
// Walk fields within this type if they were matched
|
|
589
|
+
// Handle both 'fields' (for OBJECT types) and 'inputFields' (for INPUT_OBJECT types)
|
|
590
|
+
const fieldsArray = type.fields || type.inputFields;
|
|
591
|
+
if (fieldsArray && Array.isArray(fieldsArray) && matched.fields.has(i)) {
|
|
592
|
+
const matchedFieldIndices = matched.fields.get(i);
|
|
593
|
+
for (let j = 0; j < fieldsArray.length; j++) {
|
|
594
|
+
if (!(j in fieldsArray))
|
|
595
|
+
continue;
|
|
596
|
+
if (!matchedFieldIndices.has(j))
|
|
597
|
+
continue;
|
|
598
|
+
const field = fieldsArray[j];
|
|
599
|
+
if (!field)
|
|
600
|
+
continue;
|
|
601
|
+
if (globalIndex >= startGlobalIndex) {
|
|
602
|
+
results.push({
|
|
603
|
+
type: "field",
|
|
604
|
+
name: field.name,
|
|
605
|
+
typeName: type.name,
|
|
606
|
+
description: field.description,
|
|
607
|
+
returnType: field.type?.name || field.type?.kind,
|
|
608
|
+
args: field.args?.filter((a) => a != null) || [],
|
|
609
|
+
_globalIndex: globalIndex,
|
|
610
|
+
details: includeDetails
|
|
611
|
+
? {
|
|
612
|
+
typeName: type.name,
|
|
613
|
+
fieldType: field.type?.name || field.type?.kind,
|
|
614
|
+
description: field.description,
|
|
615
|
+
argCount: field.args?.filter((a) => a != null).length || 0,
|
|
616
|
+
}
|
|
617
|
+
: undefined,
|
|
618
|
+
});
|
|
619
|
+
}
|
|
620
|
+
globalIndex++;
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
// Walk GraphQL queries (standard introspection format)
|
|
626
|
+
// In GraphQL introspection, queries are fields of the Query type
|
|
627
|
+
// First, find the Query type in the types array
|
|
628
|
+
const queryTypeName = json.graphql?.data?.__schema?.queryType?.name;
|
|
629
|
+
const queryTypeIndex = queryTypeName
|
|
630
|
+
? graphqlTypes?.findIndex((t) => t?.name === queryTypeName)
|
|
631
|
+
: -1;
|
|
632
|
+
const queryType = queryTypeIndex >= 0 ? graphqlTypes[queryTypeIndex] : null;
|
|
633
|
+
const graphqlQueries = queryType?.fields;
|
|
634
|
+
// Only process queries if the Query type was matched (i.e., something in it matched the search)
|
|
635
|
+
if (graphqlQueries &&
|
|
636
|
+
Array.isArray(graphqlQueries) &&
|
|
637
|
+
matched.types.has(queryTypeIndex)) {
|
|
638
|
+
// Return all query fields since we already know this type was matched
|
|
639
|
+
for (let i = 0; i < graphqlQueries.length; i++) {
|
|
640
|
+
if (!(i in graphqlQueries))
|
|
641
|
+
continue;
|
|
642
|
+
const query = graphqlQueries[i];
|
|
643
|
+
if (!query)
|
|
644
|
+
continue;
|
|
645
|
+
if (globalIndex >= startGlobalIndex) {
|
|
646
|
+
results.push({
|
|
647
|
+
type: "query",
|
|
648
|
+
name: query.name,
|
|
649
|
+
description: query.description,
|
|
650
|
+
returnType: query.returnType || query.type?.name || query.type,
|
|
651
|
+
args: query.args?.filter((a) => a != null) || [],
|
|
652
|
+
_globalIndex: globalIndex,
|
|
653
|
+
details: includeDetails
|
|
654
|
+
? {
|
|
655
|
+
returnType: query.returnType || query.type?.name,
|
|
656
|
+
argCount: query.args?.filter((a) => a != null).length || 0,
|
|
657
|
+
}
|
|
658
|
+
: undefined,
|
|
659
|
+
});
|
|
660
|
+
}
|
|
661
|
+
globalIndex++;
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
// Walk GraphQL mutations (standard introspection format)
|
|
665
|
+
// In GraphQL introspection, mutations are fields of the Mutation type
|
|
666
|
+
// First, find the Mutation type in the types array
|
|
667
|
+
const mutationTypeName = json.graphql?.data?.__schema?.mutationType?.name;
|
|
668
|
+
const mutationTypeIndex = mutationTypeName
|
|
669
|
+
? graphqlTypes?.findIndex((t) => t?.name === mutationTypeName)
|
|
670
|
+
: -1;
|
|
671
|
+
const mutationType = mutationTypeIndex >= 0 ? graphqlTypes[mutationTypeIndex] : null;
|
|
672
|
+
const graphqlMutations = mutationType?.fields;
|
|
673
|
+
// Only process mutations if the Mutation type was matched (i.e., something in it matched the search)
|
|
674
|
+
if (graphqlMutations &&
|
|
675
|
+
Array.isArray(graphqlMutations) &&
|
|
676
|
+
matched.types.has(mutationTypeIndex)) {
|
|
677
|
+
// Return all mutation fields since we already know this type was matched
|
|
678
|
+
for (let i = 0; i < graphqlMutations.length; i++) {
|
|
679
|
+
if (!(i in graphqlMutations))
|
|
680
|
+
continue;
|
|
681
|
+
const mutation = graphqlMutations[i];
|
|
682
|
+
if (!mutation)
|
|
683
|
+
continue;
|
|
684
|
+
if (globalIndex >= startGlobalIndex) {
|
|
685
|
+
results.push({
|
|
686
|
+
type: "mutation",
|
|
687
|
+
name: mutation.name,
|
|
688
|
+
description: mutation.description,
|
|
689
|
+
returnType: mutation.returnType || mutation.type?.name || mutation.type,
|
|
690
|
+
args: mutation.args?.filter((a) => a != null) || [],
|
|
691
|
+
_globalIndex: globalIndex,
|
|
692
|
+
details: includeDetails
|
|
693
|
+
? {
|
|
694
|
+
returnType: mutation.returnType || mutation.type?.name,
|
|
695
|
+
argCount: mutation.args?.filter((a) => a != null).length || 0,
|
|
696
|
+
}
|
|
697
|
+
: undefined,
|
|
698
|
+
});
|
|
699
|
+
}
|
|
700
|
+
globalIndex++;
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
// Track if we hit the limit (truncation)
|
|
704
|
+
const totalMatched = matched.tables.size +
|
|
705
|
+
Array.from(matched.columns.values()).reduce((sum, cols) => sum + cols.size, 0) +
|
|
706
|
+
matched.schemas.size +
|
|
707
|
+
matched.paths.size +
|
|
708
|
+
matched.operations.size +
|
|
709
|
+
matched.types.size +
|
|
710
|
+
Array.from(matched.fields.values()).reduce((sum, fields) => sum + fields.size, 0);
|
|
711
|
+
const truncated = results.length >= limit && totalMatched > limit;
|
|
712
|
+
logger.debug(`[extract] Built ${results.length} results from ${totalMatched} total matched items (truncated: ${truncated})`);
|
|
713
|
+
return {
|
|
714
|
+
matches: results,
|
|
715
|
+
totalCount: totalMatched,
|
|
716
|
+
truncated,
|
|
717
|
+
};
|
|
718
|
+
}
|
|
719
|
+
/**
|
|
720
|
+
* Build a GrepMetadataResult from gron search results.
|
|
721
|
+
* Uses the path-prefix approach with batching: extracts parent prefixes,
|
|
722
|
+
* fetches parent data in batches of 250, stops when character limit is reached.
|
|
723
|
+
*
|
|
724
|
+
* PAGINATION NOTE: If we want to implement pagination so that Clark can get all
|
|
725
|
+
* results from this grep, we can return the currentIndex and store the prefixArray
|
|
726
|
+
* in memory (or ContextManager) so we can continue from the next index on subsequent calls.
|
|
727
|
+
*/
|
|
728
|
+
export async function buildResultFromGronMatches(gronPath, matches, limit = Infinity, // Optional - defaults to no count limit
|
|
729
|
+
includeDetails, maxResponseChars = 25_000, startIndex = 0) {
|
|
730
|
+
const logger = getLogger();
|
|
731
|
+
if (matches.length === 0) {
|
|
732
|
+
return { matches: [], totalCount: 0, truncated: false };
|
|
733
|
+
}
|
|
734
|
+
logger.debug(`[gron-prefix] Processing ${matches.length} matches (startIndex: ${startIndex})`);
|
|
735
|
+
// Step 1: Extract unique parent prefixes AND matched indices from all matches
|
|
736
|
+
const { prefixes, matched } = extractParentPrefixes(matches);
|
|
737
|
+
logger.debug(`[gron-prefix] Found ${prefixes.size} unique parent prefixes`);
|
|
738
|
+
logger.debug(`[gron-prefix] Matched indices: tables=${matched.tables.size}, columns=${matched.columns.size}`);
|
|
739
|
+
if (prefixes.size === 0) {
|
|
740
|
+
return { matches: [], totalCount: 0, truncated: false };
|
|
741
|
+
}
|
|
742
|
+
// Convert Set to Array for index-based access
|
|
743
|
+
const prefixArray = Array.from(prefixes);
|
|
744
|
+
const totalPrefixes = prefixArray.length;
|
|
745
|
+
// Step 2: Fetch and ungron parent objects in batches
|
|
746
|
+
// Start ungronning from exactly startIndex to avoid re-processing prefixes
|
|
747
|
+
// NOTE: This trades correctness for efficiency - if a prefix spans multiple results,
|
|
748
|
+
// we might miss some if we start in the middle of it. This is accepted as a trade-off.
|
|
749
|
+
const BATCH_SIZE = 250;
|
|
750
|
+
const startPrefixIndex = startIndex; // Start exactly where Clark left off
|
|
751
|
+
let currentIndex = startPrefixIndex;
|
|
752
|
+
let mergedJSON = {};
|
|
753
|
+
let totalProcessed = 0;
|
|
754
|
+
logger.debug(`[gron-prefix] Starting ungronning from prefix ${startPrefixIndex} (startIndex ${startIndex})`);
|
|
755
|
+
while (currentIndex < prefixArray.length) {
|
|
756
|
+
// Fetch next batch
|
|
757
|
+
const batchEnd = Math.min(currentIndex + BATCH_SIZE, prefixArray.length);
|
|
758
|
+
const batchPrefixes = new Set(prefixArray.slice(currentIndex, batchEnd));
|
|
759
|
+
const batchResult = await ungronMultiplePrefixes(gronPath, batchPrefixes, 0, // Always start at 0 within this batch
|
|
760
|
+
batchPrefixes.size);
|
|
761
|
+
// Deep merge this batch into accumulated JSON
|
|
762
|
+
mergedJSON = deepMerge(mergedJSON, batchResult.json);
|
|
763
|
+
totalProcessed += batchResult.processedCount;
|
|
764
|
+
currentIndex += batchResult.processedCount;
|
|
765
|
+
// Check if we've exceeded the character limit
|
|
766
|
+
// We do a quick estimate by checking JSON size
|
|
767
|
+
const currentSize = JSON.stringify(mergedJSON).length;
|
|
768
|
+
logger.debug(`[gron-prefix] After batch ${Math.floor(totalProcessed / BATCH_SIZE)}: ` +
|
|
769
|
+
`processed ${totalProcessed}/${prefixes.size} prefixes, JSON size: ${currentSize} chars`);
|
|
770
|
+
// Stop if we're approaching the character limit (leave 20% buffer for formatting)
|
|
771
|
+
if (currentSize > maxResponseChars * 0.8) {
|
|
772
|
+
logger.info(`[gron-prefix] Stopping after ${totalProcessed}/${prefixes.size} prefixes ` +
|
|
773
|
+
`(JSON size ${currentSize} approaching limit ${maxResponseChars})`);
|
|
774
|
+
break;
|
|
775
|
+
}
|
|
776
|
+
// Optimization: Stop early if user explicitly requested a small limit
|
|
777
|
+
// Use 2x buffer since not all prefixes will yield results
|
|
778
|
+
// Only applies when limit < 100 (explicit small requests, not Infinity)
|
|
779
|
+
if (limit < 100 && totalProcessed >= limit * 2) {
|
|
780
|
+
break;
|
|
781
|
+
}
|
|
782
|
+
// Also stop if we have no more batches
|
|
783
|
+
if (currentIndex >= prefixes.size) {
|
|
784
|
+
break;
|
|
785
|
+
}
|
|
786
|
+
}
|
|
787
|
+
// Step 3: Extract results from the accumulated JSON using matched indices
|
|
788
|
+
// The extraction will filter by startIndex internally
|
|
789
|
+
// IMPORTANT: When using windowed ungronning, globalIndexOffset should match startPrefixIndex
|
|
790
|
+
// so that the first result from this window gets numbered correctly
|
|
791
|
+
const result = extractResultsFromJSON(mergedJSON, matched, Infinity, // No limit here - we'll slice in grep-metadata.ts
|
|
792
|
+
includeDetails, startIndex, // Filter: only return results with globalIndex >= this
|
|
793
|
+
startPrefixIndex);
|
|
794
|
+
logger.debug(`[gron-prefix] Extracted ${result.matches.length} results (starting from global index ${startIndex})`);
|
|
795
|
+
return {
|
|
796
|
+
...result,
|
|
797
|
+
truncated: result.truncated || currentIndex < totalPrefixes,
|
|
798
|
+
hasMore: currentIndex < totalPrefixes,
|
|
799
|
+
};
|
|
800
|
+
}
|
|
801
|
+
/**
|
|
802
|
+
* Deep merge utility for combining JSON objects from multiple batches
|
|
803
|
+
*/
|
|
804
|
+
function deepMerge(target, source) {
|
|
805
|
+
if (!source || typeof source !== "object") {
|
|
806
|
+
return target;
|
|
807
|
+
}
|
|
808
|
+
if (!target || typeof target !== "object") {
|
|
809
|
+
return source;
|
|
810
|
+
}
|
|
811
|
+
for (const key of Object.keys(source)) {
|
|
812
|
+
if (Array.isArray(source[key])) {
|
|
813
|
+
if (!target[key]) {
|
|
814
|
+
target[key] = [];
|
|
815
|
+
}
|
|
816
|
+
// Merge arrays by index, preserving sparse arrays
|
|
817
|
+
for (let i = 0; i < source[key].length; i++) {
|
|
818
|
+
if (source[key][i] !== undefined) {
|
|
819
|
+
if (typeof source[key][i] === "object" && source[key][i] !== null) {
|
|
820
|
+
target[key][i] = deepMerge(target[key][i] || {}, source[key][i]);
|
|
821
|
+
}
|
|
822
|
+
else {
|
|
823
|
+
target[key][i] = source[key][i];
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
else if (typeof source[key] === "object" && source[key] !== null) {
|
|
829
|
+
target[key] = deepMerge(target[key] || {}, source[key]);
|
|
830
|
+
}
|
|
831
|
+
else {
|
|
832
|
+
target[key] = source[key];
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
return target;
|
|
836
|
+
}
|
|
837
|
+
//# sourceMappingURL=grep-metadata-ripgrep.js.map
|