bluera-knowledge 0.14.8 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +1 -1
- package/CHANGELOG.md +13 -0
- package/dist/{chunk-SWHYQLSJ.js → chunk-DX5I6U5X.js} +36 -231
- package/dist/chunk-DX5I6U5X.js.map +1 -0
- package/dist/{chunk-TGMFSPTQ.js → chunk-OMC3RAZT.js} +2 -2
- package/dist/{chunk-5VW5DNW4.js → chunk-WYZQUKUD.js} +523 -106
- package/dist/chunk-WYZQUKUD.js.map +1 -0
- package/dist/index.js +7 -7
- package/dist/mcp/server.d.ts +859 -1
- package/dist/mcp/server.js +2 -2
- package/dist/workers/background-worker-cli.js +2 -2
- package/package.json +1 -1
- package/dist/chunk-5VW5DNW4.js.map +0 -1
- package/dist/chunk-SWHYQLSJ.js.map +0 -1
- /package/dist/{chunk-TGMFSPTQ.js.map → chunk-OMC3RAZT.js.map} +0 -0
|
@@ -181,15 +181,15 @@ function getLogDirectory() {
|
|
|
181
181
|
return getLogDir();
|
|
182
182
|
}
|
|
183
183
|
function shutdownLogger() {
|
|
184
|
-
return new Promise((
|
|
184
|
+
return new Promise((resolve4) => {
|
|
185
185
|
if (rootLogger !== null) {
|
|
186
186
|
rootLogger.flush();
|
|
187
187
|
setTimeout(() => {
|
|
188
188
|
rootLogger = null;
|
|
189
|
-
|
|
189
|
+
resolve4();
|
|
190
190
|
}, 100);
|
|
191
191
|
} else {
|
|
192
|
-
|
|
192
|
+
resolve4();
|
|
193
193
|
}
|
|
194
194
|
});
|
|
195
195
|
}
|
|
@@ -2075,10 +2075,106 @@ var ConfigService = class {
|
|
|
2075
2075
|
}
|
|
2076
2076
|
};
|
|
2077
2077
|
|
|
2078
|
+
// src/services/gitignore.service.ts
|
|
2079
|
+
import { readFile as readFile3, writeFile as writeFile3, access as access2 } from "fs/promises";
|
|
2080
|
+
import { join as join5 } from "path";
|
|
2081
|
+
var REQUIRED_PATTERNS = [
|
|
2082
|
+
".bluera/",
|
|
2083
|
+
"!.bluera/",
|
|
2084
|
+
"!.bluera/bluera-knowledge/",
|
|
2085
|
+
"!.bluera/bluera-knowledge/stores.config.json",
|
|
2086
|
+
".bluera/bluera-knowledge/data/"
|
|
2087
|
+
];
|
|
2088
|
+
var SECTION_HEADER = `
|
|
2089
|
+
# Bluera Knowledge
|
|
2090
|
+
# Store definitions (stores.config.json) are committed for team sharing
|
|
2091
|
+
# Data directory (vector DB, cloned repos) is not committed
|
|
2092
|
+
`;
|
|
2093
|
+
async function fileExists2(path4) {
|
|
2094
|
+
try {
|
|
2095
|
+
await access2(path4);
|
|
2096
|
+
return true;
|
|
2097
|
+
} catch {
|
|
2098
|
+
return false;
|
|
2099
|
+
}
|
|
2100
|
+
}
|
|
2101
|
+
var GitignoreService = class {
|
|
2102
|
+
gitignorePath;
|
|
2103
|
+
constructor(projectRoot) {
|
|
2104
|
+
this.gitignorePath = join5(projectRoot, ".gitignore");
|
|
2105
|
+
}
|
|
2106
|
+
/**
|
|
2107
|
+
* Check if all required patterns are present in .gitignore
|
|
2108
|
+
*/
|
|
2109
|
+
async hasRequiredPatterns() {
|
|
2110
|
+
const exists = await fileExists2(this.gitignorePath);
|
|
2111
|
+
if (!exists) {
|
|
2112
|
+
return false;
|
|
2113
|
+
}
|
|
2114
|
+
const content = await readFile3(this.gitignorePath, "utf-8");
|
|
2115
|
+
const lines = content.split("\n").map((l) => l.trim());
|
|
2116
|
+
for (const pattern of REQUIRED_PATTERNS) {
|
|
2117
|
+
if (!lines.includes(pattern)) {
|
|
2118
|
+
return false;
|
|
2119
|
+
}
|
|
2120
|
+
}
|
|
2121
|
+
return true;
|
|
2122
|
+
}
|
|
2123
|
+
/**
|
|
2124
|
+
* Ensure required .gitignore patterns are present.
|
|
2125
|
+
*
|
|
2126
|
+
* - Creates .gitignore if it doesn't exist
|
|
2127
|
+
* - Appends missing patterns if .gitignore exists
|
|
2128
|
+
* - Does nothing if all patterns are already present
|
|
2129
|
+
*
|
|
2130
|
+
* @returns Object with updated flag and descriptive message
|
|
2131
|
+
*/
|
|
2132
|
+
async ensureGitignorePatterns() {
|
|
2133
|
+
const exists = await fileExists2(this.gitignorePath);
|
|
2134
|
+
if (!exists) {
|
|
2135
|
+
const content = `${SECTION_HEADER.trim()}
|
|
2136
|
+
${REQUIRED_PATTERNS.join("\n")}
|
|
2137
|
+
`;
|
|
2138
|
+
await writeFile3(this.gitignorePath, content);
|
|
2139
|
+
return {
|
|
2140
|
+
updated: true,
|
|
2141
|
+
message: "Created .gitignore with Bluera Knowledge patterns"
|
|
2142
|
+
};
|
|
2143
|
+
}
|
|
2144
|
+
const existingContent = await readFile3(this.gitignorePath, "utf-8");
|
|
2145
|
+
const lines = existingContent.split("\n").map((l) => l.trim());
|
|
2146
|
+
const missingPatterns = REQUIRED_PATTERNS.filter((pattern) => !lines.includes(pattern));
|
|
2147
|
+
if (missingPatterns.length === 0) {
|
|
2148
|
+
return {
|
|
2149
|
+
updated: false,
|
|
2150
|
+
message: "All Bluera Knowledge patterns already present in .gitignore"
|
|
2151
|
+
};
|
|
2152
|
+
}
|
|
2153
|
+
let newContent = existingContent;
|
|
2154
|
+
if (!newContent.endsWith("\n")) {
|
|
2155
|
+
newContent += "\n";
|
|
2156
|
+
}
|
|
2157
|
+
newContent += SECTION_HEADER;
|
|
2158
|
+
newContent += `${missingPatterns.join("\n")}
|
|
2159
|
+
`;
|
|
2160
|
+
await writeFile3(this.gitignorePath, newContent);
|
|
2161
|
+
return {
|
|
2162
|
+
updated: true,
|
|
2163
|
+
message: `Updated .gitignore with ${String(missingPatterns.length)} Bluera Knowledge pattern(s)`
|
|
2164
|
+
};
|
|
2165
|
+
}
|
|
2166
|
+
/**
|
|
2167
|
+
* Get the path to the .gitignore file
|
|
2168
|
+
*/
|
|
2169
|
+
getGitignorePath() {
|
|
2170
|
+
return this.gitignorePath;
|
|
2171
|
+
}
|
|
2172
|
+
};
|
|
2173
|
+
|
|
2078
2174
|
// src/services/index.service.ts
|
|
2079
2175
|
import { createHash as createHash2 } from "crypto";
|
|
2080
|
-
import { readFile as
|
|
2081
|
-
import { join as
|
|
2176
|
+
import { readFile as readFile4, readdir } from "fs/promises";
|
|
2177
|
+
import { join as join6, extname, basename } from "path";
|
|
2082
2178
|
|
|
2083
2179
|
// src/services/chunking.service.ts
|
|
2084
2180
|
var CHUNK_PRESETS = {
|
|
@@ -2533,7 +2629,7 @@ var IndexService = class {
|
|
|
2533
2629
|
* Extracted for parallel processing.
|
|
2534
2630
|
*/
|
|
2535
2631
|
async processFile(filePath, store) {
|
|
2536
|
-
const content = await
|
|
2632
|
+
const content = await readFile4(filePath, "utf-8");
|
|
2537
2633
|
const fileHash = createHash2("md5").update(content).digest("hex");
|
|
2538
2634
|
const chunks = this.chunker.chunk(content, filePath);
|
|
2539
2635
|
const ext = extname(filePath).toLowerCase();
|
|
@@ -2581,7 +2677,7 @@ var IndexService = class {
|
|
|
2581
2677
|
const files = [];
|
|
2582
2678
|
const entries = await readdir(dir, { withFileTypes: true });
|
|
2583
2679
|
for (const entry of entries) {
|
|
2584
|
-
const fullPath =
|
|
2680
|
+
const fullPath = join6(dir, entry.name);
|
|
2585
2681
|
if (entry.isDirectory()) {
|
|
2586
2682
|
if (!["node_modules", ".git", "dist", "build"].includes(entry.name)) {
|
|
2587
2683
|
files.push(...await this.scanDirectory(fullPath));
|
|
@@ -3780,36 +3876,240 @@ var SearchService = class {
|
|
|
3780
3876
|
}
|
|
3781
3877
|
};
|
|
3782
3878
|
|
|
3879
|
+
// src/services/store-definition.service.ts
|
|
3880
|
+
import { readFile as readFile5, writeFile as writeFile4, mkdir as mkdir3, access as access3 } from "fs/promises";
|
|
3881
|
+
import { dirname as dirname4, resolve as resolve2, isAbsolute, join as join7 } from "path";
|
|
3882
|
+
|
|
3883
|
+
// src/types/store-definition.ts
|
|
3884
|
+
import { z as z2 } from "zod";
|
|
3885
|
+
var BaseStoreDefinitionSchema = z2.object({
|
|
3886
|
+
name: z2.string().min(1, "Store name is required"),
|
|
3887
|
+
description: z2.string().optional(),
|
|
3888
|
+
tags: z2.array(z2.string()).optional()
|
|
3889
|
+
});
|
|
3890
|
+
var FileStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({
|
|
3891
|
+
type: z2.literal("file"),
|
|
3892
|
+
path: z2.string().min(1, "Path is required for file stores")
|
|
3893
|
+
});
|
|
3894
|
+
var RepoStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({
|
|
3895
|
+
type: z2.literal("repo"),
|
|
3896
|
+
url: z2.url("Valid URL is required for repo stores"),
|
|
3897
|
+
branch: z2.string().optional(),
|
|
3898
|
+
depth: z2.number().int().positive("Depth must be a positive integer").optional()
|
|
3899
|
+
});
|
|
3900
|
+
var WebStoreDefinitionSchema = BaseStoreDefinitionSchema.extend({
|
|
3901
|
+
type: z2.literal("web"),
|
|
3902
|
+
url: z2.url("Valid URL is required for web stores"),
|
|
3903
|
+
depth: z2.number().int().min(0, "Depth must be non-negative").default(1),
|
|
3904
|
+
maxPages: z2.number().int().positive("maxPages must be a positive integer").optional(),
|
|
3905
|
+
crawlInstructions: z2.string().optional(),
|
|
3906
|
+
extractInstructions: z2.string().optional()
|
|
3907
|
+
});
|
|
3908
|
+
var StoreDefinitionSchema = z2.discriminatedUnion("type", [
|
|
3909
|
+
FileStoreDefinitionSchema,
|
|
3910
|
+
RepoStoreDefinitionSchema,
|
|
3911
|
+
WebStoreDefinitionSchema
|
|
3912
|
+
]);
|
|
3913
|
+
var StoreDefinitionsConfigSchema = z2.object({
|
|
3914
|
+
version: z2.literal(1),
|
|
3915
|
+
stores: z2.array(StoreDefinitionSchema)
|
|
3916
|
+
});
|
|
3917
|
+
function isFileStoreDefinition(def) {
|
|
3918
|
+
return def.type === "file";
|
|
3919
|
+
}
|
|
3920
|
+
function isRepoStoreDefinition(def) {
|
|
3921
|
+
return def.type === "repo";
|
|
3922
|
+
}
|
|
3923
|
+
function isWebStoreDefinition(def) {
|
|
3924
|
+
return def.type === "web";
|
|
3925
|
+
}
|
|
3926
|
+
var DEFAULT_STORE_DEFINITIONS_CONFIG = {
|
|
3927
|
+
version: 1,
|
|
3928
|
+
stores: []
|
|
3929
|
+
};
|
|
3930
|
+
|
|
3931
|
+
// src/services/store-definition.service.ts
|
|
3932
|
+
async function fileExists3(path4) {
|
|
3933
|
+
try {
|
|
3934
|
+
await access3(path4);
|
|
3935
|
+
return true;
|
|
3936
|
+
} catch {
|
|
3937
|
+
return false;
|
|
3938
|
+
}
|
|
3939
|
+
}
|
|
3940
|
+
var StoreDefinitionService = class {
|
|
3941
|
+
configPath;
|
|
3942
|
+
projectRoot;
|
|
3943
|
+
config = null;
|
|
3944
|
+
constructor(projectRoot) {
|
|
3945
|
+
this.projectRoot = projectRoot ?? ProjectRootService.resolve();
|
|
3946
|
+
this.configPath = join7(this.projectRoot, ".bluera/bluera-knowledge/stores.config.json");
|
|
3947
|
+
}
|
|
3948
|
+
/**
|
|
3949
|
+
* Load store definitions from config file.
|
|
3950
|
+
* Returns empty config if file doesn't exist.
|
|
3951
|
+
* Throws on parse/validation errors (fail fast per CLAUDE.md).
|
|
3952
|
+
*/
|
|
3953
|
+
async load() {
|
|
3954
|
+
if (this.config !== null) {
|
|
3955
|
+
return this.config;
|
|
3956
|
+
}
|
|
3957
|
+
const exists = await fileExists3(this.configPath);
|
|
3958
|
+
if (!exists) {
|
|
3959
|
+
this.config = {
|
|
3960
|
+
...DEFAULT_STORE_DEFINITIONS_CONFIG,
|
|
3961
|
+
stores: [...DEFAULT_STORE_DEFINITIONS_CONFIG.stores]
|
|
3962
|
+
};
|
|
3963
|
+
return this.config;
|
|
3964
|
+
}
|
|
3965
|
+
const content = await readFile5(this.configPath, "utf-8");
|
|
3966
|
+
let parsed;
|
|
3967
|
+
try {
|
|
3968
|
+
parsed = JSON.parse(content);
|
|
3969
|
+
} catch (error) {
|
|
3970
|
+
throw new Error(
|
|
3971
|
+
`Failed to parse store definitions at ${this.configPath}: ${error instanceof Error ? error.message : String(error)}`
|
|
3972
|
+
);
|
|
3973
|
+
}
|
|
3974
|
+
const result = StoreDefinitionsConfigSchema.safeParse(parsed);
|
|
3975
|
+
if (!result.success) {
|
|
3976
|
+
throw new Error(`Invalid store definitions at ${this.configPath}: ${result.error.message}`);
|
|
3977
|
+
}
|
|
3978
|
+
this.config = result.data;
|
|
3979
|
+
return this.config;
|
|
3980
|
+
}
|
|
3981
|
+
/**
|
|
3982
|
+
* Save store definitions to config file.
|
|
3983
|
+
*/
|
|
3984
|
+
async save(config) {
|
|
3985
|
+
await mkdir3(dirname4(this.configPath), { recursive: true });
|
|
3986
|
+
await writeFile4(this.configPath, JSON.stringify(config, null, 2));
|
|
3987
|
+
this.config = config;
|
|
3988
|
+
}
|
|
3989
|
+
/**
|
|
3990
|
+
* Add a store definition.
|
|
3991
|
+
* Throws if a definition with the same name already exists.
|
|
3992
|
+
*/
|
|
3993
|
+
async addDefinition(definition) {
|
|
3994
|
+
const config = await this.load();
|
|
3995
|
+
const existing = config.stores.find((s) => s.name === definition.name);
|
|
3996
|
+
if (existing !== void 0) {
|
|
3997
|
+
throw new Error(`Store definition "${definition.name}" already exists`);
|
|
3998
|
+
}
|
|
3999
|
+
config.stores.push(definition);
|
|
4000
|
+
await this.save(config);
|
|
4001
|
+
}
|
|
4002
|
+
/**
|
|
4003
|
+
* Remove a store definition by name.
|
|
4004
|
+
* Returns true if removed, false if not found.
|
|
4005
|
+
*/
|
|
4006
|
+
async removeDefinition(name) {
|
|
4007
|
+
const config = await this.load();
|
|
4008
|
+
const index = config.stores.findIndex((s) => s.name === name);
|
|
4009
|
+
if (index === -1) {
|
|
4010
|
+
return false;
|
|
4011
|
+
}
|
|
4012
|
+
config.stores.splice(index, 1);
|
|
4013
|
+
await this.save(config);
|
|
4014
|
+
return true;
|
|
4015
|
+
}
|
|
4016
|
+
/**
|
|
4017
|
+
* Update an existing store definition.
|
|
4018
|
+
* Only updates the provided fields, preserving others.
|
|
4019
|
+
* Throws if definition not found.
|
|
4020
|
+
*/
|
|
4021
|
+
async updateDefinition(name, updates) {
|
|
4022
|
+
const config = await this.load();
|
|
4023
|
+
const index = config.stores.findIndex((s) => s.name === name);
|
|
4024
|
+
if (index === -1) {
|
|
4025
|
+
throw new Error(`Store definition "${name}" not found`);
|
|
4026
|
+
}
|
|
4027
|
+
const existing = config.stores[index];
|
|
4028
|
+
if (existing === void 0) {
|
|
4029
|
+
throw new Error(`Store definition "${name}" not found at index ${String(index)}`);
|
|
4030
|
+
}
|
|
4031
|
+
if (updates.description !== void 0) {
|
|
4032
|
+
existing.description = updates.description;
|
|
4033
|
+
}
|
|
4034
|
+
if (updates.tags !== void 0) {
|
|
4035
|
+
existing.tags = updates.tags;
|
|
4036
|
+
}
|
|
4037
|
+
await this.save(config);
|
|
4038
|
+
}
|
|
4039
|
+
/**
|
|
4040
|
+
* Get a store definition by name.
|
|
4041
|
+
* Returns undefined if not found.
|
|
4042
|
+
*/
|
|
4043
|
+
async getByName(name) {
|
|
4044
|
+
const config = await this.load();
|
|
4045
|
+
return config.stores.find((s) => s.name === name);
|
|
4046
|
+
}
|
|
4047
|
+
/**
|
|
4048
|
+
* Check if any definitions exist.
|
|
4049
|
+
*/
|
|
4050
|
+
async hasDefinitions() {
|
|
4051
|
+
const config = await this.load();
|
|
4052
|
+
return config.stores.length > 0;
|
|
4053
|
+
}
|
|
4054
|
+
/**
|
|
4055
|
+
* Resolve a file store path relative to project root.
|
|
4056
|
+
*/
|
|
4057
|
+
resolvePath(path4) {
|
|
4058
|
+
if (isAbsolute(path4)) {
|
|
4059
|
+
return path4;
|
|
4060
|
+
}
|
|
4061
|
+
return resolve2(this.projectRoot, path4);
|
|
4062
|
+
}
|
|
4063
|
+
/**
|
|
4064
|
+
* Get the config file path.
|
|
4065
|
+
*/
|
|
4066
|
+
getConfigPath() {
|
|
4067
|
+
return this.configPath;
|
|
4068
|
+
}
|
|
4069
|
+
/**
|
|
4070
|
+
* Get the project root.
|
|
4071
|
+
*/
|
|
4072
|
+
getProjectRoot() {
|
|
4073
|
+
return this.projectRoot;
|
|
4074
|
+
}
|
|
4075
|
+
/**
|
|
4076
|
+
* Clear the cached config (useful for testing).
|
|
4077
|
+
*/
|
|
4078
|
+
clearCache() {
|
|
4079
|
+
this.config = null;
|
|
4080
|
+
}
|
|
4081
|
+
};
|
|
4082
|
+
|
|
3783
4083
|
// src/services/store.service.ts
|
|
3784
4084
|
import { randomUUID as randomUUID2 } from "crypto";
|
|
3785
|
-
import { readFile as
|
|
3786
|
-
import { join as
|
|
4085
|
+
import { readFile as readFile6, writeFile as writeFile5, mkdir as mkdir5, stat, access as access4 } from "fs/promises";
|
|
4086
|
+
import { join as join8, resolve as resolve3 } from "path";
|
|
3787
4087
|
|
|
3788
4088
|
// src/plugin/git-clone.ts
|
|
3789
4089
|
import { spawn } from "child_process";
|
|
3790
|
-
import { mkdir as
|
|
4090
|
+
import { mkdir as mkdir4 } from "fs/promises";
|
|
3791
4091
|
async function cloneRepository(options) {
|
|
3792
4092
|
const { url, targetDir, branch, depth = 1 } = options;
|
|
3793
|
-
await
|
|
4093
|
+
await mkdir4(targetDir, { recursive: true });
|
|
3794
4094
|
const args = ["clone", "--depth", String(depth)];
|
|
3795
4095
|
if (branch !== void 0) {
|
|
3796
4096
|
args.push("--branch", branch);
|
|
3797
4097
|
}
|
|
3798
4098
|
args.push(url, targetDir);
|
|
3799
|
-
return new Promise((
|
|
4099
|
+
return new Promise((resolve4) => {
|
|
3800
4100
|
const git = spawn("git", args, { stdio: ["ignore", "pipe", "pipe"] });
|
|
3801
4101
|
let stderr = "";
|
|
3802
4102
|
git.stderr.on("data", (data) => {
|
|
3803
4103
|
stderr += data.toString();
|
|
3804
4104
|
});
|
|
3805
4105
|
git.on("error", (error) => {
|
|
3806
|
-
|
|
4106
|
+
resolve4(err(error));
|
|
3807
4107
|
});
|
|
3808
4108
|
git.on("close", (code) => {
|
|
3809
4109
|
if (code === 0) {
|
|
3810
|
-
|
|
4110
|
+
resolve4(ok(targetDir));
|
|
3811
4111
|
} else {
|
|
3812
|
-
|
|
4112
|
+
resolve4(err(new Error(`Git clone failed: ${stderr}`)));
|
|
3813
4113
|
}
|
|
3814
4114
|
});
|
|
3815
4115
|
});
|
|
@@ -3824,9 +4124,9 @@ function extractRepoName(url) {
|
|
|
3824
4124
|
}
|
|
3825
4125
|
|
|
3826
4126
|
// src/services/store.service.ts
|
|
3827
|
-
async function
|
|
4127
|
+
async function fileExists4(path4) {
|
|
3828
4128
|
try {
|
|
3829
|
-
await
|
|
4129
|
+
await access4(path4);
|
|
3830
4130
|
return true;
|
|
3831
4131
|
} catch {
|
|
3832
4132
|
return false;
|
|
@@ -3835,13 +4135,15 @@ async function fileExists2(path4) {
|
|
|
3835
4135
|
var StoreService = class {
|
|
3836
4136
|
dataDir;
|
|
3837
4137
|
definitionService;
|
|
4138
|
+
gitignoreService;
|
|
3838
4139
|
registry = { stores: [] };
|
|
3839
4140
|
constructor(dataDir, options) {
|
|
3840
4141
|
this.dataDir = dataDir;
|
|
3841
4142
|
this.definitionService = options?.definitionService ?? void 0;
|
|
4143
|
+
this.gitignoreService = options?.gitignoreService ?? void 0;
|
|
3842
4144
|
}
|
|
3843
4145
|
async initialize() {
|
|
3844
|
-
await
|
|
4146
|
+
await mkdir5(this.dataDir, { recursive: true });
|
|
3845
4147
|
await this.loadRegistry();
|
|
3846
4148
|
}
|
|
3847
4149
|
/**
|
|
@@ -3904,7 +4206,7 @@ var StoreService = class {
|
|
|
3904
4206
|
if (input.path === void 0) {
|
|
3905
4207
|
return err(new Error("Path is required for file stores"));
|
|
3906
4208
|
}
|
|
3907
|
-
const normalizedPath =
|
|
4209
|
+
const normalizedPath = resolve3(input.path);
|
|
3908
4210
|
try {
|
|
3909
4211
|
const stats = await stat(normalizedPath);
|
|
3910
4212
|
if (!stats.isDirectory()) {
|
|
@@ -3929,7 +4231,7 @@ var StoreService = class {
|
|
|
3929
4231
|
case "repo": {
|
|
3930
4232
|
let repoPath = input.path;
|
|
3931
4233
|
if (input.url !== void 0) {
|
|
3932
|
-
const cloneDir =
|
|
4234
|
+
const cloneDir = join8(this.dataDir, "repos", id);
|
|
3933
4235
|
const result = await cloneRepository({
|
|
3934
4236
|
url: input.url,
|
|
3935
4237
|
targetDir: cloneDir,
|
|
@@ -3944,7 +4246,7 @@ var StoreService = class {
|
|
|
3944
4246
|
if (repoPath === void 0) {
|
|
3945
4247
|
return err(new Error("Path or URL required for repo stores"));
|
|
3946
4248
|
}
|
|
3947
|
-
const normalizedRepoPath =
|
|
4249
|
+
const normalizedRepoPath = resolve3(repoPath);
|
|
3948
4250
|
store = {
|
|
3949
4251
|
type: "repo",
|
|
3950
4252
|
id,
|
|
@@ -3984,6 +4286,9 @@ var StoreService = class {
|
|
|
3984
4286
|
}
|
|
3985
4287
|
this.registry.stores.push(store);
|
|
3986
4288
|
await this.saveRegistry();
|
|
4289
|
+
if (this.gitignoreService !== void 0) {
|
|
4290
|
+
await this.gitignoreService.ensureGitignorePatterns();
|
|
4291
|
+
}
|
|
3987
4292
|
if (this.definitionService !== void 0 && options?.skipDefinitionSync !== true) {
|
|
3988
4293
|
const definition = this.createDefinitionFromStore(store, input);
|
|
3989
4294
|
await this.definitionService.addDefinition(definition);
|
|
@@ -4053,14 +4358,14 @@ var StoreService = class {
|
|
|
4053
4358
|
return ok(void 0);
|
|
4054
4359
|
}
|
|
4055
4360
|
async loadRegistry() {
|
|
4056
|
-
const registryPath =
|
|
4057
|
-
const exists = await
|
|
4361
|
+
const registryPath = join8(this.dataDir, "stores.json");
|
|
4362
|
+
const exists = await fileExists4(registryPath);
|
|
4058
4363
|
if (!exists) {
|
|
4059
4364
|
this.registry = { stores: [] };
|
|
4060
4365
|
await this.saveRegistry();
|
|
4061
4366
|
return;
|
|
4062
4367
|
}
|
|
4063
|
-
const content = await
|
|
4368
|
+
const content = await readFile6(registryPath, "utf-8");
|
|
4064
4369
|
try {
|
|
4065
4370
|
const data = JSON.parse(content);
|
|
4066
4371
|
this.registry = {
|
|
@@ -4078,8 +4383,8 @@ var StoreService = class {
|
|
|
4078
4383
|
}
|
|
4079
4384
|
}
|
|
4080
4385
|
async saveRegistry() {
|
|
4081
|
-
const registryPath =
|
|
4082
|
-
await
|
|
4386
|
+
const registryPath = join8(this.dataDir, "stores.json");
|
|
4387
|
+
await writeFile5(registryPath, JSON.stringify(this.registry, null, 2));
|
|
4083
4388
|
}
|
|
4084
4389
|
};
|
|
4085
4390
|
|
|
@@ -4093,33 +4398,33 @@ import { fileURLToPath } from "url";
|
|
|
4093
4398
|
import { ZodError } from "zod";
|
|
4094
4399
|
|
|
4095
4400
|
// src/crawl/schemas.ts
|
|
4096
|
-
import { z as
|
|
4097
|
-
var CrawledLinkSchema =
|
|
4098
|
-
href:
|
|
4099
|
-
text:
|
|
4100
|
-
title:
|
|
4101
|
-
base_domain:
|
|
4102
|
-
head_data:
|
|
4103
|
-
head_extraction_status:
|
|
4104
|
-
head_extraction_error:
|
|
4105
|
-
intrinsic_score:
|
|
4106
|
-
contextual_score:
|
|
4107
|
-
total_score:
|
|
4401
|
+
import { z as z3 } from "zod";
|
|
4402
|
+
var CrawledLinkSchema = z3.object({
|
|
4403
|
+
href: z3.string(),
|
|
4404
|
+
text: z3.string(),
|
|
4405
|
+
title: z3.string().optional(),
|
|
4406
|
+
base_domain: z3.string().optional(),
|
|
4407
|
+
head_data: z3.unknown().optional(),
|
|
4408
|
+
head_extraction_status: z3.unknown().optional(),
|
|
4409
|
+
head_extraction_error: z3.unknown().optional(),
|
|
4410
|
+
intrinsic_score: z3.number().optional(),
|
|
4411
|
+
contextual_score: z3.unknown().optional(),
|
|
4412
|
+
total_score: z3.unknown().optional()
|
|
4108
4413
|
});
|
|
4109
|
-
var CrawlPageSchema =
|
|
4110
|
-
url:
|
|
4111
|
-
title:
|
|
4112
|
-
content:
|
|
4113
|
-
links:
|
|
4114
|
-
crawledAt:
|
|
4414
|
+
var CrawlPageSchema = z3.object({
|
|
4415
|
+
url: z3.string(),
|
|
4416
|
+
title: z3.string(),
|
|
4417
|
+
content: z3.string(),
|
|
4418
|
+
links: z3.array(z3.string()),
|
|
4419
|
+
crawledAt: z3.string()
|
|
4115
4420
|
});
|
|
4116
|
-
var CrawlResultSchema =
|
|
4117
|
-
pages:
|
|
4421
|
+
var CrawlResultSchema = z3.object({
|
|
4422
|
+
pages: z3.array(CrawlPageSchema)
|
|
4118
4423
|
});
|
|
4119
|
-
var HeadlessResultSchema =
|
|
4120
|
-
html:
|
|
4121
|
-
markdown:
|
|
4122
|
-
links:
|
|
4424
|
+
var HeadlessResultSchema = z3.object({
|
|
4425
|
+
html: z3.string(),
|
|
4426
|
+
markdown: z3.string(),
|
|
4427
|
+
links: z3.array(z3.union([CrawledLinkSchema, z3.string()]))
|
|
4123
4428
|
});
|
|
4124
4429
|
function validateHeadlessResult(data) {
|
|
4125
4430
|
return HeadlessResultSchema.parse(data);
|
|
@@ -4127,33 +4432,33 @@ function validateHeadlessResult(data) {
|
|
|
4127
4432
|
function validateCrawlResult(data) {
|
|
4128
4433
|
return CrawlResultSchema.parse(data);
|
|
4129
4434
|
}
|
|
4130
|
-
var MethodInfoSchema =
|
|
4131
|
-
name:
|
|
4132
|
-
async:
|
|
4133
|
-
signature:
|
|
4134
|
-
startLine:
|
|
4135
|
-
endLine:
|
|
4136
|
-
calls:
|
|
4435
|
+
var MethodInfoSchema = z3.object({
|
|
4436
|
+
name: z3.string(),
|
|
4437
|
+
async: z3.boolean(),
|
|
4438
|
+
signature: z3.string(),
|
|
4439
|
+
startLine: z3.number(),
|
|
4440
|
+
endLine: z3.number(),
|
|
4441
|
+
calls: z3.array(z3.string())
|
|
4137
4442
|
});
|
|
4138
|
-
var CodeNodeSchema =
|
|
4139
|
-
type:
|
|
4140
|
-
name:
|
|
4141
|
-
exported:
|
|
4142
|
-
startLine:
|
|
4143
|
-
endLine:
|
|
4144
|
-
async:
|
|
4145
|
-
signature:
|
|
4146
|
-
calls:
|
|
4147
|
-
methods:
|
|
4443
|
+
var CodeNodeSchema = z3.object({
|
|
4444
|
+
type: z3.enum(["function", "class"]),
|
|
4445
|
+
name: z3.string(),
|
|
4446
|
+
exported: z3.boolean(),
|
|
4447
|
+
startLine: z3.number(),
|
|
4448
|
+
endLine: z3.number(),
|
|
4449
|
+
async: z3.boolean().optional(),
|
|
4450
|
+
signature: z3.string().optional(),
|
|
4451
|
+
calls: z3.array(z3.string()).optional(),
|
|
4452
|
+
methods: z3.array(MethodInfoSchema).optional()
|
|
4148
4453
|
});
|
|
4149
|
-
var ImportInfoSchema =
|
|
4150
|
-
source:
|
|
4151
|
-
imported:
|
|
4152
|
-
alias:
|
|
4454
|
+
var ImportInfoSchema = z3.object({
|
|
4455
|
+
source: z3.string(),
|
|
4456
|
+
imported: z3.string(),
|
|
4457
|
+
alias: z3.string().optional().nullable()
|
|
4153
4458
|
});
|
|
4154
|
-
var ParsePythonResultSchema =
|
|
4155
|
-
nodes:
|
|
4156
|
-
imports:
|
|
4459
|
+
var ParsePythonResultSchema = z3.object({
|
|
4460
|
+
nodes: z3.array(CodeNodeSchema),
|
|
4461
|
+
imports: z3.array(ImportInfoSchema)
|
|
4157
4462
|
});
|
|
4158
4463
|
function validateParsePythonResult(data) {
|
|
4159
4464
|
return ParsePythonResultSchema.parse(data);
|
|
@@ -4285,7 +4590,7 @@ var PythonBridge = class {
|
|
|
4285
4590
|
method: "crawl",
|
|
4286
4591
|
params: { url }
|
|
4287
4592
|
};
|
|
4288
|
-
return new Promise((
|
|
4593
|
+
return new Promise((resolve4, reject) => {
|
|
4289
4594
|
const timeout = setTimeout(() => {
|
|
4290
4595
|
const pending = this.pending.get(id);
|
|
4291
4596
|
if (pending) {
|
|
@@ -4295,7 +4600,7 @@ var PythonBridge = class {
|
|
|
4295
4600
|
}, timeoutMs);
|
|
4296
4601
|
this.pending.set(id, {
|
|
4297
4602
|
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- Promise resolve type narrowing
|
|
4298
|
-
resolve:
|
|
4603
|
+
resolve: resolve4,
|
|
4299
4604
|
reject,
|
|
4300
4605
|
timeout,
|
|
4301
4606
|
method: "crawl"
|
|
@@ -4317,7 +4622,7 @@ var PythonBridge = class {
|
|
|
4317
4622
|
method: "fetch_headless",
|
|
4318
4623
|
params: { url }
|
|
4319
4624
|
};
|
|
4320
|
-
return new Promise((
|
|
4625
|
+
return new Promise((resolve4, reject) => {
|
|
4321
4626
|
const timeout = setTimeout(() => {
|
|
4322
4627
|
const pending = this.pending.get(id);
|
|
4323
4628
|
if (pending) {
|
|
@@ -4327,7 +4632,7 @@ var PythonBridge = class {
|
|
|
4327
4632
|
}, timeoutMs);
|
|
4328
4633
|
this.pending.set(id, {
|
|
4329
4634
|
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- Promise resolve type narrowing
|
|
4330
|
-
resolve:
|
|
4635
|
+
resolve: resolve4,
|
|
4331
4636
|
reject,
|
|
4332
4637
|
timeout,
|
|
4333
4638
|
method: "fetch_headless"
|
|
@@ -4349,7 +4654,7 @@ var PythonBridge = class {
|
|
|
4349
4654
|
method: "parse_python",
|
|
4350
4655
|
params: { code, filePath }
|
|
4351
4656
|
};
|
|
4352
|
-
return new Promise((
|
|
4657
|
+
return new Promise((resolve4, reject) => {
|
|
4353
4658
|
const timeout = setTimeout(() => {
|
|
4354
4659
|
const pending = this.pending.get(id);
|
|
4355
4660
|
if (pending) {
|
|
@@ -4361,7 +4666,7 @@ var PythonBridge = class {
|
|
|
4361
4666
|
}, timeoutMs);
|
|
4362
4667
|
this.pending.set(id, {
|
|
4363
4668
|
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- Promise resolve type narrowing
|
|
4364
|
-
resolve:
|
|
4669
|
+
resolve: resolve4,
|
|
4365
4670
|
reject,
|
|
4366
4671
|
timeout,
|
|
4367
4672
|
method: "parse_python"
|
|
@@ -4378,7 +4683,7 @@ var PythonBridge = class {
|
|
|
4378
4683
|
if (!this.process) {
|
|
4379
4684
|
return Promise.resolve();
|
|
4380
4685
|
}
|
|
4381
|
-
return new Promise((
|
|
4686
|
+
return new Promise((resolve4) => {
|
|
4382
4687
|
this.stoppingIntentionally = true;
|
|
4383
4688
|
this.rejectAllPending(new Error("Python bridge stopped"));
|
|
4384
4689
|
if (this.stdoutReadline) {
|
|
@@ -4391,11 +4696,11 @@ var PythonBridge = class {
|
|
|
4391
4696
|
}
|
|
4392
4697
|
const proc = this.process;
|
|
4393
4698
|
if (proc === null) {
|
|
4394
|
-
|
|
4699
|
+
resolve4();
|
|
4395
4700
|
return;
|
|
4396
4701
|
}
|
|
4397
4702
|
const onExit = () => {
|
|
4398
|
-
|
|
4703
|
+
resolve4();
|
|
4399
4704
|
};
|
|
4400
4705
|
proc.once("exit", onExit);
|
|
4401
4706
|
proc.kill();
|
|
@@ -4405,7 +4710,7 @@ var PythonBridge = class {
|
|
|
4405
4710
|
proc.kill("SIGKILL");
|
|
4406
4711
|
this.process = null;
|
|
4407
4712
|
}
|
|
4408
|
-
|
|
4713
|
+
resolve4();
|
|
4409
4714
|
}, 1e3);
|
|
4410
4715
|
});
|
|
4411
4716
|
}
|
|
@@ -4420,9 +4725,9 @@ var PythonBridge = class {
|
|
|
4420
4725
|
|
|
4421
4726
|
// src/db/embeddings.ts
|
|
4422
4727
|
import { homedir as homedir3 } from "os";
|
|
4423
|
-
import { join as
|
|
4728
|
+
import { join as join9 } from "path";
|
|
4424
4729
|
import { pipeline, env } from "@huggingface/transformers";
|
|
4425
|
-
env.cacheDir =
|
|
4730
|
+
env.cacheDir = join9(homedir3(), ".cache", "huggingface-transformers");
|
|
4426
4731
|
var EmbeddingEngine = class {
|
|
4427
4732
|
extractor = null;
|
|
4428
4733
|
modelName;
|
|
@@ -4459,7 +4764,7 @@ var EmbeddingEngine = class {
|
|
|
4459
4764
|
const batchResults = await Promise.all(batch.map((text) => this.embed(text)));
|
|
4460
4765
|
results.push(...batchResults);
|
|
4461
4766
|
if (i + BATCH_SIZE < texts.length) {
|
|
4462
|
-
await new Promise((
|
|
4767
|
+
await new Promise((resolve4) => setTimeout(resolve4, 100));
|
|
4463
4768
|
}
|
|
4464
4769
|
}
|
|
4465
4770
|
return results;
|
|
@@ -4483,17 +4788,17 @@ var EmbeddingEngine = class {
|
|
|
4483
4788
|
import * as lancedb from "@lancedb/lancedb";
|
|
4484
4789
|
|
|
4485
4790
|
// src/types/document.ts
|
|
4486
|
-
import { z as
|
|
4487
|
-
var DocumentTypeSchema =
|
|
4488
|
-
var DocumentMetadataSchema =
|
|
4489
|
-
path:
|
|
4490
|
-
url:
|
|
4791
|
+
import { z as z4 } from "zod";
|
|
4792
|
+
var DocumentTypeSchema = z4.enum(["file", "chunk", "web"]);
|
|
4793
|
+
var DocumentMetadataSchema = z4.object({
|
|
4794
|
+
path: z4.string().optional(),
|
|
4795
|
+
url: z4.string().optional(),
|
|
4491
4796
|
type: DocumentTypeSchema,
|
|
4492
|
-
storeId:
|
|
4493
|
-
indexedAt:
|
|
4494
|
-
fileHash:
|
|
4495
|
-
chunkIndex:
|
|
4496
|
-
totalChunks:
|
|
4797
|
+
storeId: z4.string(),
|
|
4798
|
+
indexedAt: z4.union([z4.string(), z4.date()]),
|
|
4799
|
+
fileHash: z4.string().optional(),
|
|
4800
|
+
chunkIndex: z4.number().optional(),
|
|
4801
|
+
totalChunks: z4.number().optional()
|
|
4497
4802
|
}).loose();
|
|
4498
4803
|
|
|
4499
4804
|
// src/db/lance.ts
|
|
@@ -4618,6 +4923,102 @@ var LanceStore = class {
|
|
|
4618
4923
|
|
|
4619
4924
|
// src/services/index.ts
|
|
4620
4925
|
var logger4 = createLogger("services");
|
|
4926
|
+
var LazyServiceContainer = class {
|
|
4927
|
+
// Eagerly initialized (lightweight)
|
|
4928
|
+
config;
|
|
4929
|
+
store;
|
|
4930
|
+
lance;
|
|
4931
|
+
pythonBridge;
|
|
4932
|
+
// Configuration for lazy initialization
|
|
4933
|
+
appConfig;
|
|
4934
|
+
dataDir;
|
|
4935
|
+
// Lazily initialized (heavy)
|
|
4936
|
+
_embeddings = null;
|
|
4937
|
+
_codeGraph = null;
|
|
4938
|
+
_search = null;
|
|
4939
|
+
_index = null;
|
|
4940
|
+
constructor(config, appConfig, dataDir, store, lance, pythonBridge) {
|
|
4941
|
+
this.config = config;
|
|
4942
|
+
this.appConfig = appConfig;
|
|
4943
|
+
this.dataDir = dataDir;
|
|
4944
|
+
this.store = store;
|
|
4945
|
+
this.lance = lance;
|
|
4946
|
+
this.pythonBridge = pythonBridge;
|
|
4947
|
+
}
|
|
4948
|
+
/**
|
|
4949
|
+
* EmbeddingEngine is lazily created on first access.
|
|
4950
|
+
* Model loading (3-10s) is deferred until embed() is called.
|
|
4951
|
+
*/
|
|
4952
|
+
get embeddings() {
|
|
4953
|
+
if (this._embeddings === null) {
|
|
4954
|
+
logger4.debug("Lazy-initializing EmbeddingEngine");
|
|
4955
|
+
this._embeddings = new EmbeddingEngine(
|
|
4956
|
+
this.appConfig.embedding.model,
|
|
4957
|
+
this.appConfig.embedding.dimensions
|
|
4958
|
+
);
|
|
4959
|
+
}
|
|
4960
|
+
return this._embeddings;
|
|
4961
|
+
}
|
|
4962
|
+
/**
|
|
4963
|
+
* CodeGraphService is lazily created on first access.
|
|
4964
|
+
*/
|
|
4965
|
+
get codeGraph() {
|
|
4966
|
+
if (this._codeGraph === null) {
|
|
4967
|
+
logger4.debug("Lazy-initializing CodeGraphService");
|
|
4968
|
+
this._codeGraph = new CodeGraphService(this.dataDir, this.pythonBridge);
|
|
4969
|
+
}
|
|
4970
|
+
return this._codeGraph;
|
|
4971
|
+
}
|
|
4972
|
+
/**
|
|
4973
|
+
* SearchService is lazily created on first access.
|
|
4974
|
+
*/
|
|
4975
|
+
get search() {
|
|
4976
|
+
if (this._search === null) {
|
|
4977
|
+
logger4.debug("Lazy-initializing SearchService");
|
|
4978
|
+
this._search = new SearchService(this.lance, this.embeddings, this.codeGraph);
|
|
4979
|
+
}
|
|
4980
|
+
return this._search;
|
|
4981
|
+
}
|
|
4982
|
+
/**
|
|
4983
|
+
* IndexService is lazily created on first access.
|
|
4984
|
+
*/
|
|
4985
|
+
get index() {
|
|
4986
|
+
if (this._index === null) {
|
|
4987
|
+
logger4.debug("Lazy-initializing IndexService");
|
|
4988
|
+
this._index = new IndexService(this.lance, this.embeddings, {
|
|
4989
|
+
codeGraphService: this.codeGraph
|
|
4990
|
+
});
|
|
4991
|
+
}
|
|
4992
|
+
return this._index;
|
|
4993
|
+
}
|
|
4994
|
+
/**
|
|
4995
|
+
* Check if embeddings have been initialized (for cleanup purposes).
|
|
4996
|
+
*/
|
|
4997
|
+
get hasEmbeddings() {
|
|
4998
|
+
return this._embeddings !== null;
|
|
4999
|
+
}
|
|
5000
|
+
};
|
|
5001
|
+
async function createLazyServices(configPath, dataDir, projectRoot) {
|
|
5002
|
+
logger4.info({ configPath, dataDir, projectRoot }, "Initializing lazy services");
|
|
5003
|
+
const startTime = Date.now();
|
|
5004
|
+
const config = new ConfigService(configPath, dataDir, projectRoot);
|
|
5005
|
+
const appConfig = await config.load();
|
|
5006
|
+
const resolvedDataDir = config.resolveDataDir();
|
|
5007
|
+
const pythonBridge = new PythonBridge();
|
|
5008
|
+
await pythonBridge.start();
|
|
5009
|
+
const lance = new LanceStore(resolvedDataDir);
|
|
5010
|
+
let storeOptions;
|
|
5011
|
+
if (projectRoot !== void 0) {
|
|
5012
|
+
const definitionService = new StoreDefinitionService(projectRoot);
|
|
5013
|
+
const gitignoreService = new GitignoreService(projectRoot);
|
|
5014
|
+
storeOptions = { definitionService, gitignoreService };
|
|
5015
|
+
}
|
|
5016
|
+
const store = new StoreService(resolvedDataDir, storeOptions);
|
|
5017
|
+
await store.initialize();
|
|
5018
|
+
const durationMs = Date.now() - startTime;
|
|
5019
|
+
logger4.info({ dataDir: resolvedDataDir, durationMs }, "Lazy services initialized");
|
|
5020
|
+
return new LazyServiceContainer(config, appConfig, resolvedDataDir, store, lance, pythonBridge);
|
|
5021
|
+
}
|
|
4621
5022
|
async function createServices(configPath, dataDir, projectRoot) {
|
|
4622
5023
|
logger4.info({ configPath, dataDir, projectRoot }, "Initializing services");
|
|
4623
5024
|
const config = new ConfigService(configPath, dataDir, projectRoot);
|
|
@@ -4628,7 +5029,13 @@ async function createServices(configPath, dataDir, projectRoot) {
|
|
|
4628
5029
|
const lance = new LanceStore(resolvedDataDir);
|
|
4629
5030
|
const embeddings = new EmbeddingEngine(appConfig.embedding.model, appConfig.embedding.dimensions);
|
|
4630
5031
|
await embeddings.initialize();
|
|
4631
|
-
|
|
5032
|
+
let storeOptions;
|
|
5033
|
+
if (projectRoot !== void 0) {
|
|
5034
|
+
const definitionService = new StoreDefinitionService(projectRoot);
|
|
5035
|
+
const gitignoreService = new GitignoreService(projectRoot);
|
|
5036
|
+
storeOptions = { definitionService, gitignoreService };
|
|
5037
|
+
}
|
|
5038
|
+
const store = new StoreService(resolvedDataDir, storeOptions);
|
|
4632
5039
|
await store.initialize();
|
|
4633
5040
|
const codeGraph = new CodeGraphService(resolvedDataDir, pythonBridge);
|
|
4634
5041
|
const search = new SearchService(lance, embeddings, codeGraph);
|
|
@@ -4655,12 +5062,18 @@ async function destroyServices(services) {
|
|
|
4655
5062
|
logger4.error({ error }, "Error stopping Python bridge");
|
|
4656
5063
|
errors.push(error);
|
|
4657
5064
|
}
|
|
4658
|
-
|
|
4659
|
-
|
|
4660
|
-
|
|
4661
|
-
|
|
4662
|
-
|
|
4663
|
-
|
|
5065
|
+
const isLazyContainer = services instanceof LazyServiceContainer;
|
|
5066
|
+
const shouldDisposeEmbeddings = !isLazyContainer || services.hasEmbeddings;
|
|
5067
|
+
if (shouldDisposeEmbeddings) {
|
|
5068
|
+
try {
|
|
5069
|
+
await services.embeddings.dispose();
|
|
5070
|
+
} catch (e) {
|
|
5071
|
+
const error = e instanceof Error ? e : new Error(String(e));
|
|
5072
|
+
logger4.error({ error }, "Error disposing EmbeddingEngine");
|
|
5073
|
+
errors.push(error);
|
|
5074
|
+
}
|
|
5075
|
+
} else {
|
|
5076
|
+
logger4.debug("Skipping embeddings disposal (not initialized)");
|
|
4664
5077
|
}
|
|
4665
5078
|
try {
|
|
4666
5079
|
await services.lance.closeAsync();
|
|
@@ -4686,15 +5099,19 @@ export {
|
|
|
4686
5099
|
PythonBridge,
|
|
4687
5100
|
ChunkingService,
|
|
4688
5101
|
ASTParser,
|
|
4689
|
-
ProjectRootService,
|
|
4690
5102
|
createStoreId,
|
|
4691
5103
|
createDocumentId,
|
|
4692
5104
|
ok,
|
|
4693
5105
|
err,
|
|
4694
5106
|
classifyWebContentType,
|
|
5107
|
+
isFileStoreDefinition,
|
|
5108
|
+
isRepoStoreDefinition,
|
|
5109
|
+
isWebStoreDefinition,
|
|
5110
|
+
StoreDefinitionService,
|
|
4695
5111
|
extractRepoName,
|
|
4696
5112
|
JobService,
|
|
5113
|
+
createLazyServices,
|
|
4697
5114
|
createServices,
|
|
4698
5115
|
destroyServices
|
|
4699
5116
|
};
|
|
4700
|
-
//# sourceMappingURL=chunk-
|
|
5117
|
+
//# sourceMappingURL=chunk-WYZQUKUD.js.map
|