glab-agent 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +152 -0
- package/bin/glab-agent.mjs +18 -0
- package/package.json +59 -0
- package/src/local-agent/agent-config.ts +315 -0
- package/src/local-agent/agent-provider.ts +59 -0
- package/src/local-agent/agent-runner.ts +244 -0
- package/src/local-agent/claude-runner.ts +136 -0
- package/src/local-agent/cli.ts +1497 -0
- package/src/local-agent/codex-runner.ts +153 -0
- package/src/local-agent/gitlab-glab-client.ts +722 -0
- package/src/local-agent/health-server.ts +56 -0
- package/src/local-agent/heartbeat.ts +33 -0
- package/src/local-agent/log-rotate.ts +56 -0
- package/src/local-agent/logger.ts +92 -0
- package/src/local-agent/metrics.ts +51 -0
- package/src/local-agent/mr-actions.ts +121 -0
- package/src/local-agent/notifier.ts +190 -0
- package/src/local-agent/process-manager.ts +193 -0
- package/src/local-agent/reply-runner.ts +111 -0
- package/src/local-agent/repo-cache.ts +144 -0
- package/src/local-agent/report.ts +183 -0
- package/src/local-agent/skill-import.ts +344 -0
- package/src/local-agent/skill-inject.ts +109 -0
- package/src/local-agent/skill-parse.ts +47 -0
- package/src/local-agent/smoke-test.ts +443 -0
- package/src/local-agent/state-store.ts +186 -0
- package/src/local-agent/token-check.ts +37 -0
- package/src/local-agent/watcher.ts +1226 -0
- package/src/local-agent/wiki-sync.ts +290 -0
- package/src/local-agent/worktree-manager.ts +141 -0
- package/src/text.ts +16 -0
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
import { mkdir, readdir, stat, copyFile, writeFile, readFile, unlink } from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
export const WIKI_DIR_NAME = "wiki";
|
|
5
|
+
|
|
6
|
+
const LOCK_FILE_NAME = ".lock";
|
|
7
|
+
const LOCK_STALE_MS = 5 * 60 * 1000; // 5 minutes
|
|
8
|
+
|
|
9
|
+
interface LockEntry {
|
|
10
|
+
pid: number;
|
|
11
|
+
timestamp: number;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface WikiSyncLogger {
|
|
15
|
+
info(msg: string): void;
|
|
16
|
+
warn(msg: string): void;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Returns the path to the wiki directory inside .glab-agent/.
|
|
21
|
+
*/
|
|
22
|
+
export function wikiPath(repoRoot: string): string {
|
|
23
|
+
return path.join(repoRoot, ".glab-agent", WIKI_DIR_NAME);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// ---------------------------------------------------------------------------
|
|
27
|
+
// Lock helpers
|
|
28
|
+
// ---------------------------------------------------------------------------
|
|
29
|
+
|
|
30
|
+
async function acquireLock(wikiDir: string): Promise<boolean> {
|
|
31
|
+
const lockFile = path.join(wikiDir, LOCK_FILE_NAME);
|
|
32
|
+
try {
|
|
33
|
+
// Check for existing lock
|
|
34
|
+
try {
|
|
35
|
+
const raw = await readFile(lockFile, "utf8");
|
|
36
|
+
const entry: LockEntry = JSON.parse(raw);
|
|
37
|
+
const age = Date.now() - entry.timestamp;
|
|
38
|
+
if (age <= LOCK_STALE_MS) {
|
|
39
|
+
// Lock is fresh — not ours to take
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
// Stale lock — delete it and continue
|
|
43
|
+
await unlink(lockFile).catch(() => { /* ignore */ });
|
|
44
|
+
} catch {
|
|
45
|
+
// No lock file or unreadable — proceed to create
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const entry: LockEntry = { pid: process.pid, timestamp: Date.now() };
|
|
49
|
+
await writeFile(lockFile, JSON.stringify(entry), "utf8");
|
|
50
|
+
return true;
|
|
51
|
+
} catch {
|
|
52
|
+
return false;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
async function releaseLock(wikiDir: string): Promise<void> {
|
|
57
|
+
const lockFile = path.join(wikiDir, LOCK_FILE_NAME);
|
|
58
|
+
try {
|
|
59
|
+
await unlink(lockFile);
|
|
60
|
+
} catch { /* ignore */ }
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// ---------------------------------------------------------------------------
|
|
64
|
+
// Recursive file list helper
|
|
65
|
+
// ---------------------------------------------------------------------------
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Recursively collect all file paths under `dir`, returned as paths relative to `dir`.
|
|
69
|
+
*/
|
|
70
|
+
async function listFilesRecursive(dir: string): Promise<string[]> {
|
|
71
|
+
const results: string[] = [];
|
|
72
|
+
let entries: string[];
|
|
73
|
+
try {
|
|
74
|
+
entries = await readdir(dir);
|
|
75
|
+
} catch {
|
|
76
|
+
return results;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
for (const entry of entries) {
|
|
80
|
+
if (entry === LOCK_FILE_NAME) continue; // never copy lock file
|
|
81
|
+
const full = path.join(dir, entry);
|
|
82
|
+
let s;
|
|
83
|
+
try {
|
|
84
|
+
s = await stat(full);
|
|
85
|
+
} catch {
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
if (s.isDirectory()) {
|
|
89
|
+
const sub = await listFilesRecursive(full);
|
|
90
|
+
for (const rel of sub) {
|
|
91
|
+
results.push(path.join(entry, rel));
|
|
92
|
+
}
|
|
93
|
+
} else {
|
|
94
|
+
results.push(entry);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return results;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// ---------------------------------------------------------------------------
|
|
101
|
+
// Public API
|
|
102
|
+
// ---------------------------------------------------------------------------
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Pre-run: copy `.glab-agent/wiki/` from main repo into the agent worktree.
|
|
106
|
+
* Creates the destination wiki directory as needed.
|
|
107
|
+
* Returns the number of files copied, or 0 if the source wiki dir does not exist.
|
|
108
|
+
* Never throws — logs warnings on I/O errors.
|
|
109
|
+
*/
|
|
110
|
+
export async function syncWikiToWorktree(
|
|
111
|
+
repoRoot: string,
|
|
112
|
+
worktreePath: string,
|
|
113
|
+
logger?: WikiSyncLogger
|
|
114
|
+
): Promise<number> {
|
|
115
|
+
const srcDir = wikiPath(repoRoot);
|
|
116
|
+
const destDir = wikiPath(worktreePath);
|
|
117
|
+
|
|
118
|
+
// Check source exists
|
|
119
|
+
try {
|
|
120
|
+
const s = await stat(srcDir);
|
|
121
|
+
if (!s.isDirectory()) return 0;
|
|
122
|
+
} catch {
|
|
123
|
+
return 0;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
let count = 0;
|
|
127
|
+
try {
|
|
128
|
+
const files = await listFilesRecursive(srcDir);
|
|
129
|
+
|
|
130
|
+
for (const rel of files) {
|
|
131
|
+
const src = path.join(srcDir, rel);
|
|
132
|
+
const dest = path.join(destDir, rel);
|
|
133
|
+
try {
|
|
134
|
+
await mkdir(path.dirname(dest), { recursive: true });
|
|
135
|
+
await copyFile(src, dest);
|
|
136
|
+
count++;
|
|
137
|
+
} catch (err) {
|
|
138
|
+
logger?.warn(`wiki-sync: failed to copy ${rel} to worktree: ${String(err)}`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
} catch (err) {
|
|
142
|
+
logger?.warn(`wiki-sync: unexpected error during syncWikiToWorktree: ${String(err)}`);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (count > 0) {
|
|
146
|
+
logger?.info(`wiki-sync: copied ${count} wiki file(s) to worktree`);
|
|
147
|
+
}
|
|
148
|
+
return count;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Post-run: copy new/modified wiki files from the agent worktree back to the main repo.
|
|
153
|
+
* Only copies files whose mtime in the worktree is newer than in the main repo (or absent).
|
|
154
|
+
* Never deletes files in the main repo.
|
|
155
|
+
* Uses a lockfile to prevent concurrent writes.
|
|
156
|
+
* Returns the number of files synced back, or 0 if the worktree wiki dir does not exist.
|
|
157
|
+
* Never throws — logs warnings on I/O errors.
|
|
158
|
+
*/
|
|
159
|
+
export async function syncWikiFromWorktree(
|
|
160
|
+
worktreePath: string,
|
|
161
|
+
repoRoot: string,
|
|
162
|
+
logger?: WikiSyncLogger
|
|
163
|
+
): Promise<number> {
|
|
164
|
+
const srcDir = wikiPath(worktreePath);
|
|
165
|
+
const destDir = wikiPath(repoRoot);
|
|
166
|
+
|
|
167
|
+
// Check source exists
|
|
168
|
+
try {
|
|
169
|
+
const s = await stat(srcDir);
|
|
170
|
+
if (!s.isDirectory()) return 0;
|
|
171
|
+
} catch {
|
|
172
|
+
return 0;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Ensure destination wiki dir exists before acquiring lock
|
|
176
|
+
try {
|
|
177
|
+
await mkdir(destDir, { recursive: true });
|
|
178
|
+
} catch (err) {
|
|
179
|
+
logger?.warn(`wiki-sync: failed to create wiki dir in main repo: ${String(err)}`);
|
|
180
|
+
return 0;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Acquire lock
|
|
184
|
+
const locked = await acquireLock(destDir);
|
|
185
|
+
if (!locked) {
|
|
186
|
+
logger?.warn("wiki-sync: could not acquire lock for wiki sync — skipping");
|
|
187
|
+
return 0;
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
let count = 0;
|
|
191
|
+
try {
|
|
192
|
+
const files = await listFilesRecursive(srcDir);
|
|
193
|
+
|
|
194
|
+
for (const rel of files) {
|
|
195
|
+
const src = path.join(srcDir, rel);
|
|
196
|
+
const dest = path.join(destDir, rel);
|
|
197
|
+
|
|
198
|
+
try {
|
|
199
|
+
const srcStat = await stat(src);
|
|
200
|
+
|
|
201
|
+
// Compare mtime with destination (if it exists)
|
|
202
|
+
let destMtime = 0;
|
|
203
|
+
try {
|
|
204
|
+
const destStat = await stat(dest);
|
|
205
|
+
destMtime = destStat.mtimeMs;
|
|
206
|
+
} catch {
|
|
207
|
+
// Destination does not exist — always copy
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if (srcStat.mtimeMs > destMtime) {
|
|
211
|
+
await mkdir(path.dirname(dest), { recursive: true });
|
|
212
|
+
await copyFile(src, dest);
|
|
213
|
+
count++;
|
|
214
|
+
}
|
|
215
|
+
} catch (err) {
|
|
216
|
+
logger?.warn(`wiki-sync: failed to sync ${rel} back to main repo: ${String(err)}`);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
} catch (err) {
|
|
220
|
+
logger?.warn(`wiki-sync: unexpected error during syncWikiFromWorktree: ${String(err)}`);
|
|
221
|
+
} finally {
|
|
222
|
+
await releaseLock(destDir);
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if (count > 0) {
|
|
226
|
+
logger?.info(`wiki-sync: synced ${count} wiki file(s) back to main repo`);
|
|
227
|
+
}
|
|
228
|
+
return count;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
/**
|
|
232
|
+
* Initialize the wiki directory structure in the main repo with scaffold files.
|
|
233
|
+
* Creates `.glab-agent/wiki/` and subdirs (patterns/, gotchas/, decisions/, runbooks/).
|
|
234
|
+
* Creates `index.md` and `log.md` only if they do not already exist.
|
|
235
|
+
*/
|
|
236
|
+
export async function initWikiDir(repoRoot: string): Promise<void> {
|
|
237
|
+
const base = wikiPath(repoRoot);
|
|
238
|
+
const subdirs = ["patterns", "gotchas", "decisions", "runbooks"];
|
|
239
|
+
|
|
240
|
+
try {
|
|
241
|
+
await mkdir(base, { recursive: true });
|
|
242
|
+
for (const sub of subdirs) {
|
|
243
|
+
await mkdir(path.join(base, sub), { recursive: true });
|
|
244
|
+
}
|
|
245
|
+
} catch (err) {
|
|
246
|
+
throw new Error(`wiki-sync: failed to create wiki directory structure: ${String(err)}`);
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
const indexPath = path.join(base, "index.md");
|
|
250
|
+
const indexContent = `# Knowledge Wiki
|
|
251
|
+
|
|
252
|
+
> Agent 自动维护的项目知识库。每个条目一行摘要,按需深入。
|
|
253
|
+
|
|
254
|
+
## Patterns
|
|
255
|
+
|
|
256
|
+
## Gotchas
|
|
257
|
+
|
|
258
|
+
## Decisions
|
|
259
|
+
|
|
260
|
+
## Runbooks
|
|
261
|
+
`;
|
|
262
|
+
|
|
263
|
+
try {
|
|
264
|
+
await stat(indexPath);
|
|
265
|
+
// File exists — do not overwrite
|
|
266
|
+
} catch {
|
|
267
|
+
try {
|
|
268
|
+
await writeFile(indexPath, indexContent, "utf8");
|
|
269
|
+
} catch (err) {
|
|
270
|
+
throw new Error(`wiki-sync: failed to write index.md: ${String(err)}`);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
const logPath = path.join(base, "log.md");
|
|
275
|
+
const logContent = `# Knowledge Log
|
|
276
|
+
|
|
277
|
+
> 时间线记录,append-only。
|
|
278
|
+
`;
|
|
279
|
+
|
|
280
|
+
try {
|
|
281
|
+
await stat(logPath);
|
|
282
|
+
// File exists — do not overwrite
|
|
283
|
+
} catch {
|
|
284
|
+
try {
|
|
285
|
+
await writeFile(logPath, logContent, "utf8");
|
|
286
|
+
} catch (err) {
|
|
287
|
+
throw new Error(`wiki-sync: failed to write log.md: ${String(err)}`);
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { mkdir } from "node:fs/promises";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { promisify } from "node:util";
|
|
5
|
+
|
|
6
|
+
const execFileAsync = promisify(execFile);
|
|
7
|
+
|
|
8
|
+
export interface WorktreeInfo {
|
|
9
|
+
branch: string;
|
|
10
|
+
worktreePath: string;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
interface WorktreeManagerOptions {
|
|
14
|
+
repoPath: string;
|
|
15
|
+
worktreeRoot: string;
|
|
16
|
+
execFileImpl?: typeof execFileAsync;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function slugifyTitle(title: string): string {
|
|
20
|
+
const slug = title
|
|
21
|
+
.normalize("NFKD")
|
|
22
|
+
.replace(/[^\w\s-]/g, "")
|
|
23
|
+
.trim()
|
|
24
|
+
.toLowerCase()
|
|
25
|
+
.replace(/[\s_-]+/g, "-")
|
|
26
|
+
.replace(/^-+|-+$/g, "");
|
|
27
|
+
|
|
28
|
+
return (slug || "task").slice(0, 48);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export function branchNameForIssue(issueIid: number, title: string): string {
|
|
32
|
+
return `agent/issue-${issueIid}-${slugifyTitle(title)}`;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function worktreePathForIssue(worktreeRoot: string, issueIid: number, title: string): string {
|
|
36
|
+
return path.join(worktreeRoot, `issue-${issueIid}-${slugifyTitle(title)}`);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export class WorktreeManager {
|
|
40
|
+
private readonly repoPath: string;
|
|
41
|
+
|
|
42
|
+
private readonly worktreeRoot: string;
|
|
43
|
+
|
|
44
|
+
private readonly execFileImpl: typeof execFileAsync;
|
|
45
|
+
|
|
46
|
+
constructor(options: WorktreeManagerOptions) {
|
|
47
|
+
this.repoPath = options.repoPath;
|
|
48
|
+
this.worktreeRoot = options.worktreeRoot;
|
|
49
|
+
this.execFileImpl = options.execFileImpl ?? execFileAsync;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async ensureWorktree(issueIid: number, title: string): Promise<WorktreeInfo> {
|
|
53
|
+
const branch = branchNameForIssue(issueIid, title);
|
|
54
|
+
const worktreePath = worktreePathForIssue(this.worktreeRoot, issueIid, title);
|
|
55
|
+
|
|
56
|
+
await mkdir(this.worktreeRoot, { recursive: true });
|
|
57
|
+
|
|
58
|
+
if (await this.worktreeExists(worktreePath)) {
|
|
59
|
+
return { branch, worktreePath };
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const startPoint = await this.resolveStartPoint();
|
|
63
|
+
const branchExists = await this.branchExists(branch);
|
|
64
|
+
|
|
65
|
+
if (branchExists) {
|
|
66
|
+
await this.runGit(["worktree", "add", worktreePath, branch]);
|
|
67
|
+
} else {
|
|
68
|
+
await this.runGit(["worktree", "add", "-b", branch, worktreePath, startPoint]);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
return { branch, worktreePath };
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async pushBranch(branch: string): Promise<void> {
|
|
75
|
+
await this.runGit(["push", "-u", "origin", branch]);
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
async getDefaultBranch(): Promise<string> {
|
|
79
|
+
try {
|
|
80
|
+
const ref = await this.runGit(["symbolic-ref", "--quiet", "refs/remotes/origin/HEAD"]);
|
|
81
|
+
return ref.trim().replace(/^refs\/remotes\/origin\//, "");
|
|
82
|
+
} catch {
|
|
83
|
+
return "main";
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async rebaseBranch(branch: string): Promise<void> {
|
|
88
|
+
await this.runGit(["fetch", "origin"]);
|
|
89
|
+
const defaultBranch = await this.getDefaultBranch();
|
|
90
|
+
await this.runGit(["rebase", `origin/${defaultBranch}`, branch]);
|
|
91
|
+
await this.runGit(["push", "--force-with-lease", "origin", branch]);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
async hasDiff(branch: string): Promise<boolean> {
|
|
95
|
+
const defaultBranch = await this.getDefaultBranch();
|
|
96
|
+
try {
|
|
97
|
+
const diff = await this.runGit(["diff", `origin/${defaultBranch}...${branch}`, "--stat"]);
|
|
98
|
+
return diff.trim().length > 0;
|
|
99
|
+
} catch {
|
|
100
|
+
return false;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
private async resolveStartPoint(): Promise<string> {
|
|
105
|
+
try {
|
|
106
|
+
const remoteHead = await this.runGit(["symbolic-ref", "--quiet", "refs/remotes/origin/HEAD"]);
|
|
107
|
+
return remoteHead.trim().replace(/^refs\/remotes\//, "");
|
|
108
|
+
} catch {
|
|
109
|
+
const currentBranch = await this.runGit(["branch", "--show-current"]);
|
|
110
|
+
return currentBranch.trim() || "HEAD";
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
private async worktreeExists(worktreePath: string): Promise<boolean> {
|
|
115
|
+
const output = await this.runGit(["worktree", "list", "--porcelain"]);
|
|
116
|
+
return output
|
|
117
|
+
.split("\n")
|
|
118
|
+
.some((line) => line.trim() === `worktree ${worktreePath}`);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
private async branchExists(branch: string): Promise<boolean> {
|
|
122
|
+
try {
|
|
123
|
+
await this.runGit(["show-ref", "--verify", "--quiet", `refs/heads/${branch}`]);
|
|
124
|
+
return true;
|
|
125
|
+
} catch {
|
|
126
|
+
return false;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
private async runGit(args: string[]): Promise<string> {
|
|
131
|
+
try {
|
|
132
|
+
const { stdout } = await this.execFileImpl("git", ["-C", this.repoPath, ...args], {
|
|
133
|
+
encoding: "utf8",
|
|
134
|
+
maxBuffer: 10 * 1024 * 1024
|
|
135
|
+
});
|
|
136
|
+
return stdout;
|
|
137
|
+
} catch (error) {
|
|
138
|
+
throw new Error(`git command failed (${args.join(" ")}): ${String(error)}`);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
package/src/text.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
// hello-world
|
|
2
|
+
const DEFAULT_LIMIT = 240;
|
|
3
|
+
|
|
4
|
+
export function truncate(text: string, limit = DEFAULT_LIMIT): string {
|
|
5
|
+
const normalized = text.replace(/\s+/g, " ").trim();
|
|
6
|
+
|
|
7
|
+
if (normalized.length <= limit) {
|
|
8
|
+
return normalized;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
return `${normalized.slice(0, Math.max(0, limit - 1)).trimEnd()}…`;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function compactText(text: string): string {
|
|
15
|
+
return text.replace(/\s+/g, " ").trim();
|
|
16
|
+
}
|