offwatch 0.5.9 → 0.5.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/offwatch.js +7 -6
- package/package.json +4 -3
- package/src/__tests__/agent-jwt-env.test.ts +79 -0
- package/src/__tests__/allowed-hostname.test.ts +80 -0
- package/src/__tests__/auth-command-registration.test.ts +16 -0
- package/src/__tests__/board-auth.test.ts +53 -0
- package/src/__tests__/common.test.ts +98 -0
- package/src/__tests__/company-delete.test.ts +95 -0
- package/src/__tests__/company-import-export-e2e.test.ts +502 -0
- package/src/__tests__/company-import-url.test.ts +74 -0
- package/src/__tests__/company-import-zip.test.ts +44 -0
- package/src/__tests__/company.test.ts +599 -0
- package/src/__tests__/context.test.ts +70 -0
- package/src/__tests__/data-dir.test.ts +79 -0
- package/src/__tests__/doctor.test.ts +102 -0
- package/src/__tests__/feedback.test.ts +177 -0
- package/src/__tests__/helpers/embedded-postgres.ts +6 -0
- package/src/__tests__/helpers/zip.ts +87 -0
- package/src/__tests__/home-paths.test.ts +44 -0
- package/src/__tests__/http.test.ts +106 -0
- package/src/__tests__/network-bind.test.ts +62 -0
- package/src/__tests__/onboard.test.ts +166 -0
- package/src/__tests__/routines.test.ts +249 -0
- package/src/__tests__/telemetry.test.ts +117 -0
- package/src/__tests__/worktree-merge-history.test.ts +492 -0
- package/src/__tests__/worktree.test.ts +982 -0
- package/src/adapters/http/format-event.ts +4 -0
- package/src/adapters/http/index.ts +7 -0
- package/src/adapters/index.ts +2 -0
- package/src/adapters/process/format-event.ts +4 -0
- package/src/adapters/process/index.ts +7 -0
- package/src/adapters/registry.ts +63 -0
- package/src/checks/agent-jwt-secret-check.ts +40 -0
- package/src/checks/config-check.ts +33 -0
- package/src/checks/database-check.ts +59 -0
- package/src/checks/deployment-auth-check.ts +88 -0
- package/src/checks/index.ts +18 -0
- package/src/checks/llm-check.ts +82 -0
- package/src/checks/log-check.ts +30 -0
- package/src/checks/path-resolver.ts +1 -0
- package/src/checks/port-check.ts +24 -0
- package/src/checks/secrets-check.ts +146 -0
- package/src/checks/storage-check.ts +51 -0
- package/src/client/board-auth.ts +282 -0
- package/src/client/command-label.ts +4 -0
- package/src/client/context.ts +175 -0
- package/src/client/http.ts +255 -0
- package/src/commands/allowed-hostname.ts +40 -0
- package/src/commands/auth-bootstrap-ceo.ts +138 -0
- package/src/commands/client/activity.ts +71 -0
- package/src/commands/client/agent.ts +315 -0
- package/src/commands/client/approval.ts +259 -0
- package/src/commands/client/auth.ts +113 -0
- package/src/commands/client/common.ts +221 -0
- package/src/commands/client/company.ts +1578 -0
- package/src/commands/client/context.ts +125 -0
- package/src/commands/client/dashboard.ts +34 -0
- package/src/commands/client/feedback.ts +645 -0
- package/src/commands/client/issue.ts +411 -0
- package/src/commands/client/plugin.ts +374 -0
- package/src/commands/client/zip.ts +129 -0
- package/src/commands/configure.ts +201 -0
- package/src/commands/db-backup.ts +102 -0
- package/src/commands/doctor.ts +203 -0
- package/src/commands/env.ts +411 -0
- package/src/commands/heartbeat-run.ts +344 -0
- package/src/commands/onboard.ts +692 -0
- package/src/commands/routines.ts +352 -0
- package/src/commands/run.ts +216 -0
- package/src/commands/worktree-lib.ts +279 -0
- package/src/commands/worktree-merge-history-lib.ts +764 -0
- package/src/commands/worktree.ts +2876 -0
- package/src/config/data-dir.ts +48 -0
- package/src/config/env.ts +125 -0
- package/src/config/home.ts +80 -0
- package/src/config/hostnames.ts +26 -0
- package/src/config/schema.ts +30 -0
- package/src/config/secrets-key.ts +48 -0
- package/src/config/server-bind.ts +183 -0
- package/src/config/store.ts +120 -0
- package/src/index.ts +182 -0
- package/src/prompts/database.ts +157 -0
- package/src/prompts/llm.ts +43 -0
- package/src/prompts/logging.ts +37 -0
- package/src/prompts/secrets.ts +99 -0
- package/src/prompts/server.ts +221 -0
- package/src/prompts/storage.ts +146 -0
- package/src/telemetry.ts +49 -0
- package/src/utils/banner.ts +24 -0
- package/src/utils/net.ts +18 -0
- package/src/utils/path-resolver.ts +25 -0
- package/src/version.ts +10 -0
- package/lib/downloader.js +0 -112
- package/postinstall.js +0 -23
|
@@ -0,0 +1,2876 @@
|
|
|
1
|
+
import {
|
|
2
|
+
chmodSync,
|
|
3
|
+
copyFileSync,
|
|
4
|
+
existsSync,
|
|
5
|
+
mkdirSync,
|
|
6
|
+
promises as fsPromises,
|
|
7
|
+
readdirSync,
|
|
8
|
+
readFileSync,
|
|
9
|
+
readlinkSync,
|
|
10
|
+
rmSync,
|
|
11
|
+
statSync,
|
|
12
|
+
symlinkSync,
|
|
13
|
+
writeFileSync,
|
|
14
|
+
} from "node:fs";
|
|
15
|
+
import os from "node:os";
|
|
16
|
+
import path from "node:path";
|
|
17
|
+
import { execFileSync } from "node:child_process";
|
|
18
|
+
import { createServer } from "node:net";
|
|
19
|
+
import { Readable } from "node:stream";
|
|
20
|
+
import * as p from "@clack/prompts";
|
|
21
|
+
import pc from "picocolors";
|
|
22
|
+
import { and, eq, inArray, sql } from "drizzle-orm";
|
|
23
|
+
import {
|
|
24
|
+
applyPendingMigrations,
|
|
25
|
+
agents,
|
|
26
|
+
assets,
|
|
27
|
+
workspaces,
|
|
28
|
+
createDb,
|
|
29
|
+
documentRevisions,
|
|
30
|
+
documents,
|
|
31
|
+
ensurePostgresDatabase,
|
|
32
|
+
formatDatabaseBackupResult,
|
|
33
|
+
goals,
|
|
34
|
+
heartbeatRuns,
|
|
35
|
+
inspectMigrations,
|
|
36
|
+
issueAttachments,
|
|
37
|
+
issueComments,
|
|
38
|
+
issueDocuments,
|
|
39
|
+
issues,
|
|
40
|
+
projectWorkspaces,
|
|
41
|
+
projects,
|
|
42
|
+
routines,
|
|
43
|
+
routineTriggers,
|
|
44
|
+
runDatabaseBackup,
|
|
45
|
+
runDatabaseRestore,
|
|
46
|
+
createEmbeddedPostgresLogBuffer,
|
|
47
|
+
formatEmbeddedPostgresError,
|
|
48
|
+
} from "@paperclipai/db";
|
|
49
|
+
import type { Command } from "commander";
|
|
50
|
+
import { ensureAgentJwtSecret, loadPaperclipEnvFile, mergePaperclipEnvEntries, readPaperclipEnvEntries, resolvePaperclipEnvFile } from "../config/env.js";
|
|
51
|
+
import { expandHomePrefix } from "../config/home.js";
|
|
52
|
+
import type { PaperclipConfig } from "../config/schema.js";
|
|
53
|
+
import { readConfig, resolveConfigPath, writeConfig } from "../config/store.js";
|
|
54
|
+
import { printPaperclipCliBanner } from "../utils/banner.js";
|
|
55
|
+
import { resolveRuntimeLikePath } from "../utils/path-resolver.js";
|
|
56
|
+
import {
|
|
57
|
+
buildWorktreeConfig,
|
|
58
|
+
buildWorktreeEnvEntries,
|
|
59
|
+
DEFAULT_WORKTREE_HOME,
|
|
60
|
+
formatShellExports,
|
|
61
|
+
generateWorktreeColor,
|
|
62
|
+
isWorktreeSeedMode,
|
|
63
|
+
resolveSuggestedWorktreeName,
|
|
64
|
+
resolveWorktreeSeedPlan,
|
|
65
|
+
resolveWorktreeLocalPaths,
|
|
66
|
+
sanitizeWorktreeInstanceId,
|
|
67
|
+
type WorktreeSeedMode,
|
|
68
|
+
type WorktreeLocalPaths,
|
|
69
|
+
} from "./worktree-lib.js";
|
|
70
|
+
import {
|
|
71
|
+
buildWorktreeMergePlan,
|
|
72
|
+
parseWorktreeMergeScopes,
|
|
73
|
+
type IssueAttachmentRow,
|
|
74
|
+
type IssueDocumentRow,
|
|
75
|
+
type DocumentRevisionRow,
|
|
76
|
+
type PlannedAttachmentInsert,
|
|
77
|
+
type PlannedCommentInsert,
|
|
78
|
+
type PlannedIssueDocumentInsert,
|
|
79
|
+
type PlannedIssueDocumentMerge,
|
|
80
|
+
type PlannedIssueInsert,
|
|
81
|
+
} from "./worktree-merge-history-lib.js";
|
|
82
|
+
|
|
83
|
+
type WorktreeInitOptions = {
|
|
84
|
+
name?: string;
|
|
85
|
+
color?: string;
|
|
86
|
+
instance?: string;
|
|
87
|
+
home?: string;
|
|
88
|
+
fromConfig?: string;
|
|
89
|
+
fromDataDir?: string;
|
|
90
|
+
fromInstance?: string;
|
|
91
|
+
sourceConfigPathOverride?: string;
|
|
92
|
+
serverPort?: number;
|
|
93
|
+
dbPort?: number;
|
|
94
|
+
seed?: boolean;
|
|
95
|
+
seedMode?: string;
|
|
96
|
+
force?: boolean;
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
type WorktreeMakeOptions = WorktreeInitOptions & {
|
|
100
|
+
startPoint?: string;
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
type WorktreeEnvOptions = {
|
|
104
|
+
config?: string;
|
|
105
|
+
json?: boolean;
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
type WorktreeListOptions = {
|
|
109
|
+
json?: boolean;
|
|
110
|
+
};
|
|
111
|
+
|
|
112
|
+
type WorktreeMergeHistoryOptions = {
|
|
113
|
+
from?: string;
|
|
114
|
+
to?: string;
|
|
115
|
+
company?: string;
|
|
116
|
+
scope?: string;
|
|
117
|
+
apply?: boolean;
|
|
118
|
+
dry?: boolean;
|
|
119
|
+
yes?: boolean;
|
|
120
|
+
};
|
|
121
|
+
|
|
122
|
+
type WorktreeReseedOptions = {
|
|
123
|
+
from?: string;
|
|
124
|
+
to?: string;
|
|
125
|
+
fromConfig?: string;
|
|
126
|
+
fromDataDir?: string;
|
|
127
|
+
fromInstance?: string;
|
|
128
|
+
seedMode?: string;
|
|
129
|
+
yes?: boolean;
|
|
130
|
+
allowLiveTarget?: boolean;
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
type EmbeddedPostgresInstance = {
|
|
134
|
+
initialise(): Promise<void>;
|
|
135
|
+
start(): Promise<void>;
|
|
136
|
+
stop(): Promise<void>;
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
type EmbeddedPostgresCtor = new (opts: {
|
|
140
|
+
databaseDir: string;
|
|
141
|
+
user: string;
|
|
142
|
+
password: string;
|
|
143
|
+
port: number;
|
|
144
|
+
persistent: boolean;
|
|
145
|
+
initdbFlags?: string[];
|
|
146
|
+
onLog?: (message: unknown) => void;
|
|
147
|
+
onError?: (message: unknown) => void;
|
|
148
|
+
}) => EmbeddedPostgresInstance;
|
|
149
|
+
|
|
150
|
+
type EmbeddedPostgresHandle = {
|
|
151
|
+
port: number;
|
|
152
|
+
startedByThisProcess: boolean;
|
|
153
|
+
stop: () => Promise<void>;
|
|
154
|
+
};
|
|
155
|
+
|
|
156
|
+
type GitWorkspaceInfo = {
|
|
157
|
+
root: string;
|
|
158
|
+
commonDir: string;
|
|
159
|
+
gitDir: string;
|
|
160
|
+
hooksPath: string;
|
|
161
|
+
};
|
|
162
|
+
|
|
163
|
+
type CopiedGitHooksResult = {
|
|
164
|
+
sourceHooksPath: string;
|
|
165
|
+
targetHooksPath: string;
|
|
166
|
+
copied: boolean;
|
|
167
|
+
};
|
|
168
|
+
|
|
169
|
+
type SeedWorktreeDatabaseResult = {
|
|
170
|
+
backupSummary: string;
|
|
171
|
+
reboundWorkspaces: Array<{
|
|
172
|
+
name: string;
|
|
173
|
+
fromCwd: string;
|
|
174
|
+
toCwd: string;
|
|
175
|
+
}>;
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
function nonEmpty(value: string | null | undefined): string | null {
|
|
179
|
+
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
function isCurrentSourceConfigPath(sourceConfigPath: string): boolean {
|
|
183
|
+
const currentConfigPath = process.env.PAPERCLIP_CONFIG;
|
|
184
|
+
if (!currentConfigPath || currentConfigPath.trim().length === 0) {
|
|
185
|
+
return false;
|
|
186
|
+
}
|
|
187
|
+
return path.resolve(currentConfigPath) === path.resolve(sourceConfigPath);
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
const WORKTREE_NAME_PREFIX = "paperclip-";
|
|
191
|
+
|
|
192
|
+
function resolveWorktreeMakeName(name: string): string {
|
|
193
|
+
const value = nonEmpty(name);
|
|
194
|
+
if (!value) {
|
|
195
|
+
throw new Error("Worktree name is required.");
|
|
196
|
+
}
|
|
197
|
+
if (!/^[A-Za-z0-9._-]+$/.test(value)) {
|
|
198
|
+
throw new Error(
|
|
199
|
+
"Worktree name must contain only letters, numbers, dots, underscores, or dashes.",
|
|
200
|
+
);
|
|
201
|
+
}
|
|
202
|
+
return value.startsWith(WORKTREE_NAME_PREFIX) ? value : `${WORKTREE_NAME_PREFIX}${value}`;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
function resolveWorktreeHome(explicit?: string): string {
|
|
206
|
+
return explicit ?? process.env.PAPERCLIP_WORKTREES_DIR ?? DEFAULT_WORKTREE_HOME;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
function resolveWorktreeStartPoint(explicit?: string): string | undefined {
|
|
210
|
+
return explicit ?? nonEmpty(process.env.PAPERCLIP_WORKTREE_START_POINT) ?? undefined;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
type ConfiguredStorage = {
|
|
214
|
+
getObject(workspaceId: string, objectKey: string): Promise<Buffer>;
|
|
215
|
+
putObject(workspaceId: string, objectKey: string, body: Buffer, contentType: string): Promise<void>;
|
|
216
|
+
};
|
|
217
|
+
|
|
218
|
+
function assertStorageCompanyPrefix(workspaceId: string, objectKey: string): void {
|
|
219
|
+
if (!objectKey.startsWith(`${workspaceId}/`) || objectKey.includes("..")) {
|
|
220
|
+
throw new Error(`Invalid object key for company ${workspaceId}.`);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
function normalizeStorageObjectKey(objectKey: string): string {
|
|
225
|
+
const normalized = objectKey.replace(/\\/g, "/").trim();
|
|
226
|
+
if (!normalized || normalized.startsWith("/")) {
|
|
227
|
+
throw new Error("Invalid object key.");
|
|
228
|
+
}
|
|
229
|
+
const parts = normalized.split("/").filter((part) => part.length > 0);
|
|
230
|
+
if (parts.length === 0 || parts.some((part) => part === "." || part === "..")) {
|
|
231
|
+
throw new Error("Invalid object key.");
|
|
232
|
+
}
|
|
233
|
+
return parts.join("/");
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
function resolveLocalStoragePath(baseDir: string, objectKey: string): string {
|
|
237
|
+
const resolved = path.resolve(baseDir, normalizeStorageObjectKey(objectKey));
|
|
238
|
+
const root = path.resolve(baseDir);
|
|
239
|
+
if (resolved !== root && !resolved.startsWith(`${root}${path.sep}`)) {
|
|
240
|
+
throw new Error("Invalid object key path.");
|
|
241
|
+
}
|
|
242
|
+
return resolved;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
async function s3BodyToBuffer(body: unknown): Promise<Buffer> {
|
|
246
|
+
if (!body) {
|
|
247
|
+
throw new Error("Object not found.");
|
|
248
|
+
}
|
|
249
|
+
if (Buffer.isBuffer(body)) {
|
|
250
|
+
return body;
|
|
251
|
+
}
|
|
252
|
+
if (body instanceof Readable) {
|
|
253
|
+
return await streamToBuffer(body);
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
const candidate = body as {
|
|
257
|
+
transformToWebStream?: () => ReadableStream<Uint8Array>;
|
|
258
|
+
arrayBuffer?: () => Promise<ArrayBuffer>;
|
|
259
|
+
};
|
|
260
|
+
if (typeof candidate.transformToWebStream === "function") {
|
|
261
|
+
const webStream = candidate.transformToWebStream();
|
|
262
|
+
const reader = webStream.getReader();
|
|
263
|
+
const chunks: Uint8Array[] = [];
|
|
264
|
+
while (true) {
|
|
265
|
+
const { done, value } = await reader.read();
|
|
266
|
+
if (done) break;
|
|
267
|
+
if (value) chunks.push(value);
|
|
268
|
+
}
|
|
269
|
+
return Buffer.concat(chunks.map((chunk) => Buffer.from(chunk)));
|
|
270
|
+
}
|
|
271
|
+
if (typeof candidate.arrayBuffer === "function") {
|
|
272
|
+
return Buffer.from(await candidate.arrayBuffer());
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
throw new Error("Unsupported storage response body.");
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
function normalizeS3Prefix(prefix: string | undefined): string {
|
|
279
|
+
if (!prefix) return "";
|
|
280
|
+
return prefix.trim().replace(/^\/+/, "").replace(/\/+$/, "");
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
function buildS3ObjectKey(prefix: string, objectKey: string): string {
|
|
284
|
+
return prefix ? `${prefix}/${objectKey}` : objectKey;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
const dynamicImport = new Function("specifier", "return import(specifier);") as (specifier: string) => Promise<any>;
|
|
288
|
+
|
|
289
|
+
function createConfiguredStorageFromPaperclipConfig(config: PaperclipConfig): ConfiguredStorage {
|
|
290
|
+
if (config.storage.provider === "local_disk") {
|
|
291
|
+
const baseDir = expandHomePrefix(config.storage.localDisk.baseDir);
|
|
292
|
+
return {
|
|
293
|
+
async getObject(workspaceId: string, objectKey: string) {
|
|
294
|
+
assertStorageCompanyPrefix(workspaceId, objectKey);
|
|
295
|
+
return await fsPromises.readFile(resolveLocalStoragePath(baseDir, objectKey));
|
|
296
|
+
},
|
|
297
|
+
async putObject(workspaceId: string, objectKey: string, body: Buffer) {
|
|
298
|
+
assertStorageCompanyPrefix(workspaceId, objectKey);
|
|
299
|
+
const filePath = resolveLocalStoragePath(baseDir, objectKey);
|
|
300
|
+
await fsPromises.mkdir(path.dirname(filePath), { recursive: true });
|
|
301
|
+
await fsPromises.writeFile(filePath, body);
|
|
302
|
+
},
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
const prefix = normalizeS3Prefix(config.storage.s3.prefix);
|
|
307
|
+
let s3ClientPromise: Promise<any> | null = null;
|
|
308
|
+
async function getS3Client() {
|
|
309
|
+
if (!s3ClientPromise) {
|
|
310
|
+
s3ClientPromise = (async () => {
|
|
311
|
+
const sdk = await dynamicImport("@aws-sdk/client-s3");
|
|
312
|
+
return {
|
|
313
|
+
sdk,
|
|
314
|
+
client: new sdk.S3Client({
|
|
315
|
+
region: config.storage.s3.region,
|
|
316
|
+
endpoint: config.storage.s3.endpoint,
|
|
317
|
+
forcePathStyle: config.storage.s3.forcePathStyle,
|
|
318
|
+
}),
|
|
319
|
+
};
|
|
320
|
+
})();
|
|
321
|
+
}
|
|
322
|
+
return await s3ClientPromise;
|
|
323
|
+
}
|
|
324
|
+
const bucket = config.storage.s3.bucket;
|
|
325
|
+
return {
|
|
326
|
+
async getObject(workspaceId: string, objectKey: string) {
|
|
327
|
+
assertStorageCompanyPrefix(workspaceId, objectKey);
|
|
328
|
+
const { sdk, client } = await getS3Client();
|
|
329
|
+
const response = await client.send(
|
|
330
|
+
new sdk.GetObjectCommand({
|
|
331
|
+
Bucket: bucket,
|
|
332
|
+
Key: buildS3ObjectKey(prefix, objectKey),
|
|
333
|
+
}),
|
|
334
|
+
);
|
|
335
|
+
return await s3BodyToBuffer(response.Body);
|
|
336
|
+
},
|
|
337
|
+
async putObject(workspaceId: string, objectKey: string, body: Buffer, contentType: string) {
|
|
338
|
+
assertStorageCompanyPrefix(workspaceId, objectKey);
|
|
339
|
+
const { sdk, client } = await getS3Client();
|
|
340
|
+
await client.send(
|
|
341
|
+
new sdk.PutObjectCommand({
|
|
342
|
+
Bucket: bucket,
|
|
343
|
+
Key: buildS3ObjectKey(prefix, objectKey),
|
|
344
|
+
Body: body,
|
|
345
|
+
ContentType: contentType,
|
|
346
|
+
ContentLength: body.length,
|
|
347
|
+
}),
|
|
348
|
+
);
|
|
349
|
+
},
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
function openConfiguredStorage(configPath: string): ConfiguredStorage {
|
|
354
|
+
const config = readConfig(configPath);
|
|
355
|
+
if (!config) {
|
|
356
|
+
throw new Error(`Config not found at ${configPath}.`);
|
|
357
|
+
}
|
|
358
|
+
return createConfiguredStorageFromPaperclipConfig(config);
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
async function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
|
362
|
+
const chunks: Buffer[] = [];
|
|
363
|
+
for await (const chunk of stream) {
|
|
364
|
+
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
|
365
|
+
}
|
|
366
|
+
return Buffer.concat(chunks);
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
export function isMissingStorageObjectError(error: unknown): boolean {
|
|
370
|
+
if (!error || typeof error !== "object") return false;
|
|
371
|
+
const candidate = error as { code?: unknown; status?: unknown; name?: unknown; message?: unknown };
|
|
372
|
+
return candidate.code === "ENOENT"
|
|
373
|
+
|| candidate.status === 404
|
|
374
|
+
|| candidate.name === "NoSuchKey"
|
|
375
|
+
|| candidate.name === "NotFound"
|
|
376
|
+
|| candidate.message === "Object not found.";
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
export async function readSourceAttachmentBody(
|
|
380
|
+
sourceStorages: Array<Pick<ConfiguredStorage, "getObject">>,
|
|
381
|
+
workspaceId: string,
|
|
382
|
+
objectKey: string,
|
|
383
|
+
): Promise<Buffer | null> {
|
|
384
|
+
for (const sourceStorage of sourceStorages) {
|
|
385
|
+
try {
|
|
386
|
+
return await sourceStorage.getObject(workspaceId, objectKey);
|
|
387
|
+
} catch (error) {
|
|
388
|
+
if (isMissingStorageObjectError(error)) {
|
|
389
|
+
continue;
|
|
390
|
+
}
|
|
391
|
+
throw error;
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
return null;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
export function resolveWorktreeMakeTargetPath(name: string): string {
|
|
398
|
+
return path.resolve(os.homedir(), resolveWorktreeMakeName(name));
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
function extractExecSyncErrorMessage(error: unknown): string | null {
|
|
402
|
+
if (!error || typeof error !== "object") {
|
|
403
|
+
return error instanceof Error ? error.message : null;
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
const stderr = "stderr" in error ? error.stderr : null;
|
|
407
|
+
if (typeof stderr === "string") {
|
|
408
|
+
return nonEmpty(stderr);
|
|
409
|
+
}
|
|
410
|
+
if (stderr instanceof Buffer) {
|
|
411
|
+
return nonEmpty(stderr.toString("utf8"));
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
return error instanceof Error ? nonEmpty(error.message) : null;
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
function localBranchExists(cwd: string, branchName: string): boolean {
|
|
418
|
+
try {
|
|
419
|
+
execFileSync("git", ["show-ref", "--verify", "--quiet", `refs/heads/${branchName}`], {
|
|
420
|
+
cwd,
|
|
421
|
+
stdio: "ignore",
|
|
422
|
+
});
|
|
423
|
+
return true;
|
|
424
|
+
} catch {
|
|
425
|
+
return false;
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
export function resolveGitWorktreeAddArgs(input: {
|
|
430
|
+
branchName: string;
|
|
431
|
+
targetPath: string;
|
|
432
|
+
branchExists: boolean;
|
|
433
|
+
startPoint?: string;
|
|
434
|
+
}): string[] {
|
|
435
|
+
if (input.branchExists && !input.startPoint) {
|
|
436
|
+
return ["worktree", "add", input.targetPath, input.branchName];
|
|
437
|
+
}
|
|
438
|
+
const commitish = input.startPoint ?? "HEAD";
|
|
439
|
+
return ["worktree", "add", "-b", input.branchName, input.targetPath, commitish];
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
function readPidFilePort(postmasterPidFile: string): number | null {
|
|
443
|
+
if (!existsSync(postmasterPidFile)) return null;
|
|
444
|
+
try {
|
|
445
|
+
const lines = readFileSync(postmasterPidFile, "utf8").split("\n");
|
|
446
|
+
const port = Number(lines[3]?.trim());
|
|
447
|
+
return Number.isInteger(port) && port > 0 ? port : null;
|
|
448
|
+
} catch {
|
|
449
|
+
return null;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
function readRunningPostmasterPid(postmasterPidFile: string): number | null {
|
|
454
|
+
if (!existsSync(postmasterPidFile)) return null;
|
|
455
|
+
try {
|
|
456
|
+
const pid = Number(readFileSync(postmasterPidFile, "utf8").split("\n")[0]?.trim());
|
|
457
|
+
if (!Number.isInteger(pid) || pid <= 0) return null;
|
|
458
|
+
process.kill(pid, 0);
|
|
459
|
+
return pid;
|
|
460
|
+
} catch {
|
|
461
|
+
return null;
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
async function isPortAvailable(port: number): Promise<boolean> {
|
|
466
|
+
return await new Promise<boolean>((resolve) => {
|
|
467
|
+
const server = createServer();
|
|
468
|
+
server.unref();
|
|
469
|
+
server.once("error", () => resolve(false));
|
|
470
|
+
server.listen(port, "127.0.0.1", () => {
|
|
471
|
+
server.close(() => resolve(true));
|
|
472
|
+
});
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
async function findAvailablePort(preferredPort: number, reserved = new Set<number>()): Promise<number> {
|
|
477
|
+
let port = Math.max(1, Math.trunc(preferredPort));
|
|
478
|
+
while (reserved.has(port) || !(await isPortAvailable(port))) {
|
|
479
|
+
port += 1;
|
|
480
|
+
}
|
|
481
|
+
return port;
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
function resolveRepoManagedWorktreesRoot(cwd: string): string | null {
|
|
485
|
+
const normalized = path.resolve(cwd);
|
|
486
|
+
const marker = `${path.sep}.paperclip${path.sep}worktrees${path.sep}`;
|
|
487
|
+
const index = normalized.indexOf(marker);
|
|
488
|
+
if (index === -1) return null;
|
|
489
|
+
const repoRoot = normalized.slice(0, index);
|
|
490
|
+
return path.resolve(repoRoot, ".paperclip", "worktrees");
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
function collectClaimedWorktreePorts(homeDir: string, currentInstanceId: string, cwd: string): {
|
|
494
|
+
serverPorts: Set<number>;
|
|
495
|
+
databasePorts: Set<number>;
|
|
496
|
+
} {
|
|
497
|
+
const serverPorts = new Set<number>();
|
|
498
|
+
const databasePorts = new Set<number>();
|
|
499
|
+
const configPaths = new Set<string>();
|
|
500
|
+
const instancesDir = path.resolve(homeDir, "instances");
|
|
501
|
+
if (existsSync(instancesDir)) {
|
|
502
|
+
for (const entry of readdirSync(instancesDir, { withFileTypes: true })) {
|
|
503
|
+
if (!entry.isDirectory() || entry.name === currentInstanceId) continue;
|
|
504
|
+
|
|
505
|
+
const configPath = path.resolve(instancesDir, entry.name, "config.json");
|
|
506
|
+
if (existsSync(configPath)) {
|
|
507
|
+
configPaths.add(configPath);
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
const repoManagedWorktreesRoot = resolveRepoManagedWorktreesRoot(cwd);
|
|
513
|
+
if (repoManagedWorktreesRoot && existsSync(repoManagedWorktreesRoot)) {
|
|
514
|
+
for (const entry of readdirSync(repoManagedWorktreesRoot, { withFileTypes: true })) {
|
|
515
|
+
if (!entry.isDirectory()) continue;
|
|
516
|
+
const configPath = path.resolve(repoManagedWorktreesRoot, entry.name, ".paperclip", "config.json");
|
|
517
|
+
if (existsSync(configPath)) {
|
|
518
|
+
configPaths.add(configPath);
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
for (const configPath of configPaths) {
|
|
524
|
+
try {
|
|
525
|
+
const config = readConfig(configPath);
|
|
526
|
+
if (config?.server.port) {
|
|
527
|
+
serverPorts.add(config.server.port);
|
|
528
|
+
}
|
|
529
|
+
if (config?.database.mode === "embedded-postgres") {
|
|
530
|
+
databasePorts.add(config.database.embeddedPostgresPort);
|
|
531
|
+
}
|
|
532
|
+
} catch {
|
|
533
|
+
// Ignore malformed sibling configs.
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
return { serverPorts, databasePorts };
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
function detectGitBranchName(cwd: string): string | null {
|
|
541
|
+
try {
|
|
542
|
+
const value = execFileSync("git", ["branch", "--show-current"], {
|
|
543
|
+
cwd,
|
|
544
|
+
encoding: "utf8",
|
|
545
|
+
stdio: ["ignore", "pipe", "ignore"],
|
|
546
|
+
}).trim();
|
|
547
|
+
return nonEmpty(value);
|
|
548
|
+
} catch {
|
|
549
|
+
return null;
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
function detectGitWorkspaceInfo(cwd: string): GitWorkspaceInfo | null {
|
|
554
|
+
try {
|
|
555
|
+
const root = execFileSync("git", ["rev-parse", "--show-toplevel"], {
|
|
556
|
+
cwd,
|
|
557
|
+
encoding: "utf8",
|
|
558
|
+
stdio: ["ignore", "pipe", "ignore"],
|
|
559
|
+
}).trim();
|
|
560
|
+
const commonDirRaw = execFileSync("git", ["rev-parse", "--git-common-dir"], {
|
|
561
|
+
cwd: root,
|
|
562
|
+
encoding: "utf8",
|
|
563
|
+
stdio: ["ignore", "pipe", "ignore"],
|
|
564
|
+
}).trim();
|
|
565
|
+
const gitDirRaw = execFileSync("git", ["rev-parse", "--git-dir"], {
|
|
566
|
+
cwd: root,
|
|
567
|
+
encoding: "utf8",
|
|
568
|
+
stdio: ["ignore", "pipe", "ignore"],
|
|
569
|
+
}).trim();
|
|
570
|
+
const hooksPathRaw = execFileSync("git", ["rev-parse", "--git-path", "hooks"], {
|
|
571
|
+
cwd: root,
|
|
572
|
+
encoding: "utf8",
|
|
573
|
+
stdio: ["ignore", "pipe", "ignore"],
|
|
574
|
+
}).trim();
|
|
575
|
+
return {
|
|
576
|
+
root: path.resolve(root),
|
|
577
|
+
commonDir: path.resolve(root, commonDirRaw),
|
|
578
|
+
gitDir: path.resolve(root, gitDirRaw),
|
|
579
|
+
hooksPath: path.resolve(root, hooksPathRaw),
|
|
580
|
+
};
|
|
581
|
+
} catch {
|
|
582
|
+
return null;
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
function copyDirectoryContents(sourceDir: string, targetDir: string): boolean {
|
|
587
|
+
if (!existsSync(sourceDir)) return false;
|
|
588
|
+
|
|
589
|
+
const entries = readdirSync(sourceDir, { withFileTypes: true });
|
|
590
|
+
if (entries.length === 0) return false;
|
|
591
|
+
|
|
592
|
+
mkdirSync(targetDir, { recursive: true });
|
|
593
|
+
|
|
594
|
+
let copied = false;
|
|
595
|
+
for (const entry of entries) {
|
|
596
|
+
const sourcePath = path.resolve(sourceDir, entry.name);
|
|
597
|
+
const targetPath = path.resolve(targetDir, entry.name);
|
|
598
|
+
|
|
599
|
+
if (entry.isDirectory()) {
|
|
600
|
+
mkdirSync(targetPath, { recursive: true });
|
|
601
|
+
copyDirectoryContents(sourcePath, targetPath);
|
|
602
|
+
copied = true;
|
|
603
|
+
continue;
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
if (entry.isSymbolicLink()) {
|
|
607
|
+
rmSync(targetPath, { recursive: true, force: true });
|
|
608
|
+
symlinkSync(readlinkSync(sourcePath), targetPath);
|
|
609
|
+
copied = true;
|
|
610
|
+
continue;
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
copyFileSync(sourcePath, targetPath);
|
|
614
|
+
try {
|
|
615
|
+
chmodSync(targetPath, statSync(sourcePath).mode & 0o777);
|
|
616
|
+
} catch {
|
|
617
|
+
// best effort
|
|
618
|
+
}
|
|
619
|
+
copied = true;
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
return copied;
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
export function copyGitHooksToWorktreeGitDir(cwd: string): CopiedGitHooksResult | null {
|
|
626
|
+
const workspace = detectGitWorkspaceInfo(cwd);
|
|
627
|
+
if (!workspace) return null;
|
|
628
|
+
|
|
629
|
+
const sourceHooksPath = workspace.hooksPath;
|
|
630
|
+
const targetHooksPath = path.resolve(workspace.gitDir, "hooks");
|
|
631
|
+
|
|
632
|
+
if (sourceHooksPath === targetHooksPath) {
|
|
633
|
+
return {
|
|
634
|
+
sourceHooksPath,
|
|
635
|
+
targetHooksPath,
|
|
636
|
+
copied: false,
|
|
637
|
+
};
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
return {
|
|
641
|
+
sourceHooksPath,
|
|
642
|
+
targetHooksPath,
|
|
643
|
+
copied: copyDirectoryContents(sourceHooksPath, targetHooksPath),
|
|
644
|
+
};
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
export function rebindWorkspaceCwd(input: {
|
|
648
|
+
sourceRepoRoot: string;
|
|
649
|
+
targetRepoRoot: string;
|
|
650
|
+
workspaceCwd: string;
|
|
651
|
+
}): string | null {
|
|
652
|
+
const sourceRepoRoot = path.resolve(input.sourceRepoRoot);
|
|
653
|
+
const targetRepoRoot = path.resolve(input.targetRepoRoot);
|
|
654
|
+
const workspaceCwd = path.resolve(input.workspaceCwd);
|
|
655
|
+
const relative = path.relative(sourceRepoRoot, workspaceCwd);
|
|
656
|
+
if (!relative || relative === "") {
|
|
657
|
+
return targetRepoRoot;
|
|
658
|
+
}
|
|
659
|
+
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
|
660
|
+
return null;
|
|
661
|
+
}
|
|
662
|
+
return path.resolve(targetRepoRoot, relative);
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
async function rebindSeededProjectWorkspaces(input: {
|
|
666
|
+
targetConnectionString: string;
|
|
667
|
+
currentCwd: string;
|
|
668
|
+
}): Promise<SeedWorktreeDatabaseResult["reboundWorkspaces"]> {
|
|
669
|
+
const targetRepo = detectGitWorkspaceInfo(input.currentCwd);
|
|
670
|
+
if (!targetRepo) return [];
|
|
671
|
+
|
|
672
|
+
const db = createDb(input.targetConnectionString);
|
|
673
|
+
const closableDb = db as typeof db & {
|
|
674
|
+
$client?: { end?: (opts?: { timeout?: number }) => Promise<void> };
|
|
675
|
+
};
|
|
676
|
+
|
|
677
|
+
try {
|
|
678
|
+
const rows = await db
|
|
679
|
+
.select({
|
|
680
|
+
id: projectWorkspaces.id,
|
|
681
|
+
name: projectWorkspaces.name,
|
|
682
|
+
cwd: projectWorkspaces.cwd,
|
|
683
|
+
})
|
|
684
|
+
.from(projectWorkspaces);
|
|
685
|
+
|
|
686
|
+
const rebound: SeedWorktreeDatabaseResult["reboundWorkspaces"] = [];
|
|
687
|
+
for (const row of rows) {
|
|
688
|
+
const workspaceCwd = nonEmpty(row.cwd);
|
|
689
|
+
if (!workspaceCwd) continue;
|
|
690
|
+
|
|
691
|
+
const sourceRepo = detectGitWorkspaceInfo(workspaceCwd);
|
|
692
|
+
if (!sourceRepo) continue;
|
|
693
|
+
if (sourceRepo.commonDir !== targetRepo.commonDir) continue;
|
|
694
|
+
|
|
695
|
+
const reboundCwd = rebindWorkspaceCwd({
|
|
696
|
+
sourceRepoRoot: sourceRepo.root,
|
|
697
|
+
targetRepoRoot: targetRepo.root,
|
|
698
|
+
workspaceCwd,
|
|
699
|
+
});
|
|
700
|
+
if (!reboundCwd) continue;
|
|
701
|
+
|
|
702
|
+
const normalizedCurrent = path.resolve(workspaceCwd);
|
|
703
|
+
if (reboundCwd === normalizedCurrent) continue;
|
|
704
|
+
if (!existsSync(reboundCwd)) continue;
|
|
705
|
+
|
|
706
|
+
await db
|
|
707
|
+
.update(projectWorkspaces)
|
|
708
|
+
.set({
|
|
709
|
+
cwd: reboundCwd,
|
|
710
|
+
updatedAt: new Date(),
|
|
711
|
+
})
|
|
712
|
+
.where(eq(projectWorkspaces.id, row.id));
|
|
713
|
+
|
|
714
|
+
rebound.push({
|
|
715
|
+
name: row.name,
|
|
716
|
+
fromCwd: normalizedCurrent,
|
|
717
|
+
toCwd: reboundCwd,
|
|
718
|
+
});
|
|
719
|
+
}
|
|
720
|
+
|
|
721
|
+
return rebound;
|
|
722
|
+
} finally {
|
|
723
|
+
await closableDb.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
|
724
|
+
}
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
export function resolveSourceConfigPath(opts: WorktreeInitOptions): string {
|
|
728
|
+
if (opts.sourceConfigPathOverride) return path.resolve(opts.sourceConfigPathOverride);
|
|
729
|
+
if (opts.fromConfig) return path.resolve(opts.fromConfig);
|
|
730
|
+
if (!opts.fromDataDir && !opts.fromInstance) {
|
|
731
|
+
return resolveConfigPath();
|
|
732
|
+
}
|
|
733
|
+
const sourceHome = path.resolve(expandHomePrefix(opts.fromDataDir ?? "~/.paperclip"));
|
|
734
|
+
const sourceInstanceId = sanitizeWorktreeInstanceId(opts.fromInstance ?? "default");
|
|
735
|
+
return path.resolve(sourceHome, "instances", sourceInstanceId, "config.json");
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
export function resolveWorktreeReseedSource(input: WorktreeReseedOptions): ResolvedWorktreeReseedSource {
|
|
739
|
+
const fromSelector = nonEmpty(input.from);
|
|
740
|
+
const fromConfig = nonEmpty(input.fromConfig);
|
|
741
|
+
const fromDataDir = nonEmpty(input.fromDataDir);
|
|
742
|
+
const fromInstance = nonEmpty(input.fromInstance);
|
|
743
|
+
const hasExplicitConfigSource = Boolean(fromConfig || fromDataDir || fromInstance);
|
|
744
|
+
|
|
745
|
+
if (fromSelector && hasExplicitConfigSource) {
|
|
746
|
+
throw new Error(
|
|
747
|
+
"Use either --from <worktree> or --from-config/--from-data-dir/--from-instance, not both.",
|
|
748
|
+
);
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
if (fromSelector) {
|
|
752
|
+
const endpoint = resolveWorktreeEndpointFromSelector(fromSelector, { allowCurrent: true });
|
|
753
|
+
return {
|
|
754
|
+
configPath: endpoint.configPath,
|
|
755
|
+
label: endpoint.label,
|
|
756
|
+
};
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
if (hasExplicitConfigSource) {
|
|
760
|
+
const configPath = resolveSourceConfigPath({
|
|
761
|
+
fromConfig: fromConfig ?? undefined,
|
|
762
|
+
fromDataDir: fromDataDir ?? undefined,
|
|
763
|
+
fromInstance: fromInstance ?? undefined,
|
|
764
|
+
});
|
|
765
|
+
return {
|
|
766
|
+
configPath,
|
|
767
|
+
label: configPath,
|
|
768
|
+
};
|
|
769
|
+
}
|
|
770
|
+
|
|
771
|
+
throw new Error(
|
|
772
|
+
"Pass --from <worktree> or --from-config/--from-instance explicitly so the reseed source is unambiguous.",
|
|
773
|
+
);
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
export function resolveWorktreeReseedTargetPaths(input: {
|
|
777
|
+
configPath: string;
|
|
778
|
+
rootPath: string;
|
|
779
|
+
}): WorktreeLocalPaths {
|
|
780
|
+
const envEntries = readPaperclipEnvEntries(resolvePaperclipEnvFile(input.configPath));
|
|
781
|
+
const homeDir = nonEmpty(envEntries.PAPERCLIP_HOME);
|
|
782
|
+
const instanceId = nonEmpty(envEntries.PAPERCLIP_INSTANCE_ID);
|
|
783
|
+
|
|
784
|
+
if (!homeDir || !instanceId) {
|
|
785
|
+
throw new Error(
|
|
786
|
+
`Target config ${input.configPath} does not look like a worktree-local Paperclip instance. Expected PAPERCLIP_HOME and PAPERCLIP_INSTANCE_ID in the adjacent .env.`,
|
|
787
|
+
);
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
return resolveWorktreeLocalPaths({
|
|
791
|
+
cwd: input.rootPath,
|
|
792
|
+
homeDir,
|
|
793
|
+
instanceId,
|
|
794
|
+
});
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
function resolveSourceConnectionString(config: PaperclipConfig, envEntries: Record<string, string>, portOverride?: number): string {
|
|
798
|
+
if (config.database.mode === "postgres") {
|
|
799
|
+
const connectionString = nonEmpty(envEntries.DATABASE_URL) ?? nonEmpty(config.database.connectionString);
|
|
800
|
+
if (!connectionString) {
|
|
801
|
+
throw new Error(
|
|
802
|
+
"Source instance uses postgres mode but has no connection string in config or adjacent .env.",
|
|
803
|
+
);
|
|
804
|
+
}
|
|
805
|
+
return connectionString;
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
const port = portOverride ?? config.database.embeddedPostgresPort;
|
|
809
|
+
return `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
|
810
|
+
}
|
|
811
|
+
|
|
812
|
+
export function copySeededSecretsKey(input: {
|
|
813
|
+
sourceConfigPath: string;
|
|
814
|
+
sourceConfig: PaperclipConfig;
|
|
815
|
+
sourceEnvEntries: Record<string, string>;
|
|
816
|
+
targetKeyFilePath: string;
|
|
817
|
+
}): void {
|
|
818
|
+
if (input.sourceConfig.secrets.provider !== "local_encrypted") {
|
|
819
|
+
return;
|
|
820
|
+
}
|
|
821
|
+
|
|
822
|
+
mkdirSync(path.dirname(input.targetKeyFilePath), { recursive: true });
|
|
823
|
+
|
|
824
|
+
const allowProcessEnvFallback = isCurrentSourceConfigPath(input.sourceConfigPath);
|
|
825
|
+
const sourceInlineMasterKey =
|
|
826
|
+
nonEmpty(input.sourceEnvEntries.PAPERCLIP_SECRETS_MASTER_KEY) ??
|
|
827
|
+
(allowProcessEnvFallback ? nonEmpty(process.env.PAPERCLIP_SECRETS_MASTER_KEY) : null);
|
|
828
|
+
if (sourceInlineMasterKey) {
|
|
829
|
+
writeFileSync(input.targetKeyFilePath, sourceInlineMasterKey, {
|
|
830
|
+
encoding: "utf8",
|
|
831
|
+
mode: 0o600,
|
|
832
|
+
});
|
|
833
|
+
try {
|
|
834
|
+
chmodSync(input.targetKeyFilePath, 0o600);
|
|
835
|
+
} catch {
|
|
836
|
+
// best effort
|
|
837
|
+
}
|
|
838
|
+
return;
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
const sourceKeyFileOverride =
|
|
842
|
+
nonEmpty(input.sourceEnvEntries.PAPERCLIP_SECRETS_MASTER_KEY_FILE) ??
|
|
843
|
+
(allowProcessEnvFallback ? nonEmpty(process.env.PAPERCLIP_SECRETS_MASTER_KEY_FILE) : null);
|
|
844
|
+
const sourceConfiguredKeyPath = sourceKeyFileOverride ?? input.sourceConfig.secrets.localEncrypted.keyFilePath;
|
|
845
|
+
const sourceKeyFilePath = resolveRuntimeLikePath(sourceConfiguredKeyPath, input.sourceConfigPath);
|
|
846
|
+
|
|
847
|
+
if (!existsSync(sourceKeyFilePath)) {
|
|
848
|
+
throw new Error(
|
|
849
|
+
`Cannot seed worktree database because source local_encrypted secrets key was not found at ${sourceKeyFilePath}.`,
|
|
850
|
+
);
|
|
851
|
+
}
|
|
852
|
+
|
|
853
|
+
copyFileSync(sourceKeyFilePath, input.targetKeyFilePath);
|
|
854
|
+
try {
|
|
855
|
+
chmodSync(input.targetKeyFilePath, 0o600);
|
|
856
|
+
} catch {
|
|
857
|
+
// best effort
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
|
|
861
|
+
async function ensureEmbeddedPostgres(dataDir: string, preferredPort: number): Promise<EmbeddedPostgresHandle> {
|
|
862
|
+
const moduleName = "embedded-postgres";
|
|
863
|
+
let EmbeddedPostgres: EmbeddedPostgresCtor;
|
|
864
|
+
try {
|
|
865
|
+
const mod = await import(moduleName);
|
|
866
|
+
EmbeddedPostgres = mod.default as EmbeddedPostgresCtor;
|
|
867
|
+
} catch {
|
|
868
|
+
throw new Error(
|
|
869
|
+
"Embedded PostgreSQL support requires dependency `embedded-postgres`. Reinstall dependencies and try again.",
|
|
870
|
+
);
|
|
871
|
+
}
|
|
872
|
+
|
|
873
|
+
const postmasterPidFile = path.resolve(dataDir, "postmaster.pid");
|
|
874
|
+
const runningPid = readRunningPostmasterPid(postmasterPidFile);
|
|
875
|
+
if (runningPid) {
|
|
876
|
+
return {
|
|
877
|
+
port: readPidFilePort(postmasterPidFile) ?? preferredPort,
|
|
878
|
+
startedByThisProcess: false,
|
|
879
|
+
stop: async () => {},
|
|
880
|
+
};
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
const port = await findAvailablePort(preferredPort);
|
|
884
|
+
const logBuffer = createEmbeddedPostgresLogBuffer();
|
|
885
|
+
const instance = new EmbeddedPostgres({
|
|
886
|
+
databaseDir: dataDir,
|
|
887
|
+
user: "paperclip",
|
|
888
|
+
password: "paperclip",
|
|
889
|
+
port,
|
|
890
|
+
persistent: true,
|
|
891
|
+
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
|
892
|
+
onLog: logBuffer.append,
|
|
893
|
+
onError: logBuffer.append,
|
|
894
|
+
});
|
|
895
|
+
|
|
896
|
+
if (!existsSync(path.resolve(dataDir, "PG_VERSION"))) {
|
|
897
|
+
try {
|
|
898
|
+
await instance.initialise();
|
|
899
|
+
} catch (error) {
|
|
900
|
+
throw formatEmbeddedPostgresError(error, {
|
|
901
|
+
fallbackMessage: `Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${port}`,
|
|
902
|
+
recentLogs: logBuffer.getRecentLogs(),
|
|
903
|
+
});
|
|
904
|
+
}
|
|
905
|
+
}
|
|
906
|
+
if (existsSync(postmasterPidFile)) {
|
|
907
|
+
rmSync(postmasterPidFile, { force: true });
|
|
908
|
+
}
|
|
909
|
+
try {
|
|
910
|
+
await instance.start();
|
|
911
|
+
} catch (error) {
|
|
912
|
+
throw formatEmbeddedPostgresError(error, {
|
|
913
|
+
fallbackMessage: `Failed to start embedded PostgreSQL on port ${port}`,
|
|
914
|
+
recentLogs: logBuffer.getRecentLogs(),
|
|
915
|
+
});
|
|
916
|
+
}
|
|
917
|
+
|
|
918
|
+
return {
|
|
919
|
+
port,
|
|
920
|
+
startedByThisProcess: true,
|
|
921
|
+
stop: async () => {
|
|
922
|
+
await instance.stop();
|
|
923
|
+
},
|
|
924
|
+
};
|
|
925
|
+
}
|
|
926
|
+
|
|
927
|
+
export async function pauseSeededScheduledRoutines(connectionString: string): Promise<number> {
|
|
928
|
+
const db = createDb(connectionString);
|
|
929
|
+
try {
|
|
930
|
+
const scheduledRoutineIds = await db
|
|
931
|
+
.selectDistinct({ routineId: routineTriggers.routineId })
|
|
932
|
+
.from(routineTriggers)
|
|
933
|
+
.where(and(eq(routineTriggers.kind, "schedule"), eq(routineTriggers.enabled, true)));
|
|
934
|
+
const idsToPause = scheduledRoutineIds
|
|
935
|
+
.map((row) => row.routineId)
|
|
936
|
+
.filter((value): value is string => Boolean(value));
|
|
937
|
+
|
|
938
|
+
if (idsToPause.length === 0) {
|
|
939
|
+
return 0;
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
const paused = await db
|
|
943
|
+
.update(routines)
|
|
944
|
+
.set({
|
|
945
|
+
status: "paused",
|
|
946
|
+
updatedAt: new Date(),
|
|
947
|
+
})
|
|
948
|
+
.where(and(inArray(routines.id, idsToPause), sql`${routines.status} <> 'paused'`, sql`${routines.status} <> 'archived'`))
|
|
949
|
+
.returning({ id: routines.id });
|
|
950
|
+
|
|
951
|
+
return paused.length;
|
|
952
|
+
} finally {
|
|
953
|
+
await db.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
|
|
957
|
+
async function seedWorktreeDatabase(input: {
|
|
958
|
+
sourceConfigPath: string;
|
|
959
|
+
sourceConfig: PaperclipConfig;
|
|
960
|
+
targetConfig: PaperclipConfig;
|
|
961
|
+
targetPaths: WorktreeLocalPaths;
|
|
962
|
+
instanceId: string;
|
|
963
|
+
seedMode: WorktreeSeedMode;
|
|
964
|
+
}): Promise<SeedWorktreeDatabaseResult> {
|
|
965
|
+
const seedPlan = resolveWorktreeSeedPlan(input.seedMode);
|
|
966
|
+
const sourceEnvFile = resolvePaperclipEnvFile(input.sourceConfigPath);
|
|
967
|
+
const sourceEnvEntries = readPaperclipEnvEntries(sourceEnvFile);
|
|
968
|
+
copySeededSecretsKey({
|
|
969
|
+
sourceConfigPath: input.sourceConfigPath,
|
|
970
|
+
sourceConfig: input.sourceConfig,
|
|
971
|
+
sourceEnvEntries,
|
|
972
|
+
targetKeyFilePath: input.targetPaths.secretsKeyFilePath,
|
|
973
|
+
});
|
|
974
|
+
let sourceHandle: EmbeddedPostgresHandle | null = null;
|
|
975
|
+
let targetHandle: EmbeddedPostgresHandle | null = null;
|
|
976
|
+
|
|
977
|
+
try {
|
|
978
|
+
if (input.sourceConfig.database.mode === "embedded-postgres") {
|
|
979
|
+
sourceHandle = await ensureEmbeddedPostgres(
|
|
980
|
+
input.sourceConfig.database.embeddedPostgresDataDir,
|
|
981
|
+
input.sourceConfig.database.embeddedPostgresPort,
|
|
982
|
+
);
|
|
983
|
+
const sourceAdminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${sourceHandle.port}/postgres`;
|
|
984
|
+
await ensurePostgresDatabase(sourceAdminConnectionString, "paperclip");
|
|
985
|
+
}
|
|
986
|
+
const sourceConnectionString = resolveSourceConnectionString(
|
|
987
|
+
input.sourceConfig,
|
|
988
|
+
sourceEnvEntries,
|
|
989
|
+
sourceHandle?.port,
|
|
990
|
+
);
|
|
991
|
+
const backup = await runDatabaseBackup({
|
|
992
|
+
connectionString: sourceConnectionString,
|
|
993
|
+
backupDir: path.resolve(input.targetPaths.backupDir, "seed"),
|
|
994
|
+
retention: { dailyDays: 7, weeklyWeeks: 4, monthlyMonths: 1 },
|
|
995
|
+
filenamePrefix: `${input.instanceId}-seed`,
|
|
996
|
+
includeMigrationJournal: true,
|
|
997
|
+
excludeTables: seedPlan.excludedTables,
|
|
998
|
+
nullifyColumns: seedPlan.nullifyColumns,
|
|
999
|
+
});
|
|
1000
|
+
|
|
1001
|
+
targetHandle = await ensureEmbeddedPostgres(
|
|
1002
|
+
input.targetConfig.database.embeddedPostgresDataDir,
|
|
1003
|
+
input.targetConfig.database.embeddedPostgresPort,
|
|
1004
|
+
);
|
|
1005
|
+
|
|
1006
|
+
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${targetHandle.port}/postgres`;
|
|
1007
|
+
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
|
1008
|
+
const targetConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${targetHandle.port}/paperclip`;
|
|
1009
|
+
await runDatabaseRestore({
|
|
1010
|
+
connectionString: targetConnectionString,
|
|
1011
|
+
backupFile: backup.backupFile,
|
|
1012
|
+
});
|
|
1013
|
+
await applyPendingMigrations(targetConnectionString);
|
|
1014
|
+
await pauseSeededScheduledRoutines(targetConnectionString);
|
|
1015
|
+
const reboundWorkspaces = await rebindSeededProjectWorkspaces({
|
|
1016
|
+
targetConnectionString,
|
|
1017
|
+
currentCwd: input.targetPaths.cwd,
|
|
1018
|
+
});
|
|
1019
|
+
|
|
1020
|
+
return {
|
|
1021
|
+
backupSummary: formatDatabaseBackupResult(backup),
|
|
1022
|
+
reboundWorkspaces,
|
|
1023
|
+
};
|
|
1024
|
+
} finally {
|
|
1025
|
+
if (targetHandle?.startedByThisProcess) {
|
|
1026
|
+
await targetHandle.stop();
|
|
1027
|
+
}
|
|
1028
|
+
if (sourceHandle?.startedByThisProcess) {
|
|
1029
|
+
await sourceHandle.stop();
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
|
|
1034
|
+
async function runWorktreeInit(opts: WorktreeInitOptions): Promise<void> {
|
|
1035
|
+
const cwd = process.cwd();
|
|
1036
|
+
const worktreeName = resolveSuggestedWorktreeName(
|
|
1037
|
+
cwd,
|
|
1038
|
+
opts.name ?? detectGitBranchName(cwd) ?? undefined,
|
|
1039
|
+
);
|
|
1040
|
+
const seedMode = opts.seedMode ?? "minimal";
|
|
1041
|
+
if (!isWorktreeSeedMode(seedMode)) {
|
|
1042
|
+
throw new Error(`Unsupported seed mode "${seedMode}". Expected one of: minimal, full.`);
|
|
1043
|
+
}
|
|
1044
|
+
const instanceId = sanitizeWorktreeInstanceId(opts.instance ?? worktreeName);
|
|
1045
|
+
const paths = resolveWorktreeLocalPaths({
|
|
1046
|
+
cwd,
|
|
1047
|
+
homeDir: resolveWorktreeHome(opts.home),
|
|
1048
|
+
instanceId,
|
|
1049
|
+
});
|
|
1050
|
+
const branding = {
|
|
1051
|
+
name: opts.name ?? worktreeName,
|
|
1052
|
+
color: opts.color ?? generateWorktreeColor(),
|
|
1053
|
+
};
|
|
1054
|
+
const sourceConfigPath = resolveSourceConfigPath(opts);
|
|
1055
|
+
const sourceConfig = existsSync(sourceConfigPath) ? readConfig(sourceConfigPath) : null;
|
|
1056
|
+
|
|
1057
|
+
if ((existsSync(paths.configPath) || existsSync(paths.instanceRoot)) && !opts.force) {
|
|
1058
|
+
throw new Error(
|
|
1059
|
+
`Worktree config already exists at ${paths.configPath} or instance data exists at ${paths.instanceRoot}. Re-run with --force to replace it.`,
|
|
1060
|
+
);
|
|
1061
|
+
}
|
|
1062
|
+
|
|
1063
|
+
if (opts.force) {
|
|
1064
|
+
rmSync(paths.repoConfigDir, { recursive: true, force: true });
|
|
1065
|
+
rmSync(paths.instanceRoot, { recursive: true, force: true });
|
|
1066
|
+
}
|
|
1067
|
+
|
|
1068
|
+
const claimedPorts = collectClaimedWorktreePorts(paths.homeDir, paths.instanceId, paths.cwd);
|
|
1069
|
+
const preferredServerPort = opts.serverPort ?? ((sourceConfig?.server.port ?? 3100) + 1);
|
|
1070
|
+
const serverPort = await findAvailablePort(preferredServerPort, claimedPorts.serverPorts);
|
|
1071
|
+
const preferredDbPort = opts.dbPort ?? ((sourceConfig?.database.embeddedPostgresPort ?? 54329) + 1);
|
|
1072
|
+
const databasePort = await findAvailablePort(
|
|
1073
|
+
preferredDbPort,
|
|
1074
|
+
new Set([...claimedPorts.databasePorts, serverPort]),
|
|
1075
|
+
);
|
|
1076
|
+
const targetConfig = buildWorktreeConfig({
|
|
1077
|
+
sourceConfig,
|
|
1078
|
+
paths,
|
|
1079
|
+
serverPort,
|
|
1080
|
+
databasePort,
|
|
1081
|
+
});
|
|
1082
|
+
|
|
1083
|
+
writeConfig(targetConfig, paths.configPath);
|
|
1084
|
+
const sourceEnvEntries = readPaperclipEnvEntries(resolvePaperclipEnvFile(sourceConfigPath));
|
|
1085
|
+
const existingAgentJwtSecret =
|
|
1086
|
+
nonEmpty(sourceEnvEntries.PAPERCLIP_AGENT_JWT_SECRET) ??
|
|
1087
|
+
nonEmpty(process.env.PAPERCLIP_AGENT_JWT_SECRET);
|
|
1088
|
+
mergePaperclipEnvEntries(
|
|
1089
|
+
{
|
|
1090
|
+
...buildWorktreeEnvEntries(paths, branding),
|
|
1091
|
+
...(existingAgentJwtSecret ? { PAPERCLIP_AGENT_JWT_SECRET: existingAgentJwtSecret } : {}),
|
|
1092
|
+
},
|
|
1093
|
+
paths.envPath,
|
|
1094
|
+
);
|
|
1095
|
+
ensureAgentJwtSecret(paths.configPath);
|
|
1096
|
+
loadPaperclipEnvFile(paths.configPath);
|
|
1097
|
+
const copiedGitHooks = copyGitHooksToWorktreeGitDir(cwd);
|
|
1098
|
+
|
|
1099
|
+
let seedSummary: string | null = null;
|
|
1100
|
+
let reboundWorkspaceSummary: SeedWorktreeDatabaseResult["reboundWorkspaces"] = [];
|
|
1101
|
+
if (opts.seed !== false) {
|
|
1102
|
+
if (!sourceConfig) {
|
|
1103
|
+
throw new Error(
|
|
1104
|
+
`Cannot seed worktree database because source config was not found at ${sourceConfigPath}. Use --no-seed or provide --from-config.`,
|
|
1105
|
+
);
|
|
1106
|
+
}
|
|
1107
|
+
const spinner = p.spinner();
|
|
1108
|
+
spinner.start(`Seeding isolated worktree database from source instance (${seedMode})...`);
|
|
1109
|
+
try {
|
|
1110
|
+
const seeded = await seedWorktreeDatabase({
|
|
1111
|
+
sourceConfigPath,
|
|
1112
|
+
sourceConfig,
|
|
1113
|
+
targetConfig,
|
|
1114
|
+
targetPaths: paths,
|
|
1115
|
+
instanceId,
|
|
1116
|
+
seedMode,
|
|
1117
|
+
});
|
|
1118
|
+
seedSummary = seeded.backupSummary;
|
|
1119
|
+
reboundWorkspaceSummary = seeded.reboundWorkspaces;
|
|
1120
|
+
spinner.stop(`Seeded isolated worktree database (${seedMode}).`);
|
|
1121
|
+
} catch (error) {
|
|
1122
|
+
spinner.stop(pc.red("Failed to seed worktree database."));
|
|
1123
|
+
throw error;
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
|
|
1127
|
+
p.log.message(pc.dim(`Repo config: ${paths.configPath}`));
|
|
1128
|
+
p.log.message(pc.dim(`Repo env: ${paths.envPath}`));
|
|
1129
|
+
p.log.message(pc.dim(`Isolated home: ${paths.homeDir}`));
|
|
1130
|
+
p.log.message(pc.dim(`Instance: ${paths.instanceId}`));
|
|
1131
|
+
p.log.message(pc.dim(`Worktree badge: ${branding.name} (${branding.color})`));
|
|
1132
|
+
p.log.message(pc.dim(`Server port: ${serverPort} | DB port: ${databasePort}`));
|
|
1133
|
+
if (copiedGitHooks?.copied) {
|
|
1134
|
+
p.log.message(
|
|
1135
|
+
pc.dim(`Mirrored git hooks: ${copiedGitHooks.sourceHooksPath} -> ${copiedGitHooks.targetHooksPath}`),
|
|
1136
|
+
);
|
|
1137
|
+
}
|
|
1138
|
+
if (seedSummary) {
|
|
1139
|
+
p.log.message(pc.dim(`Seed mode: ${seedMode}`));
|
|
1140
|
+
p.log.message(pc.dim(`Seed snapshot: ${seedSummary}`));
|
|
1141
|
+
for (const rebound of reboundWorkspaceSummary) {
|
|
1142
|
+
p.log.message(
|
|
1143
|
+
pc.dim(`Rebound workspace ${rebound.name}: ${rebound.fromCwd} -> ${rebound.toCwd}`),
|
|
1144
|
+
);
|
|
1145
|
+
}
|
|
1146
|
+
}
|
|
1147
|
+
p.outro(
|
|
1148
|
+
pc.green(
|
|
1149
|
+
`Worktree ready. Run Paperclip inside this repo and the CLI/server will use ${paths.instanceId} automatically.`,
|
|
1150
|
+
),
|
|
1151
|
+
);
|
|
1152
|
+
}
|
|
1153
|
+
|
|
1154
|
+
export async function worktreeInitCommand(opts: WorktreeInitOptions): Promise<void> {
|
|
1155
|
+
printPaperclipCliBanner();
|
|
1156
|
+
p.intro(pc.bgCyan(pc.black(" paperclipai worktree init ")));
|
|
1157
|
+
await runWorktreeInit(opts);
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
export async function worktreeMakeCommand(nameArg: string, opts: WorktreeMakeOptions): Promise<void> {
|
|
1161
|
+
printPaperclipCliBanner();
|
|
1162
|
+
p.intro(pc.bgCyan(pc.black(" paperclipai worktree:make ")));
|
|
1163
|
+
|
|
1164
|
+
const name = resolveWorktreeMakeName(nameArg);
|
|
1165
|
+
const startPoint = resolveWorktreeStartPoint(opts.startPoint);
|
|
1166
|
+
const sourceCwd = process.cwd();
|
|
1167
|
+
const sourceConfigPath = resolveSourceConfigPath(opts);
|
|
1168
|
+
const targetPath = resolveWorktreeMakeTargetPath(name);
|
|
1169
|
+
if (existsSync(targetPath)) {
|
|
1170
|
+
throw new Error(`Target path already exists: ${targetPath}`);
|
|
1171
|
+
}
|
|
1172
|
+
|
|
1173
|
+
mkdirSync(path.dirname(targetPath), { recursive: true });
|
|
1174
|
+
if (startPoint) {
|
|
1175
|
+
const [remote] = startPoint.split("/", 1);
|
|
1176
|
+
try {
|
|
1177
|
+
execFileSync("git", ["fetch", remote], {
|
|
1178
|
+
cwd: sourceCwd,
|
|
1179
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1180
|
+
});
|
|
1181
|
+
} catch (error) {
|
|
1182
|
+
throw new Error(
|
|
1183
|
+
`Failed to fetch from remote "${remote}": ${extractExecSyncErrorMessage(error) ?? String(error)}`,
|
|
1184
|
+
);
|
|
1185
|
+
}
|
|
1186
|
+
}
|
|
1187
|
+
|
|
1188
|
+
const worktreeArgs = resolveGitWorktreeAddArgs({
|
|
1189
|
+
branchName: name,
|
|
1190
|
+
targetPath,
|
|
1191
|
+
branchExists: !startPoint && localBranchExists(sourceCwd, name),
|
|
1192
|
+
startPoint,
|
|
1193
|
+
});
|
|
1194
|
+
|
|
1195
|
+
const spinner = p.spinner();
|
|
1196
|
+
spinner.start(`Creating git worktree at ${targetPath}...`);
|
|
1197
|
+
try {
|
|
1198
|
+
execFileSync("git", worktreeArgs, {
|
|
1199
|
+
cwd: sourceCwd,
|
|
1200
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1201
|
+
});
|
|
1202
|
+
spinner.stop(`Created git worktree at ${targetPath}.`);
|
|
1203
|
+
} catch (error) {
|
|
1204
|
+
spinner.stop(pc.red("Failed to create git worktree."));
|
|
1205
|
+
throw new Error(extractExecSyncErrorMessage(error) ?? String(error));
|
|
1206
|
+
}
|
|
1207
|
+
|
|
1208
|
+
const installSpinner = p.spinner();
|
|
1209
|
+
installSpinner.start("Installing dependencies...");
|
|
1210
|
+
try {
|
|
1211
|
+
execFileSync("pnpm", ["install"], {
|
|
1212
|
+
cwd: targetPath,
|
|
1213
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1214
|
+
});
|
|
1215
|
+
installSpinner.stop("Installed dependencies.");
|
|
1216
|
+
} catch (error) {
|
|
1217
|
+
installSpinner.stop(pc.yellow("Failed to install dependencies (continuing anyway)."));
|
|
1218
|
+
p.log.warning(extractExecSyncErrorMessage(error) ?? String(error));
|
|
1219
|
+
}
|
|
1220
|
+
|
|
1221
|
+
const originalCwd = process.cwd();
|
|
1222
|
+
try {
|
|
1223
|
+
process.chdir(targetPath);
|
|
1224
|
+
await runWorktreeInit({
|
|
1225
|
+
...opts,
|
|
1226
|
+
name,
|
|
1227
|
+
sourceConfigPathOverride: sourceConfigPath,
|
|
1228
|
+
});
|
|
1229
|
+
} catch (error) {
|
|
1230
|
+
throw error;
|
|
1231
|
+
} finally {
|
|
1232
|
+
process.chdir(originalCwd);
|
|
1233
|
+
}
|
|
1234
|
+
}
|
|
1235
|
+
|
|
1236
|
+
type WorktreeCleanupOptions = {
|
|
1237
|
+
instance?: string;
|
|
1238
|
+
home?: string;
|
|
1239
|
+
force?: boolean;
|
|
1240
|
+
};
|
|
1241
|
+
|
|
1242
|
+
type GitWorktreeListEntry = {
|
|
1243
|
+
worktree: string;
|
|
1244
|
+
branch: string | null;
|
|
1245
|
+
bare: boolean;
|
|
1246
|
+
detached: boolean;
|
|
1247
|
+
};
|
|
1248
|
+
|
|
1249
|
+
type MergeSourceChoice = {
|
|
1250
|
+
worktree: string;
|
|
1251
|
+
branch: string | null;
|
|
1252
|
+
branchLabel: string;
|
|
1253
|
+
hasPaperclipConfig: boolean;
|
|
1254
|
+
isCurrent: boolean;
|
|
1255
|
+
};
|
|
1256
|
+
|
|
1257
|
+
type ResolvedWorktreeEndpoint = {
|
|
1258
|
+
rootPath: string;
|
|
1259
|
+
configPath: string;
|
|
1260
|
+
label: string;
|
|
1261
|
+
isCurrent: boolean;
|
|
1262
|
+
};
|
|
1263
|
+
|
|
1264
|
+
type ResolvedWorktreeReseedSource = {
|
|
1265
|
+
configPath: string;
|
|
1266
|
+
label: string;
|
|
1267
|
+
};
|
|
1268
|
+
|
|
1269
|
+
function parseGitWorktreeList(cwd: string): GitWorktreeListEntry[] {
|
|
1270
|
+
const raw = execFileSync("git", ["worktree", "list", "--porcelain"], {
|
|
1271
|
+
cwd,
|
|
1272
|
+
encoding: "utf8",
|
|
1273
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1274
|
+
});
|
|
1275
|
+
const entries: GitWorktreeListEntry[] = [];
|
|
1276
|
+
let current: Partial<GitWorktreeListEntry> = {};
|
|
1277
|
+
for (const line of raw.split("\n")) {
|
|
1278
|
+
if (line.startsWith("worktree ")) {
|
|
1279
|
+
current = { worktree: line.slice("worktree ".length) };
|
|
1280
|
+
} else if (line.startsWith("branch ")) {
|
|
1281
|
+
current.branch = line.slice("branch ".length);
|
|
1282
|
+
} else if (line === "bare") {
|
|
1283
|
+
current.bare = true;
|
|
1284
|
+
} else if (line === "detached") {
|
|
1285
|
+
current.detached = true;
|
|
1286
|
+
} else if (line === "" && current.worktree) {
|
|
1287
|
+
entries.push({
|
|
1288
|
+
worktree: current.worktree,
|
|
1289
|
+
branch: current.branch ?? null,
|
|
1290
|
+
bare: current.bare ?? false,
|
|
1291
|
+
detached: current.detached ?? false,
|
|
1292
|
+
});
|
|
1293
|
+
current = {};
|
|
1294
|
+
}
|
|
1295
|
+
}
|
|
1296
|
+
if (current.worktree) {
|
|
1297
|
+
entries.push({
|
|
1298
|
+
worktree: current.worktree,
|
|
1299
|
+
branch: current.branch ?? null,
|
|
1300
|
+
bare: current.bare ?? false,
|
|
1301
|
+
detached: current.detached ?? false,
|
|
1302
|
+
});
|
|
1303
|
+
}
|
|
1304
|
+
return entries;
|
|
1305
|
+
}
|
|
1306
|
+
|
|
1307
|
+
function toMergeSourceChoices(cwd: string): MergeSourceChoice[] {
|
|
1308
|
+
const currentCwd = path.resolve(cwd);
|
|
1309
|
+
return parseGitWorktreeList(cwd).map((entry) => {
|
|
1310
|
+
const branchLabel = entry.branch?.replace(/^refs\/heads\//, "") ?? "(detached)";
|
|
1311
|
+
const worktreePath = path.resolve(entry.worktree);
|
|
1312
|
+
return {
|
|
1313
|
+
worktree: worktreePath,
|
|
1314
|
+
branch: entry.branch,
|
|
1315
|
+
branchLabel,
|
|
1316
|
+
hasPaperclipConfig: existsSync(path.resolve(worktreePath, ".paperclip", "config.json")),
|
|
1317
|
+
isCurrent: worktreePath === currentCwd,
|
|
1318
|
+
};
|
|
1319
|
+
});
|
|
1320
|
+
}
|
|
1321
|
+
|
|
1322
|
+
function branchHasUniqueCommits(cwd: string, branchName: string): boolean {
|
|
1323
|
+
try {
|
|
1324
|
+
const output = execFileSync(
|
|
1325
|
+
"git",
|
|
1326
|
+
["log", "--oneline", branchName, "--not", "--remotes", "--exclude", `refs/heads/${branchName}`, "--branches"],
|
|
1327
|
+
{ cwd, encoding: "utf8", stdio: ["ignore", "pipe", "pipe"] },
|
|
1328
|
+
).trim();
|
|
1329
|
+
return output.length > 0;
|
|
1330
|
+
} catch {
|
|
1331
|
+
return false;
|
|
1332
|
+
}
|
|
1333
|
+
}
|
|
1334
|
+
|
|
1335
|
+
function branchExistsOnAnyRemote(cwd: string, branchName: string): boolean {
|
|
1336
|
+
try {
|
|
1337
|
+
const output = execFileSync(
|
|
1338
|
+
"git",
|
|
1339
|
+
["branch", "-r", "--list", `*/${branchName}`],
|
|
1340
|
+
{ cwd, encoding: "utf8", stdio: ["ignore", "pipe", "pipe"] },
|
|
1341
|
+
).trim();
|
|
1342
|
+
return output.length > 0;
|
|
1343
|
+
} catch {
|
|
1344
|
+
return false;
|
|
1345
|
+
}
|
|
1346
|
+
}
|
|
1347
|
+
|
|
1348
|
+
function worktreePathHasUncommittedChanges(worktreePath: string): boolean {
|
|
1349
|
+
try {
|
|
1350
|
+
const output = execFileSync(
|
|
1351
|
+
"git",
|
|
1352
|
+
["status", "--porcelain"],
|
|
1353
|
+
{ cwd: worktreePath, encoding: "utf8", stdio: ["ignore", "pipe", "pipe"] },
|
|
1354
|
+
).trim();
|
|
1355
|
+
return output.length > 0;
|
|
1356
|
+
} catch {
|
|
1357
|
+
return false;
|
|
1358
|
+
}
|
|
1359
|
+
}
|
|
1360
|
+
|
|
1361
|
+
export async function worktreeCleanupCommand(nameArg: string, opts: WorktreeCleanupOptions): Promise<void> {
|
|
1362
|
+
printPaperclipCliBanner();
|
|
1363
|
+
p.intro(pc.bgCyan(pc.black(" paperclipai worktree:cleanup ")));
|
|
1364
|
+
|
|
1365
|
+
const name = resolveWorktreeMakeName(nameArg);
|
|
1366
|
+
const sourceCwd = process.cwd();
|
|
1367
|
+
const targetPath = resolveWorktreeMakeTargetPath(name);
|
|
1368
|
+
const instanceId = sanitizeWorktreeInstanceId(opts.instance ?? name);
|
|
1369
|
+
const homeDir = path.resolve(expandHomePrefix(resolveWorktreeHome(opts.home)));
|
|
1370
|
+
const instanceRoot = path.resolve(homeDir, "instances", instanceId);
|
|
1371
|
+
|
|
1372
|
+
// ── 1. Assess current state ──────────────────────────────────────────
|
|
1373
|
+
|
|
1374
|
+
const hasBranch = localBranchExists(sourceCwd, name);
|
|
1375
|
+
const hasTargetDir = existsSync(targetPath);
|
|
1376
|
+
const hasInstanceData = existsSync(instanceRoot);
|
|
1377
|
+
|
|
1378
|
+
const worktrees = parseGitWorktreeList(sourceCwd);
|
|
1379
|
+
const linkedWorktree = worktrees.find(
|
|
1380
|
+
(wt) => wt.branch === `refs/heads/${name}` || path.resolve(wt.worktree) === path.resolve(targetPath),
|
|
1381
|
+
);
|
|
1382
|
+
|
|
1383
|
+
if (!hasBranch && !hasTargetDir && !hasInstanceData && !linkedWorktree) {
|
|
1384
|
+
p.log.info("Nothing to clean up — no branch, worktree directory, or instance data found.");
|
|
1385
|
+
p.outro(pc.green("Already clean."));
|
|
1386
|
+
return;
|
|
1387
|
+
}
|
|
1388
|
+
|
|
1389
|
+
// ── 2. Safety checks ────────────────────────────────────────────────
|
|
1390
|
+
|
|
1391
|
+
const problems: string[] = [];
|
|
1392
|
+
|
|
1393
|
+
if (hasBranch && branchHasUniqueCommits(sourceCwd, name)) {
|
|
1394
|
+
const onRemote = branchExistsOnAnyRemote(sourceCwd, name);
|
|
1395
|
+
if (onRemote) {
|
|
1396
|
+
p.log.info(
|
|
1397
|
+
`Branch "${name}" has unique local commits, but the branch also exists on a remote — safe to delete locally.`,
|
|
1398
|
+
);
|
|
1399
|
+
} else {
|
|
1400
|
+
problems.push(
|
|
1401
|
+
`Branch "${name}" has commits not found on any other branch or remote. ` +
|
|
1402
|
+
`Deleting it will lose work. Push it first, or use --force.`,
|
|
1403
|
+
);
|
|
1404
|
+
}
|
|
1405
|
+
}
|
|
1406
|
+
|
|
1407
|
+
if (hasTargetDir && worktreePathHasUncommittedChanges(targetPath)) {
|
|
1408
|
+
problems.push(
|
|
1409
|
+
`Worktree directory ${targetPath} has uncommitted changes. Commit or stash first, or use --force.`,
|
|
1410
|
+
);
|
|
1411
|
+
}
|
|
1412
|
+
|
|
1413
|
+
if (problems.length > 0 && !opts.force) {
|
|
1414
|
+
for (const problem of problems) {
|
|
1415
|
+
p.log.error(problem);
|
|
1416
|
+
}
|
|
1417
|
+
throw new Error("Safety checks failed. Resolve the issues above or re-run with --force.");
|
|
1418
|
+
}
|
|
1419
|
+
if (problems.length > 0 && opts.force) {
|
|
1420
|
+
for (const problem of problems) {
|
|
1421
|
+
p.log.warning(`Overridden by --force: ${problem}`);
|
|
1422
|
+
}
|
|
1423
|
+
}
|
|
1424
|
+
|
|
1425
|
+
// ── 3. Clean up (idempotent steps) ──────────────────────────────────
|
|
1426
|
+
|
|
1427
|
+
// 3a. Remove the git worktree registration
|
|
1428
|
+
if (linkedWorktree) {
|
|
1429
|
+
const worktreeDirExists = existsSync(linkedWorktree.worktree);
|
|
1430
|
+
const spinner = p.spinner();
|
|
1431
|
+
if (worktreeDirExists) {
|
|
1432
|
+
spinner.start(`Removing git worktree at ${linkedWorktree.worktree}...`);
|
|
1433
|
+
try {
|
|
1434
|
+
const removeArgs = ["worktree", "remove", linkedWorktree.worktree];
|
|
1435
|
+
if (opts.force) removeArgs.push("--force");
|
|
1436
|
+
execFileSync("git", removeArgs, {
|
|
1437
|
+
cwd: sourceCwd,
|
|
1438
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1439
|
+
});
|
|
1440
|
+
spinner.stop(`Removed git worktree at ${linkedWorktree.worktree}.`);
|
|
1441
|
+
} catch (error) {
|
|
1442
|
+
spinner.stop(pc.yellow(`Could not remove worktree cleanly, will prune instead.`));
|
|
1443
|
+
p.log.warning(extractExecSyncErrorMessage(error) ?? String(error));
|
|
1444
|
+
}
|
|
1445
|
+
} else {
|
|
1446
|
+
spinner.start("Pruning stale worktree entry...");
|
|
1447
|
+
execFileSync("git", ["worktree", "prune"], {
|
|
1448
|
+
cwd: sourceCwd,
|
|
1449
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1450
|
+
});
|
|
1451
|
+
spinner.stop("Pruned stale worktree entry.");
|
|
1452
|
+
}
|
|
1453
|
+
} else {
|
|
1454
|
+
// Even without a linked worktree, prune to clean up any orphaned entries
|
|
1455
|
+
execFileSync("git", ["worktree", "prune"], {
|
|
1456
|
+
cwd: sourceCwd,
|
|
1457
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1458
|
+
});
|
|
1459
|
+
}
|
|
1460
|
+
|
|
1461
|
+
// 3b. Remove the worktree directory if it still exists (e.g. partial creation)
|
|
1462
|
+
if (existsSync(targetPath)) {
|
|
1463
|
+
const spinner = p.spinner();
|
|
1464
|
+
spinner.start(`Removing worktree directory ${targetPath}...`);
|
|
1465
|
+
rmSync(targetPath, { recursive: true, force: true });
|
|
1466
|
+
spinner.stop(`Removed worktree directory ${targetPath}.`);
|
|
1467
|
+
}
|
|
1468
|
+
|
|
1469
|
+
// 3c. Delete the local branch (now safe — worktree is gone)
|
|
1470
|
+
if (localBranchExists(sourceCwd, name)) {
|
|
1471
|
+
const spinner = p.spinner();
|
|
1472
|
+
spinner.start(`Deleting local branch "${name}"...`);
|
|
1473
|
+
try {
|
|
1474
|
+
const deleteFlag = opts.force ? "-D" : "-d";
|
|
1475
|
+
execFileSync("git", ["branch", deleteFlag, name], {
|
|
1476
|
+
cwd: sourceCwd,
|
|
1477
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
1478
|
+
});
|
|
1479
|
+
spinner.stop(`Deleted local branch "${name}".`);
|
|
1480
|
+
} catch (error) {
|
|
1481
|
+
spinner.stop(pc.yellow(`Could not delete branch "${name}".`));
|
|
1482
|
+
p.log.warning(extractExecSyncErrorMessage(error) ?? String(error));
|
|
1483
|
+
}
|
|
1484
|
+
}
|
|
1485
|
+
|
|
1486
|
+
// 3d. Remove instance data
|
|
1487
|
+
if (existsSync(instanceRoot)) {
|
|
1488
|
+
const spinner = p.spinner();
|
|
1489
|
+
spinner.start(`Removing instance data at ${instanceRoot}...`);
|
|
1490
|
+
rmSync(instanceRoot, { recursive: true, force: true });
|
|
1491
|
+
spinner.stop(`Removed instance data at ${instanceRoot}.`);
|
|
1492
|
+
}
|
|
1493
|
+
|
|
1494
|
+
p.outro(pc.green("Cleanup complete."));
|
|
1495
|
+
}
|
|
1496
|
+
|
|
1497
|
+
export async function worktreeEnvCommand(opts: WorktreeEnvOptions): Promise<void> {
|
|
1498
|
+
const configPath = resolveConfigPath(opts.config);
|
|
1499
|
+
const envPath = resolvePaperclipEnvFile(configPath);
|
|
1500
|
+
const envEntries = readPaperclipEnvEntries(envPath);
|
|
1501
|
+
const out = {
|
|
1502
|
+
PAPERCLIP_CONFIG: configPath,
|
|
1503
|
+
...(envEntries.PAPERCLIP_HOME ? { PAPERCLIP_HOME: envEntries.PAPERCLIP_HOME } : {}),
|
|
1504
|
+
...(envEntries.PAPERCLIP_INSTANCE_ID ? { PAPERCLIP_INSTANCE_ID: envEntries.PAPERCLIP_INSTANCE_ID } : {}),
|
|
1505
|
+
...(envEntries.PAPERCLIP_CONTEXT ? { PAPERCLIP_CONTEXT: envEntries.PAPERCLIP_CONTEXT } : {}),
|
|
1506
|
+
...envEntries,
|
|
1507
|
+
};
|
|
1508
|
+
|
|
1509
|
+
if (opts.json) {
|
|
1510
|
+
console.log(JSON.stringify(out, null, 2));
|
|
1511
|
+
return;
|
|
1512
|
+
}
|
|
1513
|
+
|
|
1514
|
+
console.log(formatShellExports(out));
|
|
1515
|
+
}
|
|
1516
|
+
|
|
1517
|
+
type ClosableDb = ReturnType<typeof createDb> & {
|
|
1518
|
+
$client?: { end?: (opts?: { timeout?: number }) => Promise<void> };
|
|
1519
|
+
};
|
|
1520
|
+
|
|
1521
|
+
type OpenDbHandle = {
|
|
1522
|
+
db: ClosableDb;
|
|
1523
|
+
stop: () => Promise<void>;
|
|
1524
|
+
};
|
|
1525
|
+
|
|
1526
|
+
type ResolvedMergeCompany = {
|
|
1527
|
+
id: string;
|
|
1528
|
+
name: string;
|
|
1529
|
+
issuePrefix: string;
|
|
1530
|
+
};
|
|
1531
|
+
|
|
1532
|
+
async function closeDb(db: ClosableDb): Promise<void> {
|
|
1533
|
+
await db.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
|
1534
|
+
}
|
|
1535
|
+
|
|
1536
|
+
function resolveCurrentEndpoint(): ResolvedWorktreeEndpoint {
|
|
1537
|
+
return {
|
|
1538
|
+
rootPath: path.resolve(process.cwd()),
|
|
1539
|
+
configPath: resolveConfigPath(),
|
|
1540
|
+
label: "current",
|
|
1541
|
+
isCurrent: true,
|
|
1542
|
+
};
|
|
1543
|
+
}
|
|
1544
|
+
|
|
1545
|
+
function resolveAttachmentLookupStorages(input: {
|
|
1546
|
+
sourceEndpoint: ResolvedWorktreeEndpoint;
|
|
1547
|
+
targetEndpoint: ResolvedWorktreeEndpoint;
|
|
1548
|
+
}): ConfiguredStorage[] {
|
|
1549
|
+
const orderedConfigPaths = [
|
|
1550
|
+
input.sourceEndpoint.configPath,
|
|
1551
|
+
resolveCurrentEndpoint().configPath,
|
|
1552
|
+
input.targetEndpoint.configPath,
|
|
1553
|
+
...toMergeSourceChoices(process.cwd())
|
|
1554
|
+
.filter((choice) => choice.hasPaperclipConfig)
|
|
1555
|
+
.map((choice) => path.resolve(choice.worktree, ".paperclip", "config.json")),
|
|
1556
|
+
];
|
|
1557
|
+
const seen = new Set<string>();
|
|
1558
|
+
const storages: ConfiguredStorage[] = [];
|
|
1559
|
+
for (const configPath of orderedConfigPaths) {
|
|
1560
|
+
const resolved = path.resolve(configPath);
|
|
1561
|
+
if (seen.has(resolved) || !existsSync(resolved)) continue;
|
|
1562
|
+
seen.add(resolved);
|
|
1563
|
+
storages.push(openConfiguredStorage(resolved));
|
|
1564
|
+
}
|
|
1565
|
+
return storages;
|
|
1566
|
+
}
|
|
1567
|
+
|
|
1568
|
+
async function openConfiguredDb(configPath: string): Promise<OpenDbHandle> {
|
|
1569
|
+
const config = readConfig(configPath);
|
|
1570
|
+
if (!config) {
|
|
1571
|
+
throw new Error(`Config not found at ${configPath}.`);
|
|
1572
|
+
}
|
|
1573
|
+
const envEntries = readPaperclipEnvEntries(resolvePaperclipEnvFile(configPath));
|
|
1574
|
+
let embeddedHandle: EmbeddedPostgresHandle | null = null;
|
|
1575
|
+
|
|
1576
|
+
try {
|
|
1577
|
+
if (config.database.mode === "embedded-postgres") {
|
|
1578
|
+
embeddedHandle = await ensureEmbeddedPostgres(
|
|
1579
|
+
config.database.embeddedPostgresDataDir,
|
|
1580
|
+
config.database.embeddedPostgresPort,
|
|
1581
|
+
);
|
|
1582
|
+
}
|
|
1583
|
+
const connectionString = resolveSourceConnectionString(config, envEntries, embeddedHandle?.port);
|
|
1584
|
+
const migrationState = await inspectMigrations(connectionString);
|
|
1585
|
+
if (migrationState.status !== "upToDate") {
|
|
1586
|
+
const pending =
|
|
1587
|
+
migrationState.reason === "pending-migrations"
|
|
1588
|
+
? ` Pending migrations: ${migrationState.pendingMigrations.join(", ")}.`
|
|
1589
|
+
: "";
|
|
1590
|
+
throw new Error(
|
|
1591
|
+
`Database for ${configPath} is not up to date.${pending} Run \`pnpm db:migrate\` (or start Paperclip once) before using worktree merge history.`,
|
|
1592
|
+
);
|
|
1593
|
+
}
|
|
1594
|
+
const db = createDb(connectionString) as ClosableDb;
|
|
1595
|
+
return {
|
|
1596
|
+
db,
|
|
1597
|
+
stop: async () => {
|
|
1598
|
+
await closeDb(db);
|
|
1599
|
+
if (embeddedHandle?.startedByThisProcess) {
|
|
1600
|
+
await embeddedHandle.stop();
|
|
1601
|
+
}
|
|
1602
|
+
},
|
|
1603
|
+
};
|
|
1604
|
+
} catch (error) {
|
|
1605
|
+
if (embeddedHandle?.startedByThisProcess) {
|
|
1606
|
+
await embeddedHandle.stop().catch(() => undefined);
|
|
1607
|
+
}
|
|
1608
|
+
throw error;
|
|
1609
|
+
}
|
|
1610
|
+
}
|
|
1611
|
+
|
|
1612
|
+
async function resolveMergeCompany(input: {
|
|
1613
|
+
sourceDb: ClosableDb;
|
|
1614
|
+
targetDb: ClosableDb;
|
|
1615
|
+
selector?: string;
|
|
1616
|
+
}): Promise<ResolvedMergeCompany> {
|
|
1617
|
+
const [sourceCompanies, targetCompanies] = await Promise.all([
|
|
1618
|
+
input.sourceDb
|
|
1619
|
+
.select({
|
|
1620
|
+
id: workspaces.id,
|
|
1621
|
+
name: workspaces.name,
|
|
1622
|
+
issuePrefix: workspaces.issuePrefix,
|
|
1623
|
+
})
|
|
1624
|
+
.from(companies),
|
|
1625
|
+
input.targetDb
|
|
1626
|
+
.select({
|
|
1627
|
+
id: workspaces.id,
|
|
1628
|
+
name: workspaces.name,
|
|
1629
|
+
issuePrefix: workspaces.issuePrefix,
|
|
1630
|
+
})
|
|
1631
|
+
.from(companies),
|
|
1632
|
+
]);
|
|
1633
|
+
|
|
1634
|
+
const targetById = new Map(targetCompanies.map((company) => [company.id, company]));
|
|
1635
|
+
const shared = sourceCompanies.filter((company) => targetById.has(company.id));
|
|
1636
|
+
const selector = nonEmpty(input.selector);
|
|
1637
|
+
if (selector) {
|
|
1638
|
+
const matched = shared.find(
|
|
1639
|
+
(company) => company.id === selector || company.issuePrefix.toLowerCase() === selector.toLowerCase(),
|
|
1640
|
+
);
|
|
1641
|
+
if (!matched) {
|
|
1642
|
+
throw new Error(`Could not resolve company "${selector}" in both source and target databases.`);
|
|
1643
|
+
}
|
|
1644
|
+
return matched;
|
|
1645
|
+
}
|
|
1646
|
+
|
|
1647
|
+
if (shared.length === 1) {
|
|
1648
|
+
return shared[0];
|
|
1649
|
+
}
|
|
1650
|
+
|
|
1651
|
+
if (shared.length === 0) {
|
|
1652
|
+
throw new Error("Source and target databases do not share a company id. Pass --company explicitly once both sides match.");
|
|
1653
|
+
}
|
|
1654
|
+
|
|
1655
|
+
const options = shared
|
|
1656
|
+
.map((company) => `${company.issuePrefix} (${company.name})`)
|
|
1657
|
+
.join(", ");
|
|
1658
|
+
throw new Error(`Multiple shared companies found. Re-run with --company <id-or-prefix>. Options: ${options}`);
|
|
1659
|
+
}
|
|
1660
|
+
|
|
1661
|
+
function renderMergePlan(plan: Awaited<ReturnType<typeof collectMergePlan>>["plan"], extras: {
|
|
1662
|
+
sourcePath: string;
|
|
1663
|
+
targetPath: string;
|
|
1664
|
+
unsupportedRunCount: number;
|
|
1665
|
+
}): string {
|
|
1666
|
+
const terminalWidth = Math.max(60, process.stdout.columns ?? 100);
|
|
1667
|
+
const oneLine = (value: string) => value.replace(/\s+/g, " ").trim();
|
|
1668
|
+
const truncateToWidth = (value: string, maxWidth: number) => {
|
|
1669
|
+
if (maxWidth <= 1) return "";
|
|
1670
|
+
if (value.length <= maxWidth) return value;
|
|
1671
|
+
return `${value.slice(0, Math.max(0, maxWidth - 1)).trimEnd()}…`;
|
|
1672
|
+
};
|
|
1673
|
+
const lines = [
|
|
1674
|
+
`Mode: preview`,
|
|
1675
|
+
`Source: ${extras.sourcePath}`,
|
|
1676
|
+
`Target: ${extras.targetPath}`,
|
|
1677
|
+
`Company: ${plan.companyName} (${plan.issuePrefix})`,
|
|
1678
|
+
"",
|
|
1679
|
+
"Projects",
|
|
1680
|
+
`- import: ${plan.counts.projectsToImport}`,
|
|
1681
|
+
"",
|
|
1682
|
+
"Issues",
|
|
1683
|
+
`- insert: ${plan.counts.issuesToInsert}`,
|
|
1684
|
+
`- already present: ${plan.counts.issuesExisting}`,
|
|
1685
|
+
`- shared/imported issues with drift: ${plan.counts.issueDrift}`,
|
|
1686
|
+
];
|
|
1687
|
+
|
|
1688
|
+
if (plan.projectImports.length > 0) {
|
|
1689
|
+
lines.push("");
|
|
1690
|
+
lines.push("Planned project imports");
|
|
1691
|
+
for (const project of plan.projectImports) {
|
|
1692
|
+
lines.push(
|
|
1693
|
+
`- ${project.source.name} (${project.workspaces.length} workspace${project.workspaces.length === 1 ? "" : "s"})`,
|
|
1694
|
+
);
|
|
1695
|
+
}
|
|
1696
|
+
}
|
|
1697
|
+
|
|
1698
|
+
const issueInserts = plan.issuePlans.filter((item): item is PlannedIssueInsert => item.action === "insert");
|
|
1699
|
+
if (issueInserts.length > 0) {
|
|
1700
|
+
lines.push("");
|
|
1701
|
+
lines.push("Planned issue imports");
|
|
1702
|
+
for (const issue of issueInserts) {
|
|
1703
|
+
const projectNote =
|
|
1704
|
+
(issue.projectResolution === "mapped" || issue.projectResolution === "imported")
|
|
1705
|
+
&& issue.mappedProjectName
|
|
1706
|
+
? ` project->${issue.projectResolution === "imported" ? "import:" : ""}${issue.mappedProjectName}`
|
|
1707
|
+
: "";
|
|
1708
|
+
const adjustments = issue.adjustments.length > 0 ? ` [${issue.adjustments.join(", ")}]` : "";
|
|
1709
|
+
const prefix = `- ${issue.source.identifier ?? issue.source.id} -> ${issue.previewIdentifier} (${issue.targetStatus}${projectNote})`;
|
|
1710
|
+
const title = oneLine(issue.source.title);
|
|
1711
|
+
const suffix = `${adjustments}${title ? ` ${title}` : ""}`;
|
|
1712
|
+
lines.push(
|
|
1713
|
+
`${prefix}${truncateToWidth(suffix, Math.max(8, terminalWidth - prefix.length))}`,
|
|
1714
|
+
);
|
|
1715
|
+
}
|
|
1716
|
+
}
|
|
1717
|
+
|
|
1718
|
+
if (plan.scopes.includes("comments")) {
|
|
1719
|
+
lines.push("");
|
|
1720
|
+
lines.push("Comments");
|
|
1721
|
+
lines.push(`- insert: ${plan.counts.commentsToInsert}`);
|
|
1722
|
+
lines.push(`- already present: ${plan.counts.commentsExisting}`);
|
|
1723
|
+
lines.push(`- skipped (missing parent): ${plan.counts.commentsMissingParent}`);
|
|
1724
|
+
}
|
|
1725
|
+
|
|
1726
|
+
lines.push("");
|
|
1727
|
+
lines.push("Documents");
|
|
1728
|
+
lines.push(`- insert: ${plan.counts.documentsToInsert}`);
|
|
1729
|
+
lines.push(`- merge existing: ${plan.counts.documentsToMerge}`);
|
|
1730
|
+
lines.push(`- already present: ${plan.counts.documentsExisting}`);
|
|
1731
|
+
lines.push(`- skipped (conflicting key): ${plan.counts.documentsConflictingKey}`);
|
|
1732
|
+
lines.push(`- skipped (missing parent): ${plan.counts.documentsMissingParent}`);
|
|
1733
|
+
lines.push(`- revisions insert: ${plan.counts.documentRevisionsToInsert}`);
|
|
1734
|
+
|
|
1735
|
+
lines.push("");
|
|
1736
|
+
lines.push("Attachments");
|
|
1737
|
+
lines.push(`- insert: ${plan.counts.attachmentsToInsert}`);
|
|
1738
|
+
lines.push(`- already present: ${plan.counts.attachmentsExisting}`);
|
|
1739
|
+
lines.push(`- skipped (missing parent): ${plan.counts.attachmentsMissingParent}`);
|
|
1740
|
+
|
|
1741
|
+
lines.push("");
|
|
1742
|
+
lines.push("Adjustments");
|
|
1743
|
+
lines.push(`- cleared assignee agents: ${plan.adjustments.clear_assignee_agent}`);
|
|
1744
|
+
lines.push(`- cleared projects: ${plan.adjustments.clear_project}`);
|
|
1745
|
+
lines.push(`- cleared project workspaces: ${plan.adjustments.clear_project_workspace}`);
|
|
1746
|
+
lines.push(`- cleared goals: ${plan.adjustments.clear_goal}`);
|
|
1747
|
+
lines.push(`- cleared comment author agents: ${plan.adjustments.clear_author_agent}`);
|
|
1748
|
+
lines.push(`- cleared document agents: ${plan.adjustments.clear_document_agent}`);
|
|
1749
|
+
lines.push(`- cleared document revision agents: ${plan.adjustments.clear_document_revision_agent}`);
|
|
1750
|
+
lines.push(`- cleared attachment author agents: ${plan.adjustments.clear_attachment_agent}`);
|
|
1751
|
+
lines.push(`- coerced in_progress to todo: ${plan.adjustments.coerce_in_progress_to_todo}`);
|
|
1752
|
+
|
|
1753
|
+
lines.push("");
|
|
1754
|
+
lines.push("Not imported in this phase");
|
|
1755
|
+
lines.push(`- heartbeat runs: ${extras.unsupportedRunCount}`);
|
|
1756
|
+
lines.push("");
|
|
1757
|
+
lines.push("Identifiers shown above are provisional preview values. `--apply` reserves fresh issue numbers at write time.");
|
|
1758
|
+
|
|
1759
|
+
return lines.join("\n");
|
|
1760
|
+
}
|
|
1761
|
+
|
|
1762
|
+
function resolveRunningEmbeddedPostgresPid(config: PaperclipConfig): number | null {
|
|
1763
|
+
if (config.database.mode !== "embedded-postgres") {
|
|
1764
|
+
return null;
|
|
1765
|
+
}
|
|
1766
|
+
return readRunningPostmasterPid(path.resolve(config.database.embeddedPostgresDataDir, "postmaster.pid"));
|
|
1767
|
+
}
|
|
1768
|
+
|
|
1769
|
+
async function collectMergePlan(input: {
|
|
1770
|
+
sourceDb: ClosableDb;
|
|
1771
|
+
targetDb: ClosableDb;
|
|
1772
|
+
company: ResolvedMergeCompany;
|
|
1773
|
+
scopes: ReturnType<typeof parseWorktreeMergeScopes>;
|
|
1774
|
+
importProjectIds?: Iterable<string>;
|
|
1775
|
+
projectIdOverrides?: Record<string, string | null | undefined>;
|
|
1776
|
+
}) {
|
|
1777
|
+
const workspaceId = input.company.id;
|
|
1778
|
+
const [
|
|
1779
|
+
targetCompanyRow,
|
|
1780
|
+
sourceIssuesRows,
|
|
1781
|
+
targetIssuesRows,
|
|
1782
|
+
sourceCommentsRows,
|
|
1783
|
+
targetCommentsRows,
|
|
1784
|
+
sourceIssueDocumentsRows,
|
|
1785
|
+
targetIssueDocumentsRows,
|
|
1786
|
+
sourceDocumentRevisionRows,
|
|
1787
|
+
targetDocumentRevisionRows,
|
|
1788
|
+
sourceAttachmentRows,
|
|
1789
|
+
targetAttachmentRows,
|
|
1790
|
+
sourceProjectsRows,
|
|
1791
|
+
sourceProjectWorkspaceRows,
|
|
1792
|
+
targetProjectsRows,
|
|
1793
|
+
targetAgentsRows,
|
|
1794
|
+
targetProjectWorkspaceRows,
|
|
1795
|
+
targetGoalsRows,
|
|
1796
|
+
runCountRows,
|
|
1797
|
+
] = await Promise.all([
|
|
1798
|
+
input.targetDb
|
|
1799
|
+
.select({
|
|
1800
|
+
issueCounter: workspaces.issueCounter,
|
|
1801
|
+
})
|
|
1802
|
+
.from(companies)
|
|
1803
|
+
.where(eq(workspaces.id, workspaceId))
|
|
1804
|
+
.then((rows) => rows[0] ?? null),
|
|
1805
|
+
input.sourceDb
|
|
1806
|
+
.select()
|
|
1807
|
+
.from(issues)
|
|
1808
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1809
|
+
input.targetDb
|
|
1810
|
+
.select()
|
|
1811
|
+
.from(issues)
|
|
1812
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1813
|
+
input.scopes.includes("comments")
|
|
1814
|
+
? input.sourceDb
|
|
1815
|
+
.select()
|
|
1816
|
+
.from(issueComments)
|
|
1817
|
+
.where(eq(issueComments.workspaceId, workspaceId))
|
|
1818
|
+
: Promise.resolve([]),
|
|
1819
|
+
input.targetDb
|
|
1820
|
+
.select()
|
|
1821
|
+
.from(issueComments)
|
|
1822
|
+
.where(eq(issueComments.workspaceId, workspaceId)),
|
|
1823
|
+
input.sourceDb
|
|
1824
|
+
.select({
|
|
1825
|
+
id: issueDocuments.id,
|
|
1826
|
+
workspaceId: issueDocuments.workspaceId,
|
|
1827
|
+
issueId: issueDocuments.issueId,
|
|
1828
|
+
documentId: issueDocuments.documentId,
|
|
1829
|
+
key: issueDocuments.key,
|
|
1830
|
+
linkCreatedAt: issueDocuments.createdAt,
|
|
1831
|
+
linkUpdatedAt: issueDocuments.updatedAt,
|
|
1832
|
+
title: documents.title,
|
|
1833
|
+
format: documents.format,
|
|
1834
|
+
latestBody: documents.latestBody,
|
|
1835
|
+
latestRevisionId: documents.latestRevisionId,
|
|
1836
|
+
latestRevisionNumber: documents.latestRevisionNumber,
|
|
1837
|
+
createdByAgentId: documents.createdByAgentId,
|
|
1838
|
+
createdByUserId: documents.createdByUserId,
|
|
1839
|
+
updatedByAgentId: documents.updatedByAgentId,
|
|
1840
|
+
updatedByUserId: documents.updatedByUserId,
|
|
1841
|
+
documentCreatedAt: documents.createdAt,
|
|
1842
|
+
documentUpdatedAt: documents.updatedAt,
|
|
1843
|
+
})
|
|
1844
|
+
.from(issueDocuments)
|
|
1845
|
+
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
|
1846
|
+
.innerJoin(issues, eq(issueDocuments.issueId, issues.id))
|
|
1847
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1848
|
+
input.targetDb
|
|
1849
|
+
.select({
|
|
1850
|
+
id: issueDocuments.id,
|
|
1851
|
+
workspaceId: issueDocuments.workspaceId,
|
|
1852
|
+
issueId: issueDocuments.issueId,
|
|
1853
|
+
documentId: issueDocuments.documentId,
|
|
1854
|
+
key: issueDocuments.key,
|
|
1855
|
+
linkCreatedAt: issueDocuments.createdAt,
|
|
1856
|
+
linkUpdatedAt: issueDocuments.updatedAt,
|
|
1857
|
+
title: documents.title,
|
|
1858
|
+
format: documents.format,
|
|
1859
|
+
latestBody: documents.latestBody,
|
|
1860
|
+
latestRevisionId: documents.latestRevisionId,
|
|
1861
|
+
latestRevisionNumber: documents.latestRevisionNumber,
|
|
1862
|
+
createdByAgentId: documents.createdByAgentId,
|
|
1863
|
+
createdByUserId: documents.createdByUserId,
|
|
1864
|
+
updatedByAgentId: documents.updatedByAgentId,
|
|
1865
|
+
updatedByUserId: documents.updatedByUserId,
|
|
1866
|
+
documentCreatedAt: documents.createdAt,
|
|
1867
|
+
documentUpdatedAt: documents.updatedAt,
|
|
1868
|
+
})
|
|
1869
|
+
.from(issueDocuments)
|
|
1870
|
+
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
|
1871
|
+
.innerJoin(issues, eq(issueDocuments.issueId, issues.id))
|
|
1872
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1873
|
+
input.sourceDb
|
|
1874
|
+
.select({
|
|
1875
|
+
id: documentRevisions.id,
|
|
1876
|
+
workspaceId: documentRevisions.workspaceId,
|
|
1877
|
+
documentId: documentRevisions.documentId,
|
|
1878
|
+
revisionNumber: documentRevisions.revisionNumber,
|
|
1879
|
+
body: documentRevisions.body,
|
|
1880
|
+
changeSummary: documentRevisions.changeSummary,
|
|
1881
|
+
createdByAgentId: documentRevisions.createdByAgentId,
|
|
1882
|
+
createdByUserId: documentRevisions.createdByUserId,
|
|
1883
|
+
createdAt: documentRevisions.createdAt,
|
|
1884
|
+
})
|
|
1885
|
+
.from(documentRevisions)
|
|
1886
|
+
.innerJoin(issueDocuments, eq(documentRevisions.documentId, issueDocuments.documentId))
|
|
1887
|
+
.innerJoin(issues, eq(issueDocuments.issueId, issues.id))
|
|
1888
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1889
|
+
input.targetDb
|
|
1890
|
+
.select({
|
|
1891
|
+
id: documentRevisions.id,
|
|
1892
|
+
workspaceId: documentRevisions.workspaceId,
|
|
1893
|
+
documentId: documentRevisions.documentId,
|
|
1894
|
+
revisionNumber: documentRevisions.revisionNumber,
|
|
1895
|
+
body: documentRevisions.body,
|
|
1896
|
+
changeSummary: documentRevisions.changeSummary,
|
|
1897
|
+
createdByAgentId: documentRevisions.createdByAgentId,
|
|
1898
|
+
createdByUserId: documentRevisions.createdByUserId,
|
|
1899
|
+
createdAt: documentRevisions.createdAt,
|
|
1900
|
+
})
|
|
1901
|
+
.from(documentRevisions)
|
|
1902
|
+
.innerJoin(issueDocuments, eq(documentRevisions.documentId, issueDocuments.documentId))
|
|
1903
|
+
.innerJoin(issues, eq(issueDocuments.issueId, issues.id))
|
|
1904
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1905
|
+
input.sourceDb
|
|
1906
|
+
.select({
|
|
1907
|
+
id: issueAttachments.id,
|
|
1908
|
+
workspaceId: issueAttachments.workspaceId,
|
|
1909
|
+
issueId: issueAttachments.issueId,
|
|
1910
|
+
issueCommentId: issueAttachments.issueCommentId,
|
|
1911
|
+
assetId: issueAttachments.assetId,
|
|
1912
|
+
provider: assets.provider,
|
|
1913
|
+
objectKey: assets.objectKey,
|
|
1914
|
+
contentType: assets.contentType,
|
|
1915
|
+
byteSize: assets.byteSize,
|
|
1916
|
+
sha256: assets.sha256,
|
|
1917
|
+
originalFilename: assets.originalFilename,
|
|
1918
|
+
createdByAgentId: assets.createdByAgentId,
|
|
1919
|
+
createdByUserId: assets.createdByUserId,
|
|
1920
|
+
assetCreatedAt: assets.createdAt,
|
|
1921
|
+
assetUpdatedAt: assets.updatedAt,
|
|
1922
|
+
attachmentCreatedAt: issueAttachments.createdAt,
|
|
1923
|
+
attachmentUpdatedAt: issueAttachments.updatedAt,
|
|
1924
|
+
})
|
|
1925
|
+
.from(issueAttachments)
|
|
1926
|
+
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
|
|
1927
|
+
.innerJoin(issues, eq(issueAttachments.issueId, issues.id))
|
|
1928
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1929
|
+
input.targetDb
|
|
1930
|
+
.select({
|
|
1931
|
+
id: issueAttachments.id,
|
|
1932
|
+
workspaceId: issueAttachments.workspaceId,
|
|
1933
|
+
issueId: issueAttachments.issueId,
|
|
1934
|
+
issueCommentId: issueAttachments.issueCommentId,
|
|
1935
|
+
assetId: issueAttachments.assetId,
|
|
1936
|
+
provider: assets.provider,
|
|
1937
|
+
objectKey: assets.objectKey,
|
|
1938
|
+
contentType: assets.contentType,
|
|
1939
|
+
byteSize: assets.byteSize,
|
|
1940
|
+
sha256: assets.sha256,
|
|
1941
|
+
originalFilename: assets.originalFilename,
|
|
1942
|
+
createdByAgentId: assets.createdByAgentId,
|
|
1943
|
+
createdByUserId: assets.createdByUserId,
|
|
1944
|
+
assetCreatedAt: assets.createdAt,
|
|
1945
|
+
assetUpdatedAt: assets.updatedAt,
|
|
1946
|
+
attachmentCreatedAt: issueAttachments.createdAt,
|
|
1947
|
+
attachmentUpdatedAt: issueAttachments.updatedAt,
|
|
1948
|
+
})
|
|
1949
|
+
.from(issueAttachments)
|
|
1950
|
+
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
|
|
1951
|
+
.innerJoin(issues, eq(issueAttachments.issueId, issues.id))
|
|
1952
|
+
.where(eq(issues.workspaceId, workspaceId)),
|
|
1953
|
+
input.sourceDb
|
|
1954
|
+
.select()
|
|
1955
|
+
.from(projects)
|
|
1956
|
+
.where(eq(projects.workspaceId, workspaceId)),
|
|
1957
|
+
input.sourceDb
|
|
1958
|
+
.select()
|
|
1959
|
+
.from(projectWorkspaces)
|
|
1960
|
+
.where(eq(projectWorkspaces.workspaceId, workspaceId)),
|
|
1961
|
+
input.targetDb
|
|
1962
|
+
.select()
|
|
1963
|
+
.from(projects)
|
|
1964
|
+
.where(eq(projects.workspaceId, workspaceId)),
|
|
1965
|
+
input.targetDb
|
|
1966
|
+
.select()
|
|
1967
|
+
.from(agents)
|
|
1968
|
+
.where(eq(agents.workspaceId, workspaceId)),
|
|
1969
|
+
input.targetDb
|
|
1970
|
+
.select()
|
|
1971
|
+
.from(projectWorkspaces)
|
|
1972
|
+
.where(eq(projectWorkspaces.workspaceId, workspaceId)),
|
|
1973
|
+
input.targetDb
|
|
1974
|
+
.select()
|
|
1975
|
+
.from(goals)
|
|
1976
|
+
.where(eq(goals.workspaceId, workspaceId)),
|
|
1977
|
+
input.sourceDb
|
|
1978
|
+
.select({ count: sql<number>`count(*)::int` })
|
|
1979
|
+
.from(heartbeatRuns)
|
|
1980
|
+
.where(eq(heartbeatRuns.workspaceId, workspaceId)),
|
|
1981
|
+
]);
|
|
1982
|
+
|
|
1983
|
+
if (!targetCompanyRow) {
|
|
1984
|
+
throw new Error(`Target company ${workspaceId} was not found.`);
|
|
1985
|
+
}
|
|
1986
|
+
|
|
1987
|
+
const plan = buildWorktreeMergePlan({
|
|
1988
|
+
workspaceId,
|
|
1989
|
+
companyName: input.company.name,
|
|
1990
|
+
issuePrefix: input.company.issuePrefix,
|
|
1991
|
+
previewIssueCounterStart: targetCompanyRow.issueCounter,
|
|
1992
|
+
scopes: input.scopes,
|
|
1993
|
+
sourceIssues: sourceIssuesRows,
|
|
1994
|
+
targetIssues: targetIssuesRows,
|
|
1995
|
+
sourceComments: sourceCommentsRows,
|
|
1996
|
+
targetComments: targetCommentsRows,
|
|
1997
|
+
sourceProjects: sourceProjectsRows,
|
|
1998
|
+
sourceProjectWorkspaces: sourceProjectWorkspaceRows,
|
|
1999
|
+
sourceDocuments: sourceIssueDocumentsRows as IssueDocumentRow[],
|
|
2000
|
+
targetDocuments: targetIssueDocumentsRows as IssueDocumentRow[],
|
|
2001
|
+
sourceDocumentRevisions: sourceDocumentRevisionRows as DocumentRevisionRow[],
|
|
2002
|
+
targetDocumentRevisions: targetDocumentRevisionRows as DocumentRevisionRow[],
|
|
2003
|
+
sourceAttachments: sourceAttachmentRows as IssueAttachmentRow[],
|
|
2004
|
+
targetAttachments: targetAttachmentRows as IssueAttachmentRow[],
|
|
2005
|
+
targetAgents: targetAgentsRows,
|
|
2006
|
+
targetProjects: targetProjectsRows,
|
|
2007
|
+
targetProjectWorkspaces: targetProjectWorkspaceRows,
|
|
2008
|
+
targetGoals: targetGoalsRows,
|
|
2009
|
+
importProjectIds: input.importProjectIds,
|
|
2010
|
+
projectIdOverrides: input.projectIdOverrides,
|
|
2011
|
+
});
|
|
2012
|
+
|
|
2013
|
+
return {
|
|
2014
|
+
plan,
|
|
2015
|
+
sourceProjects: sourceProjectsRows,
|
|
2016
|
+
targetProjects: targetProjectsRows,
|
|
2017
|
+
unsupportedRunCount: runCountRows[0]?.count ?? 0,
|
|
2018
|
+
};
|
|
2019
|
+
}
|
|
2020
|
+
|
|
2021
|
+
type ProjectMappingSelections = {
|
|
2022
|
+
importProjectIds: string[];
|
|
2023
|
+
projectIdOverrides: Record<string, string | null>;
|
|
2024
|
+
};
|
|
2025
|
+
|
|
2026
|
+
async function promptForProjectMappings(input: {
|
|
2027
|
+
plan: Awaited<ReturnType<typeof collectMergePlan>>["plan"];
|
|
2028
|
+
sourceProjects: Awaited<ReturnType<typeof collectMergePlan>>["sourceProjects"];
|
|
2029
|
+
targetProjects: Awaited<ReturnType<typeof collectMergePlan>>["targetProjects"];
|
|
2030
|
+
}): Promise<ProjectMappingSelections> {
|
|
2031
|
+
const missingProjectIds = [
|
|
2032
|
+
...new Set(
|
|
2033
|
+
input.plan.issuePlans
|
|
2034
|
+
.filter((plan): plan is PlannedIssueInsert => plan.action === "insert")
|
|
2035
|
+
.filter((plan) => !!plan.source.projectId && plan.projectResolution === "cleared")
|
|
2036
|
+
.map((plan) => plan.source.projectId as string),
|
|
2037
|
+
),
|
|
2038
|
+
];
|
|
2039
|
+
if (missingProjectIds.length === 0) {
|
|
2040
|
+
return {
|
|
2041
|
+
importProjectIds: [],
|
|
2042
|
+
projectIdOverrides: {},
|
|
2043
|
+
};
|
|
2044
|
+
}
|
|
2045
|
+
|
|
2046
|
+
const sourceProjectsById = new Map(input.sourceProjects.map((project) => [project.id, project]));
|
|
2047
|
+
const targetChoices = [...input.targetProjects]
|
|
2048
|
+
.sort((left, right) => left.name.localeCompare(right.name))
|
|
2049
|
+
.map((project) => ({
|
|
2050
|
+
value: project.id,
|
|
2051
|
+
label: project.name,
|
|
2052
|
+
hint: project.status,
|
|
2053
|
+
}));
|
|
2054
|
+
|
|
2055
|
+
const mappings: Record<string, string | null> = {};
|
|
2056
|
+
const importProjectIds = new Set<string>();
|
|
2057
|
+
for (const sourceProjectId of missingProjectIds) {
|
|
2058
|
+
const sourceProject = sourceProjectsById.get(sourceProjectId);
|
|
2059
|
+
if (!sourceProject) continue;
|
|
2060
|
+
const nameMatch = input.targetProjects.find(
|
|
2061
|
+
(project) => project.name.trim().toLowerCase() === sourceProject.name.trim().toLowerCase(),
|
|
2062
|
+
);
|
|
2063
|
+
const importSelectionValue = `__import__:${sourceProjectId}`;
|
|
2064
|
+
const selection = await p.select<string | null>({
|
|
2065
|
+
message: `Project "${sourceProject.name}" is missing in target. How should ${input.plan.issuePrefix} imports handle it?`,
|
|
2066
|
+
options: [
|
|
2067
|
+
{
|
|
2068
|
+
value: importSelectionValue,
|
|
2069
|
+
label: `Import ${sourceProject.name}`,
|
|
2070
|
+
hint: "Create the project and copy its workspace settings",
|
|
2071
|
+
},
|
|
2072
|
+
...(nameMatch
|
|
2073
|
+
? [{
|
|
2074
|
+
value: nameMatch.id,
|
|
2075
|
+
label: `Map to ${nameMatch.name}`,
|
|
2076
|
+
hint: "Recommended: exact name match",
|
|
2077
|
+
}]
|
|
2078
|
+
: []),
|
|
2079
|
+
{
|
|
2080
|
+
value: null,
|
|
2081
|
+
label: "Leave unset",
|
|
2082
|
+
hint: "Keep imported issues without a project",
|
|
2083
|
+
},
|
|
2084
|
+
...targetChoices.filter((choice) => choice.value !== nameMatch?.id),
|
|
2085
|
+
],
|
|
2086
|
+
initialValue: nameMatch?.id ?? null,
|
|
2087
|
+
});
|
|
2088
|
+
if (p.isCancel(selection)) {
|
|
2089
|
+
throw new Error("Project mapping cancelled.");
|
|
2090
|
+
}
|
|
2091
|
+
if (selection === importSelectionValue) {
|
|
2092
|
+
importProjectIds.add(sourceProjectId);
|
|
2093
|
+
continue;
|
|
2094
|
+
}
|
|
2095
|
+
mappings[sourceProjectId] = selection;
|
|
2096
|
+
}
|
|
2097
|
+
|
|
2098
|
+
return {
|
|
2099
|
+
importProjectIds: [...importProjectIds],
|
|
2100
|
+
projectIdOverrides: mappings,
|
|
2101
|
+
};
|
|
2102
|
+
}
|
|
2103
|
+
|
|
2104
|
+
export async function worktreeListCommand(opts: WorktreeListOptions): Promise<void> {
|
|
2105
|
+
const choices = toMergeSourceChoices(process.cwd());
|
|
2106
|
+
if (opts.json) {
|
|
2107
|
+
console.log(JSON.stringify(choices, null, 2));
|
|
2108
|
+
return;
|
|
2109
|
+
}
|
|
2110
|
+
|
|
2111
|
+
for (const choice of choices) {
|
|
2112
|
+
const flags = [
|
|
2113
|
+
choice.isCurrent ? "current" : null,
|
|
2114
|
+
choice.hasPaperclipConfig ? "paperclip" : "no-paperclip-config",
|
|
2115
|
+
].filter((value): value is string => value !== null);
|
|
2116
|
+
p.log.message(`${choice.branchLabel} ${choice.worktree} [${flags.join(", ")}]`);
|
|
2117
|
+
}
|
|
2118
|
+
}
|
|
2119
|
+
|
|
2120
|
+
function resolveEndpointFromChoice(choice: MergeSourceChoice): ResolvedWorktreeEndpoint {
|
|
2121
|
+
if (choice.isCurrent) {
|
|
2122
|
+
return resolveCurrentEndpoint();
|
|
2123
|
+
}
|
|
2124
|
+
return {
|
|
2125
|
+
rootPath: choice.worktree,
|
|
2126
|
+
configPath: path.resolve(choice.worktree, ".paperclip", "config.json"),
|
|
2127
|
+
label: choice.branchLabel,
|
|
2128
|
+
isCurrent: false,
|
|
2129
|
+
};
|
|
2130
|
+
}
|
|
2131
|
+
|
|
2132
|
+
function resolveWorktreeEndpointFromSelector(
|
|
2133
|
+
selector: string,
|
|
2134
|
+
opts?: { allowCurrent?: boolean },
|
|
2135
|
+
): ResolvedWorktreeEndpoint {
|
|
2136
|
+
const trimmed = selector.trim();
|
|
2137
|
+
const allowCurrent = opts?.allowCurrent !== false;
|
|
2138
|
+
if (trimmed.length === 0) {
|
|
2139
|
+
throw new Error("Worktree selector cannot be empty.");
|
|
2140
|
+
}
|
|
2141
|
+
|
|
2142
|
+
const currentEndpoint = resolveCurrentEndpoint();
|
|
2143
|
+
if (allowCurrent && trimmed === "current") {
|
|
2144
|
+
return currentEndpoint;
|
|
2145
|
+
}
|
|
2146
|
+
|
|
2147
|
+
const choices = toMergeSourceChoices(process.cwd());
|
|
2148
|
+
const directPath = path.resolve(trimmed);
|
|
2149
|
+
if (existsSync(directPath)) {
|
|
2150
|
+
if (allowCurrent && directPath === currentEndpoint.rootPath) {
|
|
2151
|
+
return currentEndpoint;
|
|
2152
|
+
}
|
|
2153
|
+
const configPath = path.resolve(directPath, ".paperclip", "config.json");
|
|
2154
|
+
if (!existsSync(configPath)) {
|
|
2155
|
+
throw new Error(`Resolved worktree path ${directPath} does not contain .paperclip/config.json.`);
|
|
2156
|
+
}
|
|
2157
|
+
return {
|
|
2158
|
+
rootPath: directPath,
|
|
2159
|
+
configPath,
|
|
2160
|
+
label: path.basename(directPath),
|
|
2161
|
+
isCurrent: false,
|
|
2162
|
+
};
|
|
2163
|
+
}
|
|
2164
|
+
|
|
2165
|
+
const matched = choices.find((choice) =>
|
|
2166
|
+
(allowCurrent || !choice.isCurrent)
|
|
2167
|
+
&& (choice.worktree === directPath
|
|
2168
|
+
|| path.basename(choice.worktree) === trimmed
|
|
2169
|
+
|| choice.branchLabel === trimmed),
|
|
2170
|
+
);
|
|
2171
|
+
if (!matched) {
|
|
2172
|
+
throw new Error(
|
|
2173
|
+
`Could not resolve worktree "${selector}". Use a path, a listed worktree directory name, branch name, or "current".`,
|
|
2174
|
+
);
|
|
2175
|
+
}
|
|
2176
|
+
if (!matched.hasPaperclipConfig && !matched.isCurrent) {
|
|
2177
|
+
throw new Error(`Resolved worktree "${selector}" does not look like a Paperclip worktree.`);
|
|
2178
|
+
}
|
|
2179
|
+
return resolveEndpointFromChoice(matched);
|
|
2180
|
+
}
|
|
2181
|
+
|
|
2182
|
+
async function promptForSourceEndpoint(excludeWorktreePath?: string): Promise<ResolvedWorktreeEndpoint> {
|
|
2183
|
+
const excluded = excludeWorktreePath ? path.resolve(excludeWorktreePath) : null;
|
|
2184
|
+
const currentEndpoint = resolveCurrentEndpoint();
|
|
2185
|
+
const choices = toMergeSourceChoices(process.cwd())
|
|
2186
|
+
.filter((choice) => choice.hasPaperclipConfig || choice.isCurrent)
|
|
2187
|
+
.filter((choice) => path.resolve(choice.worktree) !== excluded)
|
|
2188
|
+
.map((choice) => ({
|
|
2189
|
+
value: choice.isCurrent ? "__current__" : choice.worktree,
|
|
2190
|
+
label: choice.branchLabel,
|
|
2191
|
+
hint: `${choice.worktree}${choice.isCurrent ? " (current)" : ""}`,
|
|
2192
|
+
}));
|
|
2193
|
+
if (choices.length === 0) {
|
|
2194
|
+
throw new Error("No Paperclip worktrees were found. Run `paperclipai worktree:list` to inspect the repo worktrees.");
|
|
2195
|
+
}
|
|
2196
|
+
const selection = await p.select<string>({
|
|
2197
|
+
message: "Choose the source worktree to import from",
|
|
2198
|
+
options: choices,
|
|
2199
|
+
});
|
|
2200
|
+
if (p.isCancel(selection)) {
|
|
2201
|
+
throw new Error("Source worktree selection cancelled.");
|
|
2202
|
+
}
|
|
2203
|
+
if (selection === "__current__") {
|
|
2204
|
+
return currentEndpoint;
|
|
2205
|
+
}
|
|
2206
|
+
return resolveWorktreeEndpointFromSelector(selection, { allowCurrent: true });
|
|
2207
|
+
}
|
|
2208
|
+
|
|
2209
|
+
async function applyMergePlan(input: {
|
|
2210
|
+
sourceStorages: ConfiguredStorage[];
|
|
2211
|
+
targetStorage: ConfiguredStorage;
|
|
2212
|
+
targetDb: ClosableDb;
|
|
2213
|
+
company: ResolvedMergeCompany;
|
|
2214
|
+
plan: Awaited<ReturnType<typeof collectMergePlan>>["plan"];
|
|
2215
|
+
}) {
|
|
2216
|
+
const workspaceId = input.company.id;
|
|
2217
|
+
|
|
2218
|
+
return await input.targetDb.transaction(async (tx) => {
|
|
2219
|
+
const importedProjectIds = input.plan.projectImports.map((project) => project.source.id);
|
|
2220
|
+
const existingImportedProjectIds = importedProjectIds.length > 0
|
|
2221
|
+
? new Set(
|
|
2222
|
+
(await tx
|
|
2223
|
+
.select({ id: projects.id })
|
|
2224
|
+
.from(projects)
|
|
2225
|
+
.where(inArray(projects.id, importedProjectIds)))
|
|
2226
|
+
.map((row) => row.id),
|
|
2227
|
+
)
|
|
2228
|
+
: new Set<string>();
|
|
2229
|
+
const projectImports = input.plan.projectImports.filter((project) => !existingImportedProjectIds.has(project.source.id));
|
|
2230
|
+
const importedWorkspaceIds = projectImports.flatMap((project) => project.workspaces.map((workspace) => workspace.id));
|
|
2231
|
+
const existingImportedWorkspaceIds = importedWorkspaceIds.length > 0
|
|
2232
|
+
? new Set(
|
|
2233
|
+
(await tx
|
|
2234
|
+
.select({ id: projectWorkspaces.id })
|
|
2235
|
+
.from(projectWorkspaces)
|
|
2236
|
+
.where(inArray(projectWorkspaces.id, importedWorkspaceIds)))
|
|
2237
|
+
.map((row) => row.id),
|
|
2238
|
+
)
|
|
2239
|
+
: new Set<string>();
|
|
2240
|
+
|
|
2241
|
+
let insertedProjects = 0;
|
|
2242
|
+
let insertedProjectWorkspaces = 0;
|
|
2243
|
+
for (const project of projectImports) {
|
|
2244
|
+
await tx.insert(projects).values({
|
|
2245
|
+
id: project.source.id,
|
|
2246
|
+
workspaceId,
|
|
2247
|
+
goalId: project.targetGoalId,
|
|
2248
|
+
name: project.source.name,
|
|
2249
|
+
description: project.source.description,
|
|
2250
|
+
status: project.source.status,
|
|
2251
|
+
leadAgentId: project.targetLeadAgentId,
|
|
2252
|
+
targetDate: project.source.targetDate,
|
|
2253
|
+
color: project.source.color,
|
|
2254
|
+
pauseReason: project.source.pauseReason,
|
|
2255
|
+
pausedAt: project.source.pausedAt,
|
|
2256
|
+
executionWorkspacePolicy: project.source.executionWorkspacePolicy,
|
|
2257
|
+
archivedAt: project.source.archivedAt,
|
|
2258
|
+
createdAt: project.source.createdAt,
|
|
2259
|
+
updatedAt: project.source.updatedAt,
|
|
2260
|
+
});
|
|
2261
|
+
insertedProjects += 1;
|
|
2262
|
+
|
|
2263
|
+
for (const workspace of project.workspaces) {
|
|
2264
|
+
if (existingImportedWorkspaceIds.has(workspace.id)) continue;
|
|
2265
|
+
await tx.insert(projectWorkspaces).values({
|
|
2266
|
+
id: workspace.id,
|
|
2267
|
+
workspaceId,
|
|
2268
|
+
projectId: project.source.id,
|
|
2269
|
+
name: workspace.name,
|
|
2270
|
+
sourceType: workspace.sourceType,
|
|
2271
|
+
cwd: workspace.cwd,
|
|
2272
|
+
repoUrl: workspace.repoUrl,
|
|
2273
|
+
repoRef: workspace.repoRef,
|
|
2274
|
+
defaultRef: workspace.defaultRef,
|
|
2275
|
+
visibility: workspace.visibility,
|
|
2276
|
+
setupCommand: workspace.setupCommand,
|
|
2277
|
+
cleanupCommand: workspace.cleanupCommand,
|
|
2278
|
+
remoteProvider: workspace.remoteProvider,
|
|
2279
|
+
remoteWorkspaceRef: workspace.remoteWorkspaceRef,
|
|
2280
|
+
sharedWorkspaceKey: workspace.sharedWorkspaceKey,
|
|
2281
|
+
metadata: workspace.metadata,
|
|
2282
|
+
isPrimary: workspace.isPrimary,
|
|
2283
|
+
createdAt: workspace.createdAt,
|
|
2284
|
+
updatedAt: workspace.updatedAt,
|
|
2285
|
+
});
|
|
2286
|
+
insertedProjectWorkspaces += 1;
|
|
2287
|
+
}
|
|
2288
|
+
}
|
|
2289
|
+
|
|
2290
|
+
const issueCandidates = input.plan.issuePlans.filter(
|
|
2291
|
+
(plan): plan is PlannedIssueInsert => plan.action === "insert",
|
|
2292
|
+
);
|
|
2293
|
+
const issueCandidateIds = issueCandidates.map((issue) => issue.source.id);
|
|
2294
|
+
const existingIssueIds = issueCandidateIds.length > 0
|
|
2295
|
+
? new Set(
|
|
2296
|
+
(await tx
|
|
2297
|
+
.select({ id: issues.id })
|
|
2298
|
+
.from(issues)
|
|
2299
|
+
.where(inArray(issues.id, issueCandidateIds)))
|
|
2300
|
+
.map((row) => row.id),
|
|
2301
|
+
)
|
|
2302
|
+
: new Set<string>();
|
|
2303
|
+
const issueInserts = issueCandidates.filter((issue) => !existingIssueIds.has(issue.source.id));
|
|
2304
|
+
|
|
2305
|
+
let nextIssueNumber = 0;
|
|
2306
|
+
if (issueInserts.length > 0) {
|
|
2307
|
+
const [companyRow] = await tx
|
|
2308
|
+
.update(companies)
|
|
2309
|
+
.set({ issueCounter: sql`${workspaces.issueCounter} + ${issueInserts.length}` })
|
|
2310
|
+
.where(eq(workspaces.id, workspaceId))
|
|
2311
|
+
.returning({ issueCounter: workspaces.issueCounter });
|
|
2312
|
+
nextIssueNumber = companyRow.issueCounter - issueInserts.length + 1;
|
|
2313
|
+
}
|
|
2314
|
+
|
|
2315
|
+
const insertedIssueIdentifiers = new Map<string, string>();
|
|
2316
|
+
let insertedIssues = 0;
|
|
2317
|
+
for (const issue of issueInserts) {
|
|
2318
|
+
const issueNumber = nextIssueNumber;
|
|
2319
|
+
nextIssueNumber += 1;
|
|
2320
|
+
const identifier = `${input.company.issuePrefix}-${issueNumber}`;
|
|
2321
|
+
insertedIssueIdentifiers.set(issue.source.id, identifier);
|
|
2322
|
+
await tx.insert(issues).values({
|
|
2323
|
+
id: issue.source.id,
|
|
2324
|
+
workspaceId,
|
|
2325
|
+
projectId: issue.targetProjectId,
|
|
2326
|
+
projectWorkspaceId: issue.targetProjectWorkspaceId,
|
|
2327
|
+
goalId: issue.targetGoalId,
|
|
2328
|
+
parentId: issue.source.parentId,
|
|
2329
|
+
title: issue.source.title,
|
|
2330
|
+
description: issue.source.description,
|
|
2331
|
+
status: issue.targetStatus,
|
|
2332
|
+
priority: issue.source.priority,
|
|
2333
|
+
assigneeAgentId: issue.targetAssigneeAgentId,
|
|
2334
|
+
assigneeUserId: issue.source.assigneeUserId,
|
|
2335
|
+
checkoutRunId: null,
|
|
2336
|
+
executionRunId: null,
|
|
2337
|
+
executionAgentNameKey: null,
|
|
2338
|
+
executionLockedAt: null,
|
|
2339
|
+
createdByAgentId: issue.targetCreatedByAgentId,
|
|
2340
|
+
createdByUserId: issue.source.createdByUserId,
|
|
2341
|
+
issueNumber,
|
|
2342
|
+
identifier,
|
|
2343
|
+
requestDepth: issue.source.requestDepth,
|
|
2344
|
+
billingCode: issue.source.billingCode,
|
|
2345
|
+
assigneeAdapterOverrides: issue.targetAssigneeAgentId ? issue.source.assigneeAdapterOverrides : null,
|
|
2346
|
+
executionWorkspaceId: null,
|
|
2347
|
+
executionWorkspacePreference: null,
|
|
2348
|
+
executionWorkspaceSettings: null,
|
|
2349
|
+
startedAt: issue.source.startedAt,
|
|
2350
|
+
completedAt: issue.source.completedAt,
|
|
2351
|
+
cancelledAt: issue.source.cancelledAt,
|
|
2352
|
+
hiddenAt: issue.source.hiddenAt,
|
|
2353
|
+
createdAt: issue.source.createdAt,
|
|
2354
|
+
updatedAt: issue.source.updatedAt,
|
|
2355
|
+
});
|
|
2356
|
+
insertedIssues += 1;
|
|
2357
|
+
}
|
|
2358
|
+
|
|
2359
|
+
const commentCandidates = input.plan.commentPlans.filter(
|
|
2360
|
+
(plan): plan is PlannedCommentInsert => plan.action === "insert",
|
|
2361
|
+
);
|
|
2362
|
+
const commentCandidateIds = commentCandidates.map((comment) => comment.source.id);
|
|
2363
|
+
const existingCommentIds = commentCandidateIds.length > 0
|
|
2364
|
+
? new Set(
|
|
2365
|
+
(await tx
|
|
2366
|
+
.select({ id: issueComments.id })
|
|
2367
|
+
.from(issueComments)
|
|
2368
|
+
.where(inArray(issueComments.id, commentCandidateIds)))
|
|
2369
|
+
.map((row) => row.id),
|
|
2370
|
+
)
|
|
2371
|
+
: new Set<string>();
|
|
2372
|
+
|
|
2373
|
+
let insertedComments = 0;
|
|
2374
|
+
for (const comment of commentCandidates) {
|
|
2375
|
+
if (existingCommentIds.has(comment.source.id)) continue;
|
|
2376
|
+
const parentExists = await tx
|
|
2377
|
+
.select({ id: issues.id })
|
|
2378
|
+
.from(issues)
|
|
2379
|
+
.where(and(eq(issues.id, comment.source.issueId), eq(issues.workspaceId, workspaceId)))
|
|
2380
|
+
.then((rows) => rows[0] ?? null);
|
|
2381
|
+
if (!parentExists) continue;
|
|
2382
|
+
await tx.insert(issueComments).values({
|
|
2383
|
+
id: comment.source.id,
|
|
2384
|
+
workspaceId,
|
|
2385
|
+
issueId: comment.source.issueId,
|
|
2386
|
+
authorAgentId: comment.targetAuthorAgentId,
|
|
2387
|
+
authorUserId: comment.source.authorUserId,
|
|
2388
|
+
body: comment.source.body,
|
|
2389
|
+
createdAt: comment.source.createdAt,
|
|
2390
|
+
updatedAt: comment.source.updatedAt,
|
|
2391
|
+
});
|
|
2392
|
+
insertedComments += 1;
|
|
2393
|
+
}
|
|
2394
|
+
|
|
2395
|
+
const documentCandidates = input.plan.documentPlans.filter(
|
|
2396
|
+
(plan): plan is PlannedIssueDocumentInsert | PlannedIssueDocumentMerge =>
|
|
2397
|
+
plan.action === "insert" || plan.action === "merge_existing",
|
|
2398
|
+
);
|
|
2399
|
+
let insertedDocuments = 0;
|
|
2400
|
+
let mergedDocuments = 0;
|
|
2401
|
+
let insertedDocumentRevisions = 0;
|
|
2402
|
+
for (const documentPlan of documentCandidates) {
|
|
2403
|
+
const parentExists = await tx
|
|
2404
|
+
.select({ id: issues.id })
|
|
2405
|
+
.from(issues)
|
|
2406
|
+
.where(and(eq(issues.id, documentPlan.source.issueId), eq(issues.workspaceId, workspaceId)))
|
|
2407
|
+
.then((rows) => rows[0] ?? null);
|
|
2408
|
+
if (!parentExists) continue;
|
|
2409
|
+
|
|
2410
|
+
const conflictingKeyDocument = await tx
|
|
2411
|
+
.select({ documentId: issueDocuments.documentId })
|
|
2412
|
+
.from(issueDocuments)
|
|
2413
|
+
.where(and(eq(issueDocuments.issueId, documentPlan.source.issueId), eq(issueDocuments.key, documentPlan.source.key)))
|
|
2414
|
+
.then((rows) => rows[0] ?? null);
|
|
2415
|
+
if (
|
|
2416
|
+
conflictingKeyDocument
|
|
2417
|
+
&& conflictingKeyDocument.documentId !== documentPlan.source.documentId
|
|
2418
|
+
) {
|
|
2419
|
+
continue;
|
|
2420
|
+
}
|
|
2421
|
+
|
|
2422
|
+
const existingDocument = await tx
|
|
2423
|
+
.select({ id: documents.id })
|
|
2424
|
+
.from(documents)
|
|
2425
|
+
.where(eq(documents.id, documentPlan.source.documentId))
|
|
2426
|
+
.then((rows) => rows[0] ?? null);
|
|
2427
|
+
|
|
2428
|
+
if (!existingDocument) {
|
|
2429
|
+
await tx.insert(documents).values({
|
|
2430
|
+
id: documentPlan.source.documentId,
|
|
2431
|
+
workspaceId,
|
|
2432
|
+
title: documentPlan.source.title,
|
|
2433
|
+
format: documentPlan.source.format,
|
|
2434
|
+
latestBody: documentPlan.source.latestBody,
|
|
2435
|
+
latestRevisionId: documentPlan.latestRevisionId,
|
|
2436
|
+
latestRevisionNumber: documentPlan.latestRevisionNumber,
|
|
2437
|
+
createdByAgentId: documentPlan.targetCreatedByAgentId,
|
|
2438
|
+
createdByUserId: documentPlan.source.createdByUserId,
|
|
2439
|
+
updatedByAgentId: documentPlan.targetUpdatedByAgentId,
|
|
2440
|
+
updatedByUserId: documentPlan.source.updatedByUserId,
|
|
2441
|
+
createdAt: documentPlan.source.documentCreatedAt,
|
|
2442
|
+
updatedAt: documentPlan.source.documentUpdatedAt,
|
|
2443
|
+
});
|
|
2444
|
+
await tx.insert(issueDocuments).values({
|
|
2445
|
+
id: documentPlan.source.id,
|
|
2446
|
+
workspaceId,
|
|
2447
|
+
issueId: documentPlan.source.issueId,
|
|
2448
|
+
documentId: documentPlan.source.documentId,
|
|
2449
|
+
key: documentPlan.source.key,
|
|
2450
|
+
createdAt: documentPlan.source.linkCreatedAt,
|
|
2451
|
+
updatedAt: documentPlan.source.linkUpdatedAt,
|
|
2452
|
+
});
|
|
2453
|
+
insertedDocuments += 1;
|
|
2454
|
+
} else {
|
|
2455
|
+
const existingLink = await tx
|
|
2456
|
+
.select({ id: issueDocuments.id })
|
|
2457
|
+
.from(issueDocuments)
|
|
2458
|
+
.where(eq(issueDocuments.documentId, documentPlan.source.documentId))
|
|
2459
|
+
.then((rows) => rows[0] ?? null);
|
|
2460
|
+
if (!existingLink) {
|
|
2461
|
+
await tx.insert(issueDocuments).values({
|
|
2462
|
+
id: documentPlan.source.id,
|
|
2463
|
+
workspaceId,
|
|
2464
|
+
issueId: documentPlan.source.issueId,
|
|
2465
|
+
documentId: documentPlan.source.documentId,
|
|
2466
|
+
key: documentPlan.source.key,
|
|
2467
|
+
createdAt: documentPlan.source.linkCreatedAt,
|
|
2468
|
+
updatedAt: documentPlan.source.linkUpdatedAt,
|
|
2469
|
+
});
|
|
2470
|
+
} else {
|
|
2471
|
+
await tx
|
|
2472
|
+
.update(issueDocuments)
|
|
2473
|
+
.set({
|
|
2474
|
+
issueId: documentPlan.source.issueId,
|
|
2475
|
+
key: documentPlan.source.key,
|
|
2476
|
+
updatedAt: documentPlan.source.linkUpdatedAt,
|
|
2477
|
+
})
|
|
2478
|
+
.where(eq(issueDocuments.documentId, documentPlan.source.documentId));
|
|
2479
|
+
}
|
|
2480
|
+
|
|
2481
|
+
await tx
|
|
2482
|
+
.update(documents)
|
|
2483
|
+
.set({
|
|
2484
|
+
title: documentPlan.source.title,
|
|
2485
|
+
format: documentPlan.source.format,
|
|
2486
|
+
latestBody: documentPlan.source.latestBody,
|
|
2487
|
+
latestRevisionId: documentPlan.latestRevisionId,
|
|
2488
|
+
latestRevisionNumber: documentPlan.latestRevisionNumber,
|
|
2489
|
+
updatedByAgentId: documentPlan.targetUpdatedByAgentId,
|
|
2490
|
+
updatedByUserId: documentPlan.source.updatedByUserId,
|
|
2491
|
+
updatedAt: documentPlan.source.documentUpdatedAt,
|
|
2492
|
+
})
|
|
2493
|
+
.where(eq(documents.id, documentPlan.source.documentId));
|
|
2494
|
+
mergedDocuments += 1;
|
|
2495
|
+
}
|
|
2496
|
+
|
|
2497
|
+
const existingRevisionIds = new Set(
|
|
2498
|
+
(
|
|
2499
|
+
await tx
|
|
2500
|
+
.select({ id: documentRevisions.id })
|
|
2501
|
+
.from(documentRevisions)
|
|
2502
|
+
.where(eq(documentRevisions.documentId, documentPlan.source.documentId))
|
|
2503
|
+
).map((row) => row.id),
|
|
2504
|
+
);
|
|
2505
|
+
for (const revisionPlan of documentPlan.revisionsToInsert) {
|
|
2506
|
+
if (existingRevisionIds.has(revisionPlan.source.id)) continue;
|
|
2507
|
+
await tx.insert(documentRevisions).values({
|
|
2508
|
+
id: revisionPlan.source.id,
|
|
2509
|
+
workspaceId,
|
|
2510
|
+
documentId: documentPlan.source.documentId,
|
|
2511
|
+
revisionNumber: revisionPlan.targetRevisionNumber,
|
|
2512
|
+
body: revisionPlan.source.body,
|
|
2513
|
+
changeSummary: revisionPlan.source.changeSummary,
|
|
2514
|
+
createdByAgentId: revisionPlan.targetCreatedByAgentId,
|
|
2515
|
+
createdByUserId: revisionPlan.source.createdByUserId,
|
|
2516
|
+
createdAt: revisionPlan.source.createdAt,
|
|
2517
|
+
});
|
|
2518
|
+
insertedDocumentRevisions += 1;
|
|
2519
|
+
}
|
|
2520
|
+
}
|
|
2521
|
+
|
|
2522
|
+
const attachmentCandidates = input.plan.attachmentPlans.filter(
|
|
2523
|
+
(plan): plan is PlannedAttachmentInsert => plan.action === "insert",
|
|
2524
|
+
);
|
|
2525
|
+
const existingAttachmentIds = new Set(
|
|
2526
|
+
(
|
|
2527
|
+
await tx
|
|
2528
|
+
.select({ id: issueAttachments.id })
|
|
2529
|
+
.from(issueAttachments)
|
|
2530
|
+
.where(eq(issueAttachments.workspaceId, workspaceId))
|
|
2531
|
+
).map((row) => row.id),
|
|
2532
|
+
);
|
|
2533
|
+
let insertedAttachments = 0;
|
|
2534
|
+
let skippedMissingAttachmentObjects = 0;
|
|
2535
|
+
for (const attachment of attachmentCandidates) {
|
|
2536
|
+
if (existingAttachmentIds.has(attachment.source.id)) continue;
|
|
2537
|
+
const parentExists = await tx
|
|
2538
|
+
.select({ id: issues.id })
|
|
2539
|
+
.from(issues)
|
|
2540
|
+
.where(and(eq(issues.id, attachment.source.issueId), eq(issues.workspaceId, workspaceId)))
|
|
2541
|
+
.then((rows) => rows[0] ?? null);
|
|
2542
|
+
if (!parentExists) continue;
|
|
2543
|
+
|
|
2544
|
+
const body = await readSourceAttachmentBody(
|
|
2545
|
+
input.sourceStorages,
|
|
2546
|
+
workspaceId,
|
|
2547
|
+
attachment.source.objectKey,
|
|
2548
|
+
);
|
|
2549
|
+
if (!body) {
|
|
2550
|
+
skippedMissingAttachmentObjects += 1;
|
|
2551
|
+
continue;
|
|
2552
|
+
}
|
|
2553
|
+
await input.targetStorage.putObject(
|
|
2554
|
+
workspaceId,
|
|
2555
|
+
attachment.source.objectKey,
|
|
2556
|
+
body,
|
|
2557
|
+
attachment.source.contentType,
|
|
2558
|
+
);
|
|
2559
|
+
|
|
2560
|
+
await tx.insert(assets).values({
|
|
2561
|
+
id: attachment.source.assetId,
|
|
2562
|
+
workspaceId,
|
|
2563
|
+
provider: attachment.source.provider,
|
|
2564
|
+
objectKey: attachment.source.objectKey,
|
|
2565
|
+
contentType: attachment.source.contentType,
|
|
2566
|
+
byteSize: attachment.source.byteSize,
|
|
2567
|
+
sha256: attachment.source.sha256,
|
|
2568
|
+
originalFilename: attachment.source.originalFilename,
|
|
2569
|
+
createdByAgentId: attachment.targetCreatedByAgentId,
|
|
2570
|
+
createdByUserId: attachment.source.createdByUserId,
|
|
2571
|
+
createdAt: attachment.source.assetCreatedAt,
|
|
2572
|
+
updatedAt: attachment.source.assetUpdatedAt,
|
|
2573
|
+
});
|
|
2574
|
+
|
|
2575
|
+
await tx.insert(issueAttachments).values({
|
|
2576
|
+
id: attachment.source.id,
|
|
2577
|
+
workspaceId,
|
|
2578
|
+
issueId: attachment.source.issueId,
|
|
2579
|
+
assetId: attachment.source.assetId,
|
|
2580
|
+
issueCommentId: attachment.targetIssueCommentId,
|
|
2581
|
+
createdAt: attachment.source.attachmentCreatedAt,
|
|
2582
|
+
updatedAt: attachment.source.attachmentUpdatedAt,
|
|
2583
|
+
});
|
|
2584
|
+
insertedAttachments += 1;
|
|
2585
|
+
}
|
|
2586
|
+
|
|
2587
|
+
return {
|
|
2588
|
+
insertedProjects,
|
|
2589
|
+
insertedProjectWorkspaces,
|
|
2590
|
+
insertedIssues,
|
|
2591
|
+
insertedComments,
|
|
2592
|
+
insertedDocuments,
|
|
2593
|
+
mergedDocuments,
|
|
2594
|
+
insertedDocumentRevisions,
|
|
2595
|
+
insertedAttachments,
|
|
2596
|
+
skippedMissingAttachmentObjects,
|
|
2597
|
+
insertedIssueIdentifiers,
|
|
2598
|
+
};
|
|
2599
|
+
});
|
|
2600
|
+
}
|
|
2601
|
+
|
|
2602
|
+
export async function worktreeMergeHistoryCommand(sourceArg: string | undefined, opts: WorktreeMergeHistoryOptions): Promise<void> {
|
|
2603
|
+
if (opts.apply && opts.dry) {
|
|
2604
|
+
throw new Error("Use either --apply or --dry, not both.");
|
|
2605
|
+
}
|
|
2606
|
+
|
|
2607
|
+
if (sourceArg && opts.from) {
|
|
2608
|
+
throw new Error("Use either the positional source argument or --from, not both.");
|
|
2609
|
+
}
|
|
2610
|
+
|
|
2611
|
+
const targetEndpoint = opts.to
|
|
2612
|
+
? resolveWorktreeEndpointFromSelector(opts.to, { allowCurrent: true })
|
|
2613
|
+
: resolveCurrentEndpoint();
|
|
2614
|
+
const sourceEndpoint = opts.from
|
|
2615
|
+
? resolveWorktreeEndpointFromSelector(opts.from, { allowCurrent: true })
|
|
2616
|
+
: sourceArg
|
|
2617
|
+
? resolveWorktreeEndpointFromSelector(sourceArg, { allowCurrent: true })
|
|
2618
|
+
: await promptForSourceEndpoint(targetEndpoint.rootPath);
|
|
2619
|
+
|
|
2620
|
+
if (path.resolve(sourceEndpoint.configPath) === path.resolve(targetEndpoint.configPath)) {
|
|
2621
|
+
throw new Error("Source and target Paperclip configs are the same. Choose different --from/--to worktrees.");
|
|
2622
|
+
}
|
|
2623
|
+
|
|
2624
|
+
const scopes = parseWorktreeMergeScopes(opts.scope);
|
|
2625
|
+
const sourceHandle = await openConfiguredDb(sourceEndpoint.configPath);
|
|
2626
|
+
const targetHandle = await openConfiguredDb(targetEndpoint.configPath);
|
|
2627
|
+
const sourceStorages = resolveAttachmentLookupStorages({
|
|
2628
|
+
sourceEndpoint,
|
|
2629
|
+
targetEndpoint,
|
|
2630
|
+
});
|
|
2631
|
+
const targetStorage = openConfiguredStorage(targetEndpoint.configPath);
|
|
2632
|
+
|
|
2633
|
+
try {
|
|
2634
|
+
const company = await resolveMergeCompany({
|
|
2635
|
+
sourceDb: sourceHandle.db,
|
|
2636
|
+
targetDb: targetHandle.db,
|
|
2637
|
+
selector: opts.company,
|
|
2638
|
+
});
|
|
2639
|
+
let collected = await collectMergePlan({
|
|
2640
|
+
sourceDb: sourceHandle.db,
|
|
2641
|
+
targetDb: targetHandle.db,
|
|
2642
|
+
company,
|
|
2643
|
+
scopes,
|
|
2644
|
+
});
|
|
2645
|
+
if (!opts.yes) {
|
|
2646
|
+
const projectSelections = await promptForProjectMappings({
|
|
2647
|
+
plan: collected.plan,
|
|
2648
|
+
sourceProjects: collected.sourceProjects,
|
|
2649
|
+
targetProjects: collected.targetProjects,
|
|
2650
|
+
});
|
|
2651
|
+
if (
|
|
2652
|
+
projectSelections.importProjectIds.length > 0
|
|
2653
|
+
|| Object.keys(projectSelections.projectIdOverrides).length > 0
|
|
2654
|
+
) {
|
|
2655
|
+
collected = await collectMergePlan({
|
|
2656
|
+
sourceDb: sourceHandle.db,
|
|
2657
|
+
targetDb: targetHandle.db,
|
|
2658
|
+
company,
|
|
2659
|
+
scopes,
|
|
2660
|
+
importProjectIds: projectSelections.importProjectIds,
|
|
2661
|
+
projectIdOverrides: projectSelections.projectIdOverrides,
|
|
2662
|
+
});
|
|
2663
|
+
}
|
|
2664
|
+
}
|
|
2665
|
+
|
|
2666
|
+
console.log(renderMergePlan(collected.plan, {
|
|
2667
|
+
sourcePath: `${sourceEndpoint.label} (${sourceEndpoint.rootPath})`,
|
|
2668
|
+
targetPath: `${targetEndpoint.label} (${targetEndpoint.rootPath})`,
|
|
2669
|
+
unsupportedRunCount: collected.unsupportedRunCount,
|
|
2670
|
+
}));
|
|
2671
|
+
|
|
2672
|
+
if (!opts.apply) {
|
|
2673
|
+
return;
|
|
2674
|
+
}
|
|
2675
|
+
|
|
2676
|
+
const confirmed = opts.yes
|
|
2677
|
+
? true
|
|
2678
|
+
: await p.confirm({
|
|
2679
|
+
message: `Import ${collected.plan.counts.issuesToInsert} issues and ${collected.plan.counts.commentsToInsert} comments from ${sourceEndpoint.label} into ${targetEndpoint.label}?`,
|
|
2680
|
+
initialValue: false,
|
|
2681
|
+
});
|
|
2682
|
+
if (p.isCancel(confirmed) || !confirmed) {
|
|
2683
|
+
p.log.warn("Import cancelled.");
|
|
2684
|
+
return;
|
|
2685
|
+
}
|
|
2686
|
+
|
|
2687
|
+
const applied = await applyMergePlan({
|
|
2688
|
+
sourceStorages,
|
|
2689
|
+
targetStorage,
|
|
2690
|
+
targetDb: targetHandle.db,
|
|
2691
|
+
company,
|
|
2692
|
+
plan: collected.plan,
|
|
2693
|
+
});
|
|
2694
|
+
if (applied.skippedMissingAttachmentObjects > 0) {
|
|
2695
|
+
p.log.warn(
|
|
2696
|
+
`Skipped ${applied.skippedMissingAttachmentObjects} attachments whose source files were missing from storage.`,
|
|
2697
|
+
);
|
|
2698
|
+
}
|
|
2699
|
+
p.outro(
|
|
2700
|
+
pc.green(
|
|
2701
|
+
`Imported ${applied.insertedProjects} projects (${applied.insertedProjectWorkspaces} workspaces), ${applied.insertedIssues} issues, ${applied.insertedComments} comments, ${applied.insertedDocuments} documents (${applied.insertedDocumentRevisions} revisions, ${applied.mergedDocuments} merged), and ${applied.insertedAttachments} attachments into ${company.issuePrefix}.`,
|
|
2702
|
+
),
|
|
2703
|
+
);
|
|
2704
|
+
} finally {
|
|
2705
|
+
await targetHandle.stop();
|
|
2706
|
+
await sourceHandle.stop();
|
|
2707
|
+
}
|
|
2708
|
+
}
|
|
2709
|
+
|
|
2710
|
+
export async function worktreeReseedCommand(opts: WorktreeReseedOptions): Promise<void> {
|
|
2711
|
+
printPaperclipCliBanner();
|
|
2712
|
+
p.intro(pc.bgCyan(pc.black(" paperclipai worktree reseed ")));
|
|
2713
|
+
|
|
2714
|
+
const seedMode = opts.seedMode ?? "full";
|
|
2715
|
+
if (!isWorktreeSeedMode(seedMode)) {
|
|
2716
|
+
throw new Error(`Unsupported seed mode "${seedMode}". Expected one of: minimal, full.`);
|
|
2717
|
+
}
|
|
2718
|
+
|
|
2719
|
+
const targetEndpoint = opts.to
|
|
2720
|
+
? resolveWorktreeEndpointFromSelector(opts.to, { allowCurrent: true })
|
|
2721
|
+
: resolveCurrentEndpoint();
|
|
2722
|
+
const source = resolveWorktreeReseedSource(opts);
|
|
2723
|
+
|
|
2724
|
+
if (path.resolve(source.configPath) === path.resolve(targetEndpoint.configPath)) {
|
|
2725
|
+
throw new Error("Source and target Paperclip configs are the same. Choose different --from/--to values.");
|
|
2726
|
+
}
|
|
2727
|
+
if (!existsSync(source.configPath)) {
|
|
2728
|
+
throw new Error(`Source config not found at ${source.configPath}.`);
|
|
2729
|
+
}
|
|
2730
|
+
|
|
2731
|
+
const targetConfig = readConfig(targetEndpoint.configPath);
|
|
2732
|
+
if (!targetConfig) {
|
|
2733
|
+
throw new Error(`Target config not found at ${targetEndpoint.configPath}.`);
|
|
2734
|
+
}
|
|
2735
|
+
const sourceConfig = readConfig(source.configPath);
|
|
2736
|
+
if (!sourceConfig) {
|
|
2737
|
+
throw new Error(`Source config not found at ${source.configPath}.`);
|
|
2738
|
+
}
|
|
2739
|
+
|
|
2740
|
+
const targetPaths = resolveWorktreeReseedTargetPaths({
|
|
2741
|
+
configPath: targetEndpoint.configPath,
|
|
2742
|
+
rootPath: targetEndpoint.rootPath,
|
|
2743
|
+
});
|
|
2744
|
+
const runningTargetPid = resolveRunningEmbeddedPostgresPid(targetConfig);
|
|
2745
|
+
if (runningTargetPid && !opts.allowLiveTarget) {
|
|
2746
|
+
throw new Error(
|
|
2747
|
+
`Target worktree database appears to be running (pid ${runningTargetPid}). Stop Paperclip in ${targetEndpoint.rootPath} before reseeding, or re-run with --allow-live-target if you want to override this guard.`,
|
|
2748
|
+
);
|
|
2749
|
+
}
|
|
2750
|
+
|
|
2751
|
+
const confirmed = opts.yes
|
|
2752
|
+
? true
|
|
2753
|
+
: await p.confirm({
|
|
2754
|
+
message: `Overwrite the isolated Paperclip DB for ${targetEndpoint.label} from ${source.label} using ${seedMode} seed mode?`,
|
|
2755
|
+
initialValue: false,
|
|
2756
|
+
});
|
|
2757
|
+
if (p.isCancel(confirmed) || !confirmed) {
|
|
2758
|
+
p.log.warn("Reseed cancelled.");
|
|
2759
|
+
return;
|
|
2760
|
+
}
|
|
2761
|
+
|
|
2762
|
+
if (runningTargetPid && opts.allowLiveTarget) {
|
|
2763
|
+
p.log.warning(`Proceeding even though the target embedded PostgreSQL appears to be running (pid ${runningTargetPid}).`);
|
|
2764
|
+
}
|
|
2765
|
+
|
|
2766
|
+
const spinner = p.spinner();
|
|
2767
|
+
spinner.start(`Reseeding ${targetEndpoint.label} from ${source.label} (${seedMode})...`);
|
|
2768
|
+
try {
|
|
2769
|
+
const seeded = await seedWorktreeDatabase({
|
|
2770
|
+
sourceConfigPath: source.configPath,
|
|
2771
|
+
sourceConfig,
|
|
2772
|
+
targetConfig,
|
|
2773
|
+
targetPaths,
|
|
2774
|
+
instanceId: targetPaths.instanceId,
|
|
2775
|
+
seedMode,
|
|
2776
|
+
});
|
|
2777
|
+
spinner.stop(`Reseeded ${targetEndpoint.label} (${seedMode}).`);
|
|
2778
|
+
p.log.message(pc.dim(`Source: ${source.configPath}`));
|
|
2779
|
+
p.log.message(pc.dim(`Target: ${targetEndpoint.configPath}`));
|
|
2780
|
+
p.log.message(pc.dim(`Seed snapshot: ${seeded.backupSummary}`));
|
|
2781
|
+
for (const rebound of seeded.reboundWorkspaces) {
|
|
2782
|
+
p.log.message(
|
|
2783
|
+
pc.dim(`Rebound workspace ${rebound.name}: ${rebound.fromCwd} -> ${rebound.toCwd}`),
|
|
2784
|
+
);
|
|
2785
|
+
}
|
|
2786
|
+
p.outro(pc.green(`Reseed complete for ${targetEndpoint.label}.`));
|
|
2787
|
+
} catch (error) {
|
|
2788
|
+
spinner.stop(pc.red("Failed to reseed worktree database."));
|
|
2789
|
+
throw error;
|
|
2790
|
+
}
|
|
2791
|
+
}
|
|
2792
|
+
|
|
2793
|
+
export function registerWorktreeCommands(program: Command): void {
|
|
2794
|
+
const worktree = program.command("worktree").description("Worktree-local Paperclip instance helpers");
|
|
2795
|
+
|
|
2796
|
+
program
|
|
2797
|
+
.command("worktree:make")
|
|
2798
|
+
.description("Create ~/NAME as a git worktree, then initialize an isolated Paperclip instance inside it")
|
|
2799
|
+
.argument("<name>", "Worktree name — auto-prefixed with paperclip- if needed (created at ~/paperclip-NAME)")
|
|
2800
|
+
.option("--start-point <ref>", "Remote ref to base the new branch on (env: PAPERCLIP_WORKTREE_START_POINT)")
|
|
2801
|
+
.option("--instance <id>", "Explicit isolated instance id")
|
|
2802
|
+
.option("--home <path>", `Home root for worktree instances (env: PAPERCLIP_WORKTREES_DIR, default: ${DEFAULT_WORKTREE_HOME})`)
|
|
2803
|
+
.option("--from-config <path>", "Source config.json to seed from")
|
|
2804
|
+
.option("--from-data-dir <path>", "Source PAPERCLIP_HOME used when deriving the source config")
|
|
2805
|
+
.option("--from-instance <id>", "Source instance id when deriving the source config", "default")
|
|
2806
|
+
.option("--server-port <port>", "Preferred server port", (value) => Number(value))
|
|
2807
|
+
.option("--db-port <port>", "Preferred embedded Postgres port", (value) => Number(value))
|
|
2808
|
+
.option("--seed-mode <mode>", "Seed profile: minimal or full (default: minimal)", "minimal")
|
|
2809
|
+
.option("--no-seed", "Skip database seeding from the source instance")
|
|
2810
|
+
.option("--force", "Replace existing repo-local config and isolated instance data", false)
|
|
2811
|
+
.action(worktreeMakeCommand);
|
|
2812
|
+
|
|
2813
|
+
worktree
|
|
2814
|
+
.command("init")
|
|
2815
|
+
.description("Create repo-local config/env and an isolated instance for this worktree")
|
|
2816
|
+
.option("--name <name>", "Display name used to derive the instance id")
|
|
2817
|
+
.option("--instance <id>", "Explicit isolated instance id")
|
|
2818
|
+
.option("--home <path>", `Home root for worktree instances (env: PAPERCLIP_WORKTREES_DIR, default: ${DEFAULT_WORKTREE_HOME})`)
|
|
2819
|
+
.option("--from-config <path>", "Source config.json to seed from")
|
|
2820
|
+
.option("--from-data-dir <path>", "Source PAPERCLIP_HOME used when deriving the source config")
|
|
2821
|
+
.option("--from-instance <id>", "Source instance id when deriving the source config", "default")
|
|
2822
|
+
.option("--server-port <port>", "Preferred server port", (value) => Number(value))
|
|
2823
|
+
.option("--db-port <port>", "Preferred embedded Postgres port", (value) => Number(value))
|
|
2824
|
+
.option("--seed-mode <mode>", "Seed profile: minimal or full (default: minimal)", "minimal")
|
|
2825
|
+
.option("--no-seed", "Skip database seeding from the source instance")
|
|
2826
|
+
.option("--force", "Replace existing repo-local config and isolated instance data", false)
|
|
2827
|
+
.action(worktreeInitCommand);
|
|
2828
|
+
|
|
2829
|
+
worktree
|
|
2830
|
+
.command("env")
|
|
2831
|
+
.description("Print shell exports for the current worktree-local Paperclip instance")
|
|
2832
|
+
.option("-c, --config <path>", "Path to config file")
|
|
2833
|
+
.option("--json", "Print JSON instead of shell exports")
|
|
2834
|
+
.action(worktreeEnvCommand);
|
|
2835
|
+
|
|
2836
|
+
program
|
|
2837
|
+
.command("worktree:list")
|
|
2838
|
+
.description("List git worktrees visible from this repo and whether they look like Paperclip worktrees")
|
|
2839
|
+
.option("--json", "Print JSON instead of text output")
|
|
2840
|
+
.action(worktreeListCommand);
|
|
2841
|
+
|
|
2842
|
+
program
|
|
2843
|
+
.command("worktree:merge-history")
|
|
2844
|
+
.description("Preview or import issue/comment history from another worktree into the current instance")
|
|
2845
|
+
.argument("[source]", "Optional source worktree path, directory name, or branch name (back-compat alias for --from)")
|
|
2846
|
+
.option("--from <worktree>", "Source worktree path, directory name, branch name, or current")
|
|
2847
|
+
.option("--to <worktree>", "Target worktree path, directory name, branch name, or current (defaults to current)")
|
|
2848
|
+
.option("--company <id-or-prefix>", "Shared company id or issue prefix inside the chosen source/target instances")
|
|
2849
|
+
.option("--scope <items>", "Comma-separated scopes to import (issues, comments)", "issues,comments")
|
|
2850
|
+
.option("--apply", "Apply the import after previewing the plan", false)
|
|
2851
|
+
.option("--dry", "Preview only and do not import anything", false)
|
|
2852
|
+
.option("--yes", "Skip the interactive confirmation prompt when applying", false)
|
|
2853
|
+
.action(worktreeMergeHistoryCommand);
|
|
2854
|
+
|
|
2855
|
+
worktree
|
|
2856
|
+
.command("reseed")
|
|
2857
|
+
.description("Re-seed an existing worktree-local instance from another Paperclip instance or worktree")
|
|
2858
|
+
.option("--from <worktree>", "Source worktree path, directory name, branch name, or current")
|
|
2859
|
+
.option("--to <worktree>", "Target worktree path, directory name, branch name, or current (defaults to current)")
|
|
2860
|
+
.option("--from-config <path>", "Source config.json to seed from")
|
|
2861
|
+
.option("--from-data-dir <path>", "Source PAPERCLIP_HOME used when deriving the source config")
|
|
2862
|
+
.option("--from-instance <id>", "Source instance id when deriving the source config")
|
|
2863
|
+
.option("--seed-mode <mode>", "Seed profile: minimal or full (default: full)", "full")
|
|
2864
|
+
.option("--yes", "Skip the destructive confirmation prompt", false)
|
|
2865
|
+
.option("--allow-live-target", "Override the guard that requires the target worktree DB to be stopped first", false)
|
|
2866
|
+
.action(worktreeReseedCommand);
|
|
2867
|
+
|
|
2868
|
+
program
|
|
2869
|
+
.command("worktree:cleanup")
|
|
2870
|
+
.description("Safely remove a worktree, its branch, and its isolated instance data")
|
|
2871
|
+
.argument("<name>", "Worktree name — auto-prefixed with paperclip- if needed")
|
|
2872
|
+
.option("--instance <id>", "Explicit instance id (if different from the worktree name)")
|
|
2873
|
+
.option("--home <path>", `Home root for worktree instances (env: PAPERCLIP_WORKTREES_DIR, default: ${DEFAULT_WORKTREE_HOME})`)
|
|
2874
|
+
.option("--force", "Bypass safety checks (uncommitted changes, unique commits)", false)
|
|
2875
|
+
.action(worktreeCleanupCommand);
|
|
2876
|
+
}
|