opencode-swarm-plugin 0.25.3 → 0.26.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,9 @@
1
1
  $ bun build ./src/index.ts --outdir ./dist --target node --external @electric-sql/pglite --external swarm-mail && bun build ./src/plugin.ts --outfile ./dist/plugin.js --target node --external @electric-sql/pglite --external swarm-mail && tsc
2
- Bundled 197 modules in 32ms
2
+ Bundled 197 modules in 35ms
3
3
 
4
4
  index.js 1.16 MB (entry point)
5
5
 
6
- Bundled 198 modules in 32ms
6
+ Bundled 198 modules in 33ms
7
7
 
8
8
  plugin.js 1.13 MB (entry point)
9
9
 
package/CHANGELOG.md CHANGED
@@ -1,5 +1,47 @@
1
1
  # opencode-swarm-plugin
2
2
 
3
+ ## 0.26.1
4
+
5
+ ### Patch Changes
6
+
7
+ - [`b2d4a84`](https://github.com/joelhooks/swarm-tools/commit/b2d4a84748cdef4b9dbca7666dd3d313b6cd2b24) Thanks [@joelhooks](https://github.com/joelhooks)! - Add automatic JSONL migration for beads on first use
8
+
9
+ - Auto-migrate from `.beads/issues.jsonl` when database is empty
10
+ - Fix import to handle missing dependencies/labels/comments arrays
11
+ - Fix closed bead import to satisfy check constraint (status + closed_at)
12
+ - Migrates 500+ historical beads seamlessly on first adapter initialization
13
+
14
+ - Updated dependencies [[`b2d4a84`](https://github.com/joelhooks/swarm-tools/commit/b2d4a84748cdef4b9dbca7666dd3d313b6cd2b24)]:
15
+ - swarm-mail@0.2.1
16
+
17
+ ## 0.26.0
18
+
19
+ ### Minor Changes
20
+
21
+ - [`1a7b02f`](https://github.com/joelhooks/swarm-tools/commit/1a7b02f707a1490f14465467c6024331d5064878) Thanks [@joelhooks](https://github.com/joelhooks)! - Add PGLite socket server adapter with hybrid daemon management and move streams storage to $TMPDIR.
22
+
23
+ **Socket Server Adapter:**
24
+
25
+ - New `createSocketAdapter()` wrapping postgres.js for DatabaseAdapter interface
26
+ - Daemon lifecycle: `startDaemon()`, `stopDaemon()`, `isDaemonRunning()`, `healthCheck()`
27
+ - Auto-start daemon on first use with `SWARM_MAIL_SOCKET=true` env var
28
+ - Graceful fallback to embedded PGLite on failure
29
+ - CLI: `swarm-mail-daemon start|stop|status`
30
+
31
+ **$TMPDIR Storage (BREAKING):**
32
+
33
+ - Streams now stored in `$TMPDIR/opencode-<project-name>-<hash>/streams`
34
+ - Eliminates git pollution from `.opencode/streams/`
35
+ - Auto-cleaned on reboot (ephemeral coordination state)
36
+ - New exports: `getProjectTempDirName()`, `hashProjectPath()`
37
+
38
+ This fixes the multi-agent PGLite corruption issue by having all agents connect to a single pglite-server daemon via PostgreSQL wire protocol.
39
+
40
+ ### Patch Changes
41
+
42
+ - Updated dependencies [[`1a7b02f`](https://github.com/joelhooks/swarm-tools/commit/1a7b02f707a1490f14465467c6024331d5064878)]:
43
+ - swarm-mail@0.2.0
44
+
3
45
  ## 0.25.3
4
46
 
5
47
  ### Patch Changes
package/dist/beads.d.ts CHANGED
@@ -31,6 +31,9 @@ export declare class BeadValidationError extends Error {
31
31
  /**
32
32
  * Get or create a BeadsAdapter instance for a project
33
33
  * Exported for testing - allows tests to verify state directly
34
+ *
35
+ * On first initialization, checks for .beads/issues.jsonl and imports
36
+ * historical beads if the database is empty.
34
37
  */
35
38
  export declare function getBeadsAdapter(projectKey: string): Promise<BeadsAdapter>;
36
39
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"beads.d.ts","sourceRoot":"","sources":["../src/beads.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAGL,KAAK,YAAY,EAGlB,MAAM,YAAY,CAAC;AAapB;;;;;GAKG;AACH,wBAAgB,wBAAwB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAEhE;AAED;;;GAGG;AACH,wBAAgB,wBAAwB,IAAI,MAAM,CAEjD;AAuCD;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;aAGhB,OAAO,EAAE,MAAM;aACf,QAAQ,CAAC,EAAE,MAAM;aACjB,MAAM,CAAC,EAAE,MAAM;gBAH/B,OAAO,EAAE,MAAM,EACC,OAAO,EAAE,MAAM,EACf,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,MAAM,CAAC,EAAE,MAAM,YAAA;CAKlC;AAED;;GAEG;AACH,qBAAa,mBAAoB,SAAQ,KAAK;aAG1B,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBADpC,OAAO,EAAE,MAAM,EACC,QAAQ,EAAE,CAAC,CAAC,QAAQ;CAKvC;AAYD;;;GAGG;AACH,wBAAsB,eAAe,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC,CAc/E;AA8BD;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;;;;CA+CvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgJ5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;CAiDtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;CA+DvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;CA6BtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;CA4BtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;CAwBtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CAqIrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;CA8C5B,CAAC;AAMH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUtB,CAAC"}
1
+ {"version":3,"file":"beads.d.ts","sourceRoot":"","sources":["../src/beads.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAIL,KAAK,YAAY,EAGlB,MAAM,YAAY,CAAC;AAepB;;;;;GAKG;AACH,wBAAgB,wBAAwB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAEhE;AAED;;;GAGG;AACH,wBAAgB,wBAAwB,IAAI,MAAM,CAEjD;AAuCD;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;aAGhB,OAAO,EAAE,MAAM;aACf,QAAQ,CAAC,EAAE,MAAM;aACjB,MAAM,CAAC,EAAE,MAAM;gBAH/B,OAAO,EAAE,MAAM,EACC,OAAO,EAAE,MAAM,EACf,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,MAAM,CAAC,EAAE,MAAM,YAAA;CAKlC;AAED;;GAEG;AACH,qBAAa,mBAAoB,SAAQ,KAAK;aAG1B,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBADpC,OAAO,EAAE,MAAM,EACC,QAAQ,EAAE,CAAC,CAAC,QAAQ;CAKvC;AAYD;;;;;;GAMG;AACH,wBAAsB,eAAe,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC,CAiB/E;AA+ED;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;;;;CA+CvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgJ5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;CAiDtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;CA+DvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;CA6BtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;CA4BtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;CAwBtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CAqIrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;CA8C5B,CAAC;AAMH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUtB,CAAC"}
package/dist/index.js CHANGED
@@ -26166,7 +26166,7 @@ __export(exports_skills, {
26166
26166
  });
26167
26167
  import { readdir, readFile, stat, mkdir, writeFile, rm } from "fs/promises";
26168
26168
  import {
26169
- join as join4,
26169
+ join as join5,
26170
26170
  basename,
26171
26171
  dirname as dirname2,
26172
26172
  resolve,
@@ -26214,19 +26214,19 @@ function validateSkillMetadata(raw, filePath) {
26214
26214
  }
26215
26215
  function getGlobalSkillsDir() {
26216
26216
  const home = process.env.HOME || process.env.USERPROFILE || "~";
26217
- return join4(home, ".config", "opencode", "skills");
26217
+ return join5(home, ".config", "opencode", "skills");
26218
26218
  }
26219
26219
  function getClaudeGlobalSkillsDir() {
26220
26220
  const home = process.env.HOME || process.env.USERPROFILE || "~";
26221
- return join4(home, ".claude", "skills");
26221
+ return join5(home, ".claude", "skills");
26222
26222
  }
26223
26223
  function getPackageSkillsDir() {
26224
26224
  try {
26225
26225
  const currentFilePath = fileURLToPath(import.meta.url);
26226
- return join4(dirname2(currentFilePath), "..", "global-skills");
26226
+ return join5(dirname2(currentFilePath), "..", "global-skills");
26227
26227
  } catch {
26228
26228
  const currentDir = decodeURIComponent(new URL(".", import.meta.url).pathname);
26229
- return join4(currentDir, "..", "global-skills");
26229
+ return join5(currentDir, "..", "global-skills");
26230
26230
  }
26231
26231
  }
26232
26232
  async function findSkillFiles(baseDir) {
@@ -26235,7 +26235,7 @@ async function findSkillFiles(baseDir) {
26235
26235
  const entries = await readdir(baseDir, { withFileTypes: true });
26236
26236
  for (const entry of entries) {
26237
26237
  if (entry.isDirectory()) {
26238
- const skillPath = join4(baseDir, entry.name, "SKILL.md");
26238
+ const skillPath = join5(baseDir, entry.name, "SKILL.md");
26239
26239
  try {
26240
26240
  const s = await stat(skillPath);
26241
26241
  if (s.isFile()) {
@@ -26249,7 +26249,7 @@ async function findSkillFiles(baseDir) {
26249
26249
  }
26250
26250
  async function findSkillScripts(skillDir) {
26251
26251
  const scripts = [];
26252
- const scriptsDir = join4(skillDir, "scripts");
26252
+ const scriptsDir = join5(skillDir, "scripts");
26253
26253
  try {
26254
26254
  const entries = await readdir(scriptsDir, { withFileTypes: true });
26255
26255
  for (const entry of entries) {
@@ -26297,7 +26297,7 @@ async function discoverSkills(projectDir) {
26297
26297
  }
26298
26298
  }
26299
26299
  for (const relPath of PROJECT_SKILL_DIRECTORIES) {
26300
- await loadSkillsFromDir(join4(dir, relPath));
26300
+ await loadSkillsFromDir(join5(dir, relPath));
26301
26301
  }
26302
26302
  await loadSkillsFromDir(getGlobalSkillsDir());
26303
26303
  await loadSkillsFromDir(getClaudeGlobalSkillsDir());
@@ -26653,7 +26653,7 @@ Scripts run in the skill's directory with the project directory as an argument.`
26653
26653
  if (!skill.scripts.includes(args.script)) {
26654
26654
  return `Script '${args.script}' not found in skill '${args.skill}'. Available: ${skill.scripts.join(", ") || "none"}`;
26655
26655
  }
26656
- const scriptPath = join4(skill.directory, "scripts", args.script);
26656
+ const scriptPath = join5(skill.directory, "scripts", args.script);
26657
26657
  const scriptArgs = args.args || [];
26658
26658
  try {
26659
26659
  const TIMEOUT_MS = 60000;
@@ -26761,14 +26761,14 @@ Good skills have:
26761
26761
  const csoWarnings = validateCSOCompliance(args.name, args.description);
26762
26762
  let skillDir;
26763
26763
  if (args.directory === "global") {
26764
- skillDir = join4(getGlobalSkillsDir(), args.name);
26764
+ skillDir = join5(getGlobalSkillsDir(), args.name);
26765
26765
  } else if (args.directory === "global-claude") {
26766
- skillDir = join4(getClaudeGlobalSkillsDir(), args.name);
26766
+ skillDir = join5(getClaudeGlobalSkillsDir(), args.name);
26767
26767
  } else {
26768
26768
  const baseDir = args.directory || DEFAULT_SKILLS_DIR;
26769
- skillDir = join4(skillsProjectDirectory, baseDir, args.name);
26769
+ skillDir = join5(skillsProjectDirectory, baseDir, args.name);
26770
26770
  }
26771
- const skillPath = join4(skillDir, "SKILL.md");
26771
+ const skillPath = join5(skillDir, "SKILL.md");
26772
26772
  try {
26773
26773
  await mkdir(skillDir, { recursive: true });
26774
26774
  const content = generateSkillContent(args.name, args.description, args.body, { tags: args.tags, tools: args.tools });
@@ -26919,8 +26919,8 @@ executed with skills_execute. Use for:
26919
26919
  if (isAbsolute(args.script_name) || args.script_name.includes("..") || args.script_name.includes("/") || args.script_name.includes("\\") || basename(args.script_name) !== args.script_name) {
26920
26920
  return "Invalid script name. Use simple filenames without paths.";
26921
26921
  }
26922
- const scriptsDir = join4(skill.directory, "scripts");
26923
- const scriptPath = join4(scriptsDir, args.script_name);
26922
+ const scriptsDir = join5(skill.directory, "scripts");
26923
+ const scriptPath = join5(scriptsDir, args.script_name);
26924
26924
  try {
26925
26925
  await mkdir(scriptsDir, { recursive: true });
26926
26926
  await writeFile(scriptPath, args.content, {
@@ -26969,20 +26969,20 @@ Perfect for learning to create effective skills.`,
26969
26969
  }
26970
26970
  let skillDir;
26971
26971
  if (args.directory === "global") {
26972
- skillDir = join4(getGlobalSkillsDir(), args.name);
26972
+ skillDir = join5(getGlobalSkillsDir(), args.name);
26973
26973
  } else {
26974
26974
  const baseDir = args.directory || DEFAULT_SKILLS_DIR;
26975
- skillDir = join4(skillsProjectDirectory, baseDir, args.name);
26975
+ skillDir = join5(skillsProjectDirectory, baseDir, args.name);
26976
26976
  }
26977
26977
  const createdFiles = [];
26978
26978
  try {
26979
26979
  await mkdir(skillDir, { recursive: true });
26980
- const skillPath = join4(skillDir, "SKILL.md");
26980
+ const skillPath = join5(skillDir, "SKILL.md");
26981
26981
  const skillContent = generateSkillTemplate(args.name, args.description);
26982
26982
  await writeFile(skillPath, skillContent, "utf-8");
26983
26983
  createdFiles.push("SKILL.md");
26984
26984
  if (args.include_example_script !== false) {
26985
- const scriptsDir = join4(skillDir, "scripts");
26985
+ const scriptsDir = join5(skillDir, "scripts");
26986
26986
  await mkdir(scriptsDir, { recursive: true });
26987
26987
  const exampleScript = `#!/usr/bin/env bash
26988
26988
  # Example helper script for ${args.name}
@@ -26996,15 +26996,15 @@ echo "Project directory: $1"
26996
26996
 
26997
26997
  # TODO: Add actual script logic
26998
26998
  `;
26999
- const scriptPath = join4(scriptsDir, "example.sh");
26999
+ const scriptPath = join5(scriptsDir, "example.sh");
27000
27000
  await writeFile(scriptPath, exampleScript, { mode: 493 });
27001
27001
  createdFiles.push("scripts/example.sh");
27002
27002
  }
27003
27003
  if (args.include_reference !== false) {
27004
- const refsDir = join4(skillDir, "references");
27004
+ const refsDir = join5(skillDir, "references");
27005
27005
  await mkdir(refsDir, { recursive: true });
27006
27006
  const refContent = generateReferenceTemplate(args.name);
27007
- const refPath = join4(refsDir, "guide.md");
27007
+ const refPath = join5(refsDir, "guide.md");
27008
27008
  await writeFile(refPath, refContent, "utf-8");
27009
27009
  createdFiles.push("references/guide.md");
27010
27010
  }
@@ -27056,8 +27056,11 @@ init_dist();
27056
27056
  import {
27057
27057
  createBeadsAdapter,
27058
27058
  FlushManager,
27059
+ importFromJSONL,
27059
27060
  getSwarmMail
27060
27061
  } from "swarm-mail";
27062
+ import { existsSync, readFileSync } from "node:fs";
27063
+ import { join } from "node:path";
27061
27064
 
27062
27065
  // src/schemas/bead.ts
27063
27066
  init_zod();
@@ -27710,9 +27713,34 @@ async function getBeadsAdapter(projectKey) {
27710
27713
  const db = await swarmMail.getDatabase();
27711
27714
  const adapter = createBeadsAdapter(db, projectKey);
27712
27715
  await adapter.runMigrations();
27716
+ await autoMigrateFromJSONL(adapter, projectKey);
27713
27717
  adapterCache.set(projectKey, adapter);
27714
27718
  return adapter;
27715
27719
  }
27720
+ async function autoMigrateFromJSONL(adapter, projectKey) {
27721
+ const jsonlPath = join(projectKey, ".beads", "issues.jsonl");
27722
+ if (!existsSync(jsonlPath)) {
27723
+ return;
27724
+ }
27725
+ const existingBeads = await adapter.queryBeads(projectKey, { limit: 1 });
27726
+ if (existingBeads.length > 0) {
27727
+ return;
27728
+ }
27729
+ try {
27730
+ const jsonlContent = readFileSync(jsonlPath, "utf-8");
27731
+ const result = await importFromJSONL(adapter, projectKey, jsonlContent, {
27732
+ skipExisting: true
27733
+ });
27734
+ if (result.created > 0 || result.updated > 0) {
27735
+ console.log(`[beads] Auto-migrated ${result.created} beads from ${jsonlPath} (${result.skipped} skipped, ${result.errors.length} errors)`);
27736
+ }
27737
+ if (result.errors.length > 0) {
27738
+ console.warn(`[beads] Migration errors:`, result.errors.slice(0, 5).map((e) => `${e.beadId}: ${e.error}`));
27739
+ }
27740
+ } catch (error45) {
27741
+ console.warn(`[beads] Failed to auto-migrate from ${jsonlPath}:`, error45 instanceof Error ? error45.message : String(error45));
27742
+ }
27743
+ }
27716
27744
  function formatBeadForOutput(adapterBead) {
27717
27745
  return {
27718
27746
  id: adapterBead.id,
@@ -28355,8 +28383,8 @@ function formatToolAvailability(availability) {
28355
28383
 
28356
28384
  // src/rate-limiter.ts
28357
28385
  var import_ioredis = __toESM(require_built3(), 1);
28358
- import { mkdirSync, existsSync } from "node:fs";
28359
- import { dirname, join } from "node:path";
28386
+ import { mkdirSync, existsSync as existsSync2 } from "node:fs";
28387
+ import { dirname, join as join2 } from "node:path";
28360
28388
  import { homedir } from "node:os";
28361
28389
  var sqliteAvailable = false;
28362
28390
  var createDatabase = null;
@@ -28472,7 +28500,7 @@ class SqliteRateLimiter {
28472
28500
  throw new Error("SQLite is not available in this runtime (requires Bun)");
28473
28501
  }
28474
28502
  const dir = dirname(dbPath);
28475
- if (!existsSync(dir)) {
28503
+ if (!existsSync2(dir)) {
28476
28504
  mkdirSync(dir, { recursive: true });
28477
28505
  }
28478
28506
  this.db = createDatabase(dbPath);
@@ -28617,7 +28645,7 @@ async function createRateLimiter(options2) {
28617
28645
  const {
28618
28646
  backend,
28619
28647
  redisUrl = process.env.OPENCODE_RATE_LIMIT_REDIS_URL || "redis://localhost:6379",
28620
- sqlitePath = process.env.OPENCODE_RATE_LIMIT_SQLITE_PATH || join(homedir(), ".config", "opencode", "rate-limits.db")
28648
+ sqlitePath = process.env.OPENCODE_RATE_LIMIT_SQLITE_PATH || join2(homedir(), ".config", "opencode", "rate-limits.db")
28621
28649
  } = options2 || {};
28622
28650
  if (backend === "memory") {
28623
28651
  return new InMemoryRateLimiter;
@@ -28677,13 +28705,13 @@ async function getRateLimiter() {
28677
28705
 
28678
28706
  // src/agent-mail.ts
28679
28707
  import {
28680
- existsSync as existsSync2,
28708
+ existsSync as existsSync3,
28681
28709
  mkdirSync as mkdirSync2,
28682
- readFileSync,
28710
+ readFileSync as readFileSync2,
28683
28711
  writeFileSync,
28684
28712
  unlinkSync
28685
28713
  } from "fs";
28686
- import { join as join2 } from "path";
28714
+ import { join as join3 } from "path";
28687
28715
  import { tmpdir } from "os";
28688
28716
  var AGENT_MAIL_URL = "http://127.0.0.1:8765";
28689
28717
  var DEFAULT_TTL_SECONDS = 3600;
@@ -28707,16 +28735,16 @@ var RECOVERY_CONFIG = {
28707
28735
  restartCooldownMs: 1e4,
28708
28736
  enabled: process.env.OPENCODE_AGENT_MAIL_AUTO_RESTART !== "false"
28709
28737
  };
28710
- var SESSION_STATE_DIR = process.env.SWARM_STATE_DIR || join2(tmpdir(), "swarm-sessions");
28738
+ var SESSION_STATE_DIR = process.env.SWARM_STATE_DIR || join3(tmpdir(), "swarm-sessions");
28711
28739
  function getSessionStatePath(sessionID) {
28712
28740
  const safeID = sessionID.replace(/[^a-zA-Z0-9_-]/g, "_");
28713
- return join2(SESSION_STATE_DIR, `${safeID}.json`);
28741
+ return join3(SESSION_STATE_DIR, `${safeID}.json`);
28714
28742
  }
28715
28743
  function loadSessionState(sessionID) {
28716
28744
  const path = getSessionStatePath(sessionID);
28717
28745
  try {
28718
- if (existsSync2(path)) {
28719
- const data = readFileSync(path, "utf-8");
28746
+ if (existsSync3(path)) {
28747
+ const data = readFileSync2(path, "utf-8");
28720
28748
  return JSON.parse(data);
28721
28749
  }
28722
28750
  } catch (error45) {
@@ -28726,7 +28754,7 @@ function loadSessionState(sessionID) {
28726
28754
  }
28727
28755
  function saveSessionState(sessionID, state) {
28728
28756
  try {
28729
- if (!existsSync2(SESSION_STATE_DIR)) {
28757
+ if (!existsSync3(SESSION_STATE_DIR)) {
28730
28758
  mkdirSync2(SESSION_STATE_DIR, { recursive: true });
28731
28759
  }
28732
28760
  const path = getSessionStatePath(sessionID);
@@ -29507,13 +29535,13 @@ import {
29507
29535
  getActiveReservations
29508
29536
  } from "swarm-mail";
29509
29537
  import {
29510
- existsSync as existsSync3,
29538
+ existsSync as existsSync4,
29511
29539
  mkdirSync as mkdirSync3,
29512
- readFileSync as readFileSync2,
29540
+ readFileSync as readFileSync3,
29513
29541
  writeFileSync as writeFileSync2,
29514
29542
  unlinkSync as unlinkSync2
29515
29543
  } from "node:fs";
29516
- import { join as join3 } from "node:path";
29544
+ import { join as join4 } from "node:path";
29517
29545
  import { tmpdir as tmpdir2 } from "node:os";
29518
29546
  var MAX_INBOX_LIMIT2 = 5;
29519
29547
  var swarmMailProjectDirectory = null;
@@ -29523,16 +29551,16 @@ function setSwarmMailProjectDirectory(directory) {
29523
29551
  function getSwarmMailProjectDirectory() {
29524
29552
  return swarmMailProjectDirectory ?? undefined;
29525
29553
  }
29526
- var SESSION_STATE_DIR2 = process.env.SWARM_STATE_DIR || join3(tmpdir2(), "swarm-sessions");
29554
+ var SESSION_STATE_DIR2 = process.env.SWARM_STATE_DIR || join4(tmpdir2(), "swarm-sessions");
29527
29555
  function getSessionStatePath2(sessionID) {
29528
29556
  const safeID = sessionID.replace(/[^a-zA-Z0-9_-]/g, "_");
29529
- return join3(SESSION_STATE_DIR2, `${safeID}.json`);
29557
+ return join4(SESSION_STATE_DIR2, `${safeID}.json`);
29530
29558
  }
29531
29559
  function loadSessionState2(sessionID) {
29532
29560
  const path = getSessionStatePath2(sessionID);
29533
29561
  try {
29534
- if (existsSync3(path)) {
29535
- const data = readFileSync2(path, "utf-8");
29562
+ if (existsSync4(path)) {
29563
+ const data = readFileSync3(path, "utf-8");
29536
29564
  return JSON.parse(data);
29537
29565
  }
29538
29566
  } catch (error45) {
@@ -29542,7 +29570,7 @@ function loadSessionState2(sessionID) {
29542
29570
  }
29543
29571
  function saveSessionState2(sessionID, state) {
29544
29572
  try {
29545
- if (!existsSync3(SESSION_STATE_DIR2)) {
29573
+ if (!existsSync4(SESSION_STATE_DIR2)) {
29546
29574
  mkdirSync3(SESSION_STATE_DIR2, { recursive: true });
29547
29575
  }
29548
29576
  const path = getSessionStatePath2(sessionID);
@@ -29556,7 +29584,7 @@ function saveSessionState2(sessionID, state) {
29556
29584
  function clearSessionState(sessionID) {
29557
29585
  const path = getSessionStatePath2(sessionID);
29558
29586
  try {
29559
- if (existsSync3(path)) {
29587
+ if (existsSync4(path)) {
29560
29588
  unlinkSync2(path);
29561
29589
  }
29562
29590
  } catch {}
@@ -32974,7 +33002,7 @@ ${args.files_context.map((f) => `- \`${f}\``).join(`
32974
33002
  *Learned from swarm execution on ${new Date().toISOString().split("T")[0]}*`;
32975
33003
  const { getSkill: getSkill2, invalidateSkillsCache: invalidateSkillsCache2 } = await Promise.resolve().then(() => (init_skills(), exports_skills));
32976
33004
  const { mkdir: mkdir2, writeFile: writeFile2 } = await import("node:fs/promises");
32977
- const { join: join5 } = await import("node:path");
33005
+ const { join: join6 } = await import("node:path");
32978
33006
  const existing = await getSkill2(args.skill_name);
32979
33007
  if (existing) {
32980
33008
  return JSON.stringify({
@@ -32985,8 +33013,8 @@ ${args.files_context.map((f) => `- \`${f}\``).join(`
32985
33013
  suggestion: "Use skills_update to add to existing skill, or choose a different name"
32986
33014
  }, null, 2);
32987
33015
  }
32988
- const skillDir = join5(process.cwd(), ".opencode", "skills", args.skill_name);
32989
- const skillPath = join5(skillDir, "SKILL.md");
33016
+ const skillDir = join6(process.cwd(), ".opencode", "skills", args.skill_name);
33017
+ const skillPath = join6(skillDir, "SKILL.md");
32990
33018
  const frontmatter = [
32991
33019
  "---",
32992
33020
  `name: ${args.skill_name}`,
package/dist/plugin.js CHANGED
@@ -26166,7 +26166,7 @@ __export(exports_skills, {
26166
26166
  });
26167
26167
  import { readdir, readFile, stat, mkdir, writeFile, rm } from "fs/promises";
26168
26168
  import {
26169
- join as join4,
26169
+ join as join5,
26170
26170
  basename,
26171
26171
  dirname as dirname2,
26172
26172
  resolve,
@@ -26214,19 +26214,19 @@ function validateSkillMetadata(raw, filePath) {
26214
26214
  }
26215
26215
  function getGlobalSkillsDir() {
26216
26216
  const home = process.env.HOME || process.env.USERPROFILE || "~";
26217
- return join4(home, ".config", "opencode", "skills");
26217
+ return join5(home, ".config", "opencode", "skills");
26218
26218
  }
26219
26219
  function getClaudeGlobalSkillsDir() {
26220
26220
  const home = process.env.HOME || process.env.USERPROFILE || "~";
26221
- return join4(home, ".claude", "skills");
26221
+ return join5(home, ".claude", "skills");
26222
26222
  }
26223
26223
  function getPackageSkillsDir() {
26224
26224
  try {
26225
26225
  const currentFilePath = fileURLToPath(import.meta.url);
26226
- return join4(dirname2(currentFilePath), "..", "global-skills");
26226
+ return join5(dirname2(currentFilePath), "..", "global-skills");
26227
26227
  } catch {
26228
26228
  const currentDir = decodeURIComponent(new URL(".", import.meta.url).pathname);
26229
- return join4(currentDir, "..", "global-skills");
26229
+ return join5(currentDir, "..", "global-skills");
26230
26230
  }
26231
26231
  }
26232
26232
  async function findSkillFiles(baseDir) {
@@ -26235,7 +26235,7 @@ async function findSkillFiles(baseDir) {
26235
26235
  const entries = await readdir(baseDir, { withFileTypes: true });
26236
26236
  for (const entry of entries) {
26237
26237
  if (entry.isDirectory()) {
26238
- const skillPath = join4(baseDir, entry.name, "SKILL.md");
26238
+ const skillPath = join5(baseDir, entry.name, "SKILL.md");
26239
26239
  try {
26240
26240
  const s = await stat(skillPath);
26241
26241
  if (s.isFile()) {
@@ -26249,7 +26249,7 @@ async function findSkillFiles(baseDir) {
26249
26249
  }
26250
26250
  async function findSkillScripts(skillDir) {
26251
26251
  const scripts = [];
26252
- const scriptsDir = join4(skillDir, "scripts");
26252
+ const scriptsDir = join5(skillDir, "scripts");
26253
26253
  try {
26254
26254
  const entries = await readdir(scriptsDir, { withFileTypes: true });
26255
26255
  for (const entry of entries) {
@@ -26297,7 +26297,7 @@ async function discoverSkills(projectDir) {
26297
26297
  }
26298
26298
  }
26299
26299
  for (const relPath of PROJECT_SKILL_DIRECTORIES) {
26300
- await loadSkillsFromDir(join4(dir, relPath));
26300
+ await loadSkillsFromDir(join5(dir, relPath));
26301
26301
  }
26302
26302
  await loadSkillsFromDir(getGlobalSkillsDir());
26303
26303
  await loadSkillsFromDir(getClaudeGlobalSkillsDir());
@@ -26653,7 +26653,7 @@ Scripts run in the skill's directory with the project directory as an argument.`
26653
26653
  if (!skill.scripts.includes(args.script)) {
26654
26654
  return `Script '${args.script}' not found in skill '${args.skill}'. Available: ${skill.scripts.join(", ") || "none"}`;
26655
26655
  }
26656
- const scriptPath = join4(skill.directory, "scripts", args.script);
26656
+ const scriptPath = join5(skill.directory, "scripts", args.script);
26657
26657
  const scriptArgs = args.args || [];
26658
26658
  try {
26659
26659
  const TIMEOUT_MS = 60000;
@@ -26761,14 +26761,14 @@ Good skills have:
26761
26761
  const csoWarnings = validateCSOCompliance(args.name, args.description);
26762
26762
  let skillDir;
26763
26763
  if (args.directory === "global") {
26764
- skillDir = join4(getGlobalSkillsDir(), args.name);
26764
+ skillDir = join5(getGlobalSkillsDir(), args.name);
26765
26765
  } else if (args.directory === "global-claude") {
26766
- skillDir = join4(getClaudeGlobalSkillsDir(), args.name);
26766
+ skillDir = join5(getClaudeGlobalSkillsDir(), args.name);
26767
26767
  } else {
26768
26768
  const baseDir = args.directory || DEFAULT_SKILLS_DIR;
26769
- skillDir = join4(skillsProjectDirectory, baseDir, args.name);
26769
+ skillDir = join5(skillsProjectDirectory, baseDir, args.name);
26770
26770
  }
26771
- const skillPath = join4(skillDir, "SKILL.md");
26771
+ const skillPath = join5(skillDir, "SKILL.md");
26772
26772
  try {
26773
26773
  await mkdir(skillDir, { recursive: true });
26774
26774
  const content = generateSkillContent(args.name, args.description, args.body, { tags: args.tags, tools: args.tools });
@@ -26919,8 +26919,8 @@ executed with skills_execute. Use for:
26919
26919
  if (isAbsolute(args.script_name) || args.script_name.includes("..") || args.script_name.includes("/") || args.script_name.includes("\\") || basename(args.script_name) !== args.script_name) {
26920
26920
  return "Invalid script name. Use simple filenames without paths.";
26921
26921
  }
26922
- const scriptsDir = join4(skill.directory, "scripts");
26923
- const scriptPath = join4(scriptsDir, args.script_name);
26922
+ const scriptsDir = join5(skill.directory, "scripts");
26923
+ const scriptPath = join5(scriptsDir, args.script_name);
26924
26924
  try {
26925
26925
  await mkdir(scriptsDir, { recursive: true });
26926
26926
  await writeFile(scriptPath, args.content, {
@@ -26969,20 +26969,20 @@ Perfect for learning to create effective skills.`,
26969
26969
  }
26970
26970
  let skillDir;
26971
26971
  if (args.directory === "global") {
26972
- skillDir = join4(getGlobalSkillsDir(), args.name);
26972
+ skillDir = join5(getGlobalSkillsDir(), args.name);
26973
26973
  } else {
26974
26974
  const baseDir = args.directory || DEFAULT_SKILLS_DIR;
26975
- skillDir = join4(skillsProjectDirectory, baseDir, args.name);
26975
+ skillDir = join5(skillsProjectDirectory, baseDir, args.name);
26976
26976
  }
26977
26977
  const createdFiles = [];
26978
26978
  try {
26979
26979
  await mkdir(skillDir, { recursive: true });
26980
- const skillPath = join4(skillDir, "SKILL.md");
26980
+ const skillPath = join5(skillDir, "SKILL.md");
26981
26981
  const skillContent = generateSkillTemplate(args.name, args.description);
26982
26982
  await writeFile(skillPath, skillContent, "utf-8");
26983
26983
  createdFiles.push("SKILL.md");
26984
26984
  if (args.include_example_script !== false) {
26985
- const scriptsDir = join4(skillDir, "scripts");
26985
+ const scriptsDir = join5(skillDir, "scripts");
26986
26986
  await mkdir(scriptsDir, { recursive: true });
26987
26987
  const exampleScript = `#!/usr/bin/env bash
26988
26988
  # Example helper script for ${args.name}
@@ -26996,15 +26996,15 @@ echo "Project directory: $1"
26996
26996
 
26997
26997
  # TODO: Add actual script logic
26998
26998
  `;
26999
- const scriptPath = join4(scriptsDir, "example.sh");
26999
+ const scriptPath = join5(scriptsDir, "example.sh");
27000
27000
  await writeFile(scriptPath, exampleScript, { mode: 493 });
27001
27001
  createdFiles.push("scripts/example.sh");
27002
27002
  }
27003
27003
  if (args.include_reference !== false) {
27004
- const refsDir = join4(skillDir, "references");
27004
+ const refsDir = join5(skillDir, "references");
27005
27005
  await mkdir(refsDir, { recursive: true });
27006
27006
  const refContent = generateReferenceTemplate(args.name);
27007
- const refPath = join4(refsDir, "guide.md");
27007
+ const refPath = join5(refsDir, "guide.md");
27008
27008
  await writeFile(refPath, refContent, "utf-8");
27009
27009
  createdFiles.push("references/guide.md");
27010
27010
  }
@@ -27056,8 +27056,11 @@ init_dist();
27056
27056
  import {
27057
27057
  createBeadsAdapter,
27058
27058
  FlushManager,
27059
+ importFromJSONL,
27059
27060
  getSwarmMail
27060
27061
  } from "swarm-mail";
27062
+ import { existsSync, readFileSync } from "node:fs";
27063
+ import { join } from "node:path";
27061
27064
 
27062
27065
  // src/schemas/bead.ts
27063
27066
  init_zod();
@@ -27638,9 +27641,34 @@ async function getBeadsAdapter(projectKey) {
27638
27641
  const db = await swarmMail.getDatabase();
27639
27642
  const adapter = createBeadsAdapter(db, projectKey);
27640
27643
  await adapter.runMigrations();
27644
+ await autoMigrateFromJSONL(adapter, projectKey);
27641
27645
  adapterCache.set(projectKey, adapter);
27642
27646
  return adapter;
27643
27647
  }
27648
+ async function autoMigrateFromJSONL(adapter, projectKey) {
27649
+ const jsonlPath = join(projectKey, ".beads", "issues.jsonl");
27650
+ if (!existsSync(jsonlPath)) {
27651
+ return;
27652
+ }
27653
+ const existingBeads = await adapter.queryBeads(projectKey, { limit: 1 });
27654
+ if (existingBeads.length > 0) {
27655
+ return;
27656
+ }
27657
+ try {
27658
+ const jsonlContent = readFileSync(jsonlPath, "utf-8");
27659
+ const result = await importFromJSONL(adapter, projectKey, jsonlContent, {
27660
+ skipExisting: true
27661
+ });
27662
+ if (result.created > 0 || result.updated > 0) {
27663
+ console.log(`[beads] Auto-migrated ${result.created} beads from ${jsonlPath} (${result.skipped} skipped, ${result.errors.length} errors)`);
27664
+ }
27665
+ if (result.errors.length > 0) {
27666
+ console.warn(`[beads] Migration errors:`, result.errors.slice(0, 5).map((e) => `${e.beadId}: ${e.error}`));
27667
+ }
27668
+ } catch (error45) {
27669
+ console.warn(`[beads] Failed to auto-migrate from ${jsonlPath}:`, error45 instanceof Error ? error45.message : String(error45));
27670
+ }
27671
+ }
27644
27672
  function formatBeadForOutput(adapterBead) {
27645
27673
  return {
27646
27674
  id: adapterBead.id,
@@ -28257,8 +28285,8 @@ function formatToolAvailability(availability) {
28257
28285
 
28258
28286
  // src/rate-limiter.ts
28259
28287
  var import_ioredis = __toESM(require_built3(), 1);
28260
- import { mkdirSync, existsSync } from "node:fs";
28261
- import { dirname, join } from "node:path";
28288
+ import { mkdirSync, existsSync as existsSync2 } from "node:fs";
28289
+ import { dirname, join as join2 } from "node:path";
28262
28290
  import { homedir } from "node:os";
28263
28291
  var sqliteAvailable = false;
28264
28292
  var createDatabase = null;
@@ -28374,7 +28402,7 @@ class SqliteRateLimiter {
28374
28402
  throw new Error("SQLite is not available in this runtime (requires Bun)");
28375
28403
  }
28376
28404
  const dir = dirname(dbPath);
28377
- if (!existsSync(dir)) {
28405
+ if (!existsSync2(dir)) {
28378
28406
  mkdirSync(dir, { recursive: true });
28379
28407
  }
28380
28408
  this.db = createDatabase(dbPath);
@@ -28519,7 +28547,7 @@ async function createRateLimiter(options2) {
28519
28547
  const {
28520
28548
  backend,
28521
28549
  redisUrl = process.env.OPENCODE_RATE_LIMIT_REDIS_URL || "redis://localhost:6379",
28522
- sqlitePath = process.env.OPENCODE_RATE_LIMIT_SQLITE_PATH || join(homedir(), ".config", "opencode", "rate-limits.db")
28550
+ sqlitePath = process.env.OPENCODE_RATE_LIMIT_SQLITE_PATH || join2(homedir(), ".config", "opencode", "rate-limits.db")
28523
28551
  } = options2 || {};
28524
28552
  if (backend === "memory") {
28525
28553
  return new InMemoryRateLimiter;
@@ -28579,13 +28607,13 @@ async function getRateLimiter() {
28579
28607
 
28580
28608
  // src/agent-mail.ts
28581
28609
  import {
28582
- existsSync as existsSync2,
28610
+ existsSync as existsSync3,
28583
28611
  mkdirSync as mkdirSync2,
28584
- readFileSync,
28612
+ readFileSync as readFileSync2,
28585
28613
  writeFileSync,
28586
28614
  unlinkSync
28587
28615
  } from "fs";
28588
- import { join as join2 } from "path";
28616
+ import { join as join3 } from "path";
28589
28617
  import { tmpdir } from "os";
28590
28618
  var AGENT_MAIL_URL = "http://127.0.0.1:8765";
28591
28619
  var DEFAULT_TTL_SECONDS = 3600;
@@ -28609,16 +28637,16 @@ var RECOVERY_CONFIG = {
28609
28637
  restartCooldownMs: 1e4,
28610
28638
  enabled: process.env.OPENCODE_AGENT_MAIL_AUTO_RESTART !== "false"
28611
28639
  };
28612
- var SESSION_STATE_DIR = process.env.SWARM_STATE_DIR || join2(tmpdir(), "swarm-sessions");
28640
+ var SESSION_STATE_DIR = process.env.SWARM_STATE_DIR || join3(tmpdir(), "swarm-sessions");
28613
28641
  function getSessionStatePath(sessionID) {
28614
28642
  const safeID = sessionID.replace(/[^a-zA-Z0-9_-]/g, "_");
28615
- return join2(SESSION_STATE_DIR, `${safeID}.json`);
28643
+ return join3(SESSION_STATE_DIR, `${safeID}.json`);
28616
28644
  }
28617
28645
  function loadSessionState(sessionID) {
28618
28646
  const path = getSessionStatePath(sessionID);
28619
28647
  try {
28620
- if (existsSync2(path)) {
28621
- const data = readFileSync(path, "utf-8");
28648
+ if (existsSync3(path)) {
28649
+ const data = readFileSync2(path, "utf-8");
28622
28650
  return JSON.parse(data);
28623
28651
  }
28624
28652
  } catch (error45) {
@@ -28628,7 +28656,7 @@ function loadSessionState(sessionID) {
28628
28656
  }
28629
28657
  function saveSessionState(sessionID, state) {
28630
28658
  try {
28631
- if (!existsSync2(SESSION_STATE_DIR)) {
28659
+ if (!existsSync3(SESSION_STATE_DIR)) {
28632
28660
  mkdirSync2(SESSION_STATE_DIR, { recursive: true });
28633
28661
  }
28634
28662
  const path = getSessionStatePath(sessionID);
@@ -29316,13 +29344,13 @@ import {
29316
29344
  getActiveReservations
29317
29345
  } from "swarm-mail";
29318
29346
  import {
29319
- existsSync as existsSync3,
29347
+ existsSync as existsSync4,
29320
29348
  mkdirSync as mkdirSync3,
29321
- readFileSync as readFileSync2,
29349
+ readFileSync as readFileSync3,
29322
29350
  writeFileSync as writeFileSync2,
29323
29351
  unlinkSync as unlinkSync2
29324
29352
  } from "node:fs";
29325
- import { join as join3 } from "node:path";
29353
+ import { join as join4 } from "node:path";
29326
29354
  import { tmpdir as tmpdir2 } from "node:os";
29327
29355
  var MAX_INBOX_LIMIT2 = 5;
29328
29356
  var swarmMailProjectDirectory = null;
@@ -29332,16 +29360,16 @@ function setSwarmMailProjectDirectory(directory) {
29332
29360
  function getSwarmMailProjectDirectory() {
29333
29361
  return swarmMailProjectDirectory ?? undefined;
29334
29362
  }
29335
- var SESSION_STATE_DIR2 = process.env.SWARM_STATE_DIR || join3(tmpdir2(), "swarm-sessions");
29363
+ var SESSION_STATE_DIR2 = process.env.SWARM_STATE_DIR || join4(tmpdir2(), "swarm-sessions");
29336
29364
  function getSessionStatePath2(sessionID) {
29337
29365
  const safeID = sessionID.replace(/[^a-zA-Z0-9_-]/g, "_");
29338
- return join3(SESSION_STATE_DIR2, `${safeID}.json`);
29366
+ return join4(SESSION_STATE_DIR2, `${safeID}.json`);
29339
29367
  }
29340
29368
  function loadSessionState2(sessionID) {
29341
29369
  const path = getSessionStatePath2(sessionID);
29342
29370
  try {
29343
- if (existsSync3(path)) {
29344
- const data = readFileSync2(path, "utf-8");
29371
+ if (existsSync4(path)) {
29372
+ const data = readFileSync3(path, "utf-8");
29345
29373
  return JSON.parse(data);
29346
29374
  }
29347
29375
  } catch (error45) {
@@ -29351,7 +29379,7 @@ function loadSessionState2(sessionID) {
29351
29379
  }
29352
29380
  function saveSessionState2(sessionID, state) {
29353
29381
  try {
29354
- if (!existsSync3(SESSION_STATE_DIR2)) {
29382
+ if (!existsSync4(SESSION_STATE_DIR2)) {
29355
29383
  mkdirSync3(SESSION_STATE_DIR2, { recursive: true });
29356
29384
  }
29357
29385
  const path = getSessionStatePath2(sessionID);
@@ -32756,7 +32784,7 @@ ${args.files_context.map((f) => `- \`${f}\``).join(`
32756
32784
  *Learned from swarm execution on ${new Date().toISOString().split("T")[0]}*`;
32757
32785
  const { getSkill: getSkill2, invalidateSkillsCache: invalidateSkillsCache2 } = await Promise.resolve().then(() => (init_skills(), exports_skills));
32758
32786
  const { mkdir: mkdir2, writeFile: writeFile2 } = await import("node:fs/promises");
32759
- const { join: join5 } = await import("node:path");
32787
+ const { join: join6 } = await import("node:path");
32760
32788
  const existing = await getSkill2(args.skill_name);
32761
32789
  if (existing) {
32762
32790
  return JSON.stringify({
@@ -32767,8 +32795,8 @@ ${args.files_context.map((f) => `- \`${f}\``).join(`
32767
32795
  suggestion: "Use skills_update to add to existing skill, or choose a different name"
32768
32796
  }, null, 2);
32769
32797
  }
32770
- const skillDir = join5(process.cwd(), ".opencode", "skills", args.skill_name);
32771
- const skillPath = join5(skillDir, "SKILL.md");
32798
+ const skillDir = join6(process.cwd(), ".opencode", "skills", args.skill_name);
32799
+ const skillPath = join6(skillDir, "SKILL.md");
32772
32800
  const frontmatter = [
32773
32801
  "---",
32774
32802
  `name: ${args.skill_name}`,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-swarm-plugin",
3
- "version": "0.25.3",
3
+ "version": "0.26.1",
4
4
  "description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -33,7 +33,7 @@
33
33
  "@opencode-ai/plugin": "^1.0.134",
34
34
  "gray-matter": "^4.0.3",
35
35
  "ioredis": "^5.4.1",
36
- "swarm-mail": "0.1.4",
36
+ "swarm-mail": "0.2.1",
37
37
  "zod": "4.1.8"
38
38
  },
39
39
  "devDependencies": {
package/src/beads.ts CHANGED
@@ -18,10 +18,13 @@ import { z } from "zod";
18
18
  import {
19
19
  createBeadsAdapter,
20
20
  FlushManager,
21
+ importFromJSONL,
21
22
  type BeadsAdapter,
22
23
  type Bead as AdapterBead,
23
24
  getSwarmMail,
24
25
  } from "swarm-mail";
26
+ import { existsSync, readFileSync } from "node:fs";
27
+ import { join } from "node:path";
25
28
 
26
29
  // ============================================================================
27
30
  // Working Directory Configuration
@@ -130,6 +133,9 @@ const adapterCache = new Map<string, BeadsAdapter>();
130
133
  /**
131
134
  * Get or create a BeadsAdapter instance for a project
132
135
  * Exported for testing - allows tests to verify state directly
136
+ *
137
+ * On first initialization, checks for .beads/issues.jsonl and imports
138
+ * historical beads if the database is empty.
133
139
  */
134
140
  export async function getBeadsAdapter(projectKey: string): Promise<BeadsAdapter> {
135
141
  if (adapterCache.has(projectKey)) {
@@ -143,10 +149,62 @@ export async function getBeadsAdapter(projectKey: string): Promise<BeadsAdapter>
143
149
  // Run migrations to ensure schema exists
144
150
  await adapter.runMigrations();
145
151
 
152
+ // Auto-migrate from JSONL if database is empty and file exists
153
+ await autoMigrateFromJSONL(adapter, projectKey);
154
+
146
155
  adapterCache.set(projectKey, adapter);
147
156
  return adapter;
148
157
  }
149
158
 
159
+ /**
160
+ * Auto-migrate beads from .beads/issues.jsonl if:
161
+ * 1. The JSONL file exists
162
+ * 2. The database has no beads for this project
163
+ *
164
+ * This enables seamless migration from the old bd CLI to the new PGLite-based system.
165
+ */
166
+ async function autoMigrateFromJSONL(adapter: BeadsAdapter, projectKey: string): Promise<void> {
167
+ const jsonlPath = join(projectKey, ".beads", "issues.jsonl");
168
+
169
+ // Check if JSONL file exists
170
+ if (!existsSync(jsonlPath)) {
171
+ return;
172
+ }
173
+
174
+ // Check if database already has beads
175
+ const existingBeads = await adapter.queryBeads(projectKey, { limit: 1 });
176
+ if (existingBeads.length > 0) {
177
+ return; // Already have beads, skip migration
178
+ }
179
+
180
+ // Read and import JSONL
181
+ try {
182
+ const jsonlContent = readFileSync(jsonlPath, "utf-8");
183
+ const result = await importFromJSONL(adapter, projectKey, jsonlContent, {
184
+ skipExisting: true, // Safety: don't overwrite if somehow beads exist
185
+ });
186
+
187
+ if (result.created > 0 || result.updated > 0) {
188
+ console.log(
189
+ `[beads] Auto-migrated ${result.created} beads from ${jsonlPath} (${result.skipped} skipped, ${result.errors.length} errors)`
190
+ );
191
+ }
192
+
193
+ if (result.errors.length > 0) {
194
+ console.warn(
195
+ `[beads] Migration errors:`,
196
+ result.errors.slice(0, 5).map((e) => `${e.beadId}: ${e.error}`)
197
+ );
198
+ }
199
+ } catch (error) {
200
+ // Non-fatal - log and continue
201
+ console.warn(
202
+ `[beads] Failed to auto-migrate from ${jsonlPath}:`,
203
+ error instanceof Error ? error.message : String(error)
204
+ );
205
+ }
206
+ }
207
+
150
208
  /**
151
209
  * Format adapter bead for output (map field names)
152
210
  * Adapter uses: type, created_at/updated_at (timestamps)