pikakit 1.0.25 → 1.0.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/lib/commands/install.js +108 -30
- package/bin/lib/config.js +9 -0
- package/package.json +1 -1
|
@@ -12,7 +12,7 @@ import boxen from "boxen";
|
|
|
12
12
|
import { parseSkillSpec, merkleHash } from "../helpers.js";
|
|
13
13
|
import { parseSkillMdFrontmatter } from "../skills.js";
|
|
14
14
|
import { step, activeStep, stepLine, S, c, fatal, spinner, multiselect, select, confirm, isCancel, cancel } from "../ui.js";
|
|
15
|
-
import { WORKSPACE, GLOBAL_DIR, OFFLINE, GLOBAL } from "../config.js";
|
|
15
|
+
import { WORKSPACE, GLOBAL_DIR, OFFLINE, GLOBAL, REPO_CACHE_DIR, CACHE_TTL_MS, FORCE_REFRESH } from "../config.js";
|
|
16
16
|
import { installSkill } from "../installer.js";
|
|
17
17
|
|
|
18
18
|
/**
|
|
@@ -47,31 +47,93 @@ export async function run(spec) {
|
|
|
47
47
|
step("Source: " + c.cyan(url));
|
|
48
48
|
|
|
49
49
|
const s = spinner();
|
|
50
|
-
s.start("Cloning repository");
|
|
51
|
-
|
|
52
50
|
const tmp = fs.mkdtempSync(path.join(os.tmpdir(), "add-skill-"));
|
|
53
51
|
|
|
54
|
-
//
|
|
55
|
-
const
|
|
52
|
+
// --- Repository Caching Logic ---
|
|
53
|
+
const cacheDir = path.join(REPO_CACHE_DIR, org, repo);
|
|
54
|
+
const cacheMetaFile = path.join(cacheDir, ".cache-meta.json");
|
|
55
|
+
let useCache = false;
|
|
56
|
+
let cacheHit = false;
|
|
57
|
+
|
|
58
|
+
// Check if cache exists and is valid
|
|
59
|
+
if (!FORCE_REFRESH && fs.existsSync(cacheDir) && fs.existsSync(cacheMetaFile)) {
|
|
60
|
+
try {
|
|
61
|
+
const meta = JSON.parse(fs.readFileSync(cacheMetaFile, "utf-8"));
|
|
62
|
+
const cacheAge = Date.now() - new Date(meta.timestamp).getTime();
|
|
63
|
+
|
|
64
|
+
if (cacheAge < CACHE_TTL_MS) {
|
|
65
|
+
useCache = true;
|
|
66
|
+
cacheHit = true;
|
|
67
|
+
s.start("Checking cache...");
|
|
68
|
+
}
|
|
69
|
+
} catch {
|
|
70
|
+
// Invalid cache, will refresh
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
56
74
|
let lastError = null;
|
|
75
|
+
const MAX_RETRIES = 3;
|
|
57
76
|
|
|
58
|
-
|
|
77
|
+
if (useCache) {
|
|
78
|
+
// Fast path: Update cache with git fetch (much faster than clone)
|
|
59
79
|
try {
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
80
|
+
s.message("Updating from cache...");
|
|
81
|
+
await execAsync(`git -C "${cacheDir}" fetch --depth=1 origin HEAD`, { timeout: 30000 });
|
|
82
|
+
await execAsync(`git -C "${cacheDir}" reset --hard FETCH_HEAD`, { timeout: 10000 });
|
|
83
|
+
|
|
84
|
+
// Copy from cache to tmp
|
|
85
|
+
await fs.promises.cp(cacheDir, tmp, { recursive: true });
|
|
86
|
+
|
|
87
|
+
// Update cache metadata
|
|
88
|
+
fs.writeFileSync(cacheMetaFile, JSON.stringify({
|
|
89
|
+
timestamp: new Date().toISOString(),
|
|
90
|
+
org, repo, ref: ref || "HEAD"
|
|
91
|
+
}));
|
|
92
|
+
|
|
93
|
+
const savedTime = Math.round((CACHE_TTL_MS - (Date.now() - new Date(JSON.parse(fs.readFileSync(cacheMetaFile, "utf-8")).timestamp).getTime())) / 1000 / 60);
|
|
94
|
+
s.stop("Cache hit! " + c.dim(`(saved ~20s)`));
|
|
64
95
|
} catch (err) {
|
|
65
|
-
|
|
96
|
+
// Cache update failed, fall back to fresh clone
|
|
97
|
+
s.message("Cache outdated, cloning fresh...");
|
|
98
|
+
useCache = false;
|
|
99
|
+
cacheHit = false;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
66
102
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
await new Promise(r => setTimeout(r, delay));
|
|
103
|
+
if (!useCache) {
|
|
104
|
+
// Fresh clone with retry logic
|
|
105
|
+
s.start(FORCE_REFRESH ? "Force refreshing repository..." : "Cloning repository");
|
|
71
106
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
107
|
+
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
108
|
+
try {
|
|
109
|
+
await execAsync(`git clone --depth=1 ${url} "${tmp}"`, { timeout: 60000 });
|
|
110
|
+
if (ref) await execAsync(`git -C "${tmp}" checkout ${ref}`, { timeout: 30000 });
|
|
111
|
+
lastError = null;
|
|
112
|
+
|
|
113
|
+
// Save to cache for next time
|
|
114
|
+
try {
|
|
115
|
+
fs.mkdirSync(path.dirname(cacheDir), { recursive: true });
|
|
116
|
+
if (fs.existsSync(cacheDir)) fs.rmSync(cacheDir, { recursive: true, force: true });
|
|
117
|
+
await fs.promises.cp(tmp, cacheDir, { recursive: true });
|
|
118
|
+
fs.writeFileSync(path.join(cacheDir, ".cache-meta.json"), JSON.stringify({
|
|
119
|
+
timestamp: new Date().toISOString(),
|
|
120
|
+
org, repo, ref: ref || "HEAD"
|
|
121
|
+
}));
|
|
122
|
+
} catch { /* Cache write failed, non-fatal */ }
|
|
123
|
+
|
|
124
|
+
break;
|
|
125
|
+
} catch (err) {
|
|
126
|
+
lastError = err;
|
|
127
|
+
|
|
128
|
+
if (attempt < MAX_RETRIES) {
|
|
129
|
+
const delay = Math.pow(2, attempt) * 1000; // 2s, 4s
|
|
130
|
+
s.message(`Retry ${attempt}/${MAX_RETRIES} in ${delay / 1000}s...`);
|
|
131
|
+
await new Promise(r => setTimeout(r, delay));
|
|
132
|
+
|
|
133
|
+
// Clean up failed attempt
|
|
134
|
+
try { fs.rmSync(tmp, { recursive: true, force: true }); } catch { }
|
|
135
|
+
fs.mkdirSync(tmp, { recursive: true });
|
|
136
|
+
}
|
|
75
137
|
}
|
|
76
138
|
}
|
|
77
139
|
}
|
|
@@ -99,7 +161,9 @@ export async function run(spec) {
|
|
|
99
161
|
return;
|
|
100
162
|
}
|
|
101
163
|
|
|
102
|
-
|
|
164
|
+
if (!cacheHit) {
|
|
165
|
+
s.stop("Repository cloned");
|
|
166
|
+
}
|
|
103
167
|
|
|
104
168
|
// Find skills in repo - check multiple possible locations
|
|
105
169
|
const skillsInRepo = [];
|
|
@@ -399,12 +463,13 @@ export async function run(spec) {
|
|
|
399
463
|
|
|
400
464
|
const installResults = { success: [], failed: [] };
|
|
401
465
|
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
const is = spinner();
|
|
406
|
-
is.start(`Installing ${sn} to ${selectedAgents.length} agents`);
|
|
466
|
+
// --- Parallel Skill Installation (Batch processing) ---
|
|
467
|
+
const CONCURRENCY_LIMIT = 5; // Process 5 skills at a time
|
|
468
|
+
const totalSkills = selectedSkills.length;
|
|
407
469
|
|
|
470
|
+
// Create installation function
|
|
471
|
+
async function installSingleSkill(sn) {
|
|
472
|
+
const src = skillPathMap[sn] || path.join(skillsDir || tmp, sn);
|
|
408
473
|
const result = await installSkillForAgents(src, sn, selectedAgents, {
|
|
409
474
|
method: installMethod,
|
|
410
475
|
scope: isGlobal ? "global" : "project",
|
|
@@ -413,17 +478,30 @@ export async function run(spec) {
|
|
|
413
478
|
ref: ref || null
|
|
414
479
|
}
|
|
415
480
|
});
|
|
481
|
+
return { skill: sn, ...result };
|
|
482
|
+
}
|
|
416
483
|
|
|
417
|
-
|
|
418
|
-
|
|
484
|
+
// Progress spinner for batch installation
|
|
485
|
+
const batchSpinner = spinner();
|
|
486
|
+
batchSpinner.start(`Installing ${totalSkills} skills to ${selectedAgents.length} agents (parallel)...`);
|
|
419
487
|
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
488
|
+
// Process in batches
|
|
489
|
+
let completed = 0;
|
|
490
|
+
for (let i = 0; i < selectedSkills.length; i += CONCURRENCY_LIMIT) {
|
|
491
|
+
const batch = selectedSkills.slice(i, i + CONCURRENCY_LIMIT);
|
|
492
|
+
const batchResults = await Promise.all(batch.map(installSingleSkill));
|
|
493
|
+
|
|
494
|
+
for (const r of batchResults) {
|
|
495
|
+
installResults.success.push(...r.success);
|
|
496
|
+
installResults.failed.push(...r.failed);
|
|
497
|
+
completed++;
|
|
424
498
|
}
|
|
499
|
+
|
|
500
|
+
batchSpinner.message(`Installing skills... ${completed}/${totalSkills}`);
|
|
425
501
|
}
|
|
426
502
|
|
|
503
|
+
batchSpinner.stop(`Installed ${completed} skills (${installResults.success.length} agents, ${installResults.failed.length} failed)`);
|
|
504
|
+
|
|
427
505
|
|
|
428
506
|
// Derive base .agent directory from skillsDir
|
|
429
507
|
// If skillsDir is .../skills, then baseAgentDir is parent (.agent)
|
package/bin/lib/config.js
CHANGED
|
@@ -26,6 +26,12 @@ export const REGISTRIES_FILE = path.join(CACHE_ROOT, "registries.json");
|
|
|
26
26
|
/** Backup directory */
|
|
27
27
|
export const BACKUP_DIR = path.join(CACHE_ROOT, "backups");
|
|
28
28
|
|
|
29
|
+
/** Repository cache directory for git clone caching */
|
|
30
|
+
export const REPO_CACHE_DIR = path.join(CACHE_ROOT, "repos");
|
|
31
|
+
|
|
32
|
+
/** Cache TTL in milliseconds (24 hours) */
|
|
33
|
+
export const CACHE_TTL_MS = 24 * 60 * 60 * 1000;
|
|
34
|
+
|
|
29
35
|
// --- Argument Parsing ---
|
|
30
36
|
|
|
31
37
|
const args = process.argv.slice(2);
|
|
@@ -68,6 +74,9 @@ export const LOCKED = flags.has("--locked");
|
|
|
68
74
|
/** @type {boolean} Offline mode (skip network operations) */
|
|
69
75
|
export const OFFLINE = flags.has("--offline");
|
|
70
76
|
|
|
77
|
+
/** @type {boolean} Force refresh cache (re-clone from remote) */
|
|
78
|
+
export const FORCE_REFRESH = flags.has("--force-refresh") || flags.has("--no-cache");
|
|
79
|
+
|
|
71
80
|
// --- Package Info ---
|
|
72
81
|
|
|
73
82
|
import { createRequire } from "module";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pikakit",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.26",
|
|
4
4
|
"description": "Enterprise-grade Agent Skill Manager with Antigravity Skills support, Progressive Disclosure detection, and semantic routing validation",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "pikakit <pikakit@gmail.com>",
|