@kisuke/cli 1.1.12-dev.21.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -0
- package/bin/kisuke.js +36 -0
- package/config.example.json +5 -0
- package/config.json +5 -0
- package/package.json +27 -0
- package/scripts/download.mjs +159 -0
- package/scripts/platform.mjs +35 -0
package/README.md
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# @kisuke/cli
|
|
2
|
+
|
|
3
|
+
Staging npm installer for the Kisuke CLI. This package downloads a prebuilt CLI bundle from the public R2 bucket during install and exposes the `kisuke` command.
|
|
4
|
+
|
|
5
|
+
## Install (dev)
|
|
6
|
+
|
|
7
|
+
```
|
|
8
|
+
npm install -g @kisuke/cli@dev
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Notes
|
|
12
|
+
|
|
13
|
+
- Artifacts are pulled from the staging R2 prefix.
|
|
14
|
+
- The binary itself remains named `kisuke`.
|
package/bin/kisuke.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import fs from 'node:fs';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import { spawn } from 'node:child_process';
|
|
5
|
+
import { fileURLToPath } from 'node:url';
|
|
6
|
+
|
|
7
|
+
import { getBaseVersion, getPlatformTag } from '../scripts/platform.mjs';
|
|
8
|
+
|
|
9
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
10
|
+
const pkgRoot = path.resolve(__dirname, '..');
|
|
11
|
+
const pkgJson = JSON.parse(fs.readFileSync(path.join(pkgRoot, 'package.json'), 'utf8'));
|
|
12
|
+
|
|
13
|
+
const baseVersion = getBaseVersion(pkgJson.version);
|
|
14
|
+
const platformTag = getPlatformTag();
|
|
15
|
+
const bundleName = `kisuke-cli-${baseVersion}-${platformTag}`;
|
|
16
|
+
const bundleDir = path.join(pkgRoot, '.vendor', baseVersion, platformTag, bundleName);
|
|
17
|
+
|
|
18
|
+
const executable = process.platform === 'win32'
|
|
19
|
+
? path.join(bundleDir, 'kisuke.cmd')
|
|
20
|
+
: path.join(bundleDir, 'kisuke');
|
|
21
|
+
|
|
22
|
+
if (!fs.existsSync(executable)) {
|
|
23
|
+
console.error('Kisuke CLI bundle is not installed. Try reinstalling @kisuke/cli.');
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const child = spawn(executable, process.argv.slice(2), {
|
|
28
|
+
stdio: 'inherit',
|
|
29
|
+
shell: process.platform === 'win32',
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
child.on('exit', (code) => process.exit(code ?? 0));
|
|
33
|
+
child.on('error', (err) => {
|
|
34
|
+
console.error(err?.message || err);
|
|
35
|
+
process.exit(1);
|
|
36
|
+
});
|
package/config.json
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@kisuke/cli",
|
|
3
|
+
"version": "1.1.12-dev.21.2",
|
|
4
|
+
"private": false,
|
|
5
|
+
"description": "Kisuke CLI (dev) installer",
|
|
6
|
+
"type": "module",
|
|
7
|
+
"bin": {
|
|
8
|
+
"kisuke": "./bin/kisuke.js"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"bin",
|
|
12
|
+
"scripts",
|
|
13
|
+
"config.json",
|
|
14
|
+
"config.example.json",
|
|
15
|
+
"README.md",
|
|
16
|
+
"package.json"
|
|
17
|
+
],
|
|
18
|
+
"scripts": {
|
|
19
|
+
"postinstall": "node scripts/download.mjs"
|
|
20
|
+
},
|
|
21
|
+
"engines": {
|
|
22
|
+
"node": ">=18"
|
|
23
|
+
},
|
|
24
|
+
"publishConfig": {
|
|
25
|
+
"access": "public"
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import os from 'node:os';
|
|
4
|
+
import { fileURLToPath } from 'node:url';
|
|
5
|
+
import { createHash } from 'node:crypto';
|
|
6
|
+
import { spawnSync } from 'node:child_process';
|
|
7
|
+
import https from 'node:https';
|
|
8
|
+
|
|
9
|
+
import { getArchiveExt, getBaseVersion, getPlatformTag } from './platform.mjs';
|
|
10
|
+
|
|
11
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
12
|
+
const pkgRoot = path.resolve(__dirname, '..');
|
|
13
|
+
|
|
14
|
+
const workspaceMarker = path.resolve(pkgRoot, '..', '..', 'pnpm-workspace.yaml');
|
|
15
|
+
if (fs.existsSync(workspaceMarker)) {
|
|
16
|
+
process.exit(0);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const pkgJson = JSON.parse(fs.readFileSync(path.join(pkgRoot, 'package.json'), 'utf8'));
|
|
20
|
+
|
|
21
|
+
const baseVersion = getBaseVersion(pkgJson.version);
|
|
22
|
+
const platformTag = getPlatformTag();
|
|
23
|
+
const archiveExt = getArchiveExt(platformTag);
|
|
24
|
+
const bundleName = `kisuke-cli-${baseVersion}-${platformTag}`;
|
|
25
|
+
const archiveName = `${bundleName}.${archiveExt}`;
|
|
26
|
+
|
|
27
|
+
const configPath = path.join(pkgRoot, 'config.json');
|
|
28
|
+
const config = fs.existsSync(configPath)
|
|
29
|
+
? JSON.parse(fs.readFileSync(configPath, 'utf8'))
|
|
30
|
+
: {};
|
|
31
|
+
|
|
32
|
+
const accountId = config.accountId || process.env.R2_ACCOUNT_ID;
|
|
33
|
+
const bucket = config.bucket || process.env.R2_BUCKET;
|
|
34
|
+
let prefix = config.prefix || process.env.R2_PREFIX || 'connect/';
|
|
35
|
+
|
|
36
|
+
if (!accountId || !bucket) {
|
|
37
|
+
throw new Error('Missing R2 configuration. Provide config.json or R2_ACCOUNT_ID/R2_BUCKET env vars.');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (prefix && !prefix.endsWith('/')) prefix = `${prefix}/`;
|
|
41
|
+
|
|
42
|
+
const baseUrl = `https://${accountId}.r2.cloudflarestorage.com/${bucket}/${prefix}`;
|
|
43
|
+
const folderUrl = `${baseUrl}${baseVersion}/cli/${platformTag}/`;
|
|
44
|
+
const archiveUrl = `${folderUrl}${archiveName}`;
|
|
45
|
+
const sumsUrl = `${folderUrl}SHA256SUMS`;
|
|
46
|
+
|
|
47
|
+
const vendorRoot = path.join(pkgRoot, '.vendor', baseVersion, platformTag);
|
|
48
|
+
const bundleDir = path.join(vendorRoot, bundleName);
|
|
49
|
+
|
|
50
|
+
if (fs.existsSync(bundleDir)) {
|
|
51
|
+
process.exit(0);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
fs.mkdirSync(vendorRoot, { recursive: true });
|
|
55
|
+
|
|
56
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'kisuke-cli-'));
|
|
57
|
+
const archivePath = path.join(tmpDir, archiveName);
|
|
58
|
+
const sumsPath = path.join(tmpDir, 'SHA256SUMS');
|
|
59
|
+
|
|
60
|
+
await downloadFile(sumsUrl, sumsPath);
|
|
61
|
+
await downloadFile(archiveUrl, archivePath);
|
|
62
|
+
|
|
63
|
+
const expectedSha = findSha256(sumsPath, archiveName);
|
|
64
|
+
if (!expectedSha) {
|
|
65
|
+
throw new Error(`SHA256SUMS missing entry for ${archiveName}`);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const actualSha = sha256File(archivePath);
|
|
69
|
+
if (actualSha !== expectedSha) {
|
|
70
|
+
throw new Error(`SHA256 mismatch for ${archiveName}: expected ${expectedSha}, got ${actualSha}`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
extractArchive(archivePath, vendorRoot, archiveExt);
|
|
74
|
+
|
|
75
|
+
if (!fs.existsSync(bundleDir)) {
|
|
76
|
+
throw new Error(`Extraction failed: ${bundleDir} not found`);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
cleanup(tmpDir);
|
|
80
|
+
|
|
81
|
+
function downloadFile(url, dest, retries = 3) {
|
|
82
|
+
return new Promise((resolve, reject) => {
|
|
83
|
+
const request = https.get(url, (res) => {
|
|
84
|
+
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
|
|
85
|
+
return downloadFile(res.headers.location, dest, retries).then(resolve).catch(reject);
|
|
86
|
+
}
|
|
87
|
+
if (res.statusCode !== 200) {
|
|
88
|
+
reject(new Error(`Failed to download ${url} (status ${res.statusCode})`));
|
|
89
|
+
return;
|
|
90
|
+
}
|
|
91
|
+
const file = fs.createWriteStream(dest);
|
|
92
|
+
res.pipe(file);
|
|
93
|
+
file.on('finish', () => file.close(resolve));
|
|
94
|
+
});
|
|
95
|
+
request.on('error', reject);
|
|
96
|
+
}).catch((err) => {
|
|
97
|
+
if (retries <= 1) throw err;
|
|
98
|
+
const delay = (4 - retries) * 1000;
|
|
99
|
+
console.warn(`Download failed (${err.message}), retrying in ${delay / 1000}s... (${retries - 1} left)`);
|
|
100
|
+
return new Promise((r) => setTimeout(r, delay)).then(() => downloadFile(url, dest, retries - 1));
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function sha256File(filePath) {
|
|
105
|
+
const hash = createHash('sha256');
|
|
106
|
+
const data = fs.readFileSync(filePath);
|
|
107
|
+
hash.update(data);
|
|
108
|
+
return hash.digest('hex');
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function findSha256(sumsFile, filename) {
|
|
112
|
+
const lines = fs.readFileSync(sumsFile, 'utf8').split('\n');
|
|
113
|
+
for (const line of lines) {
|
|
114
|
+
const trimmed = line.trim();
|
|
115
|
+
if (!trimmed) continue;
|
|
116
|
+
const match = trimmed.match(/^([a-f0-9]{64})\s+(.+)$/i);
|
|
117
|
+
if (!match) continue;
|
|
118
|
+
if (match[2] === filename) return match[1];
|
|
119
|
+
}
|
|
120
|
+
return null;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
function extractArchive(archivePath, destDir, ext) {
|
|
124
|
+
if (ext === 'tar.gz') {
|
|
125
|
+
const result = spawnSync('tar', ['-xzf', archivePath, '-C', destDir], { stdio: 'inherit' });
|
|
126
|
+
if (result.status !== 0) throw new Error('tar extraction failed');
|
|
127
|
+
return;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
if (ext === 'zip') {
|
|
131
|
+
if (process.platform === 'win32') {
|
|
132
|
+
const result = spawnSync(
|
|
133
|
+
'powershell',
|
|
134
|
+
[
|
|
135
|
+
'-NoProfile',
|
|
136
|
+
'-Command',
|
|
137
|
+
`Expand-Archive -LiteralPath \"${archivePath}\" -DestinationPath \"${destDir}\" -Force`,
|
|
138
|
+
],
|
|
139
|
+
{ stdio: 'inherit' },
|
|
140
|
+
);
|
|
141
|
+
if (result.status !== 0) throw new Error('zip extraction failed');
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const result = spawnSync('unzip', ['-o', archivePath, '-d', destDir], { stdio: 'inherit' });
|
|
146
|
+
if (result.status !== 0) throw new Error('unzip extraction failed');
|
|
147
|
+
return;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
throw new Error(`Unsupported archive type: ${ext}`);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
function cleanup(dir) {
|
|
154
|
+
try {
|
|
155
|
+
fs.rmSync(dir, { recursive: true, force: true });
|
|
156
|
+
} catch {
|
|
157
|
+
// ignore cleanup errors
|
|
158
|
+
}
|
|
159
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { arch, platform, report } from 'node:process';
|
|
2
|
+
|
|
3
|
+
export function getBaseVersion(version) {
|
|
4
|
+
return version.replace(/-dev\..+$/, '');
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
function isMusl() {
|
|
8
|
+
if (platform !== 'linux') return false;
|
|
9
|
+
try {
|
|
10
|
+
const glibc = report?.getReport?.()?.header?.glibcVersionRuntime;
|
|
11
|
+
return !glibc;
|
|
12
|
+
} catch {
|
|
13
|
+
return true;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function getPlatformTag() {
|
|
18
|
+
if (platform === 'darwin') {
|
|
19
|
+
if (arch === 'arm64') return 'darwin-arm64';
|
|
20
|
+
if (arch === 'x64') return 'darwin-x64';
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
if (platform === 'linux') {
|
|
24
|
+
if (arch === 'arm64') return 'linux-arm64';
|
|
25
|
+
if (arch === 'x64') return isMusl() ? 'linux-musl-x64' : 'linux-x64';
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (platform === 'win32' && arch === 'x64') return 'windows-x64';
|
|
29
|
+
|
|
30
|
+
throw new Error(`Unsupported platform/arch: ${platform}/${arch}`);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export function getArchiveExt(platformTag) {
|
|
34
|
+
return platformTag.startsWith('windows-') ? 'zip' : 'tar.gz';
|
|
35
|
+
}
|