s3-storage-cli 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Muneeb
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,62 @@
1
+ # s3-storage-cli
2
+
3
+ Minimal Bun CLI for tracked S3 uploads.
4
+
5
+ Install:
6
+
7
+ ```bash
8
+ npm install -g s3-storage-cli
9
+ ```
10
+
11
+ Runtime requirement:
12
+
13
+ - `bun` must be installed because the published CLI executes with Bun
14
+
15
+ Commands:
16
+
17
+ - `status` verifies env, SQLite, and S3 connectivity
18
+ - `list` shows only objects tracked by this CLI
19
+ - `upload` uploads one or more files or directories
20
+ - `delete` removes tracked objects
21
+ - `share` returns a direct public URL or a signed private URL
22
+
23
+ Required env:
24
+
25
+ - `S3_ENDPOINT`
26
+ - `S3_REGION`
27
+ - `S3_ACCESS_KEY_ID`
28
+ - `S3_SECRET_ACCESS_KEY`
29
+ - `S3_BUCKET`
30
+
31
+ Required for full readiness and public sharing:
32
+
33
+ - `S3_PUBLIC_BASE_URL`
34
+
35
+ Optional env:
36
+
37
+ - `S3_CLI_DB_PATH`
38
+ - `S3_SHARE_TTL_SECONDS`
39
+ - `S3_SESSION_TOKEN`
40
+ - `S3_VIRTUAL_HOSTED_STYLE`
41
+
42
+ Run:
43
+
44
+ ```bash
45
+ bun run index.ts status
46
+ ```
47
+
48
+ After global install:
49
+
50
+ ```bash
51
+ s3-storage status
52
+ ```
53
+
54
+ Examples:
55
+
56
+ ```bash
57
+ bun run index.ts upload ./file.txt
58
+ bun run index.ts upload ./assets --public --prefix site
59
+ bun run index.ts list
60
+ bun run index.ts share site/assets/logo.png
61
+ bun run index.ts delete site/assets/logo.png
62
+ ```
package/SKILL.md ADDED
@@ -0,0 +1,49 @@
1
+ ---
2
+ name: s3-storage-cli
3
+ description: Use this skill when you need to manage tracked uploads in this repository's S3 CLI. It covers status checks, tracked listing, uploads, deletes, and sharing links with token-efficient output.
4
+ ---
5
+
6
+ # S3 Storage CLI
7
+
8
+ Use this repo's CLI when the task is uploading files to the configured S3-compatible bucket, listing only files previously uploaded through this CLI, deleting tracked files, checking readiness, or generating share links.
9
+
10
+ ## Required env
11
+
12
+ - `S3_ENDPOINT`
13
+ - `S3_REGION`
14
+ - `S3_ACCESS_KEY_ID`
15
+ - `S3_SECRET_ACCESS_KEY`
16
+ - `S3_BUCKET`
17
+
18
+ `S3_PUBLIC_BASE_URL` is required for `status` to report fully ready and for public object sharing.
19
+
20
+ Optional env:
21
+
22
+ - `S3_CLI_DB_PATH`
23
+ - `S3_SHARE_TTL_SECONDS`
24
+ - `S3_SESSION_TOKEN`
25
+ - `S3_VIRTUAL_HOSTED_STYLE`
26
+
27
+ ## Commands
28
+
29
+ - `bun run index.ts status`
30
+ - `bun run index.ts list [prefix]`
31
+ - `bun run index.ts upload <paths...> [--public|--private] [--prefix <remote-prefix>]`
32
+ - `bun run index.ts delete <keys...>`
33
+ - `bun run index.ts share <key> [--expires <seconds>]`
34
+
35
+ Short aliases are available: `ls`, `up`, `rm`, `sh`, `st`.
36
+
37
+ ## Behavior
38
+
39
+ - `list` reads the local SQLite catalog only. It does not list the whole bucket.
40
+ - `delete` only deletes tracked keys that are still active in the catalog.
41
+ - `upload` preserves relative paths for directory uploads. Single-file uploads use the file basename.
42
+ - `share` returns a direct URL for tracked public objects and a presigned URL for tracked private objects.
43
+ - Default output is compact plain text. Add `--json` for machine-readable output.
44
+
45
+ ## Output contract
46
+
47
+ - Success output is terse and stable.
48
+ - Errors are one-line stderr records in the form `error<TAB>code<TAB>message`.
49
+ - `share` plain output is the URL only.
package/index.ts ADDED
@@ -0,0 +1,6 @@
1
+ #!/usr/bin/env bun
2
+
3
+ import { runCli } from "./src/cli";
4
+
5
+ const exitCode = await runCli(process.argv.slice(2));
6
+ process.exit(exitCode);
package/package.json ADDED
@@ -0,0 +1,48 @@
1
+ {
2
+ "name": "s3-storage-cli",
3
+ "version": "0.1.0",
4
+ "description": "Minimal tracked S3 CLI for Bun with list, upload, delete, share, and status commands.",
5
+ "module": "index.ts",
6
+ "type": "module",
7
+ "license": "MIT",
8
+ "bin": {
9
+ "s3-storage": "./index.ts"
10
+ },
11
+ "files": [
12
+ "index.ts",
13
+ "src/catalog.ts",
14
+ "src/cli.ts",
15
+ "src/config.ts",
16
+ "src/files.ts",
17
+ "src/output.ts",
18
+ "src/storage.ts",
19
+ "README.md",
20
+ "SKILL.md",
21
+ "LICENSE"
22
+ ],
23
+ "scripts": {
24
+ "start": "bun run index.ts",
25
+ "test": "bun test"
26
+ },
27
+ "publishConfig": {
28
+ "access": "public"
29
+ },
30
+ "repository": {
31
+ "type": "git",
32
+ "url": "git+https://github.com/muneebhashone/s3-storage-cli.git"
33
+ },
34
+ "homepage": "https://github.com/muneebhashone/s3-storage-cli#readme",
35
+ "bugs": {
36
+ "url": "https://github.com/muneebhashone/s3-storage-cli/issues"
37
+ },
38
+ "keywords": [
39
+ "s3",
40
+ "cli",
41
+ "bun",
42
+ "sqlite",
43
+ "upload"
44
+ ],
45
+ "devDependencies": {
46
+ "@types/bun": "latest"
47
+ }
48
+ }
package/src/catalog.ts ADDED
@@ -0,0 +1,164 @@
1
+ import { mkdirSync } from "node:fs";
2
+ import { dirname } from "node:path";
3
+ import { Database } from "bun:sqlite";
4
+ import type { Visibility } from "./config";
5
+
6
+ export interface CatalogRecord {
7
+ bucket: string;
8
+ contentType: string | null;
9
+ etag: string | null;
10
+ key: string;
11
+ size: number;
12
+ sourcePath: string;
13
+ uploadedAt: string;
14
+ visibility: Visibility;
15
+ }
16
+
17
+ export interface ListedObject {
18
+ contentType: string | null;
19
+ etag: string | null;
20
+ key: string;
21
+ size: number;
22
+ sourcePath: string;
23
+ uploadedAt: string;
24
+ visibility: Visibility;
25
+ }
26
+
27
+ export class Catalog {
28
+ private readonly db: Database;
29
+
30
+ constructor(databasePath: string) {
31
+ mkdirSync(dirname(databasePath), { recursive: true });
32
+ this.db = new Database(databasePath, { create: true, strict: true });
33
+ this.db.run("PRAGMA journal_mode = WAL;");
34
+ this.db.run("PRAGMA synchronous = NORMAL;");
35
+ this.init();
36
+ }
37
+
38
+ close(): void {
39
+ this.db.close();
40
+ }
41
+
42
+ ping(): void {
43
+ this.db.query("SELECT 1 AS ok").get();
44
+ }
45
+
46
+ upsertObject(record: CatalogRecord): void {
47
+ this.db.run(
48
+ `
49
+ INSERT INTO objects (
50
+ bucket,
51
+ key,
52
+ visibility,
53
+ size,
54
+ etag,
55
+ content_type,
56
+ source_path,
57
+ uploaded_at,
58
+ deleted_at
59
+ )
60
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, NULL)
61
+ ON CONFLICT(bucket, key)
62
+ DO UPDATE SET
63
+ visibility = excluded.visibility,
64
+ size = excluded.size,
65
+ etag = excluded.etag,
66
+ content_type = excluded.content_type,
67
+ source_path = excluded.source_path,
68
+ uploaded_at = excluded.uploaded_at,
69
+ deleted_at = NULL
70
+ `,
71
+ [
72
+ record.bucket,
73
+ record.key,
74
+ record.visibility,
75
+ record.size,
76
+ record.etag,
77
+ record.contentType,
78
+ record.sourcePath,
79
+ record.uploadedAt,
80
+ ],
81
+ );
82
+ }
83
+
84
+ listObjects(bucket: string, prefix?: string): ListedObject[] {
85
+ return this.db
86
+ .query<ListedObject, { bucket: string; prefix: string | null }>(
87
+ `
88
+ SELECT
89
+ key,
90
+ visibility,
91
+ size,
92
+ etag,
93
+ content_type AS contentType,
94
+ source_path AS sourcePath,
95
+ uploaded_at AS uploadedAt
96
+ FROM objects
97
+ WHERE bucket = $bucket
98
+ AND deleted_at IS NULL
99
+ AND ($prefix IS NULL OR key LIKE $prefix || '%')
100
+ ORDER BY key ASC
101
+ `,
102
+ )
103
+ .all({ bucket, prefix: prefix ?? null });
104
+ }
105
+
106
+ getObject(bucket: string, key: string): ListedObject | null {
107
+ return (
108
+ this.db
109
+ .query<ListedObject, { bucket: string; key: string }>(
110
+ `
111
+ SELECT
112
+ key,
113
+ visibility,
114
+ size,
115
+ etag,
116
+ content_type AS contentType,
117
+ source_path AS sourcePath,
118
+ uploaded_at AS uploadedAt
119
+ FROM objects
120
+ WHERE bucket = $bucket
121
+ AND key = $key
122
+ AND deleted_at IS NULL
123
+ LIMIT 1
124
+ `,
125
+ )
126
+ .get({ bucket, key }) ?? null
127
+ );
128
+ }
129
+
130
+ softDeleteObject(bucket: string, key: string, deletedAt: string): void {
131
+ this.db.run(
132
+ `
133
+ UPDATE objects
134
+ SET deleted_at = ?
135
+ WHERE bucket = ?
136
+ AND key = ?
137
+ AND deleted_at IS NULL
138
+ `,
139
+ [deletedAt, bucket, key],
140
+ );
141
+ }
142
+
143
+ private init(): void {
144
+ this.db.run(`
145
+ CREATE TABLE IF NOT EXISTS objects (
146
+ bucket TEXT NOT NULL,
147
+ key TEXT NOT NULL,
148
+ visibility TEXT NOT NULL CHECK (visibility IN ('private', 'public')),
149
+ size INTEGER NOT NULL,
150
+ etag TEXT,
151
+ content_type TEXT,
152
+ source_path TEXT NOT NULL,
153
+ uploaded_at TEXT NOT NULL,
154
+ deleted_at TEXT,
155
+ PRIMARY KEY (bucket, key)
156
+ )
157
+ `);
158
+
159
+ this.db.run(`
160
+ CREATE INDEX IF NOT EXISTS idx_objects_bucket_deleted_key
161
+ ON objects (bucket, deleted_at, key)
162
+ `);
163
+ }
164
+ }
package/src/cli.ts ADDED
@@ -0,0 +1,523 @@
1
+ import { dirname } from "node:path";
2
+ import { mkdir } from "node:fs/promises";
3
+ import { Catalog } from "./catalog";
4
+ import {
5
+ ensurePublicBaseUrl,
6
+ getReadyEnvKeys,
7
+ inspectEnv,
8
+ requireCoreConfig,
9
+ resolveCatalogPath,
10
+ type AppConfig,
11
+ type EnvMap,
12
+ type Visibility,
13
+ } from "./config";
14
+ import { resolveUploadTargets } from "./files";
15
+ import { createDefaultIo, emitEnvCheck, emitError, emitJson, formatTimestamp, type CliIo } from "./output";
16
+ import { BunStorageClient, type StorageClient } from "./storage";
17
+
18
+ interface ParsedArgs {
19
+ command: string | null;
20
+ flags: Record<string, string | boolean>;
21
+ positionals: string[];
22
+ }
23
+
24
+ interface RunCliOptions {
25
+ catalogPath?: string;
26
+ cwd?: string;
27
+ env?: EnvMap;
28
+ homeDir?: string;
29
+ io?: CliIo;
30
+ now?: () => Date;
31
+ storageFactory?: (config: AppConfig) => StorageClient;
32
+ }
33
+
34
+ interface StatusCheckResult {
35
+ message?: string;
36
+ name: string;
37
+ ok: boolean;
38
+ }
39
+
40
+ export async function runCli(argv: string[], options: RunCliOptions = {}): Promise<number> {
41
+ const io = options.io ?? createDefaultIo();
42
+ const env = options.env ?? process.env;
43
+ const cwd = options.cwd ?? process.cwd();
44
+ const now = options.now ?? (() => new Date());
45
+ const storageFactory = options.storageFactory ?? ((config: AppConfig) => new BunStorageClient(config));
46
+
47
+ try {
48
+ const parsed = parseArgs(argv);
49
+ if (!parsed.command || parsed.flags.help) {
50
+ printHelp(io);
51
+ return 0;
52
+ }
53
+
54
+ switch (parsed.command) {
55
+ case "list":
56
+ case "ls":
57
+ return await runList(parsed, { catalogPath: options.catalogPath, env, homeDir: options.homeDir, io });
58
+ case "upload":
59
+ case "up":
60
+ return await runUpload(parsed, {
61
+ catalogPath: options.catalogPath,
62
+ cwd,
63
+ env,
64
+ homeDir: options.homeDir,
65
+ io,
66
+ now,
67
+ storageFactory,
68
+ });
69
+ case "delete":
70
+ case "rm":
71
+ return await runDelete(parsed, {
72
+ catalogPath: options.catalogPath,
73
+ env,
74
+ homeDir: options.homeDir,
75
+ io,
76
+ now,
77
+ storageFactory,
78
+ });
79
+ case "share":
80
+ case "sh":
81
+ return await runShare(parsed, {
82
+ catalogPath: options.catalogPath,
83
+ env,
84
+ homeDir: options.homeDir,
85
+ io,
86
+ storageFactory,
87
+ });
88
+ case "status":
89
+ case "st":
90
+ return await runStatus(parsed, {
91
+ catalogPath: options.catalogPath,
92
+ env,
93
+ homeDir: options.homeDir,
94
+ io,
95
+ storageFactory,
96
+ });
97
+ default:
98
+ throw new Error(`unknown command ${parsed.command}`);
99
+ }
100
+ } catch (error) {
101
+ emitError(io, "cli", getErrorMessage(error));
102
+ return 1;
103
+ }
104
+ }
105
+
106
+ async function runList(
107
+ parsed: ParsedArgs,
108
+ options: { catalogPath?: string; env: EnvMap; homeDir?: string; io: CliIo },
109
+ ): Promise<number> {
110
+ if (parsed.positionals.length > 1) {
111
+ throw new Error("list accepts at most one prefix");
112
+ }
113
+
114
+ const config = requireCoreConfig(options.env, {
115
+ catalogPath: options.catalogPath,
116
+ homeDir: options.homeDir,
117
+ });
118
+
119
+ const catalog = new Catalog(config.catalogPath);
120
+ try {
121
+ const objects = catalog.listObjects(config.bucket, parsed.positionals[0]);
122
+ if (parsed.flags.json) {
123
+ emitJson(options.io, { bucket: config.bucket, objects });
124
+ return 0;
125
+ }
126
+
127
+ for (const object of objects) {
128
+ options.io.stdout(
129
+ `${object.key}\t${object.visibility}\t${object.size}\t${formatTimestamp(object.uploadedAt)}`,
130
+ );
131
+ }
132
+ return 0;
133
+ } finally {
134
+ catalog.close();
135
+ }
136
+ }
137
+
138
+ async function runUpload(
139
+ parsed: ParsedArgs,
140
+ options: {
141
+ catalogPath?: string;
142
+ cwd: string;
143
+ env: EnvMap;
144
+ homeDir?: string;
145
+ io: CliIo;
146
+ now: () => Date;
147
+ storageFactory: (config: AppConfig) => StorageClient;
148
+ },
149
+ ): Promise<number> {
150
+ const visibility = resolveVisibility(parsed.flags);
151
+ const prefix = readOptionalStringFlag(parsed.flags, "prefix");
152
+ const config = requireCoreConfig(options.env, {
153
+ catalogPath: options.catalogPath,
154
+ homeDir: options.homeDir,
155
+ });
156
+
157
+ if (visibility === "public") {
158
+ ensurePublicBaseUrl(config);
159
+ }
160
+
161
+ const uploadTargets = await resolveUploadTargets(parsed.positionals, {
162
+ cwd: options.cwd,
163
+ prefix,
164
+ });
165
+
166
+ await mkdir(dirname(config.catalogPath), { recursive: true });
167
+ const storage = options.storageFactory(config);
168
+ const catalog = new Catalog(config.catalogPath);
169
+ const uploaded: Array<{ key: string; size: number; visibility: Visibility }> = [];
170
+
171
+ try {
172
+ for (const target of uploadTargets) {
173
+ const remoteMeta = await storage.uploadFile(target.absolutePath, target.key, visibility);
174
+ const uploadedAt = options.now().toISOString();
175
+ catalog.upsertObject({
176
+ bucket: config.bucket,
177
+ contentType: remoteMeta.contentType,
178
+ etag: remoteMeta.etag,
179
+ key: target.key,
180
+ size: remoteMeta.size,
181
+ sourcePath: target.sourcePath,
182
+ uploadedAt,
183
+ visibility,
184
+ });
185
+ uploaded.push({
186
+ key: target.key,
187
+ size: remoteMeta.size,
188
+ visibility,
189
+ });
190
+ }
191
+
192
+ if (parsed.flags.json) {
193
+ emitJson(options.io, { bucket: config.bucket, uploaded });
194
+ return 0;
195
+ }
196
+
197
+ for (const item of uploaded) {
198
+ options.io.stdout(`uploaded\t${item.key}\t${item.visibility}\t${item.size}`);
199
+ }
200
+ return 0;
201
+ } finally {
202
+ catalog.close();
203
+ }
204
+ }
205
+
206
+ async function runDelete(
207
+ parsed: ParsedArgs,
208
+ options: {
209
+ catalogPath?: string;
210
+ env: EnvMap;
211
+ homeDir?: string;
212
+ io: CliIo;
213
+ now: () => Date;
214
+ storageFactory: (config: AppConfig) => StorageClient;
215
+ },
216
+ ): Promise<number> {
217
+ if (parsed.positionals.length === 0) {
218
+ throw new Error("delete requires at least one key");
219
+ }
220
+
221
+ const config = requireCoreConfig(options.env, {
222
+ catalogPath: options.catalogPath,
223
+ homeDir: options.homeDir,
224
+ });
225
+ const storage = options.storageFactory(config);
226
+ const catalog = new Catalog(config.catalogPath);
227
+ const deleted: string[] = [];
228
+ const errors: Array<{ key: string; message: string }> = [];
229
+
230
+ try {
231
+ for (const key of parsed.positionals) {
232
+ const tracked = catalog.getObject(config.bucket, key);
233
+ if (!tracked) {
234
+ const message = `untracked key ${key}`;
235
+ errors.push({ key, message });
236
+ if (!parsed.flags.json) {
237
+ emitError(options.io, "delete", message);
238
+ }
239
+ continue;
240
+ }
241
+
242
+ try {
243
+ await storage.deleteObject(key);
244
+ catalog.softDeleteObject(config.bucket, key, options.now().toISOString());
245
+ deleted.push(key);
246
+ } catch (error) {
247
+ const message = `${key} ${getErrorMessage(error)}`;
248
+ errors.push({ key, message });
249
+ if (!parsed.flags.json) {
250
+ emitError(options.io, "delete", message);
251
+ }
252
+ }
253
+ }
254
+
255
+ if (parsed.flags.json) {
256
+ emitJson(options.io, { bucket: config.bucket, deleted, errors });
257
+ } else {
258
+ for (const key of deleted) {
259
+ options.io.stdout(`deleted\t${key}`);
260
+ }
261
+ }
262
+
263
+ return errors.length === 0 ? 0 : 1;
264
+ } finally {
265
+ catalog.close();
266
+ }
267
+ }
268
+
269
+ async function runShare(
270
+ parsed: ParsedArgs,
271
+ options: {
272
+ catalogPath?: string;
273
+ env: EnvMap;
274
+ homeDir?: string;
275
+ io: CliIo;
276
+ storageFactory: (config: AppConfig) => StorageClient;
277
+ },
278
+ ): Promise<number> {
279
+ if (parsed.positionals.length !== 1) {
280
+ throw new Error("share requires exactly one key");
281
+ }
282
+
283
+ const config = requireCoreConfig(options.env, {
284
+ catalogPath: options.catalogPath,
285
+ homeDir: options.homeDir,
286
+ });
287
+ const catalog = new Catalog(config.catalogPath);
288
+ try {
289
+ const key = parsed.positionals[0]!;
290
+ const tracked = catalog.getObject(config.bucket, key);
291
+ if (!tracked) {
292
+ throw new Error(`untracked key ${key}`);
293
+ }
294
+
295
+ if (tracked.visibility === "public") {
296
+ ensurePublicBaseUrl(config);
297
+ }
298
+
299
+ const expiresIn = readOptionalPositiveIntegerFlag(parsed.flags, "expires") ?? config.shareTtlSeconds;
300
+ const storage = options.storageFactory(config);
301
+ const url = await storage.getShareUrl(key, tracked.visibility, expiresIn);
302
+
303
+ if (parsed.flags.json) {
304
+ emitJson(options.io, {
305
+ expiresIn: tracked.visibility === "private" ? expiresIn : null,
306
+ key,
307
+ url,
308
+ visibility: tracked.visibility,
309
+ });
310
+ return 0;
311
+ }
312
+
313
+ options.io.stdout(url);
314
+ return 0;
315
+ } finally {
316
+ catalog.close();
317
+ }
318
+ }
319
+
320
+ async function runStatus(
321
+ parsed: ParsedArgs,
322
+ options: {
323
+ catalogPath?: string;
324
+ env: EnvMap;
325
+ homeDir?: string;
326
+ io: CliIo;
327
+ storageFactory: (config: AppConfig) => StorageClient;
328
+ },
329
+ ): Promise<number> {
330
+ if (parsed.positionals.length > 0) {
331
+ throw new Error("status does not accept positional arguments");
332
+ }
333
+
334
+ const checks: StatusCheckResult[] = [];
335
+ const envChecks = inspectEnv(options.env);
336
+ const catalogPath = options.catalogPath ?? resolveCatalogPath(options.env, options.homeDir);
337
+
338
+ if (parsed.flags.json) {
339
+ for (const check of envChecks) {
340
+ checks.push({ message: check.message, name: check.key, ok: check.ok });
341
+ }
342
+ } else {
343
+ for (const check of envChecks) {
344
+ emitEnvCheck(options.io, check);
345
+ }
346
+ }
347
+
348
+ let dbOk = false;
349
+ try {
350
+ const catalog = new Catalog(catalogPath);
351
+ catalog.ping();
352
+ catalog.close();
353
+ dbOk = true;
354
+ if (parsed.flags.json) {
355
+ checks.push({ message: catalogPath, name: "db", ok: true });
356
+ } else {
357
+ options.io.stdout(`ok\tdb\t${catalogPath}`);
358
+ }
359
+ } catch (error) {
360
+ const message = getErrorMessage(error);
361
+ if (parsed.flags.json) {
362
+ checks.push({ message, name: "db", ok: false });
363
+ } else {
364
+ emitError(options.io, "db", message);
365
+ }
366
+ }
367
+
368
+ let config: AppConfig | null = null;
369
+ try {
370
+ config = requireCoreConfig(options.env, {
371
+ catalogPath: options.catalogPath,
372
+ homeDir: options.homeDir,
373
+ });
374
+ } catch (error) {
375
+ if (parsed.flags.json) {
376
+ checks.push({ message: getErrorMessage(error), name: "core_config", ok: false });
377
+ emitJson(options.io, {
378
+ checks,
379
+ ready: false,
380
+ requiredEnv: getReadyEnvKeys(),
381
+ });
382
+ return 1;
383
+ }
384
+
385
+ options.io.stdout("not_ready");
386
+ return 1;
387
+ }
388
+
389
+ let s3Ok = false;
390
+ try {
391
+ const storage = options.storageFactory(config);
392
+ await storage.probe();
393
+ s3Ok = true;
394
+ if (parsed.flags.json) {
395
+ checks.push({ message: config.bucket, name: "s3", ok: true });
396
+ } else {
397
+ options.io.stdout(`ok\ts3\t${config.bucket}`);
398
+ }
399
+ } catch (error) {
400
+ const message = getErrorMessage(error);
401
+ if (parsed.flags.json) {
402
+ checks.push({ message, name: "s3", ok: false });
403
+ } else {
404
+ emitError(options.io, "s3", message);
405
+ }
406
+ }
407
+
408
+ const ready = envChecks.every((check) => check.ok) && dbOk && s3Ok;
409
+ if (parsed.flags.json) {
410
+ emitJson(options.io, {
411
+ checks,
412
+ ready,
413
+ requiredEnv: getReadyEnvKeys(),
414
+ });
415
+ } else {
416
+ options.io.stdout(ready ? "ready" : "not_ready");
417
+ }
418
+
419
+ return ready ? 0 : 1;
420
+ }
421
+
422
+ function parseArgs(argv: string[]): ParsedArgs {
423
+ const flags: Record<string, string | boolean> = {};
424
+ const positionals: string[] = [];
425
+ let command: string | null = null;
426
+
427
+ for (let index = 0; index < argv.length; index += 1) {
428
+ const token = argv[index];
429
+ if (token === undefined) {
430
+ continue;
431
+ }
432
+
433
+ if (!command && !token.startsWith("-")) {
434
+ command = token;
435
+ continue;
436
+ }
437
+
438
+ if (token === "--json") {
439
+ flags.json = true;
440
+ continue;
441
+ }
442
+
443
+ if (token === "--help" || token === "-h") {
444
+ flags.help = true;
445
+ continue;
446
+ }
447
+
448
+ if (token === "--public") {
449
+ flags.public = true;
450
+ continue;
451
+ }
452
+
453
+ if (token === "--private") {
454
+ flags.private = true;
455
+ continue;
456
+ }
457
+
458
+ if (token === "--prefix" || token === "--expires") {
459
+ const nextValue = argv[index + 1];
460
+ if (nextValue === undefined || nextValue.startsWith("-")) {
461
+ throw new Error(`missing value for ${token}`);
462
+ }
463
+ flags[token.slice(2)] = nextValue;
464
+ index += 1;
465
+ continue;
466
+ }
467
+
468
+ if (token.startsWith("--")) {
469
+ throw new Error(`unknown flag ${token}`);
470
+ }
471
+
472
+ positionals.push(token);
473
+ }
474
+
475
+ return { command, flags, positionals };
476
+ }
477
+
478
+ function resolveVisibility(flags: Record<string, string | boolean>): Visibility {
479
+ if (flags.public && flags.private) {
480
+ throw new Error("choose either --public or --private");
481
+ }
482
+
483
+ return flags.public ? "public" : "private";
484
+ }
485
+
486
+ function readOptionalStringFlag(flags: Record<string, string | boolean>, key: string): string | undefined {
487
+ const value = flags[key];
488
+ return typeof value === "string" ? value : undefined;
489
+ }
490
+
491
+ function readOptionalPositiveIntegerFlag(
492
+ flags: Record<string, string | boolean>,
493
+ key: string,
494
+ ): number | undefined {
495
+ const value = readOptionalStringFlag(flags, key);
496
+ if (!value) {
497
+ return undefined;
498
+ }
499
+
500
+ const parsed = Number.parseInt(value, 10);
501
+ if (!Number.isFinite(parsed) || parsed <= 0) {
502
+ throw new Error(`invalid ${key} ${value}`);
503
+ }
504
+
505
+ return parsed;
506
+ }
507
+
508
+ function printHelp(io: CliIo): void {
509
+ io.stdout("s3-storage-cli");
510
+ io.stdout("status [--json]");
511
+ io.stdout("list|ls [prefix] [--json]");
512
+ io.stdout("upload|up <paths...> [--public|--private] [--prefix <remote-prefix>] [--json]");
513
+ io.stdout("delete|rm <keys...> [--json]");
514
+ io.stdout("share|sh <key> [--expires <seconds>] [--json]");
515
+ }
516
+
517
+ function getErrorMessage(error: unknown): string {
518
+ if (error instanceof Error) {
519
+ return error.message;
520
+ }
521
+
522
+ return String(error);
523
+ }
package/src/config.ts ADDED
@@ -0,0 +1,126 @@
1
+ import { homedir } from "node:os";
2
+ import { join } from "node:path";
3
+
4
+ export type EnvMap = Record<string, string | undefined>;
5
+
6
+ export type Visibility = "private" | "public";
7
+
8
+ export interface AppConfig {
9
+ accessKeyId: string;
10
+ bucket: string;
11
+ catalogPath: string;
12
+ endpoint: string;
13
+ publicBaseUrl?: string;
14
+ region: string;
15
+ secretAccessKey: string;
16
+ sessionToken?: string;
17
+ shareTtlSeconds: number;
18
+ virtualHostedStyle: boolean;
19
+ }
20
+
21
+ export interface EnvCheck {
22
+ key: string;
23
+ ok: boolean;
24
+ requiredForReady: boolean;
25
+ message?: string;
26
+ }
27
+
28
+ const CORE_ENV_KEYS = [
29
+ "S3_ENDPOINT",
30
+ "S3_REGION",
31
+ "S3_ACCESS_KEY_ID",
32
+ "S3_SECRET_ACCESS_KEY",
33
+ "S3_BUCKET",
34
+ ] as const;
35
+
36
+ const READY_ENV_KEYS = [...CORE_ENV_KEYS, "S3_PUBLIC_BASE_URL"] as const;
37
+
38
+ export function getReadyEnvKeys(): readonly string[] {
39
+ return READY_ENV_KEYS;
40
+ }
41
+
42
+ export function inspectEnv(env: EnvMap): EnvCheck[] {
43
+ return READY_ENV_KEYS.map((key) => {
44
+ const value = env[key];
45
+ return {
46
+ key,
47
+ ok: Boolean(value && value.trim()),
48
+ requiredForReady: true,
49
+ message: value && value.trim() ? undefined : `missing ${key}`,
50
+ };
51
+ });
52
+ }
53
+
54
+ export function getMissingCoreEnvKeys(env: EnvMap): string[] {
55
+ return CORE_ENV_KEYS.filter((key) => !env[key]?.trim());
56
+ }
57
+
58
+ export function requireCoreConfig(
59
+ env: EnvMap,
60
+ options: { homeDir?: string; catalogPath?: string } = {},
61
+ ): AppConfig {
62
+ const missing = getMissingCoreEnvKeys(env);
63
+ if (missing.length > 0) {
64
+ throw new Error(`missing env ${missing.join(",")}`);
65
+ }
66
+
67
+ return {
68
+ accessKeyId: env.S3_ACCESS_KEY_ID!.trim(),
69
+ bucket: env.S3_BUCKET!.trim(),
70
+ catalogPath: options.catalogPath ?? resolveCatalogPath(env, options.homeDir),
71
+ endpoint: env.S3_ENDPOINT!.trim(),
72
+ publicBaseUrl: env.S3_PUBLIC_BASE_URL?.trim() || undefined,
73
+ region: env.S3_REGION!.trim(),
74
+ secretAccessKey: env.S3_SECRET_ACCESS_KEY!.trim(),
75
+ sessionToken: env.S3_SESSION_TOKEN?.trim() || undefined,
76
+ shareTtlSeconds: parsePositiveInteger(env.S3_SHARE_TTL_SECONDS, 3600, "S3_SHARE_TTL_SECONDS"),
77
+ virtualHostedStyle: parseBoolean(env.S3_VIRTUAL_HOSTED_STYLE, false),
78
+ };
79
+ }
80
+
81
+ export function resolveCatalogPath(env: EnvMap, customHomeDir?: string): string {
82
+ const override = env.S3_CLI_DB_PATH?.trim();
83
+ if (override) {
84
+ return override;
85
+ }
86
+
87
+ return join(customHomeDir ?? homedir(), ".s3-storage-cli", "catalog.sqlite");
88
+ }
89
+
90
+ export function ensurePublicBaseUrl(config: AppConfig): string {
91
+ if (!config.publicBaseUrl) {
92
+ throw new Error("missing env S3_PUBLIC_BASE_URL");
93
+ }
94
+
95
+ return config.publicBaseUrl;
96
+ }
97
+
98
+ function parsePositiveInteger(raw: string | undefined, fallback: number, key: string): number {
99
+ if (!raw?.trim()) {
100
+ return fallback;
101
+ }
102
+
103
+ const parsed = Number.parseInt(raw, 10);
104
+ if (!Number.isFinite(parsed) || parsed <= 0) {
105
+ throw new Error(`invalid env ${key}`);
106
+ }
107
+
108
+ return parsed;
109
+ }
110
+
111
+ function parseBoolean(raw: string | undefined, fallback: boolean): boolean {
112
+ if (!raw?.trim()) {
113
+ return fallback;
114
+ }
115
+
116
+ const normalized = raw.trim().toLowerCase();
117
+ if (["1", "true", "yes", "on"].includes(normalized)) {
118
+ return true;
119
+ }
120
+
121
+ if (["0", "false", "no", "off"].includes(normalized)) {
122
+ return false;
123
+ }
124
+
125
+ return fallback;
126
+ }
package/src/files.ts ADDED
@@ -0,0 +1,101 @@
1
+ import { lstat, readdir } from "node:fs/promises";
2
+ import { basename, posix, relative, resolve } from "node:path";
3
+
4
+ export interface UploadTarget {
5
+ absolutePath: string;
6
+ key: string;
7
+ sourcePath: string;
8
+ }
9
+
10
+ export async function resolveUploadTargets(
11
+ inputPaths: string[],
12
+ options: { cwd: string; prefix?: string },
13
+ ): Promise<UploadTarget[]> {
14
+ if (inputPaths.length === 0) {
15
+ throw new Error("upload requires at least one path");
16
+ }
17
+
18
+ const prefix = normalizePrefix(options.prefix);
19
+ const targets: UploadTarget[] = [];
20
+
21
+ for (const inputPath of inputPaths) {
22
+ const absoluteInputPath = resolve(options.cwd, inputPath);
23
+ const stats = await lstat(absoluteInputPath).catch(() => null);
24
+ if (!stats) {
25
+ throw new Error(`path not found ${inputPath}`);
26
+ }
27
+
28
+ if (stats.isDirectory()) {
29
+ const directoryName = basename(absoluteInputPath);
30
+ const entries = await walkDirectory(absoluteInputPath);
31
+ for (const filePath of entries) {
32
+ const relativeInsideDirectory = toS3Key(relative(absoluteInputPath, filePath));
33
+ targets.push({
34
+ absolutePath: filePath,
35
+ key: `${prefix}${toS3Key(posix.join(directoryName, relativeInsideDirectory))}`,
36
+ sourcePath: filePath,
37
+ });
38
+ }
39
+ continue;
40
+ }
41
+
42
+ if (!stats.isFile()) {
43
+ throw new Error(`unsupported path ${inputPath}`);
44
+ }
45
+
46
+ targets.push({
47
+ absolutePath: absoluteInputPath,
48
+ key: `${prefix}${toS3Key(basename(absoluteInputPath))}`,
49
+ sourcePath: absoluteInputPath,
50
+ });
51
+ }
52
+
53
+ if (targets.length === 0) {
54
+ throw new Error("no files found");
55
+ }
56
+
57
+ assertNoDuplicateKeys(targets);
58
+ return targets;
59
+ }
60
+
61
+ function normalizePrefix(prefix: string | undefined): string {
62
+ if (!prefix?.trim()) {
63
+ return "";
64
+ }
65
+
66
+ return toS3Key(prefix.trim()).replace(/^\/+/, "").replace(/\/?$/, "/");
67
+ }
68
+
69
+ function toS3Key(value: string): string {
70
+ return value.replaceAll("\\", "/").replace(/^\/+/, "");
71
+ }
72
+
73
+ async function walkDirectory(directoryPath: string): Promise<string[]> {
74
+ const entries = await readdir(directoryPath, { withFileTypes: true });
75
+ const files: string[] = [];
76
+
77
+ for (const entry of entries) {
78
+ const entryPath = resolve(directoryPath, entry.name);
79
+ if (entry.isDirectory()) {
80
+ files.push(...(await walkDirectory(entryPath)));
81
+ continue;
82
+ }
83
+
84
+ if (entry.isFile()) {
85
+ files.push(entryPath);
86
+ }
87
+ }
88
+
89
+ return files.sort((left, right) => left.localeCompare(right));
90
+ }
91
+
92
+ function assertNoDuplicateKeys(targets: UploadTarget[]): void {
93
+ const seen = new Map<string, string>();
94
+ for (const target of targets) {
95
+ const previous = seen.get(target.key);
96
+ if (previous) {
97
+ throw new Error(`duplicate key ${target.key} from ${previous} and ${target.sourcePath}`);
98
+ }
99
+ seen.set(target.key, target.sourcePath);
100
+ }
101
+ }
package/src/output.ts ADDED
@@ -0,0 +1,34 @@
1
+ import type { EnvCheck } from "./config";
2
+
3
+ export interface CliIo {
4
+ stderr: (line: string) => void;
5
+ stdout: (line: string) => void;
6
+ }
7
+
8
+ export function createDefaultIo(): CliIo {
9
+ return {
10
+ stderr: (line) => process.stderr.write(`${line}\n`),
11
+ stdout: (line) => process.stdout.write(`${line}\n`),
12
+ };
13
+ }
14
+
15
+ export function emitJson(io: CliIo, data: unknown): void {
16
+ io.stdout(JSON.stringify(data));
17
+ }
18
+
19
+ export function emitError(io: CliIo, code: string, message: string): void {
20
+ io.stderr(`error\t${code}\t${message}`);
21
+ }
22
+
23
+ export function emitEnvCheck(io: CliIo, check: EnvCheck): void {
24
+ if (check.ok) {
25
+ io.stdout(`ok\tenv\t${check.key}`);
26
+ return;
27
+ }
28
+
29
+ emitError(io, "env", check.message ?? `invalid ${check.key}`);
30
+ }
31
+
32
+ export function formatTimestamp(value: string): string {
33
+ return new Date(value).toISOString();
34
+ }
package/src/storage.ts ADDED
@@ -0,0 +1,85 @@
1
+ import { S3Client } from "bun";
2
+ import type { AppConfig, Visibility } from "./config";
3
+
4
+ export interface UploadedRemoteMeta {
5
+ contentType: string | null;
6
+ etag: string | null;
7
+ size: number;
8
+ }
9
+
10
+ export interface StorageClient {
11
+ deleteObject(key: string): Promise<void>;
12
+ getShareUrl(key: string, visibility: Visibility, expiresIn: number): Promise<string>;
13
+ probe(): Promise<void>;
14
+ uploadFile(localPath: string, key: string, visibility: Visibility): Promise<UploadedRemoteMeta>;
15
+ }
16
+
17
+ export class BunStorageClient implements StorageClient {
18
+ private readonly client: S3Client;
19
+ private readonly config: AppConfig;
20
+
21
+ constructor(config: AppConfig) {
22
+ this.config = config;
23
+ this.client = new S3Client({
24
+ accessKeyId: config.accessKeyId,
25
+ bucket: config.bucket,
26
+ endpoint: config.endpoint,
27
+ region: config.region,
28
+ secretAccessKey: config.secretAccessKey,
29
+ sessionToken: config.sessionToken,
30
+ virtualHostedStyle: config.virtualHostedStyle,
31
+ });
32
+ }
33
+
34
+ async deleteObject(key: string): Promise<void> {
35
+ await this.client.delete(key);
36
+ }
37
+
38
+ async getShareUrl(key: string, visibility: Visibility, expiresIn: number): Promise<string> {
39
+ if (visibility === "public") {
40
+ if (!this.config.publicBaseUrl) {
41
+ throw new Error("missing env S3_PUBLIC_BASE_URL");
42
+ }
43
+
44
+ return buildPublicObjectUrl(this.config.publicBaseUrl, key);
45
+ }
46
+
47
+ return this.client.presign(key, {
48
+ expiresIn,
49
+ method: "GET",
50
+ });
51
+ }
52
+
53
+ async probe(): Promise<void> {
54
+ const probeKey = "__s3_storage_cli_status_probe__";
55
+ await this.client.exists(probeKey);
56
+ }
57
+
58
+ async uploadFile(localPath: string, key: string, visibility: Visibility): Promise<UploadedRemoteMeta> {
59
+ const file = Bun.file(localPath);
60
+ const localType = file.type || undefined;
61
+ await this.client.write(key, file, {
62
+ acl: visibility === "public" ? "public-read" : "private",
63
+ type: localType,
64
+ });
65
+
66
+ const remote = await this.client.stat(key);
67
+ return {
68
+ contentType: remote.type || localType || null,
69
+ etag: remote.etag || null,
70
+ size: remote.size,
71
+ };
72
+ }
73
+ }
74
+
75
+ export function buildPublicObjectUrl(baseUrl: string, key: string): string {
76
+ const url = new URL(baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`);
77
+ const encodedKey = key
78
+ .split("/")
79
+ .filter(Boolean)
80
+ .map((segment) => encodeURIComponent(segment))
81
+ .join("/");
82
+ const basePath = url.pathname.endsWith("/") ? url.pathname : `${url.pathname}/`;
83
+ url.pathname = `${basePath}${encodedKey}`.replace(/\/+/g, "/");
84
+ return url.toString();
85
+ }