@delma/fylo 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +16 -0
- package/.github/copilot-instructions.md +113 -0
- package/.github/prompts/issue.prompt.md +19 -0
- package/.github/prompts/pr.prompt.md +18 -0
- package/.github/prompts/release.prompt.md +49 -0
- package/.github/prompts/review-pr.prompt.md +19 -0
- package/.github/prompts/sync-main.prompt.md +14 -0
- package/.github/workflows/ci.yml +37 -0
- package/.github/workflows/publish.yml +101 -0
- package/.prettierrc +7 -0
- package/LICENSE +21 -0
- package/README.md +230 -0
- package/eslint.config.js +28 -0
- package/package.json +51 -0
- package/src/CLI +37 -0
- package/src/adapters/cipher.ts +174 -0
- package/src/adapters/redis.ts +71 -0
- package/src/adapters/s3.ts +67 -0
- package/src/core/directory.ts +418 -0
- package/src/core/extensions.ts +19 -0
- package/src/core/format.ts +486 -0
- package/src/core/parser.ts +876 -0
- package/src/core/query.ts +48 -0
- package/src/core/walker.ts +167 -0
- package/src/index.ts +1088 -0
- package/src/types/fylo.d.ts +139 -0
- package/src/types/index.d.ts +3 -0
- package/src/types/query.d.ts +73 -0
- package/tests/collection/truncate.test.ts +56 -0
- package/tests/data.ts +110 -0
- package/tests/index.ts +19 -0
- package/tests/integration/create.test.ts +57 -0
- package/tests/integration/delete.test.ts +147 -0
- package/tests/integration/edge-cases.test.ts +232 -0
- package/tests/integration/encryption.test.ts +176 -0
- package/tests/integration/export.test.ts +61 -0
- package/tests/integration/join-modes.test.ts +221 -0
- package/tests/integration/nested.test.ts +212 -0
- package/tests/integration/operators.test.ts +167 -0
- package/tests/integration/read.test.ts +203 -0
- package/tests/integration/rollback.test.ts +105 -0
- package/tests/integration/update.test.ts +130 -0
- package/tests/mocks/cipher.ts +55 -0
- package/tests/mocks/redis.ts +13 -0
- package/tests/mocks/s3.ts +114 -0
- package/tests/schemas/album.d.ts +5 -0
- package/tests/schemas/album.json +5 -0
- package/tests/schemas/comment.d.ts +7 -0
- package/tests/schemas/comment.json +7 -0
- package/tests/schemas/photo.d.ts +7 -0
- package/tests/schemas/photo.json +7 -0
- package/tests/schemas/post.d.ts +6 -0
- package/tests/schemas/post.json +6 -0
- package/tests/schemas/tip.d.ts +7 -0
- package/tests/schemas/tip.json +7 -0
- package/tests/schemas/todo.d.ts +6 -0
- package/tests/schemas/todo.json +6 -0
- package/tests/schemas/user.d.ts +23 -0
- package/tests/schemas/user.json +23 -0
- package/tsconfig.json +19 -0
package/eslint.config.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import tsPlugin from '@typescript-eslint/eslint-plugin'
|
|
2
|
+
import tsParser from '@typescript-eslint/parser'
|
|
3
|
+
import prettierConfig from 'eslint-config-prettier'
|
|
4
|
+
|
|
5
|
+
export default [
|
|
6
|
+
{
|
|
7
|
+
files: ['src/**/*.ts', 'tests/**/*.ts'],
|
|
8
|
+
languageOptions: {
|
|
9
|
+
parser: tsParser,
|
|
10
|
+
parserOptions: {
|
|
11
|
+
project: './tsconfig.json'
|
|
12
|
+
}
|
|
13
|
+
},
|
|
14
|
+
plugins: {
|
|
15
|
+
'@typescript-eslint': tsPlugin
|
|
16
|
+
},
|
|
17
|
+
rules: {
|
|
18
|
+
...tsPlugin.configs['recommended'].rules,
|
|
19
|
+
'@typescript-eslint/no-explicit-any': 'warn',
|
|
20
|
+
'@typescript-eslint/explicit-function-return-type': 'warn',
|
|
21
|
+
'@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_', varsIgnorePattern: '^_' }]
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
prettierConfig,
|
|
25
|
+
{
|
|
26
|
+
ignores: ['bin/**', 'node_modules/**', '**/*.d.ts']
|
|
27
|
+
}
|
|
28
|
+
]
|
package/package.json
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@delma/fylo",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"main": "./dist/index.js",
|
|
5
|
+
"types": "./dist/types/index.d.ts",
|
|
6
|
+
"bin": {
|
|
7
|
+
"fylo.query": "./dist/cli/index.js"
|
|
8
|
+
},
|
|
9
|
+
"scripts": {
|
|
10
|
+
"build": "tsc",
|
|
11
|
+
"test": "bun test",
|
|
12
|
+
"typecheck": "tsc --noEmit",
|
|
13
|
+
"lint": "eslint src tests",
|
|
14
|
+
"format": "prettier --write src tests"
|
|
15
|
+
},
|
|
16
|
+
"devDependencies": {
|
|
17
|
+
"@types/bun": "^1.2.19",
|
|
18
|
+
"@types/node": "^20.19.39",
|
|
19
|
+
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
|
20
|
+
"@typescript-eslint/parser": "^8.0.0",
|
|
21
|
+
"eslint": "^9.0.0",
|
|
22
|
+
"eslint-config-prettier": "^9.0.0",
|
|
23
|
+
"prettier": "^3.0.0"
|
|
24
|
+
},
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"@vyckr/ttid": "1.3.1",
|
|
27
|
+
"@vyckr/chex": "0.3.0"
|
|
28
|
+
},
|
|
29
|
+
"type": "module",
|
|
30
|
+
"peerDependencies": {
|
|
31
|
+
"typescript": "^5.0.0"
|
|
32
|
+
},
|
|
33
|
+
"repository": {
|
|
34
|
+
"type": "git",
|
|
35
|
+
"url": "git+https://github.com/Chidelma/Fylo.git"
|
|
36
|
+
},
|
|
37
|
+
"homepage": "https://fylo.vyckr.com",
|
|
38
|
+
"license": "MIT",
|
|
39
|
+
"keywords": [
|
|
40
|
+
"storage",
|
|
41
|
+
"database",
|
|
42
|
+
"s3",
|
|
43
|
+
"aws",
|
|
44
|
+
"typescript",
|
|
45
|
+
"bun"
|
|
46
|
+
],
|
|
47
|
+
"author": "Chidelma",
|
|
48
|
+
"bugs": {
|
|
49
|
+
"url": "https://github.com/Chidelma/Fylo/issues"
|
|
50
|
+
}
|
|
51
|
+
}
|
package/src/CLI
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
/// <reference path="./types/index.d.ts" />
|
|
3
|
+
import Silo from '.'
|
|
4
|
+
|
|
5
|
+
const SQL = process.argv[process.argv.length - 1]
|
|
6
|
+
|
|
7
|
+
const op = SQL.match(/^((?:SELECT|select)|(?:INSERT|insert)|(?:UPDATE|update)|(?:DELETE|delete)|(?:CREATE|create)|(?:DROP|drop))/i)
|
|
8
|
+
|
|
9
|
+
if(!op) throw new Error("Missing SQL Operation")
|
|
10
|
+
|
|
11
|
+
const res = await new Silo().executeSQL(SQL)
|
|
12
|
+
|
|
13
|
+
const cmnd = op.shift()!
|
|
14
|
+
|
|
15
|
+
switch(cmnd.toUpperCase()) {
|
|
16
|
+
case "CREATE":
|
|
17
|
+
console.log("Successfully created schema")
|
|
18
|
+
break
|
|
19
|
+
case "DROP":
|
|
20
|
+
console.log("Successfully dropped schema")
|
|
21
|
+
break
|
|
22
|
+
case "SELECT":
|
|
23
|
+
if(typeof res === 'object' && !Array.isArray(res)) console.format(res)
|
|
24
|
+
else console.log(res)
|
|
25
|
+
break
|
|
26
|
+
case "INSERT":
|
|
27
|
+
console.log(res)
|
|
28
|
+
break
|
|
29
|
+
case "UPDATE":
|
|
30
|
+
console.log(`Successfully updated ${res} document(s)`)
|
|
31
|
+
break
|
|
32
|
+
case "DELETE":
|
|
33
|
+
console.log(`Successfully deleted ${res} document(s)`)
|
|
34
|
+
break
|
|
35
|
+
default:
|
|
36
|
+
throw new Error("Invalid Operation: " + cmnd)
|
|
37
|
+
}
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AES-256-CBC encryption adapter for field-level value encryption.
|
|
3
|
+
*
|
|
4
|
+
* Two modes are supported via the `deterministic` flag on `encrypt()`:
|
|
5
|
+
*
|
|
6
|
+
* - **Random IV (default)**: A cryptographically random IV is generated per
|
|
7
|
+
* encryption operation. Identical plaintexts produce different ciphertexts.
|
|
8
|
+
* Use this for fields that do not need exact-match ($eq/$ne) queries.
|
|
9
|
+
*
|
|
10
|
+
* - **Deterministic IV (opt-in)**: The IV is derived from HMAC-SHA256 of the
|
|
11
|
+
* plaintext, so identical values always produce identical ciphertext. This
|
|
12
|
+
* enables exact-match queries on encrypted fields but leaks equality — an
|
|
13
|
+
* observer can determine which records share field values without decrypting.
|
|
14
|
+
* Use only when $eq/$ne queries on encrypted fields are required.
|
|
15
|
+
*
|
|
16
|
+
* Encrypted fields are declared per-collection in JSON schema files via the
|
|
17
|
+
* `$encrypted` array. The encryption key is sourced from `ENCRYPTION_KEY` env var.
|
|
18
|
+
* Set `CIPHER_SALT` to a unique random value to prevent cross-deployment attacks.
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
export class Cipher {
|
|
22
|
+
|
|
23
|
+
private static key: CryptoKey | null = null
|
|
24
|
+
private static hmacKey: CryptoKey | null = null
|
|
25
|
+
|
|
26
|
+
/** Per-collection encrypted field sets, loaded from schema `$encrypted` arrays. */
|
|
27
|
+
private static collections: Map<string, Set<string>> = new Map()
|
|
28
|
+
|
|
29
|
+
static isConfigured(): boolean {
|
|
30
|
+
return Cipher.key !== null
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
static hasEncryptedFields(collection: string): boolean {
|
|
34
|
+
const fields = Cipher.collections.get(collection)
|
|
35
|
+
return !!fields && fields.size > 0
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
static isEncryptedField(collection: string, field: string): boolean {
|
|
39
|
+
const fields = Cipher.collections.get(collection)
|
|
40
|
+
if (!fields || fields.size === 0) return false
|
|
41
|
+
|
|
42
|
+
for (const pattern of fields) {
|
|
43
|
+
if (field === pattern) return true
|
|
44
|
+
// Support nested: encrypting "address" encrypts "address/city" etc.
|
|
45
|
+
if (field.startsWith(`${pattern}/`)) return true
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return false
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Registers encrypted fields for a collection (from schema `$encrypted` array).
|
|
53
|
+
*/
|
|
54
|
+
static registerFields(collection: string, fields: string[]): void {
|
|
55
|
+
if (fields.length > 0) {
|
|
56
|
+
Cipher.collections.set(collection, new Set(fields))
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Derives AES + HMAC keys from a secret string. Called once at startup.
|
|
62
|
+
*/
|
|
63
|
+
static async configure(secret: string): Promise<void> {
|
|
64
|
+
const encoder = new TextEncoder()
|
|
65
|
+
const keyMaterial = await crypto.subtle.importKey(
|
|
66
|
+
'raw',
|
|
67
|
+
encoder.encode(secret),
|
|
68
|
+
'PBKDF2',
|
|
69
|
+
false,
|
|
70
|
+
['deriveBits']
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
const cipherSalt = process.env.CIPHER_SALT
|
|
74
|
+
if (!cipherSalt) {
|
|
75
|
+
console.warn('CIPHER_SALT is not set. Using default salt is insecure for multi-deployment use. Set CIPHER_SALT to a unique random value.')
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Derive 48 bytes: 32 for AES key + 16 for HMAC key
|
|
79
|
+
const bits = await crypto.subtle.deriveBits(
|
|
80
|
+
{ name: 'PBKDF2', salt: encoder.encode(cipherSalt ?? 'fylo-cipher'), iterations: 100000, hash: 'SHA-256' },
|
|
81
|
+
keyMaterial,
|
|
82
|
+
384
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
const derived = new Uint8Array(bits)
|
|
86
|
+
|
|
87
|
+
Cipher.key = await crypto.subtle.importKey(
|
|
88
|
+
'raw',
|
|
89
|
+
derived.slice(0, 32),
|
|
90
|
+
{ name: 'AES-CBC' },
|
|
91
|
+
false,
|
|
92
|
+
['encrypt', 'decrypt']
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
Cipher.hmacKey = await crypto.subtle.importKey(
|
|
96
|
+
'raw',
|
|
97
|
+
derived.slice(32),
|
|
98
|
+
{ name: 'HMAC', hash: 'SHA-256' },
|
|
99
|
+
false,
|
|
100
|
+
['sign']
|
|
101
|
+
)
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
static reset(): void {
|
|
105
|
+
Cipher.key = null
|
|
106
|
+
Cipher.hmacKey = null
|
|
107
|
+
Cipher.collections = new Map()
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Deterministic IV from HMAC-SHA256 of plaintext, truncated to 16 bytes.
|
|
112
|
+
*/
|
|
113
|
+
private static async deriveIV(plaintext: string): Promise<Uint8Array> {
|
|
114
|
+
const encoder = new TextEncoder()
|
|
115
|
+
const sig = await crypto.subtle.sign('HMAC', Cipher.hmacKey!, encoder.encode(plaintext))
|
|
116
|
+
return new Uint8Array(sig).slice(0, 16)
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Encrypts a value. Returns a URL-safe base64 string (no slashes).
|
|
121
|
+
*
|
|
122
|
+
* @param value - The plaintext to encrypt.
|
|
123
|
+
* @param deterministic - When true, derives IV from HMAC of plaintext (same
|
|
124
|
+
* input always produces same ciphertext). Required for $eq/$ne queries on
|
|
125
|
+
* encrypted fields. Defaults to false (random IV per operation).
|
|
126
|
+
*/
|
|
127
|
+
static async encrypt(value: string, deterministic = false): Promise<string> {
|
|
128
|
+
if (!Cipher.key) throw new Error('Cipher not configured — set ENCRYPTION_KEY env var')
|
|
129
|
+
|
|
130
|
+
const iv = deterministic
|
|
131
|
+
? await Cipher.deriveIV(value)
|
|
132
|
+
: crypto.getRandomValues(new Uint8Array(16))
|
|
133
|
+
const encoder = new TextEncoder()
|
|
134
|
+
|
|
135
|
+
const encrypted = await crypto.subtle.encrypt(
|
|
136
|
+
{ name: 'AES-CBC', iv },
|
|
137
|
+
Cipher.key,
|
|
138
|
+
encoder.encode(value)
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
// Concatenate IV + ciphertext and encode as URL-safe base64
|
|
142
|
+
const combined = new Uint8Array(iv.length + encrypted.byteLength)
|
|
143
|
+
combined.set(iv)
|
|
144
|
+
combined.set(new Uint8Array(encrypted), iv.length)
|
|
145
|
+
|
|
146
|
+
return btoa(String.fromCharCode(...combined))
|
|
147
|
+
.replace(/\+/g, '-')
|
|
148
|
+
.replace(/\//g, '_')
|
|
149
|
+
.replace(/=+$/, '')
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Decrypts a URL-safe base64 encoded value back to plaintext.
|
|
154
|
+
*/
|
|
155
|
+
static async decrypt(encoded: string): Promise<string> {
|
|
156
|
+
if (!Cipher.key) throw new Error('Cipher not configured — set ENCRYPTION_KEY env var')
|
|
157
|
+
|
|
158
|
+
// Restore standard base64
|
|
159
|
+
const b64 = encoded.replace(/-/g, '+').replace(/_/g, '/')
|
|
160
|
+
const padded = b64 + '='.repeat((4 - b64.length % 4) % 4)
|
|
161
|
+
|
|
162
|
+
const combined = Uint8Array.from(atob(padded), c => c.charCodeAt(0))
|
|
163
|
+
const iv = combined.slice(0, 16)
|
|
164
|
+
const ciphertext = combined.slice(16)
|
|
165
|
+
|
|
166
|
+
const decrypted = await crypto.subtle.decrypt(
|
|
167
|
+
{ name: 'AES-CBC', iv },
|
|
168
|
+
Cipher.key,
|
|
169
|
+
ciphertext
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
return new TextDecoder().decode(decrypted)
|
|
173
|
+
}
|
|
174
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { RedisClient } from "bun";
|
|
2
|
+
import { S3 } from "./s3";
|
|
3
|
+
|
|
4
|
+
export class Redis {
|
|
5
|
+
|
|
6
|
+
private client: RedisClient
|
|
7
|
+
|
|
8
|
+
private static LOGGING = process.env.LOGGING
|
|
9
|
+
|
|
10
|
+
constructor() {
|
|
11
|
+
|
|
12
|
+
const redisUrl = process.env.REDIS_URL
|
|
13
|
+
if (!redisUrl) throw new Error('REDIS_URL environment variable is required')
|
|
14
|
+
|
|
15
|
+
this.client = new RedisClient(redisUrl, {
|
|
16
|
+
connectionTimeout: process.env.REDIS_CONN_TIMEOUT ? Number(process.env.REDIS_CONN_TIMEOUT) : undefined,
|
|
17
|
+
idleTimeout: process.env.REDIS_IDLE_TIMEOUT ? Number(process.env.REDIS_IDLE_TIMEOUT) : undefined,
|
|
18
|
+
autoReconnect: process.env.REDIS_AUTO_CONNECT ? true : undefined,
|
|
19
|
+
maxRetries: process.env.REDIS_MAX_RETRIES ? Number(process.env.REDIS_MAX_RETRIES) : undefined,
|
|
20
|
+
enableOfflineQueue: process.env.REDIS_ENABLE_OFFLINE_QUEUE ? true : undefined,
|
|
21
|
+
enableAutoPipelining: process.env.REDIS_ENABLE_AUTO_PIPELINING ? true : undefined,
|
|
22
|
+
tls: process.env.REDIS_TLS ? true : undefined
|
|
23
|
+
})
|
|
24
|
+
|
|
25
|
+
this.client.onconnect = () => {
|
|
26
|
+
if(Redis.LOGGING) console.log("Client Connected")
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
this.client.onclose = (err) => console.error("Redis client connection closed", err.message)
|
|
30
|
+
|
|
31
|
+
this.client.connect()
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
async publish(collection: string, action: 'insert' | 'delete', keyId: string | _ttid) {
|
|
35
|
+
|
|
36
|
+
if(this.client.connected) {
|
|
37
|
+
|
|
38
|
+
await this.client.publish(S3.getBucketFormat(collection), JSON.stringify({ action, keyId }))
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async claimTTID(_id: _ttid, ttlSeconds: number = 10): Promise<boolean> {
|
|
43
|
+
|
|
44
|
+
if(!this.client.connected) return false
|
|
45
|
+
|
|
46
|
+
const result = await this.client.send('SET', [`ttid:${_id}`, '1', 'NX', 'EX', String(ttlSeconds)])
|
|
47
|
+
|
|
48
|
+
return result === 'OK'
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async *subscribe(collection: string) {
|
|
52
|
+
|
|
53
|
+
if(!this.client.connected) throw new Error('Redis not connected!')
|
|
54
|
+
|
|
55
|
+
const client = this.client
|
|
56
|
+
|
|
57
|
+
const stream = new ReadableStream({
|
|
58
|
+
async start(controller) {
|
|
59
|
+
await client.subscribe(S3.getBucketFormat(collection), (message) => {
|
|
60
|
+
controller.enqueue(message)
|
|
61
|
+
})
|
|
62
|
+
},
|
|
63
|
+
})
|
|
64
|
+
|
|
65
|
+
for await (const chunk of stream) {
|
|
66
|
+
const parsed = JSON.parse(chunk)
|
|
67
|
+
if (typeof parsed !== 'object' || parsed === null || !('action' in parsed) || !('keyId' in parsed)) continue
|
|
68
|
+
yield parsed
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { $, S3Client } from "bun"
|
|
2
|
+
|
|
3
|
+
export class S3 {
|
|
4
|
+
|
|
5
|
+
static readonly BUCKET_ENV = process.env.BUCKET_PREFIX
|
|
6
|
+
|
|
7
|
+
static readonly CREDS = {
|
|
8
|
+
accessKeyId: process.env.S3_ACCESS_KEY_ID ?? process.env.AWS_ACCESS_KEY_ID,
|
|
9
|
+
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY ?? process.env.AWS_SECRET_ACCESS_KEY,
|
|
10
|
+
region: process.env.S3_REGION ?? process.env.AWS_REGION,
|
|
11
|
+
endpoint: process.env.S3_ENDPOINT ?? process.env.AWS_ENDPOINT
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
private static validateCollection(collection: string): void {
|
|
15
|
+
if (!/^[a-z0-9][a-z0-9\-]*[a-z0-9]$/.test(collection)) {
|
|
16
|
+
throw new Error('Invalid collection name')
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
static getBucketFormat(collection: string) {
|
|
21
|
+
S3.validateCollection(collection)
|
|
22
|
+
return S3.BUCKET_ENV ? `${S3.BUCKET_ENV}-${collection}` : collection
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
static file(collection: string, path: string) {
|
|
26
|
+
|
|
27
|
+
return S3Client.file(path, {
|
|
28
|
+
bucket: S3.getBucketFormat(collection),
|
|
29
|
+
...S3.CREDS
|
|
30
|
+
})
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
static async list(collection: string, options?: Bun.S3ListObjectsOptions) {
|
|
34
|
+
|
|
35
|
+
return await S3Client.list(options, {
|
|
36
|
+
bucket: S3.getBucketFormat(collection),
|
|
37
|
+
...S3.CREDS
|
|
38
|
+
})
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
static async put(collection: string, path: string, data: string) {
|
|
42
|
+
|
|
43
|
+
await S3Client.write(path, data, {
|
|
44
|
+
bucket: S3.getBucketFormat(collection),
|
|
45
|
+
...S3.CREDS
|
|
46
|
+
})
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
static async delete(collection: string, path: string) {
|
|
50
|
+
|
|
51
|
+
await S3Client.delete(path, {
|
|
52
|
+
bucket: S3.getBucketFormat(collection),
|
|
53
|
+
...S3.CREDS
|
|
54
|
+
})
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
static async createBucket(collection: string) {
|
|
58
|
+
const endpoint = S3.CREDS.endpoint
|
|
59
|
+
await $`aws s3 mb s3://${S3.getBucketFormat(collection)} ${endpoint ? `--endpoint-url=${endpoint}` : ""}`.quiet()
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
static async deleteBucket(collection: string) {
|
|
63
|
+
const endpoint = S3.CREDS.endpoint
|
|
64
|
+
await $`aws s3 rm s3://${S3.getBucketFormat(collection)} --recursive ${endpoint ? `--endpoint-url=${endpoint}` : ""}`.quiet()
|
|
65
|
+
await $`aws s3 rb s3://${S3.getBucketFormat(collection)} ${endpoint ? `--endpoint-url=${endpoint}` : ""}`.quiet()
|
|
66
|
+
}
|
|
67
|
+
}
|