@delma/fylo 1.1.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/.github/copilot-instructions.md +1 -1
  2. package/.github/prompts/release.prompt.md +4 -43
  3. package/AGENTS.md +1 -1
  4. package/CLAUDE.md +1 -1
  5. package/README.md +141 -62
  6. package/eslint.config.js +8 -4
  7. package/package.json +9 -7
  8. package/src/CLI +16 -14
  9. package/src/adapters/cipher.ts +12 -6
  10. package/src/adapters/redis.ts +193 -123
  11. package/src/adapters/s3.ts +6 -12
  12. package/src/core/collection.ts +5 -0
  13. package/src/core/directory.ts +120 -151
  14. package/src/core/extensions.ts +4 -2
  15. package/src/core/format.ts +390 -419
  16. package/src/core/parser.ts +167 -142
  17. package/src/core/query.ts +31 -26
  18. package/src/core/walker.ts +68 -61
  19. package/src/core/write-queue.ts +7 -4
  20. package/src/engines/s3-files.ts +888 -0
  21. package/src/engines/types.ts +21 -0
  22. package/src/index.ts +754 -378
  23. package/src/migrate-cli.ts +22 -0
  24. package/src/migrate.ts +74 -0
  25. package/src/types/bun-runtime.d.ts +73 -0
  26. package/src/types/fylo.d.ts +115 -27
  27. package/src/types/node-runtime.d.ts +61 -0
  28. package/src/types/query.d.ts +6 -2
  29. package/src/types/vendor-modules.d.ts +8 -7
  30. package/src/worker.ts +7 -1
  31. package/src/workers/write-worker.ts +25 -24
  32. package/tests/collection/truncate.test.js +35 -0
  33. package/tests/{data.ts → data.js} +8 -21
  34. package/tests/{index.ts → index.js} +4 -9
  35. package/tests/integration/aws-s3-files.canary.test.js +22 -0
  36. package/tests/integration/{create.test.ts → create.test.js} +13 -31
  37. package/tests/integration/delete.test.js +95 -0
  38. package/tests/integration/{edge-cases.test.ts → edge-cases.test.js} +50 -124
  39. package/tests/integration/{encryption.test.ts → encryption.test.js} +20 -65
  40. package/tests/integration/{export.test.ts → export.test.js} +8 -23
  41. package/tests/integration/{join-modes.test.ts → join-modes.test.js} +37 -104
  42. package/tests/integration/migration.test.js +38 -0
  43. package/tests/integration/nested.test.js +142 -0
  44. package/tests/integration/operators.test.js +122 -0
  45. package/tests/integration/{queue.test.ts → queue.test.js} +24 -40
  46. package/tests/integration/read.test.js +119 -0
  47. package/tests/integration/rollback.test.js +60 -0
  48. package/tests/integration/s3-files.test.js +108 -0
  49. package/tests/integration/update.test.js +99 -0
  50. package/tests/mocks/{cipher.ts → cipher.js} +11 -26
  51. package/tests/mocks/redis.js +123 -0
  52. package/tests/mocks/{s3.ts → s3.js} +24 -58
  53. package/tests/schemas/album.json +1 -1
  54. package/tests/schemas/comment.json +1 -1
  55. package/tests/schemas/photo.json +1 -1
  56. package/tests/schemas/post.json +1 -1
  57. package/tests/schemas/tip.json +1 -1
  58. package/tests/schemas/todo.json +1 -1
  59. package/tests/schemas/user.d.ts +12 -12
  60. package/tests/schemas/user.json +1 -1
  61. package/tsconfig.json +4 -2
  62. package/tsconfig.typecheck.json +31 -0
  63. package/.github/prompts/issue.prompt.md +0 -19
  64. package/.github/prompts/pr.prompt.md +0 -18
  65. package/.github/prompts/review-pr.prompt.md +0 -19
  66. package/.github/prompts/sync-main.prompt.md +0 -14
  67. package/tests/collection/truncate.test.ts +0 -56
  68. package/tests/integration/delete.test.ts +0 -147
  69. package/tests/integration/nested.test.ts +0 -212
  70. package/tests/integration/operators.test.ts +0 -167
  71. package/tests/integration/read.test.ts +0 -203
  72. package/tests/integration/rollback.test.ts +0 -105
  73. package/tests/integration/update.test.ts +0 -130
  74. package/tests/mocks/redis.ts +0 -169
@@ -1,3 +1,3 @@
1
- Follow the shared workspace instructions in `../instructions.md` and shared context in `../memory.md`.
1
+ Follow the shared workspace instructions in `../../INSTRUCTIONS.md`, shared context in `../../MEMORY.md`, and shared release process in `../../RELEASE.md`.
2
2
 
3
3
  If a repo-local instruction file adds stricter rules, follow the repo-local rule.
@@ -1,49 +1,10 @@
1
1
  ---
2
- description: "Create a release branch, publish to npm via CI, then merge to main"
2
+ description: "Follow the shared workspace release process"
3
3
  agent: "agent"
4
4
  tools: [runInTerminal]
5
5
  ---
6
- Create a release branch, publish to npm via CI, then merge to main.
6
+ Follow the shared workspace release process in [../../../RELEASE.md](../../../RELEASE.md).
7
7
 
8
- 1. Run `bun test` and stop if any tests fail.
8
+ Use the repo's actual default branch. Do not assume it is `main`.
9
9
 
10
- 2. Determine the new version automatically based on unreleased commits:
11
- `git log $(git describe --tags --abbrev=0 2>/dev/null || git rev-list --max-parents=0 HEAD)..HEAD --oneline`
12
-
13
- Apply these rules to select the bump type:
14
- - **major** — any commit with a `!` breaking-change marker (e.g. `feat!:`, `fix!:`) or a `BREAKING CHANGE` footer.
15
- - **minor** — one or more `feat:` commits and no breaking changes.
16
- - **patch** — only `fix:`, `chore:`, `docs:`, `refactor:`, `test:`, or `perf:` commits.
17
-
18
- Compute the new version by incrementing the corresponding part of the current `"version"` in [package.json](package.json) and resetting lower parts to zero. Show the chosen version and the reasoning to the user before proceeding.
19
-
20
- 3. Update `"version"` in [package.json](package.json) to the new version.
21
-
22
- 4. Fetch the latest main and create a release branch from it:
23
- ```
24
- git fetch origin main
25
- git checkout -b release/<version> origin/main
26
- ```
27
-
28
- 5. Stage all changes and commit:
29
- `git add -A && git commit -m "chore: release v<version>"`
30
-
31
- 6. Push the branch:
32
- `git push -u origin release/<version>`
33
-
34
- 7. Tell the user that the `publish` workflow will now run on GitHub Actions:
35
- - It verifies the branch name matches `package.json` version.
36
- - It runs tests, publishes to npm, creates a git tag, and opens a GitHub release.
37
- - The NPM_TOKEN secret must be set in repo Settings → Secrets → Actions.
38
-
39
- 8. Once the workflow passes (user confirms), create a PR and merge it to main:
40
- ```
41
- gh pr create --title "chore: release v<version>" --body "Release v<version>" --base main --head release/<version>
42
- gh pr merge --merge --delete-branch
43
- ```
44
-
45
- 9. Switch back to main and pull:
46
- ```
47
- git checkout main
48
- git pull
49
- ```
10
+ If repo-local workflows impose stricter checks or branch requirements, follow the repo-local workflow and then update the shared release guide later if that rule becomes the new standard.
package/AGENTS.md CHANGED
@@ -1,3 +1,3 @@
1
- Follow the shared workspace instructions in [../instructions.md](../instructions.md) and shared context in [../memory.md](../memory.md).
1
+ Follow the shared workspace instructions in [../INSTRUCTIONS.md](../INSTRUCTIONS.md), shared context in [../MEMORY.md](../MEMORY.md), and shared release process in [../RELEASE.md](../RELEASE.md).
2
2
 
3
3
  Repo-local rules may add to or override the shared files when explicitly stated.
package/CLAUDE.md CHANGED
@@ -1,3 +1,3 @@
1
- Follow the shared workspace instructions in [../instructions.md](../instructions.md) and shared context in [../memory.md](../memory.md).
1
+ Follow the shared workspace instructions in [../INSTRUCTIONS.md](../INSTRUCTIONS.md), shared context in [../MEMORY.md](../MEMORY.md), and shared release process in [../RELEASE.md](../RELEASE.md).
2
2
 
3
3
  Repo-local rules may add to or override the shared files when explicitly stated.
package/README.md CHANGED
@@ -1,12 +1,19 @@
1
1
  # Fylo
2
2
 
3
- S3-backed NoSQL document store with SQL parsing, Redis-backed write coordination and pub/sub for real-time events, and a CLI.
3
+ NoSQL document store with SQL parsing, real-time listeners, and Bun-first workflows.
4
4
 
5
- Documents are stored as **S3 key paths** — not file contents. Each document produces two keys per field: a **data key** (`{ttid}/{field}/{value}`) for full-doc retrieval and an **index key** (`{field}/{value}/{ttid}`) for query lookups. This enables fast reads and filtered queries without a traditional database engine.
5
+ Fylo `2.0.0` supports two storage engines:
6
+
7
+ - `legacy-s3`: the existing S3 + Redis architecture with queued writes, bucket-per-collection storage, and Redis-backed pub/sub/locks.
8
+ - `s3-files`: a new AWS S3 Files mode that stores canonical documents on a mounted S3 Files filesystem, keeps query indexes in a collection-level SQLite database under `.fylo/index.db`, and uses filesystem locks plus an append-only event journal instead of Redis.
9
+
10
+ The legacy engine still stores documents as **S3 key paths** — not file contents. Each document produces two keys per field: a **data key** (`{ttid}/{field}/{value}`) for full-doc retrieval and an **index key** (`{field}/{value}/{ttid}`) for query lookups. This enables fast reads and filtered queries without a traditional database engine.
6
11
 
7
12
  Built for **serverless** runtimes (AWS Lambda, Cloudflare Workers) — no persistent in-memory state, lazy connections, minimal cold-start overhead.
8
13
 
9
- Writes are coordinated through Redis before they are flushed to S3. By default the high-level CRUD methods wait for the queued write to be processed so existing code can continue to behave synchronously. If you want fire-and-forget semantics, pass `{ wait: false }` and process queued jobs with a worker or `processQueuedWrites()`.
14
+ In `legacy-s3`, writes are coordinated through Redis before they are flushed to S3. By default the high-level CRUD methods wait for the queued write to be processed so existing code can continue to behave synchronously. If you want fire-and-forget semantics, pass `{ wait: false }` and process queued jobs with a worker or `processQueuedWrites()`.
15
+
16
+ In `s3-files`, writes are immediate and synchronous. Queue APIs, worker APIs, and Redis-backed job tracking are intentionally unsupported.
10
17
 
11
18
  ## Install
12
19
 
@@ -14,81 +21,116 @@ Writes are coordinated through Redis before they are flushed to S3. By default t
14
21
  bun add @delma/fylo
15
22
  ```
16
23
 
24
+ ## Engine Selection
25
+
26
+ ```typescript
27
+ import Fylo from '@delma/fylo'
28
+
29
+ const legacy = new Fylo()
30
+
31
+ const s3Files = new Fylo({
32
+ engine: 's3-files',
33
+ s3FilesRoot: '/mnt/fylo'
34
+ })
35
+ ```
36
+
37
+ Static helpers such as `Fylo.createCollection()` and `Fylo.findDocs()` use environment defaults:
38
+
39
+ ```bash
40
+ export FYLO_STORAGE_ENGINE=s3-files
41
+ export FYLO_S3FILES_ROOT=/mnt/fylo
42
+ ```
43
+
17
44
  ## Environment Variables
18
45
 
19
- | Variable | Purpose |
20
- |----------|---------|
21
- | `BUCKET_PREFIX` | S3 bucket name prefix |
22
- | `S3_ACCESS_KEY_ID` / `AWS_ACCESS_KEY_ID` | S3 credentials |
23
- | `S3_SECRET_ACCESS_KEY` / `AWS_SECRET_ACCESS_KEY` | S3 credentials |
24
- | `S3_REGION` / `AWS_REGION` | S3 region |
25
- | `S3_ENDPOINT` / `AWS_ENDPOINT` | S3 endpoint (for LocalStack, MinIO, etc.) |
26
- | `REDIS_URL` | Redis connection URL used for pub/sub, document locks, and queued write coordination |
27
- | `FYLO_WRITE_MAX_ATTEMPTS` | Maximum retry attempts before a queued job is dead-lettered |
28
- | `FYLO_WRITE_RETRY_BASE_MS` | Base retry delay used for exponential backoff between recovery attempts |
29
- | `FYLO_WORKER_ID` | Optional stable identifier for a write worker process |
30
- | `FYLO_WORKER_BATCH_SIZE` | Number of queued jobs a worker pulls per read loop |
31
- | `FYLO_WORKER_BLOCK_MS` | Redis stream block time for waiting on new jobs |
32
- | `FYLO_WORKER_RECOVER_ON_START` | Whether the worker reclaims stale pending jobs on startup |
33
- | `FYLO_WORKER_RECOVER_IDLE_MS` | Minimum idle time before a pending job is reclaimed |
34
- | `FYLO_WORKER_STOP_WHEN_IDLE` | Exit the worker loop when no jobs are available |
35
- | `LOGGING` | Enable debug logging |
36
- | `STRICT` | Enable schema validation via CHEX |
46
+ | Variable | Purpose |
47
+ | ------------------------------------------------ | ------------------------------------------------------------------------------------ |
48
+ | `FYLO_STORAGE_ENGINE` | `legacy-s3` (default) or `s3-files` |
49
+ | `FYLO_S3FILES_ROOT` | Mounted S3 Files root directory used by the `s3-files` engine |
50
+ | `BUCKET_PREFIX` | S3 bucket name prefix |
51
+ | `S3_ACCESS_KEY_ID` / `AWS_ACCESS_KEY_ID` | S3 credentials |
52
+ | `S3_SECRET_ACCESS_KEY` / `AWS_SECRET_ACCESS_KEY` | S3 credentials |
53
+ | `S3_REGION` / `AWS_REGION` | S3 region |
54
+ | `S3_ENDPOINT` / `AWS_ENDPOINT` | S3 endpoint (for LocalStack, MinIO, etc.) |
55
+ | `REDIS_URL` | Redis connection URL used for pub/sub, document locks, and queued write coordination |
56
+ | `FYLO_WRITE_MAX_ATTEMPTS` | Maximum retry attempts before a queued job is dead-lettered |
57
+ | `FYLO_WRITE_RETRY_BASE_MS` | Base retry delay used for exponential backoff between recovery attempts |
58
+ | `FYLO_WORKER_ID` | Optional stable identifier for a write worker process |
59
+ | `FYLO_WORKER_BATCH_SIZE` | Number of queued jobs a worker pulls per read loop |
60
+ | `FYLO_WORKER_BLOCK_MS` | Redis stream block time for waiting on new jobs |
61
+ | `FYLO_WORKER_RECOVER_ON_START` | Whether the worker reclaims stale pending jobs on startup |
62
+ | `FYLO_WORKER_RECOVER_IDLE_MS` | Minimum idle time before a pending job is reclaimed |
63
+ | `FYLO_WORKER_STOP_WHEN_IDLE` | Exit the worker loop when no jobs are available |
64
+ | `LOGGING` | Enable debug logging |
65
+ | `STRICT` | Enable schema validation via CHEX |
66
+
67
+ ### S3 Files requirements
68
+
69
+ When `FYLO_STORAGE_ENGINE=s3-files`, FYLO expects:
70
+
71
+ - an already provisioned AWS S3 Files file system
72
+ - the mounted root directory to be available to the Bun process
73
+ - bucket versioning enabled on the underlying S3 bucket
74
+ - Linux/AWS compute assumptions that match AWS S3 Files mounting requirements
75
+
76
+ FYLO no longer talks to the S3 API directly in this mode, but S3 remains the underlying source of truth because that is how S3 Files works.
37
77
 
38
78
  ## Usage
39
79
 
40
80
  ### CRUD — NoSQL API
41
81
 
42
82
  ```typescript
43
- import Fylo from "@delma/fylo"
83
+ import Fylo from '@delma/fylo'
44
84
 
45
85
  const fylo = new Fylo()
46
86
 
47
87
  // Collections
48
- await Fylo.createCollection("users")
88
+ await Fylo.createCollection('users')
49
89
 
50
90
  // Create
51
- const _id = await fylo.putData<_user>("users", { name: "John Doe", age: 30 })
91
+ const _id = await fylo.putData<_user>('users', { name: 'John Doe', age: 30 })
52
92
 
53
93
  // Read one
54
- const user = await Fylo.getDoc<_user>("users", _id).once()
94
+ const user = await Fylo.getDoc<_user>('users', _id).once()
55
95
 
56
96
  // Read many
57
- for await (const doc of Fylo.findDocs<_user>("users", { $limit: 10 }).collect()) {
97
+ for await (const doc of Fylo.findDocs<_user>('users', { $limit: 10 }).collect()) {
58
98
  console.log(doc)
59
99
  }
60
100
 
61
101
  // Update one
62
- await fylo.patchDoc<_user>("users", { [_id]: { age: 31 } })
102
+ await fylo.patchDoc<_user>('users', { [_id]: { age: 31 } })
63
103
 
64
104
  // Update many
65
- const updated = await fylo.patchDocs<_user>("users", {
105
+ const updated = await fylo.patchDocs<_user>('users', {
66
106
  $where: { $ops: [{ age: { $gte: 30 } }] },
67
107
  $set: { age: 31 }
68
108
  })
69
109
 
70
110
  // Delete one
71
- await fylo.delDoc("users", _id)
111
+ await fylo.delDoc('users', _id)
72
112
 
73
113
  // Delete many
74
- const deleted = await fylo.delDocs<_user>("users", {
75
- $ops: [{ name: { $like: "%Doe%" } }]
114
+ const deleted = await fylo.delDocs<_user>('users', {
115
+ $ops: [{ name: { $like: '%Doe%' } }]
76
116
  })
77
117
 
78
118
  // Drop
79
- await Fylo.dropCollection("users")
119
+ await Fylo.dropCollection('users')
80
120
  ```
81
121
 
82
122
  ### Queued Writes
83
123
 
124
+ `legacy-s3` only.
125
+
84
126
  ```typescript
85
127
  const fylo = new Fylo()
86
128
 
87
129
  // Default behavior waits for the queued write to finish.
88
- const _id = await fylo.putData("users", { name: "John Doe" })
130
+ const _id = await fylo.putData('users', { name: 'John Doe' })
89
131
 
90
132
  // Async mode returns the queued job immediately.
91
- const queued = await fylo.putData("users", { name: "Jane Doe" }, { wait: false })
133
+ const queued = await fylo.putData('users', { name: 'Jane Doe' }, { wait: false })
92
134
 
93
135
  // Poll status if you need to track progress.
94
136
  const status = await fylo.getJobStatus(queued.jobId)
@@ -109,6 +151,8 @@ Operational helpers:
109
151
 
110
152
  ### Worker
111
153
 
154
+ `legacy-s3` only.
155
+
112
156
  Run a dedicated write worker when you want queued writes to be flushed outside the request path:
113
157
 
114
158
  ```bash
@@ -117,6 +161,28 @@ bun run worker
117
161
 
118
162
  The worker entrypoint lives at [worker.ts](/Users/iyor/Library/CloudStorage/Dropbox/myProjects/FYLO/src/worker.ts) and continuously drains the Redis stream, recovers stale pending jobs on startup, and respects the retry/dead-letter settings above.
119
163
 
164
+ If `FYLO_STORAGE_ENGINE=s3-files`, `fylo.worker` exits with an explicit unsupported-engine error.
165
+
166
+ ### Migration
167
+
168
+ Move legacy collections into an S3 Files-backed root with:
169
+
170
+ ```bash
171
+ fylo.migrate users posts
172
+ ```
173
+
174
+ Programmatic usage:
175
+
176
+ ```typescript
177
+ import { migrateLegacyS3ToS3Files } from '@delma/fylo'
178
+
179
+ await migrateLegacyS3ToS3Files({
180
+ collections: ['users', 'posts'],
181
+ s3FilesRoot: '/mnt/fylo',
182
+ verify: true
183
+ })
184
+ ```
185
+
120
186
  ### CRUD — SQL API
121
187
 
122
188
  ```typescript
@@ -139,36 +205,45 @@ await fylo.executeSQL(`DROP TABLE users`)
139
205
 
140
206
  ```typescript
141
207
  // Equality
142
- { $ops: [{ status: { $eq: "active" } }] }
208
+ {
209
+ $ops: [{ status: { $eq: 'active' } }]
210
+ }
143
211
 
144
212
  // Not equal
145
- { $ops: [{ status: { $ne: "archived" } }] }
213
+ {
214
+ $ops: [{ status: { $ne: 'archived' } }]
215
+ }
146
216
 
147
217
  // Numeric range
148
- { $ops: [{ age: { $gte: 18, $lt: 65 } }] }
218
+ {
219
+ $ops: [{ age: { $gte: 18, $lt: 65 } }]
220
+ }
149
221
 
150
222
  // Pattern matching
151
- { $ops: [{ email: { $like: "%@gmail.com" } }] }
223
+ {
224
+ $ops: [{ email: { $like: '%@gmail.com' } }]
225
+ }
152
226
 
153
227
  // Array contains
154
- { $ops: [{ tags: { $contains: "urgent" } }] }
228
+ {
229
+ $ops: [{ tags: { $contains: 'urgent' } }]
230
+ }
155
231
 
156
232
  // Multiple ops use OR semantics — matches if any op is satisfied
157
- { $ops: [
158
- { status: { $eq: "active" } },
159
- { priority: { $gte: 5 } }
160
- ]}
233
+ {
234
+ $ops: [{ status: { $eq: 'active' } }, { priority: { $gte: 5 } }]
235
+ }
161
236
  ```
162
237
 
163
238
  ### Joins
164
239
 
165
240
  ```typescript
166
241
  const results = await Fylo.joinDocs<_post, _user>({
167
- $leftCollection: "posts",
168
- $rightCollection: "users",
169
- $mode: "inner", // "inner" | "left" | "right" | "outer"
170
- $on: { userId: { $eq: "id" } },
171
- $select: ["title", "name"],
242
+ $leftCollection: 'posts',
243
+ $rightCollection: 'users',
244
+ $mode: 'inner', // "inner" | "left" | "right" | "outer"
245
+ $on: { userId: { $eq: 'id' } },
246
+ $select: ['title', 'name'],
172
247
  $limit: 50
173
248
  })
174
249
  ```
@@ -177,17 +252,17 @@ const results = await Fylo.joinDocs<_post, _user>({
177
252
 
178
253
  ```typescript
179
254
  // Stream new/updated documents
180
- for await (const doc of Fylo.findDocs<_user>("users")) {
255
+ for await (const doc of Fylo.findDocs<_user>('users')) {
181
256
  console.log(doc)
182
257
  }
183
258
 
184
259
  // Stream deletions
185
- for await (const _id of Fylo.findDocs<_user>("users").onDelete()) {
186
- console.log("deleted:", _id)
260
+ for await (const _id of Fylo.findDocs<_user>('users').onDelete()) {
261
+ console.log('deleted:', _id)
187
262
  }
188
263
 
189
264
  // Watch a single document
190
- for await (const doc of Fylo.getDoc<_user>("users", _id)) {
265
+ for await (const doc of Fylo.getDoc<_user>('users', _id)) {
191
266
  console.log(doc)
192
267
  }
193
268
  ```
@@ -198,10 +273,14 @@ for await (const doc of Fylo.getDoc<_user>("users", _id)) {
198
273
  const fylo = new Fylo()
199
274
 
200
275
  // Import from JSON array or NDJSON URL
201
- const count = await fylo.importBulkData<_user>("users", new URL("https://example.com/users.json"), 1000)
276
+ const count = await fylo.importBulkData<_user>(
277
+ 'users',
278
+ new URL('https://example.com/users.json'),
279
+ 1000
280
+ )
202
281
 
203
282
  // Export all documents
204
- for await (const doc of Fylo.exportBulkData<_user>("users")) {
283
+ for await (const doc of Fylo.exportBulkData<_user>('users')) {
205
284
  console.log(doc)
206
285
  }
207
286
  ```
@@ -214,7 +293,7 @@ Fylo still keeps best-effort rollback data for writes performed by the current i
214
293
 
215
294
  ```typescript
216
295
  const fylo = new Fylo()
217
- await fylo.putData("users", { name: "test" })
296
+ await fylo.putData('users', { name: 'test' })
218
297
  await fylo.rollback() // undoes all writes in this instance
219
298
  ```
220
299
 
@@ -272,13 +351,13 @@ Fylo is a low-level storage abstraction. The following must be implemented by th
272
351
 
273
352
  ### Secure configuration
274
353
 
275
- | Concern | Guidance |
276
- |---------|----------|
277
- | AWS credentials | Never commit credentials to version control. Use IAM instance roles or inject via CI secrets. Rotate any credentials that have been exposed. |
278
- | `ENCRYPTION_KEY` | Must be at least 32 characters. Use a high-entropy random value. |
279
- | `CIPHER_SALT` | Set a unique random value per deployment to prevent cross-instance precomputation attacks. |
280
- | `REDIS_URL` | Always set explicitly. Use `rediss://` (TLS) in production with authentication credentials in the URL. |
281
- | Collection names | Must match `^[a-z0-9][a-z0-9\-]*[a-z0-9]$`. Names are validated before any shell or S3 operation. |
354
+ | Concern | Guidance |
355
+ | ---------------- | -------------------------------------------------------------------------------------------------------------------------------------------- |
356
+ | AWS credentials | Never commit credentials to version control. Use IAM instance roles or inject via CI secrets. Rotate any credentials that have been exposed. |
357
+ | `ENCRYPTION_KEY` | Must be at least 32 characters. Use a high-entropy random value. |
358
+ | `CIPHER_SALT` | Set a unique random value per deployment to prevent cross-instance precomputation attacks. |
359
+ | `REDIS_URL` | Always set explicitly. Use `rediss://` (TLS) in production with authentication credentials in the URL. |
360
+ | Collection names | Must match `^[a-z0-9][a-z0-9\-]*[a-z0-9]$`. Names are validated before any shell or S3 operation. |
282
361
 
283
362
  ### Encrypted fields
284
363
 
package/eslint.config.js CHANGED
@@ -4,12 +4,10 @@ import prettierConfig from 'eslint-config-prettier'
4
4
 
5
5
  export default [
6
6
  {
7
- files: ['src/**/*.ts', 'tests/**/*.ts'],
7
+ files: ['src/**/*.ts'],
8
8
  languageOptions: {
9
9
  parser: tsParser,
10
- parserOptions: {
11
- project: './tsconfig.json'
12
- }
10
+ parserOptions: {}
13
11
  },
14
12
  plugins: {
15
13
  '@typescript-eslint': tsPlugin
@@ -21,6 +19,12 @@ export default [
21
19
  '@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_', varsIgnorePattern: '^_' }]
22
20
  }
23
21
  },
22
+ {
23
+ files: ['tests/**/*.js'],
24
+ rules: {
25
+ 'no-unused-vars': ['error', { argsIgnorePattern: '^_', varsIgnorePattern: '^_' }]
26
+ }
27
+ },
24
28
  prettierConfig,
25
29
  {
26
30
  ignores: ['bin/**', 'node_modules/**', '**/*.d.ts']
package/package.json CHANGED
@@ -1,18 +1,19 @@
1
1
  {
2
2
  "name": "@delma/fylo",
3
- "version": "1.1.1",
3
+ "version": "2.0.0",
4
4
  "main": "./dist/index.js",
5
5
  "types": "./dist/types/index.d.ts",
6
6
  "bin": {
7
7
  "fylo.query": "./dist/cli/index.js",
8
- "fylo.worker": "./dist/worker.js"
8
+ "fylo.worker": "./dist/worker.js",
9
+ "fylo.migrate": "./dist/migrate-cli.js"
9
10
  },
10
11
  "scripts": {
11
12
  "build": "tsc",
12
13
  "test": "bun test",
13
- "typecheck": "tsc --noEmit",
14
+ "typecheck": "tsc -p tsconfig.typecheck.json",
14
15
  "worker": "bun run ./src/worker.ts",
15
- "lint": "eslint src tests",
16
+ "lint": "prettier --check \"src/**/*.{ts,d.ts}\" \"tests/**/*.js\" README.md",
16
17
  "format": "prettier --write src tests"
17
18
  },
18
19
  "devDependencies": {
@@ -22,11 +23,12 @@
22
23
  "@typescript-eslint/parser": "^8.0.0",
23
24
  "eslint": "^9.0.0",
24
25
  "eslint-config-prettier": "^9.0.0",
25
- "prettier": "^3.0.0"
26
+ "prettier": "^3.0.0",
27
+ "typescript": "^5.9.3"
26
28
  },
27
29
  "dependencies": {
28
- "@delma/ttid": "1.3.4",
29
- "@delma/chex": "0.3.3"
30
+ "@delma/chex": "0.3.3",
31
+ "@delma/ttid": "1.3.4"
30
32
  },
31
33
  "type": "module",
32
34
  "peerDependencies": {
package/src/CLI CHANGED
@@ -4,34 +4,36 @@ import Silo from '.'
4
4
 
5
5
  const SQL = process.argv[process.argv.length - 1]
6
6
 
7
- const op = SQL.match(/^((?:SELECT|select)|(?:INSERT|insert)|(?:UPDATE|update)|(?:DELETE|delete)|(?:CREATE|create)|(?:DROP|drop))/i)
7
+ const op = SQL.match(
8
+ /^((?:SELECT|select)|(?:INSERT|insert)|(?:UPDATE|update)|(?:DELETE|delete)|(?:CREATE|create)|(?:DROP|drop))/i
9
+ )
8
10
 
9
- if(!op) throw new Error("Missing SQL Operation")
11
+ if (!op) throw new Error('Missing SQL Operation')
10
12
 
11
13
  const res = await new Silo().executeSQL(SQL)
12
14
 
13
15
  const cmnd = op.shift()!
14
16
 
15
- switch(cmnd.toUpperCase()) {
16
- case "CREATE":
17
- console.log("Successfully created schema")
17
+ switch (cmnd.toUpperCase()) {
18
+ case 'CREATE':
19
+ console.log('Successfully created schema')
18
20
  break
19
- case "DROP":
20
- console.log("Successfully dropped schema")
21
+ case 'DROP':
22
+ console.log('Successfully dropped schema')
21
23
  break
22
- case "SELECT":
23
- if(typeof res === 'object' && !Array.isArray(res)) console.format(res)
24
+ case 'SELECT':
25
+ if (typeof res === 'object' && !Array.isArray(res)) console.format(res)
24
26
  else console.log(res)
25
27
  break
26
- case "INSERT":
28
+ case 'INSERT':
27
29
  console.log(res)
28
30
  break
29
- case "UPDATE":
31
+ case 'UPDATE':
30
32
  console.log(`Successfully updated ${res} document(s)`)
31
33
  break
32
- case "DELETE":
34
+ case 'DELETE':
33
35
  console.log(`Successfully deleted ${res} document(s)`)
34
36
  break
35
37
  default:
36
- throw new Error("Invalid Operation: " + cmnd)
37
- }
38
+ throw new Error('Invalid Operation: ' + cmnd)
39
+ }
@@ -19,7 +19,6 @@
19
19
  */
20
20
 
21
21
  export class Cipher {
22
-
23
22
  private static key: CryptoKey | null = null
24
23
  private static hmacKey: CryptoKey | null = null
25
24
 
@@ -72,12 +71,19 @@ export class Cipher {
72
71
 
73
72
  const cipherSalt = process.env.CIPHER_SALT
74
73
  if (!cipherSalt) {
75
- console.warn('CIPHER_SALT is not set. Using default salt is insecure for multi-deployment use. Set CIPHER_SALT to a unique random value.')
74
+ console.warn(
75
+ 'CIPHER_SALT is not set. Using default salt is insecure for multi-deployment use. Set CIPHER_SALT to a unique random value.'
76
+ )
76
77
  }
77
78
 
78
79
  // Derive 48 bytes: 32 for AES key + 16 for HMAC key
79
80
  const bits = await crypto.subtle.deriveBits(
80
- { name: 'PBKDF2', salt: encoder.encode(cipherSalt ?? 'fylo-cipher'), iterations: 100000, hash: 'SHA-256' },
81
+ {
82
+ name: 'PBKDF2',
83
+ salt: encoder.encode(cipherSalt ?? 'fylo-cipher'),
84
+ iterations: 100000,
85
+ hash: 'SHA-256'
86
+ },
81
87
  keyMaterial,
82
88
  384
83
89
  )
@@ -133,7 +139,7 @@ export class Cipher {
133
139
  const encoder = new TextEncoder()
134
140
 
135
141
  const encrypted = await crypto.subtle.encrypt(
136
- { name: 'AES-CBC', iv },
142
+ { name: 'AES-CBC', iv: iv as any },
137
143
  Cipher.key,
138
144
  encoder.encode(value)
139
145
  )
@@ -157,9 +163,9 @@ export class Cipher {
157
163
 
158
164
  // Restore standard base64
159
165
  const b64 = encoded.replace(/-/g, '+').replace(/_/g, '/')
160
- const padded = b64 + '='.repeat((4 - b64.length % 4) % 4)
166
+ const padded = b64 + '='.repeat((4 - (b64.length % 4)) % 4)
161
167
 
162
- const combined = Uint8Array.from(atob(padded), c => c.charCodeAt(0))
168
+ const combined = Uint8Array.from(atob(padded), (c) => c.charCodeAt(0))
163
169
  const iv = combined.slice(0, 16)
164
170
  const ciphertext = combined.slice(16)
165
171