appwrite-ctl 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +98 -40
- package/dist/cli/index.js +130 -22
- package/dist/lib/cli.js +13 -8
- package/dist/lib/config.d.ts +0 -1
- package/dist/lib/config.js +17 -8
- package/dist/lib/diagram.js +66 -15
- package/dist/lib/runner.js +10 -20
- package/dist/lib/security.d.ts +26 -0
- package/dist/lib/security.js +95 -0
- package/dist/types/index.d.ts +19 -5
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -8,8 +8,9 @@ A Node.js (ESM) package to manage Appwrite infrastructure via Version Snapshots.
|
|
|
8
8
|
- **CLI-based Snapshots**: Uses `appwrite-cli` pull/push for reliable schema synchronization.
|
|
9
9
|
- **Data Migrations**: Execute TypeScript or JavaScript migration scripts (`up` and `down`) using the Node.js SDK.
|
|
10
10
|
- **State Management**: Tracks applied migrations in a dedicated Appwrite collection (`system.migrations`).
|
|
11
|
-
- **Backup Hooks**: Supports executing external backup commands before migration.
|
|
12
11
|
- **Attribute Polling**: Ensures schema attributes are `available` before running data scripts.
|
|
12
|
+
- **Security Rules & Exceptions Ledger**: Define security rules for collections and buckets; document intentional exceptions with author and justification — all stored in `appwrite-ctl.config.json` and surfaced in generated docs.
|
|
13
|
+
- **Schema Documentation**: Auto-generate ER diagrams and detailed collection docs from any snapshot.
|
|
13
14
|
|
|
14
15
|
## Installation
|
|
15
16
|
|
|
@@ -35,7 +36,6 @@ npm install github:bfbechlin/appwrite-ctl
|
|
|
35
36
|
APPWRITE_ENDPOINT=https://cloud.appwrite.io/v1
|
|
36
37
|
APPWRITE_PROJECT_ID=your_project_id
|
|
37
38
|
APPWRITE_API_KEY=your_api_key
|
|
38
|
-
BACKUP_COMMAND="docker exec appwrite-mariadb mysqldump ..." # Optional
|
|
39
39
|
```
|
|
40
40
|
|
|
41
41
|
## Architecture
|
|
@@ -68,8 +68,9 @@ npx appwrite-ctl init
|
|
|
68
68
|
|
|
69
69
|
Creates:
|
|
70
70
|
|
|
71
|
+
- `appwrite/` directory
|
|
71
72
|
- `appwrite/migration/` directory
|
|
72
|
-
- `appwrite/
|
|
73
|
+
- `appwrite/appwrite-ctl.config.json` — unified configuration file (migration settings + security rules)
|
|
73
74
|
|
|
74
75
|
### 2. Setup System Collection
|
|
75
76
|
|
|
@@ -87,23 +88,25 @@ This command:
|
|
|
87
88
|
|
|
88
89
|
1. Creates `appwrite/migration/vN/` (auto-increments version).
|
|
89
90
|
2. Generates an `index.ts` file with a boilerplate migration script.
|
|
90
|
-
3.
|
|
91
|
+
3. Pulls the current `appwrite.config.json` from Appwrite via CLI.
|
|
92
|
+
4. Auto-generates `docs.md` for the new version and updates `appwrite/docs.md`.
|
|
91
93
|
|
|
92
94
|
**Folder Structure:**
|
|
93
95
|
|
|
94
96
|
```
|
|
95
97
|
/appwrite
|
|
96
|
-
|
|
98
|
+
appwrite-ctl.config.json <-- Unified config (migration + security rules/exceptions)
|
|
99
|
+
appwrite.config.json <-- Appwrite CLI snapshot (latest, temporary)
|
|
100
|
+
docs.md <-- Generated by `docs` command
|
|
97
101
|
/migration
|
|
98
|
-
config.json
|
|
99
102
|
/v1
|
|
100
103
|
index.ts <-- Migration logic (SDK)
|
|
101
104
|
appwrite.config.json <-- Schema snapshot (CLI format)
|
|
102
|
-
|
|
105
|
+
docs.md <-- Auto-generated on create/update
|
|
103
106
|
/v2
|
|
104
107
|
index.ts
|
|
105
108
|
appwrite.config.json
|
|
106
|
-
|
|
109
|
+
docs.md
|
|
107
110
|
```
|
|
108
111
|
|
|
109
112
|
### 4. Edit Migration Logic
|
|
@@ -114,7 +117,6 @@ import { Migration } from 'appwrite-ctl';
|
|
|
114
117
|
const migration: Migration = {
|
|
115
118
|
id: 'uuid-generated-id',
|
|
116
119
|
description: 'Update finance schema',
|
|
117
|
-
requiresBackup: true,
|
|
118
120
|
|
|
119
121
|
up: async ({ client, databases, log }) => {
|
|
120
122
|
log('Seeding initial data...');
|
|
@@ -151,11 +153,10 @@ npx appwrite-ctl migrations run
|
|
|
151
153
|
The runner performs these steps for each pending version:
|
|
152
154
|
|
|
153
155
|
1. **Configure CLI**: Sets endpoint, project-id, and API key on appwrite-cli.
|
|
154
|
-
2. **
|
|
155
|
-
3. **
|
|
156
|
-
4. **
|
|
157
|
-
5. **
|
|
158
|
-
6. **Finalization**: Records the migration as applied.
|
|
156
|
+
2. **Schema Push**: Pushes the version's `appwrite.config.json` via CLI (tables, buckets, teams, topics).
|
|
157
|
+
3. **Polling**: Waits for all schema attributes to become `available` (via SDK), with a 2-minute timeout per collection.
|
|
158
|
+
4. **Execution**: Runs the `up` function defined in `index.ts` (via SDK).
|
|
159
|
+
5. **Finalization**: Records the migration as applied.
|
|
159
160
|
|
|
160
161
|
### 7. Check Status
|
|
161
162
|
|
|
@@ -166,11 +167,11 @@ npx appwrite-ctl migrations status
|
|
|
166
167
|
### 8. Generate Schema Docs
|
|
167
168
|
|
|
168
169
|
```bash
|
|
169
|
-
#
|
|
170
|
-
npx appwrite-ctl
|
|
170
|
+
# Pull latest state from Appwrite and generate docs → appwrite/docs.md
|
|
171
|
+
npx appwrite-ctl docs
|
|
171
172
|
|
|
172
|
-
# Generate from a
|
|
173
|
-
npx appwrite-ctl
|
|
173
|
+
# Generate from a stored local snapshot (no Appwrite connection needed)
|
|
174
|
+
npx appwrite-ctl docs v1
|
|
174
175
|
```
|
|
175
176
|
|
|
176
177
|
Generates a Markdown file with:
|
|
@@ -178,18 +179,71 @@ Generates a Markdown file with:
|
|
|
178
179
|
- **ER diagrams** (Mermaid) for each database (system database excluded)
|
|
179
180
|
- **Collection details**: columns, types, defaults, indexes, permissions, relationships
|
|
180
181
|
- **Buckets**: storage configuration summary
|
|
182
|
+
- **Security exception callouts** inline where exceptions have been recorded
|
|
181
183
|
|
|
182
|
-
> **Note:**
|
|
184
|
+
> **Note:** Docs are also auto-generated inside the version folder (`vN/docs.md`) when running `migrations create` or `migrations update`.
|
|
183
185
|
|
|
184
|
-
##
|
|
186
|
+
## Security Exceptions Ledger
|
|
187
|
+
|
|
188
|
+
When a resource intentionally deviates from security best-practices, document it explicitly in the `security.exceptions` block of `appwrite-ctl.config.json` — it persists across all snapshot operations.
|
|
189
|
+
|
|
190
|
+
> [!IMPORTANT]
|
|
191
|
+
> `appwrite-ctl.config.json` should be committed to version control — it is the team's audit trail for security exceptions.
|
|
192
|
+
|
|
193
|
+
### Integration with Docs
|
|
194
|
+
|
|
195
|
+
When `docs` (or `migrations create` / `migrations update`) generates `docs.md`, it reads `security.exceptions` and injects a `> [!WARNING]` callout after each affected collection or bucket.
|
|
196
|
+
|
|
197
|
+
### Adding Exceptions via CLI
|
|
198
|
+
|
|
199
|
+
```bash
|
|
200
|
+
npx appwrite-ctl exceptions add
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
Walk through the prompts — the rule is selected from the configured rules list, and the author is resolved automatically from `git config user.name` or your OS username.
|
|
204
|
+
|
|
205
|
+
### Listing Exceptions
|
|
206
|
+
|
|
207
|
+
```bash
|
|
208
|
+
npx appwrite-ctl exceptions list
|
|
209
|
+
```
|
|
210
|
+
|
|
211
|
+
Prints a formatted table of every recorded exception grouped by type and resource ID.
|
|
212
|
+
|
|
213
|
+
---
|
|
214
|
+
|
|
215
|
+
## Configuration (`appwrite/appwrite-ctl.config.json`)
|
|
216
|
+
|
|
217
|
+
All tool configuration lives in a single file at `appwrite/appwrite-ctl.config.json`. It is created automatically by `appwrite-ctl init`.
|
|
185
218
|
|
|
186
219
|
```json
|
|
187
220
|
{
|
|
188
221
|
"collection": "migrations",
|
|
189
|
-
"database": "system"
|
|
222
|
+
"database": "system",
|
|
223
|
+
"security": {
|
|
224
|
+
"rules": {
|
|
225
|
+
"require-row-security": { "enabled": true, "severity": "error" },
|
|
226
|
+
"forbid-role-all-write": { "enabled": true, "severity": "error" },
|
|
227
|
+
"forbid-role-all-delete": { "enabled": true, "severity": "error" },
|
|
228
|
+
"forbid-role-all-read": { "enabled": true, "severity": "warn" },
|
|
229
|
+
"forbid-role-all-create": { "enabled": true, "severity": "warn" },
|
|
230
|
+
"require-file-security": { "enabled": true, "severity": "warn" }
|
|
231
|
+
},
|
|
232
|
+
"exceptions": {
|
|
233
|
+
"collections": {},
|
|
234
|
+
"buckets": {}
|
|
235
|
+
}
|
|
236
|
+
}
|
|
190
237
|
}
|
|
191
238
|
```
|
|
192
239
|
|
|
240
|
+
| Field | Description |
|
|
241
|
+
| :-------------------- | :-------------------------------------------------------------------------------------- |
|
|
242
|
+
| `collection` | ID of the migrations tracking collection. |
|
|
243
|
+
| `database` | ID of the database where migrations are tracked (default: `system`). |
|
|
244
|
+
| `security.rules` | Map of rule IDs to `{ enabled, severity }`. Severity: `"error"` \| `"warn"` \| `"off"`. |
|
|
245
|
+
| `security.exceptions` | Documented bypasses per resource (see **Security Exceptions Ledger** above). |
|
|
246
|
+
|
|
193
247
|
## CI/CD & Automated Deployment
|
|
194
248
|
|
|
195
249
|
1. Install Appwrite CLI: `npm install -g appwrite-cli`
|
|
@@ -205,31 +259,33 @@ Generates a Markdown file with:
|
|
|
205
259
|
|
|
206
260
|
## CLI Commands
|
|
207
261
|
|
|
208
|
-
| Command | Description
|
|
209
|
-
| :---------------------------- |
|
|
210
|
-
| `init` | Initialize the project folder structure and config.
|
|
211
|
-
| `migrations setup` | Create the `system` database and `migrations` collection.
|
|
212
|
-
| `migrations create` | Create a new migration version pulling the latest snapshot from Appwrite via CLI.
|
|
213
|
-
| `migrations update <version>` | Update a version's snapshot by pulling from Appwrite via CLI.
|
|
214
|
-
| `migrations run` | Execute all pending migrations in order.
|
|
215
|
-
| `migrations status` | List applied and pending migrations.
|
|
216
|
-
| `
|
|
262
|
+
| Command | Description |
|
|
263
|
+
| :---------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
|
264
|
+
| `init` | Initialize the project folder structure and config. |
|
|
265
|
+
| `migrations setup` | Create the `system` database and `migrations` collection. |
|
|
266
|
+
| `migrations create` | Create a new migration version pulling the latest snapshot from Appwrite via CLI. |
|
|
267
|
+
| `migrations update <version>` | Update a version's snapshot by pulling from Appwrite via CLI. |
|
|
268
|
+
| `migrations run` | Execute all pending migrations in order. |
|
|
269
|
+
| `migrations status` | List applied and pending migrations. |
|
|
270
|
+
| `docs [version]` | Generate `docs.md`. Without a version, pulls live from Appwrite. With a version (e.g. `v1`), reads the stored local snapshot — no Appwrite connection needed. |
|
|
271
|
+
| `exceptions add` | Interactively add a security exception entry to `appwrite-ctl.config.json`. |
|
|
272
|
+
| `exceptions list` | List all security exceptions recorded in `appwrite-ctl.config.json`. |
|
|
217
273
|
|
|
218
274
|
# AI Rules
|
|
219
275
|
|
|
220
276
|
## Understanding the Data Models Layer
|
|
221
277
|
|
|
222
|
-
📌 `
|
|
278
|
+
📌 `docs.md` — The Source of Truth
|
|
223
279
|
|
|
224
280
|
The most important file for understanding the application's **data model** is:
|
|
225
281
|
|
|
226
282
|
```
|
|
227
|
-
appwrite/
|
|
283
|
+
appwrite/docs.md
|
|
228
284
|
```
|
|
229
285
|
|
|
230
|
-
This is an **auto-generated** Markdown file that documents the **current state** of every database, collection, attribute, relationship, index, and storage bucket in the Appwrite project. It is generated from the latest `appwrite.config.json` snapshot via the `
|
|
286
|
+
This is an **auto-generated** Markdown file that documents the **current state** of every database, collection, attribute, relationship, index, and storage bucket in the Appwrite project. It is generated from the latest `appwrite.config.json` snapshot via the `docs` command.
|
|
231
287
|
|
|
232
|
-
**When you need to understand the data model — always read `appwrite/
|
|
288
|
+
**When you need to understand the data model — always read `appwrite/docs.md` first.**
|
|
233
289
|
|
|
234
290
|
It contains:
|
|
235
291
|
|
|
@@ -240,6 +296,7 @@ It contains:
|
|
|
240
296
|
- Indexes: type (unique, key, fulltext), columns, and sort orders.
|
|
241
297
|
- Permissions: read/write/create/delete access rules.
|
|
242
298
|
- **Buckets** — storage buckets with max file size, extensions, compression, encryption, and antivirus settings.
|
|
299
|
+
- **Security exception callouts** — `[!WARNING]` blocks embedded next to any resource with a recorded bypass.
|
|
243
300
|
|
|
244
301
|
## Migration Commands
|
|
245
302
|
|
|
@@ -251,13 +308,14 @@ This project uses `appwrite-ctl` to manage schema migrations. The available comm
|
|
|
251
308
|
| `appwrite-ctl migrations update <version>` | Pull the current Appwrite state and update a version's snapshot. |
|
|
252
309
|
| `appwrite-ctl migrations run` | Execute all pending migrations in order (push schema → poll attributes → run script). |
|
|
253
310
|
| `appwrite-ctl migrations status` | List applied and pending migrations. |
|
|
254
|
-
| `appwrite-ctl
|
|
311
|
+
| `appwrite-ctl docs` | Pull the current Appwrite state and generate/regenerate `docs.md`. |
|
|
312
|
+
| `appwrite-ctl docs <version>` | Generate `docs.md` from a stored local snapshot (no Appwrite connection needed). |
|
|
255
313
|
|
|
256
314
|
Each migration version lives in `appwrite/migration/vN/` and contains:
|
|
257
315
|
|
|
258
316
|
- **`appwrite.config.json`** — the schema snapshot (Appwrite CLI format).
|
|
259
317
|
- **`index.ts`** — the migration script with `up` (and optional `down`) functions.
|
|
260
|
-
- **`
|
|
318
|
+
- **`docs.md`** — auto-generated docs for that version's snapshot.
|
|
261
319
|
|
|
262
320
|
## How to Handle Data Model Changes
|
|
263
321
|
|
|
@@ -278,11 +336,11 @@ When a change to the data model is needed (e.g. adding a collection, modifying a
|
|
|
278
336
|
4. **Regenerate the schema docs:**
|
|
279
337
|
|
|
280
338
|
```bash
|
|
281
|
-
npx appwrite-ctl
|
|
339
|
+
npx appwrite-ctl docs
|
|
282
340
|
```
|
|
283
341
|
|
|
284
|
-
This updates
|
|
342
|
+
This updates `appwrite/docs.md` from the latest Appwrite state.
|
|
285
343
|
|
|
286
|
-
5. **Verify** the updated `appwrite/
|
|
344
|
+
5. **Verify** the updated `appwrite/docs.md` to confirm the changes are correct.
|
|
287
345
|
|
|
288
|
-
> ⚠️ **Never edit `
|
|
346
|
+
> ⚠️ **Never edit `docs.md` files manually** — they are auto-generated. Always modify the `appwrite.config.json` snapshot and run `docs` to regenerate.
|
package/dist/cli/index.js
CHANGED
|
@@ -9,6 +9,7 @@ import { loadConfig } from '../lib/config.js';
|
|
|
9
9
|
import { createAppwriteClient, ensureMigrationCollection, getAppliedMigrations, } from '../lib/appwrite.js';
|
|
10
10
|
import { configureClient, pullSnapshot, getSnapshotFilename } from '../lib/cli.js';
|
|
11
11
|
import { generateSchemaDoc } from '../lib/diagram.js';
|
|
12
|
+
import { loadSecurityLedger, saveSecurityLedger, resolveAuthor, DEFAULT_RULES, } from '../lib/security.js';
|
|
12
13
|
const program = new Command();
|
|
13
14
|
const generateDocs = (snapshotPath, version, outputDir) => {
|
|
14
15
|
if (!fs.existsSync(snapshotPath))
|
|
@@ -17,9 +18,9 @@ const generateDocs = (snapshotPath, version, outputDir) => {
|
|
|
17
18
|
if (!fs.existsSync(outputDir)) {
|
|
18
19
|
fs.mkdirSync(outputDir, { recursive: true });
|
|
19
20
|
}
|
|
20
|
-
const outputPath = path.join(outputDir, '
|
|
21
|
+
const outputPath = path.join(outputDir, 'docs.md');
|
|
21
22
|
fs.writeFileSync(outputPath, markdown);
|
|
22
|
-
console.log(chalk.green(`
|
|
23
|
+
console.log(chalk.green(`Docs updated at ${outputPath}`));
|
|
23
24
|
};
|
|
24
25
|
program
|
|
25
26
|
.name('appwrite-ctl')
|
|
@@ -32,21 +33,25 @@ program
|
|
|
32
33
|
const rootDir = process.cwd();
|
|
33
34
|
const appwriteDir = path.join(rootDir, 'appwrite');
|
|
34
35
|
const migrationDir = path.join(appwriteDir, 'migration');
|
|
35
|
-
const
|
|
36
|
+
const ctlConfigPath = path.join(appwriteDir, 'appwrite-ctl.config.json');
|
|
36
37
|
if (!fs.existsSync(appwriteDir))
|
|
37
38
|
fs.mkdirSync(appwriteDir);
|
|
38
39
|
if (!fs.existsSync(migrationDir))
|
|
39
40
|
fs.mkdirSync(migrationDir);
|
|
40
|
-
if (!fs.existsSync(
|
|
41
|
+
if (!fs.existsSync(ctlConfigPath)) {
|
|
41
42
|
const config = {
|
|
42
43
|
collection: 'migrations',
|
|
43
44
|
database: 'system',
|
|
45
|
+
security: {
|
|
46
|
+
rules: DEFAULT_RULES,
|
|
47
|
+
exceptions: {},
|
|
48
|
+
},
|
|
44
49
|
};
|
|
45
|
-
fs.writeFileSync(
|
|
46
|
-
console.log(chalk.green('Created appwrite/
|
|
50
|
+
fs.writeFileSync(ctlConfigPath, JSON.stringify(config, null, 2) + '\n');
|
|
51
|
+
console.log(chalk.green('Created appwrite/appwrite-ctl.config.json'));
|
|
47
52
|
}
|
|
48
53
|
else {
|
|
49
|
-
console.log(chalk.yellow('
|
|
54
|
+
console.log(chalk.yellow('appwrite-ctl.config.json already exists — not overwritten.'));
|
|
50
55
|
}
|
|
51
56
|
console.log(chalk.green('Initialization complete.'));
|
|
52
57
|
});
|
|
@@ -91,7 +96,6 @@ migrations
|
|
|
91
96
|
const migration: Migration = {
|
|
92
97
|
id: "${uuidv4()}",
|
|
93
98
|
description: "${name}",
|
|
94
|
-
requiresBackup: false,
|
|
95
99
|
up: async ({ client, databases, log, error }) => {
|
|
96
100
|
log("Executing up migration for ${name}");
|
|
97
101
|
// Write your migration logic here
|
|
@@ -151,7 +155,7 @@ migrations
|
|
|
151
155
|
console.log(chalk.green(`Successfully updated snapshot for ${version}`));
|
|
152
156
|
const snapshotFilename = getSnapshotFilename();
|
|
153
157
|
generateDocs(path.join(versionPath, snapshotFilename), version, versionPath);
|
|
154
|
-
console.log(chalk.green(`Successfully updated
|
|
158
|
+
console.log(chalk.green(`Successfully updated docs.md for ${version}`));
|
|
155
159
|
}
|
|
156
160
|
catch (error) {
|
|
157
161
|
console.error(chalk.red(`Failed to update snapshot: ${error.message}`));
|
|
@@ -210,22 +214,42 @@ migrations
|
|
|
210
214
|
process.exit(1);
|
|
211
215
|
}
|
|
212
216
|
});
|
|
213
|
-
|
|
214
|
-
.command('docs')
|
|
215
|
-
.description('
|
|
216
|
-
|
|
217
|
+
program
|
|
218
|
+
.command('docs [version]')
|
|
219
|
+
.description('Generate schema documentation with ER diagrams. Optionally pass a version (e.g. v1) to ' +
|
|
220
|
+
'generate docs from a stored snapshot instead of pulling from Appwrite.')
|
|
221
|
+
.action(async (version) => {
|
|
217
222
|
try {
|
|
218
223
|
const options = program.opts();
|
|
219
|
-
const config = loadConfig(options.env);
|
|
220
|
-
console.log(chalk.blue(`Pulling latest schema from Appwrite to project root...`));
|
|
221
|
-
await configureClient(config);
|
|
222
|
-
const snapshotPath = await pullSnapshot();
|
|
223
|
-
console.log(chalk.blue('Generating documentation...'));
|
|
224
224
|
const appwriteDir = path.join(process.cwd(), 'appwrite');
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
fs.
|
|
225
|
+
if (version) {
|
|
226
|
+
// Use stored snapshot for the given version without hitting Appwrite.
|
|
227
|
+
const versionPath = path.join(appwriteDir, 'migration', version);
|
|
228
|
+
if (!fs.existsSync(versionPath)) {
|
|
229
|
+
console.error(chalk.red(`Version directory '${version}' not found.`));
|
|
230
|
+
process.exit(1);
|
|
231
|
+
}
|
|
232
|
+
const snapshotFilename = getSnapshotFilename();
|
|
233
|
+
const snapshotPath = path.join(versionPath, snapshotFilename);
|
|
234
|
+
if (!fs.existsSync(snapshotPath)) {
|
|
235
|
+
console.error(chalk.red(`No snapshot found for ${version}.`));
|
|
236
|
+
process.exit(1);
|
|
237
|
+
}
|
|
238
|
+
console.log(chalk.blue(`Generating docs from stored snapshot for ${version}...`));
|
|
239
|
+
generateDocs(snapshotPath, version, appwriteDir);
|
|
240
|
+
generateDocs(snapshotPath, version, versionPath);
|
|
241
|
+
}
|
|
242
|
+
else {
|
|
243
|
+
const config = loadConfig(options.env);
|
|
244
|
+
console.log(chalk.blue(`Pulling latest schema from Appwrite to project root...`));
|
|
245
|
+
await configureClient(config);
|
|
246
|
+
const snapshotPath = await pullSnapshot();
|
|
247
|
+
console.log(chalk.blue('Generating documentation...'));
|
|
248
|
+
generateDocs(snapshotPath, 'latest', appwriteDir);
|
|
249
|
+
// Cleanup the temporary snapshot pulled to root
|
|
250
|
+
if (fs.existsSync(snapshotPath)) {
|
|
251
|
+
fs.unlinkSync(snapshotPath);
|
|
252
|
+
}
|
|
229
253
|
}
|
|
230
254
|
}
|
|
231
255
|
catch (error) {
|
|
@@ -233,4 +257,88 @@ migrations
|
|
|
233
257
|
process.exit(1);
|
|
234
258
|
}
|
|
235
259
|
});
|
|
260
|
+
const RESOURCE_TYPES = ['Collection', 'Bucket'];
|
|
261
|
+
const exceptions = program
|
|
262
|
+
.command('exceptions')
|
|
263
|
+
.description('Manage security exception entries in security.json');
|
|
264
|
+
exceptions
|
|
265
|
+
.command('add')
|
|
266
|
+
.description('Interactively add a new security exception entry to appwrite/security.json')
|
|
267
|
+
.action(async () => {
|
|
268
|
+
const { default: inquirer } = await import('inquirer');
|
|
269
|
+
const appwriteDir = path.join(process.cwd(), 'appwrite');
|
|
270
|
+
const ledger = loadSecurityLedger(appwriteDir);
|
|
271
|
+
const author = resolveAuthor();
|
|
272
|
+
const today = new Date().toISOString().split('T')[0];
|
|
273
|
+
console.log(chalk.blue(`Author resolved as: ${chalk.bold(author)}`));
|
|
274
|
+
const answers = await inquirer.prompt([
|
|
275
|
+
{
|
|
276
|
+
type: 'list',
|
|
277
|
+
name: 'resourceType',
|
|
278
|
+
message: 'Resource type:',
|
|
279
|
+
choices: RESOURCE_TYPES,
|
|
280
|
+
},
|
|
281
|
+
{
|
|
282
|
+
type: 'input',
|
|
283
|
+
name: 'resourceId',
|
|
284
|
+
message: 'Resource ID (collection/bucket ID):',
|
|
285
|
+
validate: (v) => v.trim().length > 0 || 'Resource ID is required.',
|
|
286
|
+
},
|
|
287
|
+
{
|
|
288
|
+
// Use a list picker when rules are configured, otherwise free text
|
|
289
|
+
type: Object.keys(ledger.rules ?? {}).length > 0 ? 'list' : 'input',
|
|
290
|
+
name: 'rule',
|
|
291
|
+
message: 'Rule being bypassed:',
|
|
292
|
+
choices: Object.keys(ledger.rules ?? {}),
|
|
293
|
+
validate: (v) => v.trim().length > 0 || 'Rule is required.',
|
|
294
|
+
},
|
|
295
|
+
{
|
|
296
|
+
type: 'input',
|
|
297
|
+
name: 'justification',
|
|
298
|
+
message: 'Technical justification:',
|
|
299
|
+
validate: (v) => v.trim().length > 0 || 'Justification is required.',
|
|
300
|
+
},
|
|
301
|
+
]);
|
|
302
|
+
const type = answers.resourceType === 'Collection' ? 'collections' : 'buckets';
|
|
303
|
+
if (!ledger.exceptions[type])
|
|
304
|
+
ledger.exceptions[type] = {};
|
|
305
|
+
const bucket = ledger.exceptions[type];
|
|
306
|
+
if (!bucket[answers.resourceId])
|
|
307
|
+
bucket[answers.resourceId] = [];
|
|
308
|
+
bucket[answers.resourceId].push({
|
|
309
|
+
rule: answers.rule.trim(),
|
|
310
|
+
justification: answers.justification.trim(),
|
|
311
|
+
author,
|
|
312
|
+
date: today,
|
|
313
|
+
});
|
|
314
|
+
saveSecurityLedger(appwriteDir, ledger);
|
|
315
|
+
console.log(chalk.green(`\n✅ Exception recorded in appwrite/security.json by '${author}' on ${today}.`));
|
|
316
|
+
});
|
|
317
|
+
exceptions
|
|
318
|
+
.command('list')
|
|
319
|
+
.description('List all security exceptions recorded in appwrite/security.json')
|
|
320
|
+
.action(() => {
|
|
321
|
+
const appwriteDir = path.join(process.cwd(), 'appwrite');
|
|
322
|
+
const ledger = loadSecurityLedger(appwriteDir);
|
|
323
|
+
const { collections = {}, buckets = {} } = ledger.exceptions;
|
|
324
|
+
const allEntries = [];
|
|
325
|
+
for (const [id, exs] of Object.entries(collections)) {
|
|
326
|
+
for (const ex of exs)
|
|
327
|
+
allEntries.push({ type: 'collection', id, ...ex });
|
|
328
|
+
}
|
|
329
|
+
for (const [id, exs] of Object.entries(buckets)) {
|
|
330
|
+
for (const ex of exs)
|
|
331
|
+
allEntries.push({ type: 'bucket', id, ...ex });
|
|
332
|
+
}
|
|
333
|
+
if (allEntries.length === 0) {
|
|
334
|
+
console.log(chalk.yellow('No security exceptions recorded in appwrite/security.json.'));
|
|
335
|
+
return;
|
|
336
|
+
}
|
|
337
|
+
console.log(chalk.bold.underline('\nSecurity Exceptions\n'));
|
|
338
|
+
for (const entry of allEntries) {
|
|
339
|
+
console.log(`${chalk.cyan(entry.type.padEnd(12))} ${chalk.bold(entry.id.padEnd(28))} ${chalk.yellow(entry.rule.padEnd(30))} ${chalk.gray(`${entry.author}, ${entry.date}`)}`);
|
|
340
|
+
console.log(` ${chalk.italic(entry.justification)}`);
|
|
341
|
+
console.log();
|
|
342
|
+
}
|
|
343
|
+
});
|
|
236
344
|
program.parse();
|
package/dist/lib/cli.js
CHANGED
|
@@ -1,21 +1,28 @@
|
|
|
1
|
-
import { exec } from 'child_process';
|
|
1
|
+
import { exec, execFile as _execFile } from 'child_process';
|
|
2
2
|
import { promisify } from 'util';
|
|
3
3
|
import fs from 'fs';
|
|
4
4
|
import path from 'path';
|
|
5
5
|
import chalk from 'chalk';
|
|
6
6
|
const execAsync = promisify(exec);
|
|
7
|
+
const execFileAsync = promisify(_execFile);
|
|
7
8
|
const SNAPSHOT_FILENAME = 'appwrite.config.json';
|
|
8
9
|
/**
|
|
9
10
|
* Configure the Appwrite CLI client for non-interactive use via API key.
|
|
10
11
|
*/
|
|
11
12
|
export const configureClient = async (config) => {
|
|
13
|
+
// Use execFile (not exec) to pass each argument separately — prevents command injection
|
|
14
|
+
// if endpoint / projectId / apiKey contain shell-special characters.
|
|
12
15
|
const args = [
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
+
'client',
|
|
17
|
+
'--endpoint',
|
|
18
|
+
config.endpoint,
|
|
19
|
+
'--project-id',
|
|
20
|
+
config.projectId,
|
|
21
|
+
'--key',
|
|
22
|
+
config.apiKey,
|
|
16
23
|
];
|
|
17
24
|
try {
|
|
18
|
-
await
|
|
25
|
+
await execFileAsync('appwrite', args);
|
|
19
26
|
console.log(chalk.green('Appwrite CLI configured successfully.'));
|
|
20
27
|
}
|
|
21
28
|
catch (error) {
|
|
@@ -54,9 +61,7 @@ export const pullSnapshot = async (targetDir) => {
|
|
|
54
61
|
fs.copyFileSync(rootConfig, targetPath);
|
|
55
62
|
console.log(chalk.green(`Snapshot saved to ${targetPath}`));
|
|
56
63
|
// Cleanup: Remove the root appwrite.config.json created by the pull command.
|
|
57
|
-
|
|
58
|
-
fs.unlinkSync(rootConfig);
|
|
59
|
-
}
|
|
64
|
+
fs.unlinkSync(rootConfig);
|
|
60
65
|
return targetPath;
|
|
61
66
|
}
|
|
62
67
|
console.log(chalk.green(`Snapshot saved to ${rootConfig}`));
|
package/dist/lib/config.d.ts
CHANGED
package/dist/lib/config.js
CHANGED
|
@@ -7,16 +7,26 @@ import path from 'path';
|
|
|
7
7
|
export const loadConfig = (envPath = '.env') => {
|
|
8
8
|
// Load environment variables.
|
|
9
9
|
dotenv.config({ path: path.resolve(process.cwd(), envPath), override: true });
|
|
10
|
-
|
|
11
|
-
const
|
|
12
|
-
const
|
|
13
|
-
const
|
|
10
|
+
// Trim values to avoid copy-paste whitespace bugs in .env files.
|
|
11
|
+
const endpoint = process.env.APPWRITE_ENDPOINT?.trim();
|
|
12
|
+
const projectId = process.env.APPWRITE_PROJECT_ID?.trim();
|
|
13
|
+
const apiKey = process.env.APPWRITE_API_KEY?.trim();
|
|
14
14
|
if (!endpoint || !projectId || !apiKey) {
|
|
15
15
|
throw new Error('Missing required environment variables: APPWRITE_ENDPOINT, APPWRITE_PROJECT_ID, APPWRITE_API_KEY');
|
|
16
16
|
}
|
|
17
|
+
// Validate endpoint is a well-formed http(s) URL to prevent SSRF via misconfiguration.
|
|
18
|
+
try {
|
|
19
|
+
const url = new URL(endpoint);
|
|
20
|
+
if (url.protocol !== 'http:' && url.protocol !== 'https:') {
|
|
21
|
+
throw new Error('APPWRITE_ENDPOINT must use http or https protocol.');
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
catch {
|
|
25
|
+
throw new Error(`APPWRITE_ENDPOINT is not a valid URL: "${endpoint}"`);
|
|
26
|
+
}
|
|
17
27
|
// Find root directory.
|
|
18
28
|
const rootDir = process.cwd();
|
|
19
|
-
const configPath = path.join(rootDir, 'appwrite', '
|
|
29
|
+
const configPath = path.join(rootDir, 'appwrite', 'appwrite-ctl.config.json');
|
|
20
30
|
let migrationCollectionId = 'migrations';
|
|
21
31
|
let database = 'system';
|
|
22
32
|
if (fs.existsSync(configPath)) {
|
|
@@ -33,8 +43,8 @@ export const loadConfig = (envPath = '.env') => {
|
|
|
33
43
|
database = fileConfig.databaseId;
|
|
34
44
|
}
|
|
35
45
|
}
|
|
36
|
-
catch
|
|
37
|
-
console.warn('Could not parse config.json, using defaults.');
|
|
46
|
+
catch {
|
|
47
|
+
console.warn('Could not parse appwrite-ctl.config.json, using defaults.');
|
|
38
48
|
}
|
|
39
49
|
}
|
|
40
50
|
return {
|
|
@@ -43,6 +53,5 @@ export const loadConfig = (envPath = '.env') => {
|
|
|
43
53
|
apiKey,
|
|
44
54
|
migrationCollectionId,
|
|
45
55
|
database,
|
|
46
|
-
backupCommand,
|
|
47
56
|
};
|
|
48
57
|
};
|
package/dist/lib/diagram.js
CHANGED
|
@@ -1,11 +1,21 @@
|
|
|
1
1
|
import fs from 'fs';
|
|
2
2
|
import path from 'path';
|
|
3
|
+
import { loadSecurityLedger, getExceptions } from './security.js';
|
|
3
4
|
const MERMAID_CARDINALITY = {
|
|
4
5
|
oneToOne: '||--||',
|
|
5
6
|
oneToMany: '||--o{',
|
|
6
7
|
manyToOne: '}o--||',
|
|
7
8
|
manyToMany: '}o--o{',
|
|
8
9
|
};
|
|
10
|
+
/**
|
|
11
|
+
* Sanitize a string for safe embedding inside Mermaid erDiagram entity/field names.
|
|
12
|
+
* Braces, backticks, double-quotes, and newlines can break Mermaid's parser.
|
|
13
|
+
*/
|
|
14
|
+
const sanitizeMermaid = (value) => value
|
|
15
|
+
.replace(/[\r\n]+/g, ' ') // no literal newlines
|
|
16
|
+
.replace(/[{}]/g, '') // brace characters end entity blocks
|
|
17
|
+
.replace(/"/g, "'") // double-quote ends Mermaid label strings
|
|
18
|
+
.replace(/`/g, "'"); // backtick is a Mermaid reserved delimiter
|
|
9
19
|
/**
|
|
10
20
|
* Map Appwrite column types to concise display types for the ER diagram.
|
|
11
21
|
*/
|
|
@@ -42,7 +52,7 @@ const buildErDiagram = (tables) => {
|
|
|
42
52
|
const relationships = [];
|
|
43
53
|
const renderedPairs = new Set();
|
|
44
54
|
for (const table of tables) {
|
|
45
|
-
const entityName = table.name;
|
|
55
|
+
const entityName = sanitizeMermaid(table.name);
|
|
46
56
|
lines.push(` ${entityName} {`);
|
|
47
57
|
// Always add implicit id primary key
|
|
48
58
|
lines.push(` string id PK`);
|
|
@@ -50,19 +60,21 @@ const buildErDiagram = (tables) => {
|
|
|
50
60
|
if (col.type === 'relationship') {
|
|
51
61
|
// Only emit from the parent side, and skip if pair already rendered
|
|
52
62
|
if (col.side === 'parent' && col.relatedTable) {
|
|
53
|
-
const
|
|
63
|
+
const relatedName = sanitizeMermaid(col.relatedTable);
|
|
64
|
+
const pairKey = [entityName, relatedName].sort().join(':');
|
|
54
65
|
if (!renderedPairs.has(pairKey)) {
|
|
55
66
|
renderedPairs.add(pairKey);
|
|
56
67
|
const cardinality = MERMAID_CARDINALITY[col.relationType ?? 'oneToMany'] ?? '||--||';
|
|
57
|
-
const label = `"${col.key}"`;
|
|
58
|
-
relationships.push(` ${entityName} ${cardinality} ${
|
|
68
|
+
const label = `"${sanitizeMermaid(col.key)}"`;
|
|
69
|
+
relationships.push(` ${entityName} ${cardinality} ${relatedName} : ${label}`);
|
|
59
70
|
}
|
|
60
71
|
}
|
|
61
72
|
continue;
|
|
62
73
|
}
|
|
63
74
|
const type = mapColumnType(col);
|
|
75
|
+
const colKey = sanitizeMermaid(col.key);
|
|
64
76
|
const comment = col.required ? '"NOT NULL"' : '';
|
|
65
|
-
lines.push(` ${type} ${
|
|
77
|
+
lines.push(` ${type} ${colKey} ${comment}`.trimEnd());
|
|
66
78
|
}
|
|
67
79
|
lines.push(` }`);
|
|
68
80
|
}
|
|
@@ -73,10 +85,23 @@ const buildErDiagram = (tables) => {
|
|
|
73
85
|
lines.push('```');
|
|
74
86
|
return lines.join('\n');
|
|
75
87
|
};
|
|
88
|
+
/**
|
|
89
|
+
* Render security exception callout lines into a `> [!WARNING]` block.
|
|
90
|
+
*/
|
|
91
|
+
const buildSecurityCallout = (exceptions) => {
|
|
92
|
+
if (exceptions.length === 0)
|
|
93
|
+
return '';
|
|
94
|
+
const lines = [''];
|
|
95
|
+
lines.push('> [!WARNING]');
|
|
96
|
+
for (const ex of exceptions) {
|
|
97
|
+
lines.push(`> **Security Exception Acknowledged:** (\`${ex.rule}\`) — *${ex.justification}* — (Author: ${ex.author}, ${ex.date})`);
|
|
98
|
+
}
|
|
99
|
+
return lines.join('\n');
|
|
100
|
+
};
|
|
76
101
|
/**
|
|
77
102
|
* Build markdown documentation for a single collection.
|
|
78
103
|
*/
|
|
79
|
-
const buildCollectionDoc = (table) => {
|
|
104
|
+
const buildCollectionDoc = (table, exceptions = []) => {
|
|
80
105
|
const sections = [];
|
|
81
106
|
const status = table.enabled ? '🟢 Enabled' : '🔴 Disabled';
|
|
82
107
|
sections.push(`#### ${table.name} (\`${table.$id}\`)`);
|
|
@@ -146,22 +171,44 @@ const buildCollectionDoc = (table) => {
|
|
|
146
171
|
sections.push(`| \`${idx.key}\` | ${idx.type} | ${idx.columns.join(', ')} | ${idx.orders.join(', ')} |`);
|
|
147
172
|
}
|
|
148
173
|
}
|
|
174
|
+
const callout = buildSecurityCallout(exceptions);
|
|
175
|
+
if (callout)
|
|
176
|
+
sections.push(callout);
|
|
149
177
|
return sections.join('\n');
|
|
150
178
|
};
|
|
151
179
|
/**
|
|
152
180
|
* Build the buckets documentation section.
|
|
153
181
|
*/
|
|
154
|
-
const buildBucketsDoc = (buckets) => {
|
|
182
|
+
const buildBucketsDoc = (buckets, ledger) => {
|
|
155
183
|
if (buckets.length === 0)
|
|
156
184
|
return '';
|
|
157
185
|
const lines = [];
|
|
158
186
|
lines.push('## Buckets');
|
|
159
|
-
lines.push('');
|
|
160
|
-
lines.push('| Name | ID | Max Size | Extensions | Compression | Encryption | Antivirus | Enabled |');
|
|
161
|
-
lines.push('| --- | --- | --- | --- | --- | --- | --- | --- |');
|
|
162
187
|
for (const b of buckets) {
|
|
163
188
|
const extensions = b.allowedFileExtensions.length > 0 ? b.allowedFileExtensions.join(', ') : 'any';
|
|
164
|
-
|
|
189
|
+
const status = b.enabled ? '🟢 Enabled' : '🔴 Disabled';
|
|
190
|
+
lines.push('');
|
|
191
|
+
lines.push(`### ${b.name} (\`${b.$id}\`)`);
|
|
192
|
+
lines.push('');
|
|
193
|
+
lines.push(`- **Status:** ${status}`);
|
|
194
|
+
lines.push('');
|
|
195
|
+
lines.push('| Max Size | Extensions | Compression | Encryption | Antivirus | File Security |');
|
|
196
|
+
lines.push('| --- | --- | --- | --- | --- | --- |');
|
|
197
|
+
lines.push(`| ${formatFileSize(b.maximumFileSize)} | ${extensions} | ${b.compression} | ${b.encryption ? '✅' : '—'} | ${b.antivirus ? '✅' : '—'} | ${b.fileSecurity ? 'Yes' : 'No'} |`);
|
|
198
|
+
if (b.$permissions.length > 0) {
|
|
199
|
+
lines.push('');
|
|
200
|
+
lines.push('**Permissions:**');
|
|
201
|
+
lines.push('');
|
|
202
|
+
lines.push('| Permission |');
|
|
203
|
+
lines.push('| --- |');
|
|
204
|
+
for (const perm of b.$permissions) {
|
|
205
|
+
lines.push(`| \`${perm}\` |`);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
const bucketExceptions = ledger ? getExceptions(ledger, 'buckets', b.$id) : [];
|
|
209
|
+
const callout = buildSecurityCallout(bucketExceptions);
|
|
210
|
+
if (callout)
|
|
211
|
+
lines.push(callout);
|
|
165
212
|
}
|
|
166
213
|
return lines.join('\n');
|
|
167
214
|
};
|
|
@@ -171,8 +218,11 @@ const buildBucketsDoc = (buckets) => {
|
|
|
171
218
|
export const generateSchemaDoc = (snapshotPath, version) => {
|
|
172
219
|
const raw = fs.readFileSync(snapshotPath, 'utf-8');
|
|
173
220
|
const snapshot = JSON.parse(raw);
|
|
174
|
-
// Load
|
|
175
|
-
const
|
|
221
|
+
// Load security ledger from appwrite/ at the project root
|
|
222
|
+
const appwriteDir = path.join(process.cwd(), 'appwrite');
|
|
223
|
+
const ledger = loadSecurityLedger(appwriteDir);
|
|
224
|
+
// Load appwrite-ctl config to discover the system database name
|
|
225
|
+
const configPath = path.join(process.cwd(), 'appwrite', 'appwrite-ctl.config.json');
|
|
176
226
|
let systemDbName = 'system';
|
|
177
227
|
if (fs.existsSync(configPath)) {
|
|
178
228
|
try {
|
|
@@ -210,13 +260,14 @@ export const generateSchemaDoc = (snapshotPath, version) => {
|
|
|
210
260
|
sections.push('### Collections');
|
|
211
261
|
for (const table of dbTables) {
|
|
212
262
|
sections.push('');
|
|
213
|
-
|
|
263
|
+
const collectionExceptions = getExceptions(ledger, 'collections', table.$id);
|
|
264
|
+
sections.push(buildCollectionDoc(table, collectionExceptions));
|
|
214
265
|
}
|
|
215
266
|
}
|
|
216
267
|
// Buckets section
|
|
217
268
|
if (snapshot.buckets.length > 0) {
|
|
218
269
|
sections.push('');
|
|
219
|
-
sections.push(buildBucketsDoc(snapshot.buckets));
|
|
270
|
+
sections.push(buildBucketsDoc(snapshot.buckets, ledger));
|
|
220
271
|
}
|
|
221
272
|
return sections.join('\n') + '\n';
|
|
222
273
|
};
|
package/dist/lib/runner.js
CHANGED
|
@@ -53,8 +53,8 @@ export const runMigrations = async (envPath = '.env') => {
|
|
|
53
53
|
try {
|
|
54
54
|
migrationModule = await jiti.import(validIndexFile);
|
|
55
55
|
}
|
|
56
|
-
catch (
|
|
57
|
-
console.error(`Failed to load migration file ${validIndexFile}:`,
|
|
56
|
+
catch (loadError) {
|
|
57
|
+
console.error(`Failed to load migration file ${validIndexFile}:`, loadError);
|
|
58
58
|
process.exit(1);
|
|
59
59
|
}
|
|
60
60
|
const migration = migrationModule.default;
|
|
@@ -67,24 +67,7 @@ export const runMigrations = async (envPath = '.env') => {
|
|
|
67
67
|
continue;
|
|
68
68
|
}
|
|
69
69
|
console.log(`Applying version ${version} (${migration.id})...`);
|
|
70
|
-
// 3.
|
|
71
|
-
if (migration.requiresBackup && config.backupCommand) {
|
|
72
|
-
console.log('Running backup command...');
|
|
73
|
-
try {
|
|
74
|
-
const { exec } = await import('child_process');
|
|
75
|
-
const { promisify } = await import('util');
|
|
76
|
-
const execAsync = promisify(exec);
|
|
77
|
-
await execAsync(config.backupCommand);
|
|
78
|
-
}
|
|
79
|
-
catch (error) {
|
|
80
|
-
console.error('Backup failed:', error);
|
|
81
|
-
process.exit(1);
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
else if (migration.requiresBackup && !config.backupCommand) {
|
|
85
|
-
console.warn('Migration requires backup but BACKUP_COMMAND is not set. Proceeding with caution...');
|
|
86
|
-
}
|
|
87
|
-
// 4. Schema sync via CLI push.
|
|
70
|
+
// 3. Schema sync via CLI push.
|
|
88
71
|
const snapshotPath = path.join(versionPath, snapshotFilename);
|
|
89
72
|
if (fs.existsSync(snapshotPath)) {
|
|
90
73
|
console.log(`Pushing schema snapshot for ${version}...`);
|
|
@@ -130,6 +113,7 @@ export const runMigrations = async (envPath = '.env') => {
|
|
|
130
113
|
};
|
|
131
114
|
async function waitForAttributes(databases, snapshotPath) {
|
|
132
115
|
console.log('Polling attribute status...');
|
|
116
|
+
const MAX_ATTEMPTS = 60; // 60 × 2 s = 2-minute timeout per collection
|
|
133
117
|
let schema;
|
|
134
118
|
try {
|
|
135
119
|
schema = JSON.parse(fs.readFileSync(snapshotPath, 'utf8'));
|
|
@@ -155,7 +139,13 @@ async function waitForAttributes(databases, snapshotPath) {
|
|
|
155
139
|
}
|
|
156
140
|
console.log(`Checking attributes for table ${table.name} (${collectionId})...`);
|
|
157
141
|
let allAvailable = false;
|
|
142
|
+
let attempts = 0;
|
|
158
143
|
while (!allAvailable) {
|
|
144
|
+
if (attempts >= MAX_ATTEMPTS) {
|
|
145
|
+
console.warn(chalk.yellow(` ⚠ Timed out waiting for attributes on ${collectionId} after ${MAX_ATTEMPTS} attempts. Proceeding anyway.`));
|
|
146
|
+
break;
|
|
147
|
+
}
|
|
148
|
+
attempts++;
|
|
159
149
|
try {
|
|
160
150
|
const response = await databases.listAttributes(databaseId, collectionId);
|
|
161
151
|
const attributes = response.attributes;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { SecurityException, SecurityExceptions, SecurityLedger, SecurityRules } from '../types/index.js';
|
|
2
|
+
export type { SecurityException, SecurityExceptions, SecurityLedger, SecurityRules };
|
|
3
|
+
/**
|
|
4
|
+
* Default security rules included in every freshly initialised appwrite-ctl.config.json.
|
|
5
|
+
* Mirrors the validation intent of the future `appwrite-ctl audit` command.
|
|
6
|
+
*/
|
|
7
|
+
export declare const DEFAULT_RULES: SecurityRules;
|
|
8
|
+
/**
|
|
9
|
+
* Load the security ledger from the `security` key inside appwrite-ctl.config.json.
|
|
10
|
+
* Returns an empty ledger if the key or file does not exist.
|
|
11
|
+
*/
|
|
12
|
+
export declare const loadSecurityLedger: (appwriteDir: string) => SecurityLedger;
|
|
13
|
+
/**
|
|
14
|
+
* Persist the security ledger back into the `security` key of appwrite-ctl.config.json,
|
|
15
|
+
* preserving all other top-level keys.
|
|
16
|
+
*/
|
|
17
|
+
export declare const saveSecurityLedger: (appwriteDir: string, ledger: SecurityLedger) => void;
|
|
18
|
+
/**
|
|
19
|
+
* Return the exceptions list for a specific resource type + ID.
|
|
20
|
+
* Returns an empty array if no entry exists.
|
|
21
|
+
*/
|
|
22
|
+
export declare const getExceptions: (ledger: SecurityLedger, type: "collections" | "buckets", id: string) => SecurityException[];
|
|
23
|
+
/**
|
|
24
|
+
* Resolve the current author using `git config user.name` falling back to the OS username.
|
|
25
|
+
*/
|
|
26
|
+
export declare const resolveAuthor: () => string;
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import os from 'os';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import { execSync } from 'child_process';
|
|
5
|
+
const CTL_CONFIG_FILENAME = 'appwrite-ctl.config.json';
|
|
6
|
+
/**
|
|
7
|
+
* Default security rules included in every freshly initialised appwrite-ctl.config.json.
|
|
8
|
+
* Mirrors the validation intent of the future `appwrite-ctl audit` command.
|
|
9
|
+
*/
|
|
10
|
+
export const DEFAULT_RULES = {
|
|
11
|
+
'require-row-security': { enabled: true, severity: 'error' },
|
|
12
|
+
'forbid-role-all-write': { enabled: true, severity: 'error' },
|
|
13
|
+
'forbid-role-all-delete': { enabled: true, severity: 'error' },
|
|
14
|
+
'forbid-role-all-read': { enabled: true, severity: 'warn' },
|
|
15
|
+
'forbid-role-all-create': { enabled: true, severity: 'warn' },
|
|
16
|
+
'require-file-security': { enabled: true, severity: 'warn' },
|
|
17
|
+
};
|
|
18
|
+
/**
|
|
19
|
+
* Assert that a resolved file path stays within the expected parent directory.
|
|
20
|
+
* Throws if the path escapes via `..` components.
|
|
21
|
+
*/
|
|
22
|
+
const assertSafePath = (resolvedPath, expectedParent) => {
|
|
23
|
+
const normalizedParent = path.resolve(expectedParent);
|
|
24
|
+
const normalizedTarget = path.resolve(resolvedPath);
|
|
25
|
+
if (!normalizedTarget.startsWith(normalizedParent + path.sep) &&
|
|
26
|
+
normalizedTarget !== normalizedParent) {
|
|
27
|
+
throw new Error(`Path traversal detected: '${resolvedPath}' is outside '${expectedParent}'.`);
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
/**
|
|
31
|
+
* Read the raw appwrite-ctl.config.json object from disk.
|
|
32
|
+
* Returns an empty object if the file does not exist or cannot be parsed.
|
|
33
|
+
*/
|
|
34
|
+
const readCtlConfig = (appwriteDir) => {
|
|
35
|
+
const filePath = path.join(appwriteDir, CTL_CONFIG_FILENAME);
|
|
36
|
+
assertSafePath(filePath, process.cwd());
|
|
37
|
+
if (!fs.existsSync(filePath))
|
|
38
|
+
return {};
|
|
39
|
+
try {
|
|
40
|
+
return JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
41
|
+
}
|
|
42
|
+
catch {
|
|
43
|
+
return {};
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
/**
|
|
47
|
+
* Write the raw appwrite-ctl.config.json object back to disk.
|
|
48
|
+
*/
|
|
49
|
+
const writeCtlConfig = (appwriteDir, data) => {
|
|
50
|
+
const filePath = path.join(appwriteDir, CTL_CONFIG_FILENAME);
|
|
51
|
+
assertSafePath(filePath, process.cwd());
|
|
52
|
+
fs.writeFileSync(filePath, JSON.stringify(data, null, 2) + '\n');
|
|
53
|
+
};
|
|
54
|
+
/**
|
|
55
|
+
* Load the security ledger from the `security` key inside appwrite-ctl.config.json.
|
|
56
|
+
* Returns an empty ledger if the key or file does not exist.
|
|
57
|
+
*/
|
|
58
|
+
export const loadSecurityLedger = (appwriteDir) => {
|
|
59
|
+
const cfg = readCtlConfig(appwriteDir);
|
|
60
|
+
const raw = cfg.security;
|
|
61
|
+
if (!raw || typeof raw !== 'object') {
|
|
62
|
+
return { exceptions: {} };
|
|
63
|
+
}
|
|
64
|
+
return { rules: raw.rules, exceptions: raw.exceptions ?? {} };
|
|
65
|
+
};
|
|
66
|
+
/**
|
|
67
|
+
* Persist the security ledger back into the `security` key of appwrite-ctl.config.json,
|
|
68
|
+
* preserving all other top-level keys.
|
|
69
|
+
*/
|
|
70
|
+
export const saveSecurityLedger = (appwriteDir, ledger) => {
|
|
71
|
+
const cfg = readCtlConfig(appwriteDir);
|
|
72
|
+
cfg.security = ledger;
|
|
73
|
+
writeCtlConfig(appwriteDir, cfg);
|
|
74
|
+
};
|
|
75
|
+
/**
|
|
76
|
+
* Return the exceptions list for a specific resource type + ID.
|
|
77
|
+
* Returns an empty array if no entry exists.
|
|
78
|
+
*/
|
|
79
|
+
export const getExceptions = (ledger, type, id) => {
|
|
80
|
+
return ledger.exceptions[type]?.[id] ?? [];
|
|
81
|
+
};
|
|
82
|
+
/**
|
|
83
|
+
* Resolve the current author using `git config user.name` falling back to the OS username.
|
|
84
|
+
*/
|
|
85
|
+
export const resolveAuthor = () => {
|
|
86
|
+
try {
|
|
87
|
+
const name = execSync('git config user.name', { encoding: 'utf-8', stdio: 'pipe' }).trim();
|
|
88
|
+
if (name)
|
|
89
|
+
return name;
|
|
90
|
+
}
|
|
91
|
+
catch {
|
|
92
|
+
// Not in a git repo or git not available
|
|
93
|
+
}
|
|
94
|
+
return os.userInfo().username;
|
|
95
|
+
};
|
package/dist/types/index.d.ts
CHANGED
|
@@ -11,7 +11,6 @@ export type MigrationFunction = (context: MigrationContext) => Promise<void>;
|
|
|
11
11
|
export interface Migration {
|
|
12
12
|
id: string;
|
|
13
13
|
description?: string;
|
|
14
|
-
requiresBackup?: boolean;
|
|
15
14
|
up: MigrationFunction;
|
|
16
15
|
down?: MigrationFunction;
|
|
17
16
|
}
|
|
@@ -19,8 +18,23 @@ export interface Config {
|
|
|
19
18
|
collection: string;
|
|
20
19
|
database: string;
|
|
21
20
|
}
|
|
22
|
-
export interface
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
21
|
+
export interface SecurityException {
|
|
22
|
+
rule: string;
|
|
23
|
+
justification: string;
|
|
24
|
+
author: string;
|
|
25
|
+
date: string;
|
|
26
|
+
}
|
|
27
|
+
export type SecurityExceptions = Record<string, SecurityException[]>;
|
|
28
|
+
export type SecurityRuleSeverity = 'error' | 'warn' | 'off';
|
|
29
|
+
export interface SecurityRule {
|
|
30
|
+
enabled: boolean;
|
|
31
|
+
severity: SecurityRuleSeverity;
|
|
32
|
+
}
|
|
33
|
+
export type SecurityRules = Record<string, SecurityRule>;
|
|
34
|
+
export interface SecurityLedger {
|
|
35
|
+
rules?: SecurityRules;
|
|
36
|
+
exceptions: {
|
|
37
|
+
collections?: SecurityExceptions;
|
|
38
|
+
buckets?: SecurityExceptions;
|
|
39
|
+
};
|
|
26
40
|
}
|