appwrite-ctl 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +288 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +236 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +5 -0
- package/dist/lib/appwrite.d.ts +21 -0
- package/dist/lib/appwrite.js +72 -0
- package/dist/lib/cli.d.ts +28 -0
- package/dist/lib/cli.js +136 -0
- package/dist/lib/config.d.ts +12 -0
- package/dist/lib/config.js +48 -0
- package/dist/lib/diagram.d.ts +4 -0
- package/dist/lib/diagram.js +222 -0
- package/dist/lib/runner.d.ts +4 -0
- package/dist/lib/runner.js +183 -0
- package/dist/lib/schema.d.ts +6 -0
- package/dist/lib/schema.js +57 -0
- package/dist/types/index.d.ts +26 -0
- package/dist/types/index.js +1 -0
- package/package.json +58 -0
package/dist/lib/cli.js
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import { exec } from 'child_process';
|
|
2
|
+
import { promisify } from 'util';
|
|
3
|
+
import fs from 'fs';
|
|
4
|
+
import path from 'path';
|
|
5
|
+
import chalk from 'chalk';
|
|
6
|
+
const execAsync = promisify(exec);
|
|
7
|
+
const SNAPSHOT_FILENAME = 'appwrite.config.json';
|
|
8
|
+
/**
|
|
9
|
+
* Configure the Appwrite CLI client for non-interactive use via API key.
|
|
10
|
+
*/
|
|
11
|
+
export const configureClient = async (config) => {
|
|
12
|
+
const args = [
|
|
13
|
+
`--endpoint ${config.endpoint}`,
|
|
14
|
+
`--project-id ${config.projectId}`,
|
|
15
|
+
`--key ${config.apiKey}`,
|
|
16
|
+
];
|
|
17
|
+
try {
|
|
18
|
+
await execAsync(`appwrite client ${args.join(' ')}`);
|
|
19
|
+
console.log(chalk.green('Appwrite CLI configured successfully.'));
|
|
20
|
+
}
|
|
21
|
+
catch (error) {
|
|
22
|
+
throw new Error(`Failed to configure Appwrite CLI: ${error.message}. Ensure 'appwrite-cli' is installed.`, { cause: error });
|
|
23
|
+
}
|
|
24
|
+
};
|
|
25
|
+
// Resource types to sync (excludes 'settings' which requires interactive login).
|
|
26
|
+
const RESOURCES = ['tables', 'buckets', 'teams', 'topics'];
|
|
27
|
+
/**
|
|
28
|
+
* Pull a full snapshot from Appwrite into a target directory.
|
|
29
|
+
* Uses individual `appwrite pull <resource>` commands for non-interactive operation.
|
|
30
|
+
*
|
|
31
|
+
* The operation works in the project root (where appwrite.config.json lives).
|
|
32
|
+
* If a targetDir is provided, it copies the resulting file to the target directory and cleans up the root.
|
|
33
|
+
*/
|
|
34
|
+
export const pullSnapshot = async (targetDir) => {
|
|
35
|
+
const rootDir = process.cwd();
|
|
36
|
+
const rootConfig = path.join(rootDir, SNAPSHOT_FILENAME);
|
|
37
|
+
for (const resource of RESOURCES) {
|
|
38
|
+
console.log(chalk.blue(`Pulling ${resource}...`));
|
|
39
|
+
try {
|
|
40
|
+
await execAsync(`appwrite pull ${resource}`, { cwd: rootDir, timeout: 120_000 });
|
|
41
|
+
console.log(chalk.green(` ✓ ${resource}`));
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
console.warn(chalk.yellow(` ⚠ Failed to pull ${resource}: ${error.message}`));
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
if (!fs.existsSync(rootConfig)) {
|
|
48
|
+
throw new Error(`appwrite.config.json not found at project root after pull. ` +
|
|
49
|
+
`Ensure the CLI is configured correctly.`);
|
|
50
|
+
}
|
|
51
|
+
if (targetDir && targetDir !== rootDir) {
|
|
52
|
+
// Copy the updated root config into the target version directory
|
|
53
|
+
const targetPath = path.join(targetDir, SNAPSHOT_FILENAME);
|
|
54
|
+
fs.copyFileSync(rootConfig, targetPath);
|
|
55
|
+
console.log(chalk.green(`Snapshot saved to ${targetPath}`));
|
|
56
|
+
// Cleanup: Remove the root appwrite.config.json created by the pull command.
|
|
57
|
+
if (fs.existsSync(rootConfig)) {
|
|
58
|
+
fs.unlinkSync(rootConfig);
|
|
59
|
+
}
|
|
60
|
+
return targetPath;
|
|
61
|
+
}
|
|
62
|
+
console.log(chalk.green(`Snapshot saved to ${rootConfig}`));
|
|
63
|
+
return rootConfig;
|
|
64
|
+
};
|
|
65
|
+
/**
|
|
66
|
+
* Push a snapshot from a version directory to the Appwrite project.
|
|
67
|
+
* Copies the version's appwrite.config.json to the project root,
|
|
68
|
+
* then runs individual `appwrite push <resource> --all --force` commands.
|
|
69
|
+
*
|
|
70
|
+
* The `projectId` in the snapshot is rewritten to match the current config,
|
|
71
|
+
* allowing the same snapshot to be pushed to any environment.
|
|
72
|
+
*
|
|
73
|
+
* `--all` auto-selects all resources, `--force` auto-confirms changes.
|
|
74
|
+
*/
|
|
75
|
+
export const pushSnapshot = async (snapshotPath, config) => {
|
|
76
|
+
const rootDir = process.cwd();
|
|
77
|
+
const rootConfig = path.join(rootDir, SNAPSHOT_FILENAME);
|
|
78
|
+
if (!fs.existsSync(snapshotPath)) {
|
|
79
|
+
throw new Error(`Snapshot not found: ${snapshotPath}`);
|
|
80
|
+
}
|
|
81
|
+
// Backup current root config before overwriting.
|
|
82
|
+
const backupPath = rootConfig + '.bak';
|
|
83
|
+
const originalExists = fs.existsSync(rootConfig);
|
|
84
|
+
if (originalExists) {
|
|
85
|
+
fs.copyFileSync(rootConfig, backupPath);
|
|
86
|
+
}
|
|
87
|
+
// Copy snapshot to root and rewrite projectId to match current environment.
|
|
88
|
+
const snapshotData = JSON.parse(fs.readFileSync(snapshotPath, 'utf8'));
|
|
89
|
+
snapshotData.projectId = config.projectId;
|
|
90
|
+
fs.writeFileSync(rootConfig, JSON.stringify(snapshotData, null, 2));
|
|
91
|
+
console.log(chalk.blue(`Snapshot copied to project root (projectId: ${config.projectId}).`));
|
|
92
|
+
try {
|
|
93
|
+
for (const resource of RESOURCES) {
|
|
94
|
+
console.log(chalk.blue(`Pushing ${resource}...`));
|
|
95
|
+
try {
|
|
96
|
+
const extraFlags = resource === 'tables' ? '--attempts 60' : '';
|
|
97
|
+
await execAsync(`appwrite push ${resource} --all --force ${extraFlags}`.trim(), {
|
|
98
|
+
cwd: rootDir,
|
|
99
|
+
timeout: 300_000,
|
|
100
|
+
});
|
|
101
|
+
console.log(chalk.green(` ✓ ${resource}`));
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
console.error(chalk.red(` ✗ Failed to push ${resource}: ${error.message}`));
|
|
105
|
+
throw error;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
catch (error) {
|
|
110
|
+
// Restore backup if push fails.
|
|
111
|
+
if (originalExists && fs.existsSync(backupPath)) {
|
|
112
|
+
fs.copyFileSync(backupPath, rootConfig);
|
|
113
|
+
console.log(chalk.yellow('Root config restored from backup after push failure.'));
|
|
114
|
+
}
|
|
115
|
+
throw error;
|
|
116
|
+
}
|
|
117
|
+
finally {
|
|
118
|
+
// Restore original state.
|
|
119
|
+
if (originalExists) {
|
|
120
|
+
if (fs.existsSync(backupPath)) {
|
|
121
|
+
fs.copyFileSync(backupPath, rootConfig);
|
|
122
|
+
fs.unlinkSync(backupPath);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
// If it didn't exist before, delete the one we created.
|
|
127
|
+
if (fs.existsSync(rootConfig)) {
|
|
128
|
+
fs.unlinkSync(rootConfig);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
};
|
|
133
|
+
/**
|
|
134
|
+
* Get the snapshot filename used for versioned snapshots.
|
|
135
|
+
*/
|
|
136
|
+
export const getSnapshotFilename = () => SNAPSHOT_FILENAME;
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export interface AppConfig {
|
|
2
|
+
endpoint: string;
|
|
3
|
+
projectId: string;
|
|
4
|
+
apiKey: string;
|
|
5
|
+
migrationCollectionId: string;
|
|
6
|
+
database: string;
|
|
7
|
+
backupCommand?: string;
|
|
8
|
+
}
|
|
9
|
+
/**
|
|
10
|
+
* Load configuration from environment variables or .env file.
|
|
11
|
+
*/
|
|
12
|
+
export declare const loadConfig: (envPath?: string) => AppConfig;
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import dotenv from 'dotenv';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
/**
|
|
5
|
+
* Load configuration from environment variables or .env file.
|
|
6
|
+
*/
|
|
7
|
+
export const loadConfig = (envPath = '.env') => {
|
|
8
|
+
// Load environment variables.
|
|
9
|
+
dotenv.config({ path: path.resolve(process.cwd(), envPath), override: true });
|
|
10
|
+
const endpoint = process.env.APPWRITE_ENDPOINT;
|
|
11
|
+
const projectId = process.env.APPWRITE_PROJECT_ID;
|
|
12
|
+
const apiKey = process.env.APPWRITE_API_KEY;
|
|
13
|
+
const backupCommand = process.env.BACKUP_COMMAND;
|
|
14
|
+
if (!endpoint || !projectId || !apiKey) {
|
|
15
|
+
throw new Error('Missing required environment variables: APPWRITE_ENDPOINT, APPWRITE_PROJECT_ID, APPWRITE_API_KEY');
|
|
16
|
+
}
|
|
17
|
+
// Find root directory.
|
|
18
|
+
const rootDir = process.cwd();
|
|
19
|
+
const configPath = path.join(rootDir, 'appwrite', 'migration', 'config.json');
|
|
20
|
+
let migrationCollectionId = 'migrations';
|
|
21
|
+
let database = 'system';
|
|
22
|
+
if (fs.existsSync(configPath)) {
|
|
23
|
+
try {
|
|
24
|
+
const fileConfig = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
|
|
25
|
+
if (fileConfig.collection) {
|
|
26
|
+
migrationCollectionId = fileConfig.collection;
|
|
27
|
+
}
|
|
28
|
+
if (fileConfig.database) {
|
|
29
|
+
database = fileConfig.database;
|
|
30
|
+
}
|
|
31
|
+
else if (fileConfig.databaseId) {
|
|
32
|
+
// Backward compatibility.
|
|
33
|
+
database = fileConfig.databaseId;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
catch (error) {
|
|
37
|
+
console.warn('Could not parse config.json, using defaults.');
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return {
|
|
41
|
+
endpoint,
|
|
42
|
+
projectId,
|
|
43
|
+
apiKey,
|
|
44
|
+
migrationCollectionId,
|
|
45
|
+
database,
|
|
46
|
+
backupCommand,
|
|
47
|
+
};
|
|
48
|
+
};
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
const MERMAID_CARDINALITY = {
|
|
4
|
+
oneToOne: '||--||',
|
|
5
|
+
oneToMany: '||--o{',
|
|
6
|
+
manyToOne: '}o--||',
|
|
7
|
+
manyToMany: '}o--o{',
|
|
8
|
+
};
|
|
9
|
+
/**
|
|
10
|
+
* Map Appwrite column types to concise display types for the ER diagram.
|
|
11
|
+
*/
|
|
12
|
+
const mapColumnType = (col) => {
|
|
13
|
+
if (col.type === 'string' && col.format === 'enum')
|
|
14
|
+
return 'enum';
|
|
15
|
+
if (col.type === 'relationship')
|
|
16
|
+
return 'relationship';
|
|
17
|
+
// Sanitize types that conflict with Mermaid reserved words
|
|
18
|
+
const safeTypes = {
|
|
19
|
+
text: 'longtext',
|
|
20
|
+
point: 'geopoint',
|
|
21
|
+
polygon: 'geopolygon',
|
|
22
|
+
};
|
|
23
|
+
return safeTypes[col.type] ?? col.type;
|
|
24
|
+
};
|
|
25
|
+
/**
|
|
26
|
+
* Format a human-readable file size string.
|
|
27
|
+
*/
|
|
28
|
+
const formatFileSize = (bytes) => {
|
|
29
|
+
if (bytes >= 1_000_000_000)
|
|
30
|
+
return `${(bytes / 1_000_000_000).toFixed(1)} GB`;
|
|
31
|
+
if (bytes >= 1_000_000)
|
|
32
|
+
return `${(bytes / 1_000_000).toFixed(0)} MB`;
|
|
33
|
+
if (bytes >= 1_000)
|
|
34
|
+
return `${(bytes / 1_000).toFixed(0)} KB`;
|
|
35
|
+
return `${bytes} B`;
|
|
36
|
+
};
|
|
37
|
+
/**
|
|
38
|
+
* Build the Mermaid erDiagram block for a set of tables.
|
|
39
|
+
*/
|
|
40
|
+
const buildErDiagram = (tables) => {
|
|
41
|
+
const lines = ['```mermaid', 'erDiagram'];
|
|
42
|
+
const relationships = [];
|
|
43
|
+
const renderedPairs = new Set();
|
|
44
|
+
for (const table of tables) {
|
|
45
|
+
const entityName = table.name;
|
|
46
|
+
lines.push(` ${entityName} {`);
|
|
47
|
+
// Always add implicit id primary key
|
|
48
|
+
lines.push(` string id PK`);
|
|
49
|
+
for (const col of table.columns) {
|
|
50
|
+
if (col.type === 'relationship') {
|
|
51
|
+
// Only emit from the parent side, and skip if pair already rendered
|
|
52
|
+
if (col.side === 'parent' && col.relatedTable) {
|
|
53
|
+
const pairKey = [entityName, col.relatedTable].sort().join(':');
|
|
54
|
+
if (!renderedPairs.has(pairKey)) {
|
|
55
|
+
renderedPairs.add(pairKey);
|
|
56
|
+
const cardinality = MERMAID_CARDINALITY[col.relationType ?? 'oneToMany'] ?? '||--||';
|
|
57
|
+
const label = `"${col.key}"`;
|
|
58
|
+
relationships.push(` ${entityName} ${cardinality} ${col.relatedTable} : ${label}`);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
const type = mapColumnType(col);
|
|
64
|
+
const comment = col.required ? '"NOT NULL"' : '';
|
|
65
|
+
lines.push(` ${type} ${col.key} ${comment}`.trimEnd());
|
|
66
|
+
}
|
|
67
|
+
lines.push(` }`);
|
|
68
|
+
}
|
|
69
|
+
if (relationships.length > 0) {
|
|
70
|
+
lines.push('');
|
|
71
|
+
lines.push(...relationships);
|
|
72
|
+
}
|
|
73
|
+
lines.push('```');
|
|
74
|
+
return lines.join('\n');
|
|
75
|
+
};
|
|
76
|
+
/**
|
|
77
|
+
* Build markdown documentation for a single collection.
|
|
78
|
+
*/
|
|
79
|
+
const buildCollectionDoc = (table) => {
|
|
80
|
+
const sections = [];
|
|
81
|
+
const status = table.enabled ? '🟢 Enabled' : '🔴 Disabled';
|
|
82
|
+
sections.push(`#### ${table.name} (\`${table.$id}\`)`);
|
|
83
|
+
sections.push('');
|
|
84
|
+
sections.push(`- **Status:** ${status}`);
|
|
85
|
+
sections.push(`- **Row-level Security:** ${table.rowSecurity ? 'Yes' : 'No'}`);
|
|
86
|
+
// Permissions
|
|
87
|
+
if (table.$permissions.length > 0) {
|
|
88
|
+
sections.push('');
|
|
89
|
+
sections.push('**Permissions:**');
|
|
90
|
+
sections.push('');
|
|
91
|
+
sections.push('| Permission |');
|
|
92
|
+
sections.push('| --- |');
|
|
93
|
+
for (const perm of table.$permissions) {
|
|
94
|
+
sections.push(`| \`${perm}\` |`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
// Columns (non-relationship)
|
|
98
|
+
const dataCols = table.columns.filter((c) => c.type !== 'relationship');
|
|
99
|
+
if (dataCols.length > 0) {
|
|
100
|
+
sections.push('');
|
|
101
|
+
sections.push('**Columns:**');
|
|
102
|
+
sections.push('');
|
|
103
|
+
sections.push('| Key | Type | Required | Default | Details |');
|
|
104
|
+
sections.push('| --- | --- | --- | --- | --- |');
|
|
105
|
+
for (const col of dataCols) {
|
|
106
|
+
const type = mapColumnType(col);
|
|
107
|
+
const required = col.required ? '✅' : '—';
|
|
108
|
+
const def = col.default !== null && col.default !== undefined ? `\`${col.default}\`` : '—';
|
|
109
|
+
const details = [];
|
|
110
|
+
if (col.size)
|
|
111
|
+
details.push(`size: ${col.size}`);
|
|
112
|
+
if (col.min !== undefined && col.min !== null)
|
|
113
|
+
details.push(`min: ${col.min}`);
|
|
114
|
+
if (col.max !== undefined && col.max !== null)
|
|
115
|
+
details.push(`max: ${col.max}`);
|
|
116
|
+
if (col.format === 'enum' && col.elements) {
|
|
117
|
+
details.push(`values: ${col.elements.map((e) => `\`${e}\``).join(', ')}`);
|
|
118
|
+
}
|
|
119
|
+
if (col.array)
|
|
120
|
+
details.push('array');
|
|
121
|
+
if (col.encrypt)
|
|
122
|
+
details.push('encrypted');
|
|
123
|
+
sections.push(`| \`${col.key}\` | ${type} | ${required} | ${def} | ${details.join('; ') || '—'} |`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
// Relationships
|
|
127
|
+
const relCols = table.columns.filter((c) => c.type === 'relationship');
|
|
128
|
+
if (relCols.length > 0) {
|
|
129
|
+
sections.push('');
|
|
130
|
+
sections.push('**Relationships:**');
|
|
131
|
+
sections.push('');
|
|
132
|
+
sections.push('| Key | Related Collection | Type | Side | On Delete | Two-way |');
|
|
133
|
+
sections.push('| --- | --- | --- | --- | --- | --- |');
|
|
134
|
+
for (const col of relCols) {
|
|
135
|
+
sections.push(`| \`${col.key}\` | ${col.relatedTable} | ${col.relationType} | ${col.side} | ${col.onDelete ?? '—'} | ${col.twoWay ? 'Yes' : 'No'} |`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
// Indexes
|
|
139
|
+
if (table.indexes.length > 0) {
|
|
140
|
+
sections.push('');
|
|
141
|
+
sections.push('**Indexes:**');
|
|
142
|
+
sections.push('');
|
|
143
|
+
sections.push('| Key | Type | Columns | Orders |');
|
|
144
|
+
sections.push('| --- | --- | --- | --- |');
|
|
145
|
+
for (const idx of table.indexes) {
|
|
146
|
+
sections.push(`| \`${idx.key}\` | ${idx.type} | ${idx.columns.join(', ')} | ${idx.orders.join(', ')} |`);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return sections.join('\n');
|
|
150
|
+
};
|
|
151
|
+
/**
|
|
152
|
+
* Build the buckets documentation section.
|
|
153
|
+
*/
|
|
154
|
+
const buildBucketsDoc = (buckets) => {
|
|
155
|
+
if (buckets.length === 0)
|
|
156
|
+
return '';
|
|
157
|
+
const lines = [];
|
|
158
|
+
lines.push('## Buckets');
|
|
159
|
+
lines.push('');
|
|
160
|
+
lines.push('| Name | ID | Max Size | Extensions | Compression | Encryption | Antivirus | Enabled |');
|
|
161
|
+
lines.push('| --- | --- | --- | --- | --- | --- | --- | --- |');
|
|
162
|
+
for (const b of buckets) {
|
|
163
|
+
const extensions = b.allowedFileExtensions.length > 0 ? b.allowedFileExtensions.join(', ') : 'any';
|
|
164
|
+
lines.push(`| ${b.name} | \`${b.$id}\` | ${formatFileSize(b.maximumFileSize)} | ${extensions} | ${b.compression} | ${b.encryption ? '✅' : '—'} | ${b.antivirus ? '✅' : '—'} | ${b.enabled ? '✅' : '—'} |`);
|
|
165
|
+
}
|
|
166
|
+
return lines.join('\n');
|
|
167
|
+
};
|
|
168
|
+
/**
|
|
169
|
+
* Generate the full schema documentation markdown from a snapshot.
|
|
170
|
+
*/
|
|
171
|
+
export const generateSchemaDoc = (snapshotPath, version) => {
|
|
172
|
+
const raw = fs.readFileSync(snapshotPath, 'utf-8');
|
|
173
|
+
const snapshot = JSON.parse(raw);
|
|
174
|
+
// Load migration config to discover the system database name
|
|
175
|
+
const configPath = path.join(process.cwd(), 'appwrite', 'migration', 'config.json');
|
|
176
|
+
let systemDbName = 'system';
|
|
177
|
+
if (fs.existsSync(configPath)) {
|
|
178
|
+
try {
|
|
179
|
+
const cfg = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
|
|
180
|
+
if (cfg.database)
|
|
181
|
+
systemDbName = cfg.database;
|
|
182
|
+
}
|
|
183
|
+
catch {
|
|
184
|
+
// Ignore parse errors, keep default
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
const sections = [];
|
|
188
|
+
sections.push(`# Schema — ${version}`);
|
|
189
|
+
sections.push('');
|
|
190
|
+
sections.push(`> Auto-generated from \`appwrite.config.json\` (${version})`);
|
|
191
|
+
// Filter out system database
|
|
192
|
+
const userDatabases = snapshot.tablesDB.filter((db) => db.$id !== systemDbName);
|
|
193
|
+
for (const db of userDatabases) {
|
|
194
|
+
const dbTables = snapshot.tables.filter((t) => t.databaseId === db.$id);
|
|
195
|
+
if (dbTables.length === 0)
|
|
196
|
+
continue;
|
|
197
|
+
const dbStatus = db.enabled ? '🟢 Enabled' : '🔴 Disabled';
|
|
198
|
+
sections.push('');
|
|
199
|
+
sections.push(`## Database: ${db.name} (\`${db.$id}\`)`);
|
|
200
|
+
sections.push('');
|
|
201
|
+
sections.push(`- **Status:** ${dbStatus}`);
|
|
202
|
+
sections.push(`- **Collections:** ${dbTables.length}`);
|
|
203
|
+
// ER Diagram for this database
|
|
204
|
+
sections.push('');
|
|
205
|
+
sections.push('### ER Diagram');
|
|
206
|
+
sections.push('');
|
|
207
|
+
sections.push(buildErDiagram(dbTables));
|
|
208
|
+
// Collection details
|
|
209
|
+
sections.push('');
|
|
210
|
+
sections.push('### Collections');
|
|
211
|
+
for (const table of dbTables) {
|
|
212
|
+
sections.push('');
|
|
213
|
+
sections.push(buildCollectionDoc(table));
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// Buckets section
|
|
217
|
+
if (snapshot.buckets.length > 0) {
|
|
218
|
+
sections.push('');
|
|
219
|
+
sections.push(buildBucketsDoc(snapshot.buckets));
|
|
220
|
+
}
|
|
221
|
+
return sections.join('\n') + '\n';
|
|
222
|
+
};
|
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { createJiti } from 'jiti';
|
|
4
|
+
import { loadConfig } from './config.js';
|
|
5
|
+
import { createAppwriteClient, getAppliedMigrations, recordMigration } from './appwrite.js';
|
|
6
|
+
import { configureClient, pushSnapshot, getSnapshotFilename } from './cli.js';
|
|
7
|
+
import chalk from 'chalk';
|
|
8
|
+
const jiti = createJiti(import.meta.url);
|
|
9
|
+
/**
|
|
10
|
+
* Run pending migrations.
|
|
11
|
+
*/
|
|
12
|
+
export const runMigrations = async (envPath = '.env') => {
|
|
13
|
+
const config = loadConfig(envPath);
|
|
14
|
+
const { client, databases } = createAppwriteClient(config);
|
|
15
|
+
console.log('Starting migration process...');
|
|
16
|
+
// 0. Configure CLI with API key (non-interactive auth).
|
|
17
|
+
console.log('Configuring Appwrite CLI...');
|
|
18
|
+
await configureClient(config);
|
|
19
|
+
// 1. Discovery.
|
|
20
|
+
const migrationsDir = path.join(process.cwd(), 'appwrite', 'migration');
|
|
21
|
+
if (!fs.existsSync(migrationsDir)) {
|
|
22
|
+
console.error(`Migrations directory not found at ${migrationsDir}`);
|
|
23
|
+
process.exit(1);
|
|
24
|
+
}
|
|
25
|
+
const versionDirs = fs
|
|
26
|
+
.readdirSync(migrationsDir)
|
|
27
|
+
.filter((dir) => dir.startsWith('v') && fs.statSync(path.join(migrationsDir, dir)).isDirectory())
|
|
28
|
+
.sort((a, b) => {
|
|
29
|
+
const numA = parseInt(a.substring(1));
|
|
30
|
+
const numB = parseInt(b.substring(1));
|
|
31
|
+
return numA - numB;
|
|
32
|
+
});
|
|
33
|
+
console.log(`Found ${versionDirs.length} versions.`);
|
|
34
|
+
// 2. State Check.
|
|
35
|
+
const appliedMigrationIds = await getAppliedMigrations(databases, config);
|
|
36
|
+
const appliedSet = new Set(appliedMigrationIds);
|
|
37
|
+
const snapshotFilename = getSnapshotFilename();
|
|
38
|
+
for (const version of versionDirs) {
|
|
39
|
+
const versionPath = path.join(migrationsDir, version);
|
|
40
|
+
const indexFile = path.join(versionPath, 'index.ts');
|
|
41
|
+
const indexFileJs = path.join(versionPath, 'index.js');
|
|
42
|
+
const validIndexFile = fs.existsSync(indexFile)
|
|
43
|
+
? indexFile
|
|
44
|
+
: fs.existsSync(indexFileJs)
|
|
45
|
+
? indexFileJs
|
|
46
|
+
: null;
|
|
47
|
+
if (!validIndexFile) {
|
|
48
|
+
console.warn(`Skipping ${version}: No index.ts or index.js found.`);
|
|
49
|
+
continue;
|
|
50
|
+
}
|
|
51
|
+
// Load migration file using jiti.
|
|
52
|
+
let migrationModule;
|
|
53
|
+
try {
|
|
54
|
+
migrationModule = await jiti.import(validIndexFile);
|
|
55
|
+
}
|
|
56
|
+
catch (e) {
|
|
57
|
+
console.error(`Failed to load migration file ${validIndexFile}:`, e);
|
|
58
|
+
process.exit(1);
|
|
59
|
+
}
|
|
60
|
+
const migration = migrationModule.default;
|
|
61
|
+
if (!migration || !migration.id) {
|
|
62
|
+
console.error(`Invalid migration file in ${version}: Missing default export or id.`);
|
|
63
|
+
process.exit(1);
|
|
64
|
+
}
|
|
65
|
+
if (appliedSet.has(migration.id)) {
|
|
66
|
+
console.log(`Version ${version} (${migration.id}) already applied. Skipping.`);
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
console.log(`Applying version ${version} (${migration.id})...`);
|
|
70
|
+
// 3. Backup hook.
|
|
71
|
+
if (migration.requiresBackup && config.backupCommand) {
|
|
72
|
+
console.log('Running backup command...');
|
|
73
|
+
try {
|
|
74
|
+
const { exec } = await import('child_process');
|
|
75
|
+
const { promisify } = await import('util');
|
|
76
|
+
const execAsync = promisify(exec);
|
|
77
|
+
await execAsync(config.backupCommand);
|
|
78
|
+
}
|
|
79
|
+
catch (error) {
|
|
80
|
+
console.error('Backup failed:', error);
|
|
81
|
+
process.exit(1);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
else if (migration.requiresBackup && !config.backupCommand) {
|
|
85
|
+
console.warn('Migration requires backup but BACKUP_COMMAND is not set. Proceeding with caution...');
|
|
86
|
+
}
|
|
87
|
+
// 4. Schema sync via CLI push.
|
|
88
|
+
const snapshotPath = path.join(versionPath, snapshotFilename);
|
|
89
|
+
if (fs.existsSync(snapshotPath)) {
|
|
90
|
+
console.log(`Pushing schema snapshot for ${version}...`);
|
|
91
|
+
try {
|
|
92
|
+
await pushSnapshot(snapshotPath, config);
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
console.error('Schema push failed:', error.message);
|
|
96
|
+
console.error("Ensure 'appwrite-cli' is installed and accessible.");
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
else {
|
|
101
|
+
console.warn(`No ${snapshotFilename} found in ${version}. Skipping schema sync.`);
|
|
102
|
+
}
|
|
103
|
+
// 5. Attribute polling.
|
|
104
|
+
if (fs.existsSync(snapshotPath)) {
|
|
105
|
+
await waitForAttributes(databases, snapshotPath);
|
|
106
|
+
}
|
|
107
|
+
// 6. Data execution.
|
|
108
|
+
console.log('Executing migration script...');
|
|
109
|
+
const context = {
|
|
110
|
+
client,
|
|
111
|
+
databases,
|
|
112
|
+
log: (msg) => console.log(`[${version}] ${msg}`),
|
|
113
|
+
error: (msg) => console.error(`[${version}] ${msg}`),
|
|
114
|
+
};
|
|
115
|
+
if (migration.up) {
|
|
116
|
+
try {
|
|
117
|
+
await migration.up(context);
|
|
118
|
+
}
|
|
119
|
+
catch (error) {
|
|
120
|
+
console.error('Migration script failed:', error);
|
|
121
|
+
process.exit(1);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
// 7. Finalization.
|
|
125
|
+
console.log('Finalizing...');
|
|
126
|
+
await recordMigration(databases, config, migration.id, version);
|
|
127
|
+
console.log(`Version ${version} applied successfully.`);
|
|
128
|
+
}
|
|
129
|
+
console.log('All migrations applied.');
|
|
130
|
+
};
|
|
131
|
+
async function waitForAttributes(databases, snapshotPath) {
|
|
132
|
+
console.log('Polling attribute status...');
|
|
133
|
+
let schema;
|
|
134
|
+
try {
|
|
135
|
+
schema = JSON.parse(fs.readFileSync(snapshotPath, 'utf8'));
|
|
136
|
+
}
|
|
137
|
+
catch (e) {
|
|
138
|
+
console.error('Failed to parse snapshot for attribute polling');
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
// appwrite.config.json format: tables[] with databaseId from tablesDB[].
|
|
142
|
+
const tables = schema.tables || [];
|
|
143
|
+
const tablesDB = schema.tablesDB || [];
|
|
144
|
+
// Build a map of database $id -> database info.
|
|
145
|
+
const dbMap = new Map();
|
|
146
|
+
for (const db of tablesDB) {
|
|
147
|
+
dbMap.set(db.$id, db);
|
|
148
|
+
}
|
|
149
|
+
for (const table of tables) {
|
|
150
|
+
const collectionId = table.$id;
|
|
151
|
+
const databaseId = table.databaseId;
|
|
152
|
+
if (!databaseId) {
|
|
153
|
+
console.warn(`Table ${table.name} (${collectionId}) has no databaseId. Skipping polling.`);
|
|
154
|
+
continue;
|
|
155
|
+
}
|
|
156
|
+
console.log(`Checking attributes for table ${table.name} (${collectionId})...`);
|
|
157
|
+
let allAvailable = false;
|
|
158
|
+
while (!allAvailable) {
|
|
159
|
+
try {
|
|
160
|
+
const response = await databases.listAttributes(databaseId, collectionId);
|
|
161
|
+
const attributes = response.attributes;
|
|
162
|
+
const pending = attributes.filter((attr) => attr.status !== 'available');
|
|
163
|
+
if (pending.length === 0) {
|
|
164
|
+
allAvailable = true;
|
|
165
|
+
}
|
|
166
|
+
else {
|
|
167
|
+
console.log(`Waiting for ${pending.length} attributes to be available...`);
|
|
168
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
catch (e) {
|
|
172
|
+
if (e.code === 500) {
|
|
173
|
+
console.warn(chalk.red(` ⚠ Failed to list attributes for ${collectionId} in DB ${databaseId}: Server Error. Skipping polling for this collection.`));
|
|
174
|
+
allAvailable = true; // Force exit loop
|
|
175
|
+
}
|
|
176
|
+
else {
|
|
177
|
+
console.warn(`Failed to list attributes for ${collectionId} in DB ${databaseId}: ${e.message}. Retrying...`);
|
|
178
|
+
await new Promise((resolve) => setTimeout(resolve, 2000));
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { Client, Databases, Query } from 'node-appwrite';
|
|
2
|
+
import chalk from 'chalk';
|
|
3
|
+
export const fetchProjectSchema = async (config) => {
|
|
4
|
+
const client = new Client()
|
|
5
|
+
.setEndpoint(config.endpoint)
|
|
6
|
+
.setProject(config.projectId)
|
|
7
|
+
.setKey(config.apiKey);
|
|
8
|
+
const databases = new Databases(client);
|
|
9
|
+
// 1. Fetch all databases
|
|
10
|
+
const dbsList = await databases.list([Query.limit(100)]);
|
|
11
|
+
const allCollections = [];
|
|
12
|
+
console.log(`Found ${dbsList.databases.length} databases.`);
|
|
13
|
+
for (const db of dbsList.databases) {
|
|
14
|
+
if (db.$id === 'system')
|
|
15
|
+
continue; // Skip internal system DB if strictly internal
|
|
16
|
+
console.log(`Fetching collections for database ${db.$id}...`);
|
|
17
|
+
try {
|
|
18
|
+
// 2. Fetch all collections for this DB
|
|
19
|
+
const colsList = await databases.listCollections(db.$id, [Query.limit(100)]);
|
|
20
|
+
console.log(`Found ${colsList.collections.length} collections in ${db.$id}.`);
|
|
21
|
+
for (const col of colsList.collections) {
|
|
22
|
+
try {
|
|
23
|
+
// 3. For each collection, we need full details (attributes, indexes).
|
|
24
|
+
const collectionData = {
|
|
25
|
+
...col,
|
|
26
|
+
databaseId: db.$id, // Important for deployment
|
|
27
|
+
};
|
|
28
|
+
allCollections.push(collectionData);
|
|
29
|
+
}
|
|
30
|
+
catch (innerErr) {
|
|
31
|
+
console.warn(`Skipping collection processing due to error: ${innerErr.message}`);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
catch (err) {
|
|
36
|
+
console.error(`Error fetching collections for DB ${db.$id}:`, err.message);
|
|
37
|
+
// Attempt to identify which collection is broken if we have a hint
|
|
38
|
+
if (config.knownCollectionIds && config.knownCollectionIds.length > 0) {
|
|
39
|
+
console.log(chalk.yellow('Attempting to diagnose problematic collections...'));
|
|
40
|
+
for (const colId of config.knownCollectionIds) {
|
|
41
|
+
try {
|
|
42
|
+
await databases.getCollection(db.$id, colId);
|
|
43
|
+
}
|
|
44
|
+
catch (innerErr) {
|
|
45
|
+
console.error(chalk.red(` - Collection '${colId}' failed to load: ${innerErr.message}`));
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
throw err;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return {
|
|
53
|
+
projectId: config.projectId,
|
|
54
|
+
projectName: config.projectId,
|
|
55
|
+
collections: allCollections,
|
|
56
|
+
};
|
|
57
|
+
};
|