@njdamstra/appwrite-utils-cli 1.8.9 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/CONFIG_TODO.md +1189 -0
- package/SELECTION_DIALOGS.md +146 -0
- package/SERVICE_IMPLEMENTATION_REPORT.md +462 -0
- package/dist/adapters/index.d.ts +7 -8
- package/dist/adapters/index.js +7 -9
- package/dist/backups/operations/bucketBackup.js +2 -2
- package/dist/backups/operations/collectionBackup.d.ts +1 -1
- package/dist/backups/operations/collectionBackup.js +3 -3
- package/dist/backups/operations/comprehensiveBackup.d.ts +1 -1
- package/dist/backups/operations/comprehensiveBackup.js +2 -2
- package/dist/backups/tracking/centralizedTracking.d.ts +1 -1
- package/dist/backups/tracking/centralizedTracking.js +2 -2
- package/dist/cli/commands/configCommands.js +51 -7
- package/dist/cli/commands/databaseCommands.d.ts +1 -0
- package/dist/cli/commands/databaseCommands.js +119 -9
- package/dist/cli/commands/functionCommands.js +3 -3
- package/dist/cli/commands/importFileCommands.d.ts +7 -0
- package/dist/cli/commands/importFileCommands.js +674 -0
- package/dist/cli/commands/schemaCommands.js +3 -3
- package/dist/cli/commands/storageCommands.js +2 -3
- package/dist/cli/commands/transferCommands.js +3 -5
- package/dist/collections/attributes.d.ts +1 -1
- package/dist/collections/attributes.js +2 -35
- package/dist/collections/indexes.js +1 -3
- package/dist/collections/methods.d.ts +1 -1
- package/dist/collections/methods.js +111 -192
- package/dist/collections/tableOperations.d.ts +1 -0
- package/dist/collections/tableOperations.js +55 -23
- package/dist/collections/transferOperations.d.ts +1 -1
- package/dist/collections/transferOperations.js +3 -4
- package/dist/collections/wipeOperations.d.ts +4 -3
- package/dist/collections/wipeOperations.js +112 -39
- package/dist/databases/methods.js +2 -2
- package/dist/databases/setup.js +2 -2
- package/dist/examples/yamlTerminologyExample.js +2 -2
- package/dist/functions/deployments.d.ts +1 -1
- package/dist/functions/deployments.js +5 -5
- package/dist/functions/fnConfigDiscovery.js +2 -2
- package/dist/functions/methods.js +16 -4
- package/dist/init.js +1 -1
- package/dist/interactiveCLI.d.ts +6 -1
- package/dist/interactiveCLI.js +63 -9
- package/dist/main.js +130 -177
- package/dist/migrations/afterImportActions.js +2 -3
- package/dist/migrations/appwriteToX.d.ts +1 -1
- package/dist/migrations/appwriteToX.js +9 -7
- package/dist/migrations/comprehensiveTransfer.js +3 -5
- package/dist/migrations/dataLoader.js +2 -5
- package/dist/migrations/importController.js +3 -4
- package/dist/migrations/importDataActions.js +3 -3
- package/dist/migrations/relationships.js +1 -2
- package/dist/migrations/services/DataTransformationService.js +2 -2
- package/dist/migrations/services/FileHandlerService.js +1 -1
- package/dist/migrations/services/ImportOrchestrator.js +4 -4
- package/dist/migrations/services/RateLimitManager.js +1 -1
- package/dist/migrations/services/RelationshipResolver.js +1 -1
- package/dist/migrations/services/UserMappingService.js +1 -1
- package/dist/migrations/services/ValidationService.js +1 -1
- package/dist/migrations/transfer.d.ts +8 -4
- package/dist/migrations/transfer.js +106 -55
- package/dist/migrations/yaml/YamlImportConfigLoader.js +1 -1
- package/dist/migrations/yaml/YamlImportIntegration.js +2 -2
- package/dist/migrations/yaml/generateImportSchemas.js +1 -1
- package/dist/setupCommands.d.ts +1 -1
- package/dist/setupCommands.js +5 -6
- package/dist/setupController.js +1 -1
- package/dist/shared/backupTracking.d.ts +1 -1
- package/dist/shared/backupTracking.js +2 -2
- package/dist/shared/confirmationDialogs.js +1 -1
- package/dist/shared/migrationHelpers.d.ts +1 -1
- package/dist/shared/migrationHelpers.js +3 -3
- package/dist/shared/operationQueue.d.ts +1 -1
- package/dist/shared/operationQueue.js +2 -3
- package/dist/shared/operationsTable.d.ts +1 -1
- package/dist/shared/operationsTable.js +2 -2
- package/dist/shared/progressManager.js +1 -1
- package/dist/shared/selectionDialogs.js +9 -8
- package/dist/storage/methods.js +4 -4
- package/dist/storage/schemas.d.ts +2 -2
- package/dist/tables/indexManager.d.ts +65 -0
- package/dist/tables/indexManager.js +294 -0
- package/dist/types.d.ts +2 -2
- package/dist/types.js +1 -1
- package/dist/users/methods.js +2 -3
- package/dist/utils/configMigration.js +1 -1
- package/dist/utils/index.d.ts +1 -1
- package/dist/utils/index.js +1 -1
- package/dist/utils/loadConfigs.d.ts +2 -2
- package/dist/utils/loadConfigs.js +6 -7
- package/dist/utils/setupFiles.js +5 -7
- package/dist/utilsController.d.ts +15 -8
- package/dist/utilsController.js +57 -28
- package/package.json +7 -3
- package/src/adapters/index.ts +8 -34
- package/src/backups/operations/bucketBackup.ts +2 -2
- package/src/backups/operations/collectionBackup.ts +4 -4
- package/src/backups/operations/comprehensiveBackup.ts +3 -3
- package/src/backups/tracking/centralizedTracking.ts +3 -3
- package/src/cli/commands/configCommands.ts +72 -8
- package/src/cli/commands/databaseCommands.ts +161 -9
- package/src/cli/commands/functionCommands.ts +4 -3
- package/src/cli/commands/importFileCommands.ts +815 -0
- package/src/cli/commands/schemaCommands.ts +3 -3
- package/src/cli/commands/storageCommands.ts +2 -3
- package/src/cli/commands/transferCommands.ts +3 -6
- package/src/collections/attributes.ts +3 -39
- package/src/collections/indexes.ts +2 -4
- package/src/collections/methods.ts +115 -150
- package/src/collections/tableOperations.ts +57 -21
- package/src/collections/transferOperations.ts +4 -5
- package/src/collections/wipeOperations.ts +154 -51
- package/src/databases/methods.ts +2 -2
- package/src/databases/setup.ts +2 -2
- package/src/examples/yamlTerminologyExample.ts +2 -2
- package/src/functions/deployments.ts +6 -5
- package/src/functions/fnConfigDiscovery.ts +2 -2
- package/src/functions/methods.ts +17 -4
- package/src/init.ts +1 -1
- package/src/interactiveCLI.ts +75 -10
- package/src/main.ts +143 -287
- package/src/migrations/afterImportActions.ts +2 -3
- package/src/migrations/appwriteToX.ts +12 -8
- package/src/migrations/comprehensiveTransfer.ts +6 -6
- package/src/migrations/dataLoader.ts +2 -5
- package/src/migrations/importController.ts +3 -4
- package/src/migrations/importDataActions.ts +3 -3
- package/src/migrations/relationships.ts +1 -2
- package/src/migrations/services/DataTransformationService.ts +2 -2
- package/src/migrations/services/FileHandlerService.ts +1 -1
- package/src/migrations/services/ImportOrchestrator.ts +4 -4
- package/src/migrations/services/RateLimitManager.ts +1 -1
- package/src/migrations/services/RelationshipResolver.ts +1 -1
- package/src/migrations/services/UserMappingService.ts +1 -1
- package/src/migrations/services/ValidationService.ts +1 -1
- package/src/migrations/transfer.ts +126 -83
- package/src/migrations/yaml/YamlImportConfigLoader.ts +1 -1
- package/src/migrations/yaml/YamlImportIntegration.ts +2 -2
- package/src/migrations/yaml/generateImportSchemas.ts +1 -1
- package/src/setupCommands.ts +5 -6
- package/src/setupController.ts +1 -1
- package/src/shared/backupTracking.ts +3 -3
- package/src/shared/confirmationDialogs.ts +1 -1
- package/src/shared/migrationHelpers.ts +4 -4
- package/src/shared/operationQueue.ts +3 -4
- package/src/shared/operationsTable.ts +3 -3
- package/src/shared/progressManager.ts +1 -1
- package/src/shared/selectionDialogs.ts +9 -8
- package/src/storage/methods.ts +4 -4
- package/src/tables/indexManager.ts +409 -0
- package/src/types.ts +2 -2
- package/src/users/methods.ts +2 -3
- package/src/utils/configMigration.ts +1 -1
- package/src/utils/index.ts +1 -1
- package/src/utils/loadConfigs.ts +15 -7
- package/src/utils/setupFiles.ts +5 -7
- package/src/utilsController.ts +86 -32
- package/dist/adapters/AdapterFactory.d.ts +0 -94
- package/dist/adapters/AdapterFactory.js +0 -405
- package/dist/adapters/DatabaseAdapter.d.ts +0 -233
- package/dist/adapters/DatabaseAdapter.js +0 -50
- package/dist/adapters/LegacyAdapter.d.ts +0 -50
- package/dist/adapters/LegacyAdapter.js +0 -612
- package/dist/adapters/TablesDBAdapter.d.ts +0 -45
- package/dist/adapters/TablesDBAdapter.js +0 -571
- package/dist/config/ConfigManager.d.ts +0 -445
- package/dist/config/ConfigManager.js +0 -625
- package/dist/config/configMigration.d.ts +0 -87
- package/dist/config/configMigration.js +0 -390
- package/dist/config/configValidation.d.ts +0 -66
- package/dist/config/configValidation.js +0 -358
- package/dist/config/index.d.ts +0 -8
- package/dist/config/index.js +0 -7
- package/dist/config/services/ConfigDiscoveryService.d.ts +0 -126
- package/dist/config/services/ConfigDiscoveryService.js +0 -374
- package/dist/config/services/ConfigLoaderService.d.ts +0 -129
- package/dist/config/services/ConfigLoaderService.js +0 -540
- package/dist/config/services/ConfigMergeService.d.ts +0 -208
- package/dist/config/services/ConfigMergeService.js +0 -308
- package/dist/config/services/ConfigValidationService.d.ts +0 -214
- package/dist/config/services/ConfigValidationService.js +0 -310
- package/dist/config/services/SessionAuthService.d.ts +0 -225
- package/dist/config/services/SessionAuthService.js +0 -456
- package/dist/config/services/__tests__/ConfigMergeService.test.d.ts +0 -1
- package/dist/config/services/__tests__/ConfigMergeService.test.js +0 -271
- package/dist/config/services/index.d.ts +0 -13
- package/dist/config/services/index.js +0 -10
- package/dist/config/yamlConfig.d.ts +0 -722
- package/dist/config/yamlConfig.js +0 -702
- package/dist/functions/pathResolution.d.ts +0 -37
- package/dist/functions/pathResolution.js +0 -185
- package/dist/shared/attributeMapper.d.ts +0 -20
- package/dist/shared/attributeMapper.js +0 -203
- package/dist/shared/errorUtils.d.ts +0 -54
- package/dist/shared/errorUtils.js +0 -95
- package/dist/shared/functionManager.d.ts +0 -48
- package/dist/shared/functionManager.js +0 -336
- package/dist/shared/indexManager.d.ts +0 -24
- package/dist/shared/indexManager.js +0 -151
- package/dist/shared/jsonSchemaGenerator.d.ts +0 -50
- package/dist/shared/jsonSchemaGenerator.js +0 -290
- package/dist/shared/logging.d.ts +0 -61
- package/dist/shared/logging.js +0 -116
- package/dist/shared/messageFormatter.d.ts +0 -39
- package/dist/shared/messageFormatter.js +0 -162
- package/dist/shared/pydanticModelGenerator.d.ts +0 -17
- package/dist/shared/pydanticModelGenerator.js +0 -615
- package/dist/shared/schemaGenerator.d.ts +0 -40
- package/dist/shared/schemaGenerator.js +0 -556
- package/dist/utils/ClientFactory.d.ts +0 -87
- package/dist/utils/ClientFactory.js +0 -212
- package/dist/utils/configDiscovery.d.ts +0 -78
- package/dist/utils/configDiscovery.js +0 -472
- package/dist/utils/constantsGenerator.d.ts +0 -31
- package/dist/utils/constantsGenerator.js +0 -321
- package/dist/utils/dataConverters.d.ts +0 -46
- package/dist/utils/dataConverters.js +0 -139
- package/dist/utils/directoryUtils.d.ts +0 -22
- package/dist/utils/directoryUtils.js +0 -59
- package/dist/utils/getClientFromConfig.d.ts +0 -39
- package/dist/utils/getClientFromConfig.js +0 -199
- package/dist/utils/helperFunctions.d.ts +0 -63
- package/dist/utils/helperFunctions.js +0 -156
- package/dist/utils/pathResolvers.d.ts +0 -53
- package/dist/utils/pathResolvers.js +0 -72
- package/dist/utils/projectConfig.d.ts +0 -119
- package/dist/utils/projectConfig.js +0 -171
- package/dist/utils/retryFailedPromises.d.ts +0 -2
- package/dist/utils/retryFailedPromises.js +0 -23
- package/dist/utils/sessionAuth.d.ts +0 -48
- package/dist/utils/sessionAuth.js +0 -164
- package/dist/utils/typeGuards.d.ts +0 -35
- package/dist/utils/typeGuards.js +0 -57
- package/dist/utils/validationRules.d.ts +0 -43
- package/dist/utils/validationRules.js +0 -42
- package/dist/utils/versionDetection.d.ts +0 -58
- package/dist/utils/versionDetection.js +0 -251
- package/dist/utils/yamlConverter.d.ts +0 -100
- package/dist/utils/yamlConverter.js +0 -428
- package/dist/utils/yamlLoader.d.ts +0 -70
- package/dist/utils/yamlLoader.js +0 -267
- package/src/adapters/AdapterFactory.ts +0 -510
- package/src/adapters/DatabaseAdapter.ts +0 -306
- package/src/adapters/LegacyAdapter.ts +0 -841
- package/src/adapters/TablesDBAdapter.ts +0 -773
- package/src/config/ConfigManager.ts +0 -808
- package/src/config/README.md +0 -274
- package/src/config/configMigration.ts +0 -575
- package/src/config/configValidation.ts +0 -445
- package/src/config/index.ts +0 -10
- package/src/config/services/ConfigDiscoveryService.ts +0 -463
- package/src/config/services/ConfigLoaderService.ts +0 -740
- package/src/config/services/ConfigMergeService.ts +0 -388
- package/src/config/services/ConfigValidationService.ts +0 -394
- package/src/config/services/SessionAuthService.ts +0 -565
- package/src/config/services/__tests__/ConfigMergeService.test.ts +0 -351
- package/src/config/services/index.ts +0 -29
- package/src/config/yamlConfig.ts +0 -761
- package/src/functions/pathResolution.ts +0 -227
- package/src/shared/attributeMapper.ts +0 -229
- package/src/shared/errorUtils.ts +0 -110
- package/src/shared/functionManager.ts +0 -525
- package/src/shared/indexManager.ts +0 -254
- package/src/shared/jsonSchemaGenerator.ts +0 -383
- package/src/shared/logging.ts +0 -149
- package/src/shared/messageFormatter.ts +0 -208
- package/src/shared/pydanticModelGenerator.ts +0 -618
- package/src/shared/schemaGenerator.ts +0 -644
- package/src/utils/ClientFactory.ts +0 -240
- package/src/utils/configDiscovery.ts +0 -557
- package/src/utils/constantsGenerator.ts +0 -369
- package/src/utils/dataConverters.ts +0 -159
- package/src/utils/directoryUtils.ts +0 -61
- package/src/utils/getClientFromConfig.ts +0 -257
- package/src/utils/helperFunctions.ts +0 -228
- package/src/utils/pathResolvers.ts +0 -81
- package/src/utils/projectConfig.ts +0 -299
- package/src/utils/retryFailedPromises.ts +0 -29
- package/src/utils/sessionAuth.ts +0 -230
- package/src/utils/typeGuards.ts +0 -65
- package/src/utils/validationRules.ts +0 -88
- package/src/utils/versionDetection.ts +0 -292
- package/src/utils/yamlConverter.ts +0 -542
- package/src/utils/yamlLoader.ts +0 -371
|
@@ -0,0 +1,815 @@
|
|
|
1
|
+
import inquirer from "inquirer";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import readline from "node:readline";
|
|
5
|
+
import Papa from "papaparse";
|
|
6
|
+
import { ID, Query } from "node-appwrite";
|
|
7
|
+
import pLimit from "p-limit";
|
|
8
|
+
import {
|
|
9
|
+
MessageFormatter,
|
|
10
|
+
tryAwaitWithRetry,
|
|
11
|
+
delay,
|
|
12
|
+
} from "@njdamstra/appwrite-utils-helpers";
|
|
13
|
+
import type { DatabaseAdapter } from "@njdamstra/appwrite-utils-helpers";
|
|
14
|
+
import { ProgressManager } from "../../shared/progressManager.js";
|
|
15
|
+
import { fetchAllDatabases } from "../../databases/methods.js";
|
|
16
|
+
import type { InteractiveCLI } from "../../interactiveCLI.js";
|
|
17
|
+
|
|
18
|
+
// ── Schema helpers ──────────────────────────────────────────────────────
|
|
19
|
+
|
|
20
|
+
interface SchemaInfo {
|
|
21
|
+
columnTypeMap: Map<string, string>;
|
|
22
|
+
arrayColumns: Set<string>;
|
|
23
|
+
enumElementsMap: Map<string, Set<string>>;
|
|
24
|
+
hasRelationships: boolean;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const SYSTEM_FIELDS = new Set([
|
|
28
|
+
"$databaseId",
|
|
29
|
+
"$collectionId",
|
|
30
|
+
"$tableId",
|
|
31
|
+
"$sequence",
|
|
32
|
+
"$tenant",
|
|
33
|
+
]);
|
|
34
|
+
const PASSTHROUGH_FIELDS = new Set(["$id", "$permissions", "$createdAt", "$updatedAt"]);
|
|
35
|
+
|
|
36
|
+
async function fetchSchema(
|
|
37
|
+
adapter: DatabaseAdapter,
|
|
38
|
+
databaseId: string,
|
|
39
|
+
tableId: string
|
|
40
|
+
): Promise<SchemaInfo> {
|
|
41
|
+
MessageFormatter.info(`Fetching schema for database="${databaseId}" table="${tableId}"`, { prefix: "Import" });
|
|
42
|
+
const tableInfo = await adapter.getTable({ databaseId, tableId });
|
|
43
|
+
const columns: any[] = (tableInfo.data as any)?.columns || (tableInfo.data as any)?.attributes || [];
|
|
44
|
+
const availableColumns = columns.filter((col: any) => !col.status || col.status === "available");
|
|
45
|
+
MessageFormatter.info(
|
|
46
|
+
`Schema columns: ${availableColumns.map((c: any) => c.key || c.$id).join(", ")}`,
|
|
47
|
+
{ prefix: "Import" }
|
|
48
|
+
);
|
|
49
|
+
|
|
50
|
+
const columnTypeMap = new Map<string, string>();
|
|
51
|
+
const enumElementsMap = new Map<string, Set<string>>();
|
|
52
|
+
const arrayColumns = new Set<string>();
|
|
53
|
+
const hasRelationships = availableColumns.some((col: any) => col.type === "relationship");
|
|
54
|
+
|
|
55
|
+
for (const col of availableColumns) {
|
|
56
|
+
const key = col.key || col.$id;
|
|
57
|
+
if (!key) continue;
|
|
58
|
+
columnTypeMap.set(key, col.type);
|
|
59
|
+
if (col.array) arrayColumns.add(key);
|
|
60
|
+
if (col.type === "enum" && Array.isArray(col.elements)) {
|
|
61
|
+
enumElementsMap.set(key, new Set(col.elements as string[]));
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (columnTypeMap.size > 0) {
|
|
66
|
+
MessageFormatter.info(
|
|
67
|
+
`Table schema: ${columns.length} columns detected, will cast values to match types`,
|
|
68
|
+
{ prefix: "Import" }
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return { columnTypeMap, arrayColumns, enumElementsMap, hasRelationships };
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// ── Row cleaning ────────────────────────────────────────────────────────
|
|
76
|
+
|
|
77
|
+
function cleanRow(row: any, schema: SchemaInfo): any {
|
|
78
|
+
const { columnTypeMap, arrayColumns } = schema;
|
|
79
|
+
const clean: any = {};
|
|
80
|
+
|
|
81
|
+
for (const [key, value] of Object.entries(row)) {
|
|
82
|
+
if (SYSTEM_FIELDS.has(key)) continue;
|
|
83
|
+
|
|
84
|
+
if (PASSTHROUGH_FIELDS.has(key)) {
|
|
85
|
+
if (value === null || value === undefined || value === "null" || value === "undefined" || value === "") continue;
|
|
86
|
+
|
|
87
|
+
if (key === "$permissions") {
|
|
88
|
+
if (Array.isArray(value)) {
|
|
89
|
+
clean[key] = value;
|
|
90
|
+
} else if (typeof value === "string" && value.trim()) {
|
|
91
|
+
clean[key] = value.match(/[a-z]+\([^)]*\)/g) || [];
|
|
92
|
+
}
|
|
93
|
+
} else if (key === "$createdAt" || key === "$updatedAt") {
|
|
94
|
+
const str = value instanceof Date ? value.toISOString() : String(value).trim();
|
|
95
|
+
// Only set if it looks like a valid ISO datetime
|
|
96
|
+
if (str && !isNaN(Date.parse(str))) {
|
|
97
|
+
clean[key] = str;
|
|
98
|
+
}
|
|
99
|
+
} else {
|
|
100
|
+
clean[key] = value;
|
|
101
|
+
}
|
|
102
|
+
continue;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (columnTypeMap.size > 0 && !columnTypeMap.has(key)) continue;
|
|
106
|
+
if (value === null || value === undefined || value === "null") continue;
|
|
107
|
+
|
|
108
|
+
const colType = columnTypeMap.get(key);
|
|
109
|
+
if (colType === "relationship") continue;
|
|
110
|
+
|
|
111
|
+
// Array columns
|
|
112
|
+
if (arrayColumns.has(key)) {
|
|
113
|
+
let arr: any[];
|
|
114
|
+
if (Array.isArray(value)) {
|
|
115
|
+
arr = value;
|
|
116
|
+
} else if (typeof value === "string") {
|
|
117
|
+
const trimmed = value.trim();
|
|
118
|
+
if (!trimmed || trimmed === "[]") {
|
|
119
|
+
clean[key] = [];
|
|
120
|
+
continue;
|
|
121
|
+
}
|
|
122
|
+
if (trimmed.startsWith("[")) {
|
|
123
|
+
try {
|
|
124
|
+
arr = JSON.parse(trimmed);
|
|
125
|
+
} catch {
|
|
126
|
+
arr = trimmed.split(",").map((s) => s.trim()).filter(Boolean);
|
|
127
|
+
}
|
|
128
|
+
} else {
|
|
129
|
+
arr = trimmed.split(",").map((s) => s.trim()).filter(Boolean);
|
|
130
|
+
}
|
|
131
|
+
} else {
|
|
132
|
+
arr = [value];
|
|
133
|
+
}
|
|
134
|
+
// Cast elements to scalar type
|
|
135
|
+
if (colType === "integer") {
|
|
136
|
+
arr = arr.map((v) => typeof v === "number" ? Math.round(v) : parseInt(String(v), 10)).filter((v) => !isNaN(v));
|
|
137
|
+
} else if (colType === "float" || colType === "double") {
|
|
138
|
+
arr = arr.map((v) => typeof v === "number" ? v : parseFloat(String(v))).filter((v) => !isNaN(v));
|
|
139
|
+
} else if (colType === "boolean") {
|
|
140
|
+
arr = arr.map((v) => {
|
|
141
|
+
if (typeof v === "boolean") return v;
|
|
142
|
+
const s = String(v).toLowerCase().trim();
|
|
143
|
+
return s === "true" || s === "1" || s === "yes";
|
|
144
|
+
});
|
|
145
|
+
} else {
|
|
146
|
+
arr = arr.map((v) => String(v));
|
|
147
|
+
}
|
|
148
|
+
clean[key] = arr;
|
|
149
|
+
continue;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Scalar casting
|
|
153
|
+
if (colType === "enum") {
|
|
154
|
+
clean[key] = String(value);
|
|
155
|
+
} else if (colType === "boolean") {
|
|
156
|
+
if (typeof value === "boolean") {
|
|
157
|
+
clean[key] = value;
|
|
158
|
+
} else if (typeof value === "string") {
|
|
159
|
+
const lower = value.toLowerCase().trim();
|
|
160
|
+
if (lower === "true" || lower === "1" || lower === "yes") {
|
|
161
|
+
clean[key] = true;
|
|
162
|
+
} else if (lower === "false" || lower === "0" || lower === "no" || lower === "") {
|
|
163
|
+
clean[key] = false;
|
|
164
|
+
} else {
|
|
165
|
+
clean[key] = Boolean(value);
|
|
166
|
+
}
|
|
167
|
+
} else if (typeof value === "number") {
|
|
168
|
+
clean[key] = value !== 0;
|
|
169
|
+
} else {
|
|
170
|
+
clean[key] = Boolean(value);
|
|
171
|
+
}
|
|
172
|
+
} else if (colType === "integer") {
|
|
173
|
+
if (typeof value === "number") {
|
|
174
|
+
clean[key] = Math.round(value);
|
|
175
|
+
} else if (typeof value === "string" && value.trim() !== "") {
|
|
176
|
+
const parsed = parseInt(value, 10);
|
|
177
|
+
if (!isNaN(parsed)) clean[key] = parsed;
|
|
178
|
+
}
|
|
179
|
+
} else if (colType === "float" || colType === "double") {
|
|
180
|
+
if (typeof value === "number") {
|
|
181
|
+
clean[key] = value;
|
|
182
|
+
} else if (typeof value === "string" && value.trim() !== "") {
|
|
183
|
+
const parsed = parseFloat(value);
|
|
184
|
+
if (!isNaN(parsed)) clean[key] = parsed;
|
|
185
|
+
}
|
|
186
|
+
} else if (typeof value === "string" && value === "") {
|
|
187
|
+
// Skip empty strings — let database use defaults
|
|
188
|
+
continue;
|
|
189
|
+
} else {
|
|
190
|
+
clean[key] = value;
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
return clean;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// ── Upsert helpers ──────────────────────────────────────────────────────
|
|
197
|
+
|
|
198
|
+
function makeUpsertRow(
|
|
199
|
+
adapter: DatabaseAdapter,
|
|
200
|
+
databaseId: string,
|
|
201
|
+
tableId: string
|
|
202
|
+
) {
|
|
203
|
+
return async (row: any) => {
|
|
204
|
+
const { $id, $permissions, ...data } = row;
|
|
205
|
+
const rowId = $id || ID.unique();
|
|
206
|
+
const permissions = Array.isArray($permissions) ? $permissions : undefined;
|
|
207
|
+
try {
|
|
208
|
+
await adapter.createRow({ databaseId, tableId, id: rowId, data, permissions });
|
|
209
|
+
} catch (error: any) {
|
|
210
|
+
const code = error?.originalError?.code || error?.code;
|
|
211
|
+
if (code === 409) {
|
|
212
|
+
await adapter.updateRow({ databaseId, tableId, id: rowId, data, permissions });
|
|
213
|
+
} else {
|
|
214
|
+
throw error;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
interface FlushStats {
|
|
221
|
+
created: number;
|
|
222
|
+
errors: number;
|
|
223
|
+
firstError: string | null;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Track the max working bulk size across calls so we don't retry sizes that already failed
|
|
227
|
+
let effectiveBulkMax = 2500;
|
|
228
|
+
|
|
229
|
+
async function flushBatch(
|
|
230
|
+
batch: any[],
|
|
231
|
+
adapter: DatabaseAdapter,
|
|
232
|
+
databaseId: string,
|
|
233
|
+
tableId: string,
|
|
234
|
+
schema: SchemaInfo,
|
|
235
|
+
useBulk: boolean,
|
|
236
|
+
upsertRow: (row: any) => Promise<void>
|
|
237
|
+
): Promise<FlushStats> {
|
|
238
|
+
let created = 0;
|
|
239
|
+
let errors = 0;
|
|
240
|
+
let firstError: string | null = null;
|
|
241
|
+
|
|
242
|
+
if (useBulk) {
|
|
243
|
+
// Deduplicate by $id within the batch — keep last occurrence
|
|
244
|
+
const deduped = new Map<string, any>();
|
|
245
|
+
for (const row of batch) {
|
|
246
|
+
const id = row.$id || ID.unique();
|
|
247
|
+
if (!row.$id) row.$id = id;
|
|
248
|
+
deduped.set(id, row);
|
|
249
|
+
}
|
|
250
|
+
const dedupedRows = Array.from(deduped.values());
|
|
251
|
+
|
|
252
|
+
// Split into sub-batches of effectiveBulkMax and run up to 5 concurrently
|
|
253
|
+
const subBatches: any[][] = [];
|
|
254
|
+
for (let i = 0; i < dedupedRows.length; i += effectiveBulkMax) {
|
|
255
|
+
subBatches.push(dedupedRows.slice(i, i + effectiveBulkMax));
|
|
256
|
+
}
|
|
257
|
+
const bulkLimit = pLimit(500);
|
|
258
|
+
const results = await Promise.all(
|
|
259
|
+
subBatches.map((sub) =>
|
|
260
|
+
bulkLimit(async () => {
|
|
261
|
+
try {
|
|
262
|
+
const result = await tryAwaitWithRetry(async () =>
|
|
263
|
+
adapter.bulkUpsertRows!({ databaseId, tableId, rows: sub })
|
|
264
|
+
);
|
|
265
|
+
return { created: (result as any)?.total || (result as any)?.rows?.length || sub.length, errors: 0, firstError: null as string | null };
|
|
266
|
+
} catch (error: any) {
|
|
267
|
+
const msg = error?.message || error?.originalError?.message || String(error);
|
|
268
|
+
const isSizeError = /too many|payload too large|batch.*size|limit|no longer than \d+ items|array.*\d+ items/i.test(msg)
|
|
269
|
+
|| error?.originalError?.code === 413
|
|
270
|
+
|| error?.code === 413
|
|
271
|
+
|| error?.originalError?.type === "general_argument_invalid";
|
|
272
|
+
|
|
273
|
+
if (isSizeError && sub.length > 50) {
|
|
274
|
+
const newMax = Math.floor(sub.length / 2);
|
|
275
|
+
if (newMax < effectiveBulkMax) {
|
|
276
|
+
effectiveBulkMax = newMax;
|
|
277
|
+
MessageFormatter.warning(`Bulk batch too large, reducing to ${effectiveBulkMax} per call`, { prefix: "Import" });
|
|
278
|
+
}
|
|
279
|
+
// Retry halved sequentially within this slot
|
|
280
|
+
const mid = Math.ceil(sub.length / 2);
|
|
281
|
+
const halves = [sub.slice(0, mid), sub.slice(mid)];
|
|
282
|
+
let hCreated = 0, hErrors = 0;
|
|
283
|
+
let hFirst: string | null = null;
|
|
284
|
+
for (const half of halves) {
|
|
285
|
+
try {
|
|
286
|
+
const r = await tryAwaitWithRetry(async () =>
|
|
287
|
+
adapter.bulkUpsertRows!({ databaseId, tableId, rows: half })
|
|
288
|
+
);
|
|
289
|
+
hCreated += (r as any)?.total || (r as any)?.rows?.length || half.length;
|
|
290
|
+
} catch (e: any) {
|
|
291
|
+
hErrors += half.length;
|
|
292
|
+
if (!hFirst) hFirst = e?.message || String(e);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
return { created: hCreated, errors: hErrors, firstError: hFirst };
|
|
296
|
+
}
|
|
297
|
+
return { created: 0, errors: sub.length, firstError: msg };
|
|
298
|
+
}
|
|
299
|
+
})
|
|
300
|
+
)
|
|
301
|
+
);
|
|
302
|
+
for (const r of results) {
|
|
303
|
+
created += r.created;
|
|
304
|
+
errors += r.errors;
|
|
305
|
+
if (r.firstError && !firstError) firstError = r.firstError;
|
|
306
|
+
}
|
|
307
|
+
} else {
|
|
308
|
+
const limit = pLimit(500);
|
|
309
|
+
const promises = batch.map((row) =>
|
|
310
|
+
limit(async () => {
|
|
311
|
+
try {
|
|
312
|
+
await upsertRow(row);
|
|
313
|
+
created++;
|
|
314
|
+
} catch (error: any) {
|
|
315
|
+
errors++;
|
|
316
|
+
const msg = error?.message || error?.originalError?.message || String(error);
|
|
317
|
+
if (!firstError) firstError = msg;
|
|
318
|
+
}
|
|
319
|
+
})
|
|
320
|
+
);
|
|
321
|
+
await Promise.all(promises);
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
return { created, errors, firstError };
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// ── Streaming CSV/TSV importer ──────────────────────────────────────────
|
|
328
|
+
|
|
329
|
+
async function countLines(filePath: string): Promise<number> {
|
|
330
|
+
return new Promise((resolve, reject) => {
|
|
331
|
+
let count = 0;
|
|
332
|
+
const stream = fs.createReadStream(filePath, { highWaterMark: 64 * 1024 });
|
|
333
|
+
stream.on("data", (chunk: string | Buffer) => {
|
|
334
|
+
const buf = typeof chunk === "string" ? Buffer.from(chunk) : chunk;
|
|
335
|
+
for (let i = 0; i < buf.length; i++) {
|
|
336
|
+
if (buf[i] === 0x0a) count++;
|
|
337
|
+
}
|
|
338
|
+
});
|
|
339
|
+
stream.on("end", () => resolve(count)); // header line means count ≈ data rows
|
|
340
|
+
stream.on("error", reject);
|
|
341
|
+
});
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
async function importCsvStream(
|
|
345
|
+
adapter: DatabaseAdapter,
|
|
346
|
+
filePath: string,
|
|
347
|
+
databaseId: string,
|
|
348
|
+
tableId: string,
|
|
349
|
+
schema: SchemaInfo,
|
|
350
|
+
delimiter?: string
|
|
351
|
+
): Promise<void> {
|
|
352
|
+
const useBulk = false; // individual upserts with high concurrency
|
|
353
|
+
const BATCH_SIZE = 500;
|
|
354
|
+
const upsertRow = makeUpsertRow(adapter, databaseId, tableId);
|
|
355
|
+
|
|
356
|
+
if (schema.hasRelationships) {
|
|
357
|
+
MessageFormatter.info("Table has relationships — using individual upserts (25 concurrent)", { prefix: "Import" });
|
|
358
|
+
}
|
|
359
|
+
if (useBulk) {
|
|
360
|
+
MessageFormatter.info("Using bulk upsert", { prefix: "Import" });
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
// Get approximate row count for progress
|
|
364
|
+
const lineCount = await countLines(filePath);
|
|
365
|
+
MessageFormatter.info(`~${lineCount} rows detected (streaming)`, { prefix: "Import" });
|
|
366
|
+
|
|
367
|
+
const progress = ProgressManager.create(`import-${tableId}`, lineCount, {
|
|
368
|
+
title: `Importing into ${tableId}`,
|
|
369
|
+
});
|
|
370
|
+
|
|
371
|
+
let totalCreated = 0;
|
|
372
|
+
let totalErrors = 0;
|
|
373
|
+
let firstError: string | null = null;
|
|
374
|
+
let buffer: any[] = [];
|
|
375
|
+
let headerLogged = false;
|
|
376
|
+
|
|
377
|
+
await new Promise<void>((resolve, reject) => {
|
|
378
|
+
const stream = fs.createReadStream(filePath, { encoding: "utf8" });
|
|
379
|
+
|
|
380
|
+
Papa.parse(stream, {
|
|
381
|
+
header: true,
|
|
382
|
+
dynamicTyping: false,
|
|
383
|
+
skipEmptyLines: true,
|
|
384
|
+
delimiter,
|
|
385
|
+
step: async (result: any, parser: any) => {
|
|
386
|
+
if (result.errors?.length > 0) {
|
|
387
|
+
const critical = result.errors.filter((e: any) => e.type !== "FieldMismatch");
|
|
388
|
+
if (critical.length > 0 && !firstError) {
|
|
389
|
+
firstError = critical[0].message;
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
const cleaned = cleanRow(result.data, schema);
|
|
394
|
+
|
|
395
|
+
if (!headerLogged && cleaned) {
|
|
396
|
+
MessageFormatter.info(`Columns: ${Object.keys(cleaned).join(", ")}`, { prefix: "Import" });
|
|
397
|
+
headerLogged = true;
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
buffer.push(cleaned);
|
|
401
|
+
|
|
402
|
+
if (buffer.length >= BATCH_SIZE) {
|
|
403
|
+
parser.pause();
|
|
404
|
+
const batch = buffer.splice(0, buffer.length);
|
|
405
|
+
const stats = await flushBatch(batch, adapter, databaseId, tableId, schema, useBulk, upsertRow);
|
|
406
|
+
totalCreated += stats.created;
|
|
407
|
+
totalErrors += stats.errors;
|
|
408
|
+
if (stats.firstError && !firstError) firstError = stats.firstError;
|
|
409
|
+
progress.update(totalCreated + totalErrors);
|
|
410
|
+
parser.resume();
|
|
411
|
+
}
|
|
412
|
+
},
|
|
413
|
+
complete: async () => {
|
|
414
|
+
// Flush remaining rows
|
|
415
|
+
if (buffer.length > 0) {
|
|
416
|
+
const stats = await flushBatch(buffer, adapter, databaseId, tableId, schema, useBulk, upsertRow);
|
|
417
|
+
totalCreated += stats.created;
|
|
418
|
+
totalErrors += stats.errors;
|
|
419
|
+
if (stats.firstError && !firstError) firstError = stats.firstError;
|
|
420
|
+
}
|
|
421
|
+
resolve();
|
|
422
|
+
},
|
|
423
|
+
error: (error: any) => {
|
|
424
|
+
reject(error);
|
|
425
|
+
},
|
|
426
|
+
});
|
|
427
|
+
});
|
|
428
|
+
|
|
429
|
+
progress.setTotal(totalCreated + totalErrors);
|
|
430
|
+
progress.stop();
|
|
431
|
+
|
|
432
|
+
if (firstError) {
|
|
433
|
+
MessageFormatter.error(`Sample error: ${firstError}`, undefined, { prefix: "Import" });
|
|
434
|
+
}
|
|
435
|
+
if (totalErrors > 0) {
|
|
436
|
+
MessageFormatter.warning(
|
|
437
|
+
`Imported ${totalCreated} rows with ${totalErrors} errors into ${tableId}`,
|
|
438
|
+
{ prefix: "Import" }
|
|
439
|
+
);
|
|
440
|
+
} else {
|
|
441
|
+
MessageFormatter.success(
|
|
442
|
+
`Successfully imported ${totalCreated} rows into ${tableId}`,
|
|
443
|
+
{ prefix: "Import" }
|
|
444
|
+
);
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
// ── Streaming JSONL importer ────────────────────────────────────────────
|
|
449
|
+
|
|
450
|
+
async function importJsonlStream(
|
|
451
|
+
adapter: DatabaseAdapter,
|
|
452
|
+
filePath: string,
|
|
453
|
+
databaseId: string,
|
|
454
|
+
tableId: string,
|
|
455
|
+
schema: SchemaInfo
|
|
456
|
+
): Promise<void> {
|
|
457
|
+
const useBulk = false; // individual upserts with high concurrency
|
|
458
|
+
const BATCH_SIZE = 500;
|
|
459
|
+
const upsertRow = makeUpsertRow(adapter, databaseId, tableId);
|
|
460
|
+
|
|
461
|
+
if (useBulk) MessageFormatter.info("Using bulk upsert", { prefix: "Import" });
|
|
462
|
+
|
|
463
|
+
const lineCount = await countLines(filePath);
|
|
464
|
+
MessageFormatter.info(`~${lineCount} rows detected (streaming)`, { prefix: "Import" });
|
|
465
|
+
|
|
466
|
+
const progress = ProgressManager.create(`import-${tableId}`, lineCount, {
|
|
467
|
+
title: `Importing into ${tableId}`,
|
|
468
|
+
});
|
|
469
|
+
|
|
470
|
+
let totalCreated = 0;
|
|
471
|
+
let totalErrors = 0;
|
|
472
|
+
let firstError: string | null = null;
|
|
473
|
+
let buffer: any[] = [];
|
|
474
|
+
let headerLogged = false;
|
|
475
|
+
|
|
476
|
+
const rl = readline.createInterface({
|
|
477
|
+
input: fs.createReadStream(filePath, { encoding: "utf8" }),
|
|
478
|
+
crlfDelay: Infinity,
|
|
479
|
+
});
|
|
480
|
+
|
|
481
|
+
for await (const line of rl) {
|
|
482
|
+
const trimmed = line.trim();
|
|
483
|
+
if (!trimmed) continue;
|
|
484
|
+
|
|
485
|
+
let row: any;
|
|
486
|
+
try {
|
|
487
|
+
row = JSON.parse(trimmed);
|
|
488
|
+
} catch {
|
|
489
|
+
totalErrors++;
|
|
490
|
+
if (!firstError) firstError = `Invalid JSON line: ${trimmed.slice(0, 80)}`;
|
|
491
|
+
continue;
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
const cleaned = cleanRow(row, schema);
|
|
495
|
+
if (!headerLogged && cleaned) {
|
|
496
|
+
MessageFormatter.info(`Columns: ${Object.keys(cleaned).join(", ")}`, { prefix: "Import" });
|
|
497
|
+
headerLogged = true;
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
buffer.push(cleaned);
|
|
501
|
+
|
|
502
|
+
if (buffer.length >= BATCH_SIZE) {
|
|
503
|
+
const batch = buffer.splice(0, buffer.length);
|
|
504
|
+
const stats = await flushBatch(batch, adapter, databaseId, tableId, schema, useBulk, upsertRow);
|
|
505
|
+
totalCreated += stats.created;
|
|
506
|
+
totalErrors += stats.errors;
|
|
507
|
+
if (stats.firstError && !firstError) firstError = stats.firstError;
|
|
508
|
+
progress.update(totalCreated + totalErrors);
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
// Flush remaining
|
|
513
|
+
if (buffer.length > 0) {
|
|
514
|
+
const stats = await flushBatch(buffer, adapter, databaseId, tableId, schema, useBulk, upsertRow);
|
|
515
|
+
totalCreated += stats.created;
|
|
516
|
+
totalErrors += stats.errors;
|
|
517
|
+
if (stats.firstError && !firstError) firstError = stats.firstError;
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
progress.setTotal(totalCreated + totalErrors);
|
|
521
|
+
progress.stop();
|
|
522
|
+
|
|
523
|
+
if (firstError) {
|
|
524
|
+
MessageFormatter.error(`Sample error: ${firstError}`, undefined, { prefix: "Import" });
|
|
525
|
+
}
|
|
526
|
+
if (totalErrors > 0) {
|
|
527
|
+
MessageFormatter.warning(
|
|
528
|
+
`Imported ${totalCreated} rows with ${totalErrors} errors into ${tableId}`,
|
|
529
|
+
{ prefix: "Import" }
|
|
530
|
+
);
|
|
531
|
+
} else {
|
|
532
|
+
MessageFormatter.success(
|
|
533
|
+
`Successfully imported ${totalCreated} rows into ${tableId}`,
|
|
534
|
+
{ prefix: "Import" }
|
|
535
|
+
);
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
// ── JSON importer (non-streaming, for .json files) ──────────────────────
|
|
540
|
+
|
|
541
|
+
async function importJsonFile(
|
|
542
|
+
adapter: DatabaseAdapter,
|
|
543
|
+
filePath: string,
|
|
544
|
+
databaseId: string,
|
|
545
|
+
tableId: string,
|
|
546
|
+
schema: SchemaInfo
|
|
547
|
+
): Promise<void> {
|
|
548
|
+
const raw = fs.readFileSync(filePath, "utf8");
|
|
549
|
+
const parsed = JSON.parse(raw);
|
|
550
|
+
|
|
551
|
+
let rows: any[];
|
|
552
|
+
if (Array.isArray(parsed)) {
|
|
553
|
+
rows = parsed;
|
|
554
|
+
} else if (typeof parsed === "object" && parsed !== null) {
|
|
555
|
+
const arrayKey = Object.keys(parsed).find((k) => Array.isArray(parsed[k]));
|
|
556
|
+
if (arrayKey) {
|
|
557
|
+
MessageFormatter.info(
|
|
558
|
+
`Using "${arrayKey}" array from JSON (${parsed[arrayKey].length} items)`,
|
|
559
|
+
{ prefix: "Import" }
|
|
560
|
+
);
|
|
561
|
+
rows = parsed[arrayKey];
|
|
562
|
+
} else {
|
|
563
|
+
throw new Error("JSON file must be an array or an object containing an array");
|
|
564
|
+
}
|
|
565
|
+
} else {
|
|
566
|
+
throw new Error("JSON file must be an array or an object containing an array");
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
if (!rows.length) {
|
|
570
|
+
MessageFormatter.warning("No rows found in file", { prefix: "Import" });
|
|
571
|
+
return;
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
MessageFormatter.info(`Parsed ${rows.length} rows`, { prefix: "Import" });
|
|
575
|
+
|
|
576
|
+
const useBulk = false; // individual upserts with high concurrency
|
|
577
|
+
const BATCH_SIZE = 500;
|
|
578
|
+
const upsertRow = makeUpsertRow(adapter, databaseId, tableId);
|
|
579
|
+
|
|
580
|
+
if (useBulk) MessageFormatter.info("Using bulk upsert", { prefix: "Import" });
|
|
581
|
+
if (schema.hasRelationships) {
|
|
582
|
+
MessageFormatter.info("Table has relationships — using individual upserts (25 concurrent)", { prefix: "Import" });
|
|
583
|
+
}
|
|
584
|
+
|
|
585
|
+
const cleanedRows = rows.map((row) => cleanRow(row, schema));
|
|
586
|
+
|
|
587
|
+
MessageFormatter.info(`Columns: ${Object.keys(cleanedRows[0]).join(", ")}`, { prefix: "Import" });
|
|
588
|
+
|
|
589
|
+
const progress = ProgressManager.create(`import-${tableId}`, cleanedRows.length, {
|
|
590
|
+
title: `Importing into ${tableId}`,
|
|
591
|
+
});
|
|
592
|
+
|
|
593
|
+
let totalCreated = 0;
|
|
594
|
+
let totalErrors = 0;
|
|
595
|
+
let firstError: string | null = null;
|
|
596
|
+
|
|
597
|
+
// Process in batches
|
|
598
|
+
for (let i = 0; i < cleanedRows.length; i += BATCH_SIZE) {
|
|
599
|
+
const batch = cleanedRows.slice(i, i + BATCH_SIZE);
|
|
600
|
+
const stats = await flushBatch(batch, adapter, databaseId, tableId, schema, useBulk, upsertRow);
|
|
601
|
+
totalCreated += stats.created;
|
|
602
|
+
totalErrors += stats.errors;
|
|
603
|
+
if (stats.firstError && !firstError) firstError = stats.firstError;
|
|
604
|
+
progress.update(totalCreated + totalErrors);
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
progress.setTotal(totalCreated + totalErrors);
|
|
608
|
+
progress.stop();
|
|
609
|
+
|
|
610
|
+
if (firstError) {
|
|
611
|
+
MessageFormatter.error(`Sample error: ${firstError}`, undefined, { prefix: "Import" });
|
|
612
|
+
}
|
|
613
|
+
if (totalErrors > 0) {
|
|
614
|
+
MessageFormatter.warning(
|
|
615
|
+
`Imported ${totalCreated} rows with ${totalErrors} errors into ${tableId}`,
|
|
616
|
+
{ prefix: "Import" }
|
|
617
|
+
);
|
|
618
|
+
} else {
|
|
619
|
+
MessageFormatter.success(
|
|
620
|
+
`Successfully imported ${totalCreated} rows into ${tableId}`,
|
|
621
|
+
{ prefix: "Import" }
|
|
622
|
+
);
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
// ── Main entry point ────────────────────────────────────────────────────
|
|
627
|
+
|
|
628
|
+
export async function importFileFromPath(
|
|
629
|
+
adapter: DatabaseAdapter,
|
|
630
|
+
filePath: string,
|
|
631
|
+
databaseId: string,
|
|
632
|
+
tableId: string
|
|
633
|
+
): Promise<void> {
|
|
634
|
+
const resolvedPath = path.resolve(filePath);
|
|
635
|
+
|
|
636
|
+
if (!fs.existsSync(resolvedPath)) {
|
|
637
|
+
MessageFormatter.error(`File not found: ${resolvedPath}`, undefined, {
|
|
638
|
+
prefix: "Import",
|
|
639
|
+
});
|
|
640
|
+
return;
|
|
641
|
+
}
|
|
642
|
+
|
|
643
|
+
const ext = path.extname(resolvedPath).toLowerCase();
|
|
644
|
+
const supported = new Set([".csv", ".tsv", ".json", ".jsonl"]);
|
|
645
|
+
if (!supported.has(ext)) {
|
|
646
|
+
MessageFormatter.error(
|
|
647
|
+
`Unsupported file format: ${ext}. Use .csv, .tsv, .json, or .jsonl`,
|
|
648
|
+
undefined,
|
|
649
|
+
{ prefix: "Import" }
|
|
650
|
+
);
|
|
651
|
+
return;
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
const stat = fs.statSync(resolvedPath);
|
|
655
|
+
const sizeMB = (stat.size / 1024 / 1024).toFixed(1);
|
|
656
|
+
MessageFormatter.info(`Importing ${path.basename(resolvedPath)} (${sizeMB} MB)`, { prefix: "Import" });
|
|
657
|
+
|
|
658
|
+
const schema = await fetchSchema(adapter, databaseId, tableId);
|
|
659
|
+
|
|
660
|
+
try {
|
|
661
|
+
if (ext === ".csv" || ext === ".tsv") {
|
|
662
|
+
await importCsvStream(adapter, resolvedPath, databaseId, tableId, schema, ext === ".tsv" ? "\t" : undefined);
|
|
663
|
+
} else if (ext === ".jsonl") {
|
|
664
|
+
await importJsonlStream(adapter, resolvedPath, databaseId, tableId, schema);
|
|
665
|
+
} else {
|
|
666
|
+
await importJsonFile(adapter, resolvedPath, databaseId, tableId, schema);
|
|
667
|
+
}
|
|
668
|
+
} catch (error: any) {
|
|
669
|
+
MessageFormatter.error(
|
|
670
|
+
`Failed to import file: ${error.message}`,
|
|
671
|
+
error,
|
|
672
|
+
{ prefix: "Import" }
|
|
673
|
+
);
|
|
674
|
+
}
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
// ── Prompt for missing targetDb / targetTable ───────────────────────────
|
|
678
|
+
|
|
679
|
+
export async function importFilePromptMissing(
|
|
680
|
+
adapter: DatabaseAdapter,
|
|
681
|
+
database: any,
|
|
682
|
+
filePath: string,
|
|
683
|
+
targetDb?: string,
|
|
684
|
+
targetTable?: string
|
|
685
|
+
): Promise<void> {
|
|
686
|
+
let databaseId = targetDb;
|
|
687
|
+
let tableId = targetTable;
|
|
688
|
+
|
|
689
|
+
if (!databaseId) {
|
|
690
|
+
const allDatabases = await fetchAllDatabases(database);
|
|
691
|
+
if (allDatabases.length === 0) {
|
|
692
|
+
MessageFormatter.error("No databases found", undefined, { prefix: "Import" });
|
|
693
|
+
return;
|
|
694
|
+
}
|
|
695
|
+
const { selectedDb } = await inquirer.prompt([
|
|
696
|
+
{
|
|
697
|
+
type: "list",
|
|
698
|
+
name: "selectedDb",
|
|
699
|
+
message: "Select target database:",
|
|
700
|
+
choices: allDatabases.map((db) => ({
|
|
701
|
+
name: `${db.name} (${db.$id})`,
|
|
702
|
+
value: db.$id,
|
|
703
|
+
})),
|
|
704
|
+
},
|
|
705
|
+
]);
|
|
706
|
+
databaseId = selectedDb;
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
if (!tableId) {
|
|
710
|
+
const tablesResponse = await adapter.listTables({
|
|
711
|
+
databaseId: databaseId!,
|
|
712
|
+
queries: [Query.limit(500)],
|
|
713
|
+
});
|
|
714
|
+
const tables: any[] = tablesResponse.tables || tablesResponse.data || [];
|
|
715
|
+
if (tables.length === 0) {
|
|
716
|
+
MessageFormatter.error("No tables found in selected database", undefined, { prefix: "Import" });
|
|
717
|
+
return;
|
|
718
|
+
}
|
|
719
|
+
const { selectedTable } = await inquirer.prompt([
|
|
720
|
+
{
|
|
721
|
+
type: "list",
|
|
722
|
+
name: "selectedTable",
|
|
723
|
+
message: "Select target table:",
|
|
724
|
+
choices: tables.map((t: any) => ({
|
|
725
|
+
name: `${t.name} (${t.$id})`,
|
|
726
|
+
value: t.$id,
|
|
727
|
+
})),
|
|
728
|
+
},
|
|
729
|
+
]);
|
|
730
|
+
tableId = selectedTable;
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
await importFileFromPath(adapter, filePath, databaseId!, tableId!);
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
// ── Interactive wrapper ─────────────────────────────────────────────────
|
|
737
|
+
|
|
738
|
+
export const importFileCommands = {
|
|
739
|
+
async importFile(cli: InteractiveCLI): Promise<void> {
|
|
740
|
+
const controller = (cli as any).controller!;
|
|
741
|
+
const adapter = controller.adapter as DatabaseAdapter;
|
|
742
|
+
const database = controller.database!;
|
|
743
|
+
|
|
744
|
+
// 1. Prompt for file path
|
|
745
|
+
const { filePath } = await inquirer.prompt([
|
|
746
|
+
{
|
|
747
|
+
type: "input",
|
|
748
|
+
name: "filePath",
|
|
749
|
+
message: "Path to CSV or JSON file:",
|
|
750
|
+
validate: (input: string) => {
|
|
751
|
+
if (!input || !input.trim()) return "Please provide a file path";
|
|
752
|
+
const resolved = path.resolve(input.trim());
|
|
753
|
+
if (!fs.existsSync(resolved))
|
|
754
|
+
return `File not found: ${resolved}`;
|
|
755
|
+
const ext = path.extname(resolved).toLowerCase();
|
|
756
|
+
if (![".csv", ".tsv", ".json", ".jsonl"].includes(ext))
|
|
757
|
+
return "Supported formats: .csv, .tsv, .json, .jsonl";
|
|
758
|
+
return true;
|
|
759
|
+
},
|
|
760
|
+
},
|
|
761
|
+
]);
|
|
762
|
+
|
|
763
|
+
// 2. Select database
|
|
764
|
+
const allDatabases = await fetchAllDatabases(database);
|
|
765
|
+
if (allDatabases.length === 0) {
|
|
766
|
+
MessageFormatter.error("No databases found", undefined, {
|
|
767
|
+
prefix: "Import",
|
|
768
|
+
});
|
|
769
|
+
return;
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
const { selectedDb } = await inquirer.prompt([
|
|
773
|
+
{
|
|
774
|
+
type: "list",
|
|
775
|
+
name: "selectedDb",
|
|
776
|
+
message: "Select target database:",
|
|
777
|
+
choices: allDatabases.map((db) => ({
|
|
778
|
+
name: `${db.name} (${db.$id})`,
|
|
779
|
+
value: db.$id,
|
|
780
|
+
})),
|
|
781
|
+
},
|
|
782
|
+
]);
|
|
783
|
+
|
|
784
|
+
// 3. Select table
|
|
785
|
+
const tablesResponse = await adapter.listTables({
|
|
786
|
+
databaseId: selectedDb,
|
|
787
|
+
queries: [Query.limit(500)],
|
|
788
|
+
});
|
|
789
|
+
const tables: any[] = tablesResponse.tables || tablesResponse.data || [];
|
|
790
|
+
|
|
791
|
+
if (tables.length === 0) {
|
|
792
|
+
MessageFormatter.error(
|
|
793
|
+
"No tables found in selected database",
|
|
794
|
+
undefined,
|
|
795
|
+
{ prefix: "Import" }
|
|
796
|
+
);
|
|
797
|
+
return;
|
|
798
|
+
}
|
|
799
|
+
|
|
800
|
+
const { selectedTable } = await inquirer.prompt([
|
|
801
|
+
{
|
|
802
|
+
type: "list",
|
|
803
|
+
name: "selectedTable",
|
|
804
|
+
message: "Select target table:",
|
|
805
|
+
choices: tables.map((t: any) => ({
|
|
806
|
+
name: `${t.name} (${t.$id})`,
|
|
807
|
+
value: t.$id,
|
|
808
|
+
})),
|
|
809
|
+
},
|
|
810
|
+
]);
|
|
811
|
+
|
|
812
|
+
// 4. Import
|
|
813
|
+
await importFileFromPath(adapter, filePath.trim(), selectedDb, selectedTable);
|
|
814
|
+
},
|
|
815
|
+
};
|