salesforce-to-sqlite 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +164 -0
- package/index.js +349 -0
- package/package.json +28 -0
package/README.md
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
# Salesforce to SQLite CLI Tool
|
|
2
|
+
|
|
3
|
+
> A Node.js CLI tool that extracts data from Salesforce and loads it into a SQLite database based on a load plan configuration.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- Extracts data from Salesforce using SF CLI
|
|
8
|
+
- Creates SQLite tables automatically based on query results
|
|
9
|
+
- Handles Salesforce relationship fields (e.g., `Parent.Name` becomes `Parent_Name` in SQLite)
|
|
10
|
+
- Automatic data type inference
|
|
11
|
+
- Progress logging with colored output
|
|
12
|
+
- Verbose mode for debugging
|
|
13
|
+
|
|
14
|
+
## Prerequisites
|
|
15
|
+
|
|
16
|
+
- Node.js (v14 or higher)
|
|
17
|
+
- Salesforce CLI (`sf`) installed and authenticated
|
|
18
|
+
- Active Salesforce org connection
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
npm install -g salesforce-to-sqlite
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Usage
|
|
27
|
+
|
|
28
|
+
### Basic Usage
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
sf2sqlite -o <salesforce-username> -l load-plan.json
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
### With Custom Database Name
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
sf2sqlite -o myorg@example.com -l load-plan.json -d my-data.db
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
### Verbose Mode
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
sf2sqlite -o myorg@example.com -l load-plan.json -v
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
## Options
|
|
48
|
+
|
|
49
|
+
- `-o, --org <username>` - Salesforce org username or alias (required)
|
|
50
|
+
- `-l, --load-plan <file>` - Path to load plan JSON file (required)
|
|
51
|
+
- `-d, --database <file>` - SQLite database file path (default: `salesforce.db`)
|
|
52
|
+
- `-v, --verbose` - Enable verbose logging
|
|
53
|
+
- `-h, --help` - Display help information
|
|
54
|
+
- `-V, --version` - Display version number
|
|
55
|
+
|
|
56
|
+
## Load Plan Format
|
|
57
|
+
|
|
58
|
+
The load plan is a JSON array of object configurations. Each configuration includes:
|
|
59
|
+
|
|
60
|
+
```json
|
|
61
|
+
{
|
|
62
|
+
"object": "ObjectName",
|
|
63
|
+
"compositeKeys": ["Field1", "Field2"],
|
|
64
|
+
"query": "SELECT Field1, Field2, Related.Field FROM ObjectName",
|
|
65
|
+
"fieldMappings": {
|
|
66
|
+
"Field1": "Field1",
|
|
67
|
+
"Field2": {
|
|
68
|
+
"lookup": {
|
|
69
|
+
"object": "RelatedObject",
|
|
70
|
+
"key": "KeyField",
|
|
71
|
+
"field": "Related.Field"
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
## How It Works
|
|
79
|
+
|
|
80
|
+
1. **Read Load Plan**: Parses the JSON configuration file
|
|
81
|
+
2. **Query Salesforce**: Executes each SOQL query using `sf data query`
|
|
82
|
+
3. **Field Name Sanitization**: Converts relationship fields (e.g., `Account.Name`) to SQLite-compatible names (`Account_Name`)
|
|
83
|
+
4. **Table Creation**: Creates SQLite tables with inferred data types
|
|
84
|
+
5. **Data Insertion**: Inserts queried data into SQLite tables
|
|
85
|
+
6. **Type Conversion**: Automatically converts Salesforce data types to SQLite equivalents
|
|
86
|
+
|
|
87
|
+
## Field Name Handling
|
|
88
|
+
|
|
89
|
+
Salesforce relationship fields like `Parent.Name` or `Account.Owner.Email` are automatically converted to SQLite-compatible field names:
|
|
90
|
+
|
|
91
|
+
- `Parent.Name` → `Parent_Name`
|
|
92
|
+
- `Account.Owner.Email` → `Account_Owner_Email`
|
|
93
|
+
- `Product2.StockKeepingUnit` → `Product2_StockKeepingUnit`
|
|
94
|
+
|
|
95
|
+
## Data Type Mapping
|
|
96
|
+
|
|
97
|
+
| Salesforce Type | SQLite Type | Notes |
|
|
98
|
+
|----------------|-------------|-------|
|
|
99
|
+
| String/Text | TEXT | Default for most fields |
|
|
100
|
+
| Boolean | INTEGER | true=1, false=0 |
|
|
101
|
+
| Number (Integer) | INTEGER | Whole numbers |
|
|
102
|
+
| Number (Decimal) | REAL | Decimal numbers |
|
|
103
|
+
| Date/DateTime | TEXT | ISO format strings |
|
|
104
|
+
| ID | TEXT | Salesforce IDs |
|
|
105
|
+
|
|
106
|
+
## Examples
|
|
107
|
+
|
|
108
|
+
### Extract Product Catalog Data
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
sf2sqlite -o production-org -l load-plan.json -d catalog.db
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
### Debug Query Issues
|
|
115
|
+
|
|
116
|
+
```bash
|
|
117
|
+
sf2sqlite -o sandbox -l load-plan.json -v
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
## Output
|
|
123
|
+
|
|
124
|
+
The tool provides colored console output:
|
|
125
|
+
- 🔵 Info messages
|
|
126
|
+
- ✅ Success messages
|
|
127
|
+
- ⚠️ Warning messages
|
|
128
|
+
- ❌ Error messages
|
|
129
|
+
|
|
130
|
+
At the end, a summary shows:
|
|
131
|
+
- Number of successfully processed objects
|
|
132
|
+
- Number of failed objects
|
|
133
|
+
- Location of the SQLite database file
|
|
134
|
+
|
|
135
|
+
## Error Handling
|
|
136
|
+
|
|
137
|
+
- Invalid load plan JSON: Exits with error message
|
|
138
|
+
- Failed Salesforce queries: Logs warning and continues with next object
|
|
139
|
+
- Table creation errors: Logs error and skips object
|
|
140
|
+
- Record insertion errors: Logs error but continues with other records
|
|
141
|
+
|
|
142
|
+
## Performance Notes
|
|
143
|
+
|
|
144
|
+
- Large queries use a 50MB buffer for CSV data
|
|
145
|
+
- SQLite WAL mode is enabled for better performance
|
|
146
|
+
- Batch inserts using transactions for speed
|
|
147
|
+
- Progress is logged for each object
|
|
148
|
+
|
|
149
|
+
## Troubleshooting
|
|
150
|
+
|
|
151
|
+
### "Command not found: sf"
|
|
152
|
+
Install Salesforce CLI: https://developer.salesforce.com/tools/salesforcecli
|
|
153
|
+
|
|
154
|
+
### "No auth found for org"
|
|
155
|
+
Authenticate with: `sf org login web -a <alias>`
|
|
156
|
+
|
|
157
|
+
### "Query timeout"
|
|
158
|
+
Try splitting large objects into smaller queries or increase the buffer size in the code
|
|
159
|
+
|
|
160
|
+
### "Failed to parse CSV"
|
|
161
|
+
Check query syntax and ensure fields exist in your org
|
|
162
|
+
|
|
163
|
+
## License
|
|
164
|
+
MIT (c) Mohan Chinnappan
|
package/index.js
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const { Command } = require('commander');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const { execSync } = require('child_process');
|
|
7
|
+
const Database = require('better-sqlite3');
|
|
8
|
+
const { parse } = require('csv-parse/sync');
|
|
9
|
+
const chalk = require('chalk');
|
|
10
|
+
|
|
11
|
+
const program = new Command();
|
|
12
|
+
|
|
13
|
+
program
|
|
14
|
+
.name('sf-to-sqlite')
|
|
15
|
+
.description('Extract Salesforce data and load into SQLite database')
|
|
16
|
+
.version('1.0.0')
|
|
17
|
+
.requiredOption('-o, --org <username>', 'Salesforce org username or alias')
|
|
18
|
+
.requiredOption('-l, --load-plan <file>', 'Load plan JSON file')
|
|
19
|
+
.option('-d, --database <file>', 'SQLite database file', 'salesforce.db')
|
|
20
|
+
.option('-v, --verbose', 'Verbose output')
|
|
21
|
+
.parse(process.argv);
|
|
22
|
+
|
|
23
|
+
const options = program.opts();
|
|
24
|
+
|
|
25
|
+
// Utility functions
|
|
26
|
+
function log(message, type = 'info') {
|
|
27
|
+
if (type === 'info') {
|
|
28
|
+
console.log(chalk.blue('ℹ'), message);
|
|
29
|
+
} else if (type === 'success') {
|
|
30
|
+
console.log(chalk.green('✓'), message);
|
|
31
|
+
} else if (type === 'error') {
|
|
32
|
+
console.log(chalk.red('✗'), message);
|
|
33
|
+
} else if (type === 'warn') {
|
|
34
|
+
console.log(chalk.yellow('⚠'), message);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
function logVerbose(message) {
|
|
39
|
+
if (options.verbose) {
|
|
40
|
+
console.log(chalk.gray(message));
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Read and parse load plan
|
|
45
|
+
function readLoadPlan(filePath) {
|
|
46
|
+
try {
|
|
47
|
+
const content = fs.readFileSync(filePath, 'utf-8');
|
|
48
|
+
return JSON.parse(content);
|
|
49
|
+
} catch (error) {
|
|
50
|
+
log(`Failed to read load plan: ${error.message}`, 'error');
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Sanitize field names for SQLite (replace dots with underscores)
|
|
56
|
+
function sanitizeFieldName(fieldName) {
|
|
57
|
+
return fieldName.replace(/\./g, '_');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Extract field names from query
|
|
61
|
+
function extractFieldsFromQuery(query) {
|
|
62
|
+
const selectMatch = query.match(/SELECT\s+(.+?)\s+FROM/i);
|
|
63
|
+
if (!selectMatch) {
|
|
64
|
+
return [];
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const fieldsString = selectMatch[1];
|
|
68
|
+
const fields = fieldsString.split(',').map(f => f.trim());
|
|
69
|
+
|
|
70
|
+
return fields.map(field => {
|
|
71
|
+
// Handle field with alias or relationship
|
|
72
|
+
const parts = field.split('.');
|
|
73
|
+
return {
|
|
74
|
+
original: field,
|
|
75
|
+
sanitized: sanitizeFieldName(field)
|
|
76
|
+
};
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Get SQLite data type based on Salesforce field type
|
|
81
|
+
function getSQLiteType(fieldName, fieldValue) {
|
|
82
|
+
// Check common patterns
|
|
83
|
+
if (fieldName.toLowerCase().includes('date')) {
|
|
84
|
+
return 'TEXT'; // Store dates as ISO strings
|
|
85
|
+
}
|
|
86
|
+
if (fieldName.toLowerCase().includes('is') ||
|
|
87
|
+
fieldName.toLowerCase().includes('active') ||
|
|
88
|
+
fieldName.toLowerCase().includes('required') ||
|
|
89
|
+
fieldName.toLowerCase().includes('hidden')) {
|
|
90
|
+
return 'INTEGER'; // Boolean as 0/1
|
|
91
|
+
}
|
|
92
|
+
if (fieldName.toLowerCase().includes('price') ||
|
|
93
|
+
fieldName.toLowerCase().includes('quantity') ||
|
|
94
|
+
fieldName.toLowerCase().includes('amount')) {
|
|
95
|
+
return 'REAL';
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Infer from value
|
|
99
|
+
if (fieldValue === null || fieldValue === undefined || fieldValue === '') {
|
|
100
|
+
return 'TEXT';
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (fieldValue === 'true' || fieldValue === 'false') {
|
|
104
|
+
return 'INTEGER';
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
if (!isNaN(fieldValue) && fieldValue !== '') {
|
|
108
|
+
if (fieldValue.includes('.')) {
|
|
109
|
+
return 'REAL';
|
|
110
|
+
}
|
|
111
|
+
return 'INTEGER';
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return 'TEXT';
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Create table in SQLite
|
|
118
|
+
function createTable(db, objectConfig, sampleData) {
|
|
119
|
+
const tableName = objectConfig.object;
|
|
120
|
+
|
|
121
|
+
logVerbose(`Creating table: ${tableName}`);
|
|
122
|
+
|
|
123
|
+
// Drop table if exists
|
|
124
|
+
db.exec(`DROP TABLE IF EXISTS ${tableName}`);
|
|
125
|
+
|
|
126
|
+
// Get fields from query
|
|
127
|
+
const fields = extractFieldsFromQuery(objectConfig.query);
|
|
128
|
+
|
|
129
|
+
if (fields.length === 0) {
|
|
130
|
+
log(`No fields found in query for ${tableName}`, 'warn');
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// Build CREATE TABLE statement
|
|
135
|
+
let createTableSQL = `CREATE TABLE ${tableName} (\n`;
|
|
136
|
+
createTableSQL += ` Id INTEGER PRIMARY KEY AUTOINCREMENT,\n`;
|
|
137
|
+
|
|
138
|
+
const columnDefs = fields.map(field => {
|
|
139
|
+
let dataType = 'TEXT';
|
|
140
|
+
|
|
141
|
+
// Try to infer type from sample data
|
|
142
|
+
if (sampleData && sampleData.length > 0) {
|
|
143
|
+
const sampleValue = sampleData[0][field.original];
|
|
144
|
+
dataType = getSQLiteType(field.sanitized, sampleValue);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return ` ${field.sanitized} ${dataType}`;
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
createTableSQL += columnDefs.join(',\n');
|
|
151
|
+
createTableSQL += '\n)';
|
|
152
|
+
|
|
153
|
+
logVerbose(`SQL: ${createTableSQL}`);
|
|
154
|
+
|
|
155
|
+
try {
|
|
156
|
+
db.exec(createTableSQL);
|
|
157
|
+
log(`Created table: ${tableName}`, 'success');
|
|
158
|
+
} catch (error) {
|
|
159
|
+
log(`Failed to create table ${tableName}: ${error.message}`, 'error');
|
|
160
|
+
throw error;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return fields;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// Query Salesforce using SF CLI
|
|
167
|
+
function querySalesforce(query, orgUsername) {
|
|
168
|
+
log(`Executing query...`);
|
|
169
|
+
logVerbose(`Query: ${query}`);
|
|
170
|
+
|
|
171
|
+
try {
|
|
172
|
+
const command = `sf data query -q "${query}" -r csv -o ${orgUsername}`;
|
|
173
|
+
logVerbose(`Command: ${command}`);
|
|
174
|
+
|
|
175
|
+
const result = execSync(command, {
|
|
176
|
+
encoding: 'utf-8',
|
|
177
|
+
maxBuffer: 50 * 1024 * 1024 // 50MB buffer
|
|
178
|
+
});
|
|
179
|
+
|
|
180
|
+
return result;
|
|
181
|
+
} catch (error) {
|
|
182
|
+
log(`Query failed: ${error.message}`, 'error');
|
|
183
|
+
if (error.stderr) {
|
|
184
|
+
logVerbose(`Error details: ${error.stderr}`);
|
|
185
|
+
}
|
|
186
|
+
return null;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// Parse CSV data
|
|
191
|
+
function parseCSV(csvData) {
|
|
192
|
+
if (!csvData || csvData.trim() === '') {
|
|
193
|
+
return [];
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
try {
|
|
197
|
+
const records = parse(csvData, {
|
|
198
|
+
columns: true,
|
|
199
|
+
skip_empty_lines: true,
|
|
200
|
+
trim: true,
|
|
201
|
+
relax_column_count: true
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
return records;
|
|
205
|
+
} catch (error) {
|
|
206
|
+
log(`Failed to parse CSV: ${error.message}`, 'error');
|
|
207
|
+
return [];
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// Insert data into SQLite
|
|
212
|
+
function insertData(db, tableName, fields, records) {
|
|
213
|
+
if (!records || records.length === 0) {
|
|
214
|
+
log(`No records to insert for ${tableName}`, 'warn');
|
|
215
|
+
return 0;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
logVerbose(`Inserting ${records.length} records into ${tableName}`);
|
|
219
|
+
|
|
220
|
+
// Build INSERT statement
|
|
221
|
+
const sanitizedFields = fields.map(f => f.sanitized);
|
|
222
|
+
const placeholders = sanitizedFields.map(() => '?').join(', ');
|
|
223
|
+
const insertSQL = `INSERT INTO ${tableName} (${sanitizedFields.join(', ')}) VALUES (${placeholders})`;
|
|
224
|
+
|
|
225
|
+
logVerbose(`Insert SQL: ${insertSQL}`);
|
|
226
|
+
|
|
227
|
+
const insert = db.prepare(insertSQL);
|
|
228
|
+
|
|
229
|
+
let inserted = 0;
|
|
230
|
+
const insertMany = db.transaction((records) => {
|
|
231
|
+
for (const record of records) {
|
|
232
|
+
try {
|
|
233
|
+
const values = fields.map(field => {
|
|
234
|
+
let value = record[field.original];
|
|
235
|
+
|
|
236
|
+
// Convert boolean strings to integers
|
|
237
|
+
if (value === 'true') return 1;
|
|
238
|
+
if (value === 'false') return 0;
|
|
239
|
+
|
|
240
|
+
// Handle null/empty values
|
|
241
|
+
if (value === '' || value === null || value === undefined) {
|
|
242
|
+
return null;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
return value;
|
|
246
|
+
});
|
|
247
|
+
|
|
248
|
+
insert.run(values);
|
|
249
|
+
inserted++;
|
|
250
|
+
} catch (error) {
|
|
251
|
+
logVerbose(`Failed to insert record: ${error.message}`);
|
|
252
|
+
if (options.verbose) {
|
|
253
|
+
console.log('Record:', record);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
});
|
|
258
|
+
|
|
259
|
+
insertMany(records);
|
|
260
|
+
|
|
261
|
+
log(`Inserted ${inserted} records into ${tableName}`, 'success');
|
|
262
|
+
return inserted;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
// Process single object
|
|
266
|
+
function processObject(db, objectConfig, orgUsername) {
|
|
267
|
+
log(`\n${'='.repeat(60)}`);
|
|
268
|
+
log(`Processing: ${objectConfig.object}`);
|
|
269
|
+
log(`${'='.repeat(60)}`);
|
|
270
|
+
|
|
271
|
+
// Query Salesforce
|
|
272
|
+
const csvData = querySalesforce(objectConfig.query, orgUsername);
|
|
273
|
+
|
|
274
|
+
if (!csvData) {
|
|
275
|
+
log(`Skipping ${objectConfig.object} due to query failure`, 'warn');
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
// Parse CSV
|
|
280
|
+
const records = parseCSV(csvData);
|
|
281
|
+
|
|
282
|
+
if (records.length === 0) {
|
|
283
|
+
log(`No records returned for ${objectConfig.object}`, 'warn');
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// Create table with sample data for type inference
|
|
287
|
+
const fields = createTable(db, objectConfig, records);
|
|
288
|
+
|
|
289
|
+
if (!fields || fields.length === 0) {
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Insert data
|
|
294
|
+
insertData(db, objectConfig.object, fields, records);
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// Main function
|
|
298
|
+
function main() {
|
|
299
|
+
log(`Salesforce to SQLite Extractor`, 'info');
|
|
300
|
+
log(`Org: ${options.org}`);
|
|
301
|
+
log(`Load Plan: ${options.loadPlan}`);
|
|
302
|
+
log(`Database: ${options.database}`);
|
|
303
|
+
|
|
304
|
+
// Read load plan
|
|
305
|
+
const loadPlan = readLoadPlan(options.loadPlan);
|
|
306
|
+
log(`Loaded ${loadPlan.length} object configurations`, 'success');
|
|
307
|
+
|
|
308
|
+
// Initialize SQLite database
|
|
309
|
+
const db = new Database(options.database);
|
|
310
|
+
db.pragma('journal_mode = WAL');
|
|
311
|
+
|
|
312
|
+
log(`Database initialized: ${options.database}`, 'success');
|
|
313
|
+
|
|
314
|
+
// Process each object
|
|
315
|
+
let successCount = 0;
|
|
316
|
+
let failCount = 0;
|
|
317
|
+
|
|
318
|
+
for (const objectConfig of loadPlan) {
|
|
319
|
+
try {
|
|
320
|
+
processObject(db, objectConfig, options.org);
|
|
321
|
+
successCount++;
|
|
322
|
+
} catch (error) {
|
|
323
|
+
log(`Failed to process ${objectConfig.object}: ${error.message}`, 'error');
|
|
324
|
+
failCount++;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
// Close database
|
|
329
|
+
db.close();
|
|
330
|
+
|
|
331
|
+
// Summary
|
|
332
|
+
log(`\n${'='.repeat(60)}`);
|
|
333
|
+
log(`Summary`, 'info');
|
|
334
|
+
log(`${'='.repeat(60)}`);
|
|
335
|
+
log(`Successfully processed: ${successCount} objects`, 'success');
|
|
336
|
+
if (failCount > 0) {
|
|
337
|
+
log(`Failed to process: ${failCount} objects`, 'error');
|
|
338
|
+
}
|
|
339
|
+
log(`Database saved: ${options.database}`, 'success');
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// Run main function
|
|
343
|
+
try {
|
|
344
|
+
main();
|
|
345
|
+
} catch (error) {
|
|
346
|
+
log(`Fatal error: ${error.message}`, 'error');
|
|
347
|
+
console.error(error);
|
|
348
|
+
process.exit(1);
|
|
349
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "salesforce-to-sqlite",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "CLI tool to extract Salesforce data and load into SQLite",
|
|
5
|
+
"main": "index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"sf-to-sqlite": "./index.js",
|
|
8
|
+
"sf2sqlite": "./index.js"
|
|
9
|
+
|
|
10
|
+
},
|
|
11
|
+
"scripts": {
|
|
12
|
+
"start": "node index.js"
|
|
13
|
+
},
|
|
14
|
+
"keywords": [
|
|
15
|
+
"salesforce",
|
|
16
|
+
"sqlite",
|
|
17
|
+
"cli"
|
|
18
|
+
],
|
|
19
|
+
"author": "Mohan Chinnappan",
|
|
20
|
+
"license": "MIT",
|
|
21
|
+
"dependencies": {
|
|
22
|
+
"commander": "^11.1.0",
|
|
23
|
+
"express": "^4.18.2",
|
|
24
|
+
"better-sqlite3": "^9.2.2",
|
|
25
|
+
"csv-parse": "^5.5.3",
|
|
26
|
+
"chalk": "^4.1.2"
|
|
27
|
+
}
|
|
28
|
+
}
|