sfmc-dataloader 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +66 -0
- package/bin/mcdata.mjs +9 -0
- package/lib/batch.mjs +42 -0
- package/lib/clear-de.mjs +13 -0
- package/lib/cli.mjs +216 -0
- package/lib/config.mjs +95 -0
- package/lib/confirm-clear.mjs +42 -0
- package/lib/export-de.mjs +86 -0
- package/lib/file-resolve.mjs +57 -0
- package/lib/filename.mjs +80 -0
- package/lib/import-de.mjs +48 -0
- package/lib/import-routes.mjs +67 -0
- package/lib/index.mjs +5 -0
- package/lib/paths.mjs +11 -0
- package/lib/read-rows.mjs +35 -0
- package/lib/retry.mjs +40 -0
- package/package.json +49 -0
package/README.md
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
# sfmc-dataloader
|
|
2
|
+
|
|
3
|
+
Command-line tool **`mcdata`** to export and import Salesforce Marketing Cloud Data Extension rows using the same project files as [mcdev](https://github.com/Accenture/sfmc-devtools) (`.mcdevrc.json`, `.mcdev-auth.json`) and [sfmc-sdk](https://www.npmjs.com/package/sfmc-sdk) for REST/SOAP.
|
|
4
|
+
|
|
5
|
+
## Requirements
|
|
6
|
+
|
|
7
|
+
- Node.js `^20.19.0 || ^22.13.0 || >=24` (aligned with `sfmc-sdk`)
|
|
8
|
+
- A mcdev-style project with credentials on disk
|
|
9
|
+
- Peer: `mcdev` `>=7` (declare alongside your project tooling)
|
|
10
|
+
|
|
11
|
+
## Install
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
npm install -g sfmc-dataloader
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Usage
|
|
18
|
+
|
|
19
|
+
Run from your mcdev project root (where `.mcdevrc.json` lives).
|
|
20
|
+
|
|
21
|
+
### Export
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
mcdata export MyCred/MyBU --de MyDE_CustomerKey --format csv
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
Writes to `./data/MyCred/MyBU/<encodedKey>+MCDATA+<timestamp>.csv` (TSV/JSON with `--format`).
|
|
28
|
+
|
|
29
|
+
### Import
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
mcdata import MyCred/MyBU --de MyDE_CustomerKey --format csv --api async --mode upsert
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
Resolves the latest matching export file under `./data/MyCred/MyBU/` for that DE key.
|
|
36
|
+
|
|
37
|
+
Import from explicit paths (DE key is recovered from the `+MCDATA+` filename):
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
mcdata import MyCred/MyBU --file ./data/MyCred/MyBU/encoded%2Bkey+MCDATA+2026-04-06T12-00-00.000Z.csv
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
### Clear all rows before import
|
|
44
|
+
|
|
45
|
+
**Dangerous:** removes every row in the target Data Extension before uploading.
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
mcdata import MyCred/MyBU --de MyKey --clear-before-import
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
Interactive: type `YES` when prompted. In CI, add `--i-accept-clear-data-risk` after reviewing the risk.
|
|
52
|
+
|
|
53
|
+
## Options
|
|
54
|
+
|
|
55
|
+
| Option | Description |
|
|
56
|
+
|--------|-------------|
|
|
57
|
+
| `-p, --project` | Project root (default: cwd) |
|
|
58
|
+
| `--format` | `csv` (default), `tsv`, or `json` |
|
|
59
|
+
| `--api` | `async` (default) or `sync` |
|
|
60
|
+
| `--mode` | `upsert` (default), `insert` and `update` require `--api sync` |
|
|
61
|
+
| `--clear-before-import` | SOAP `ClearData` before REST import |
|
|
62
|
+
| `--i-accept-clear-data-risk` | Non-interactive consent for clear |
|
|
63
|
+
|
|
64
|
+
## License
|
|
65
|
+
|
|
66
|
+
MIT — Author: Jörn Berkefeld
|
package/bin/mcdata.mjs
ADDED
package/lib/batch.mjs
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/** Default max UTF-8 bytes per request body (Data API family; margin below 5.9 MB). */
|
|
2
|
+
export const DEFAULT_MAX_BODY_BYTES = 5_500_000;
|
|
3
|
+
|
|
4
|
+
/** Salesforce hard cap on objects per batch. */
|
|
5
|
+
export const MAX_OBJECTS_PER_BATCH = 5_000;
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Split rows into chunks that respect both max row count and serialized JSON body size.
|
|
9
|
+
* Uses JSON.stringify on `{ items: chunk }` to estimate bytes (same shape as REST body).
|
|
10
|
+
*
|
|
11
|
+
* @param {object[]} rows - row objects (flat field map)
|
|
12
|
+
* @param {object} [opts]
|
|
13
|
+
* @param {number} [opts.maxBytes]
|
|
14
|
+
* @param {number} [opts.maxObjects]
|
|
15
|
+
* @returns {object[][]}
|
|
16
|
+
*/
|
|
17
|
+
export function chunkItemsForPayload(rows, opts = {}) {
|
|
18
|
+
const maxBytes = opts.maxBytes ?? DEFAULT_MAX_BODY_BYTES;
|
|
19
|
+
const maxObjects = opts.maxObjects ?? MAX_OBJECTS_PER_BATCH;
|
|
20
|
+
const out = [];
|
|
21
|
+
let i = 0;
|
|
22
|
+
while (i < rows.length) {
|
|
23
|
+
const chunk = [];
|
|
24
|
+
while (i < rows.length && chunk.length < maxObjects) {
|
|
25
|
+
const next = rows[i];
|
|
26
|
+
const trial = [...chunk, next];
|
|
27
|
+
const bytes = Buffer.byteLength(JSON.stringify({ items: trial }), 'utf8');
|
|
28
|
+
if (bytes > maxBytes) {
|
|
29
|
+
if (chunk.length > 0) {
|
|
30
|
+
break;
|
|
31
|
+
}
|
|
32
|
+
chunk.push(next);
|
|
33
|
+
i++;
|
|
34
|
+
break;
|
|
35
|
+
}
|
|
36
|
+
chunk.push(next);
|
|
37
|
+
i++;
|
|
38
|
+
}
|
|
39
|
+
out.push(chunk);
|
|
40
|
+
}
|
|
41
|
+
return out;
|
|
42
|
+
}
|
package/lib/clear-de.mjs
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Remove all rows from a Data Extension via SOAP Perform (ClearData).
|
|
3
|
+
* Requires appropriate tenant and user rights; may fail for shared DEs from non-owner BUs.
|
|
4
|
+
*
|
|
5
|
+
* @param {*} soap - SDK soap instance
|
|
6
|
+
* @param {string} customerKey - DE external key
|
|
7
|
+
* @returns {Promise<any>}
|
|
8
|
+
*/
|
|
9
|
+
export async function clearDataExtensionRows(soap, customerKey) {
|
|
10
|
+
return soap.perform('DataExtension', 'ClearData', {
|
|
11
|
+
CustomerKey: customerKey,
|
|
12
|
+
});
|
|
13
|
+
}
|
package/lib/cli.mjs
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
import { parseArgs } from 'node:util';
|
|
2
|
+
import process from 'node:process';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import SDK from 'sfmc-sdk';
|
|
5
|
+
import {
|
|
6
|
+
loadMcdevProject,
|
|
7
|
+
parseCredBu,
|
|
8
|
+
resolveCredentialAndMid,
|
|
9
|
+
buildSdkAuthObject,
|
|
10
|
+
} from './config.mjs';
|
|
11
|
+
import { dataDirectoryForBu } from './paths.mjs';
|
|
12
|
+
import { exportDataExtensionToFile } from './export-de.mjs';
|
|
13
|
+
import { findImportCandidates, pickLatestByMtime } from './file-resolve.mjs';
|
|
14
|
+
import { parseExportBasename } from './filename.mjs';
|
|
15
|
+
import { importFromFile } from './import-de.mjs';
|
|
16
|
+
import { clearDataExtensionRows } from './clear-de.mjs';
|
|
17
|
+
import { confirmClearBeforeImport } from './confirm-clear.mjs';
|
|
18
|
+
|
|
19
|
+
function printHelp() {
|
|
20
|
+
console.log(`mcdata — SFMC Data Extension export/import (mcdev project)
|
|
21
|
+
|
|
22
|
+
Usage:
|
|
23
|
+
mcdata export <credential>/<bu> --de <key> [--de <key> ...] [options]
|
|
24
|
+
mcdata import <credential>/<bu> (--de <key> ... | --file <path> ...) [options]
|
|
25
|
+
|
|
26
|
+
Options:
|
|
27
|
+
-p, --project <dir> mcdev project root (default: cwd)
|
|
28
|
+
--format <csv|tsv|json> File format (default: csv)
|
|
29
|
+
--json-pretty Pretty-print JSON on export
|
|
30
|
+
|
|
31
|
+
Import options:
|
|
32
|
+
--api <async|sync> REST row API family (default: async)
|
|
33
|
+
--mode <upsert|insert|update> (default: upsert; insert/update require --api sync)
|
|
34
|
+
--clear-before-import SOAP ClearData before import (destructive; see below)
|
|
35
|
+
--i-accept-clear-data-risk Non-interactive acknowledgement for --clear-before-import
|
|
36
|
+
|
|
37
|
+
Notes:
|
|
38
|
+
Exports are written under ./data/<credential>/<bu>/ with "+MCDATA+" in the filename.
|
|
39
|
+
Import with --de resolves the latest matching file in that folder (by mtime).
|
|
40
|
+
Import with --file parses the DE key from the basename (+MCDATA+ prefix).
|
|
41
|
+
|
|
42
|
+
Clear data warning:
|
|
43
|
+
--clear-before-import deletes ALL existing rows in the target DE(s) before upload.
|
|
44
|
+
Interactive: type YES when prompted. CI: also pass --i-accept-clear-data-risk.
|
|
45
|
+
`);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* @param {string[]} argv
|
|
50
|
+
* @returns {Promise<number>} exit code
|
|
51
|
+
*/
|
|
52
|
+
export async function main(argv) {
|
|
53
|
+
let values;
|
|
54
|
+
let positionals;
|
|
55
|
+
try {
|
|
56
|
+
const parsed = parseArgs({
|
|
57
|
+
args: argv.slice(2),
|
|
58
|
+
allowPositionals: true,
|
|
59
|
+
strict: true,
|
|
60
|
+
options: {
|
|
61
|
+
project: { type: 'string', short: 'p' },
|
|
62
|
+
format: { type: 'string' },
|
|
63
|
+
de: { type: 'string', multiple: true },
|
|
64
|
+
file: { type: 'string', multiple: true },
|
|
65
|
+
api: { type: 'string' },
|
|
66
|
+
mode: { type: 'string' },
|
|
67
|
+
'clear-before-import': { type: 'boolean', default: false },
|
|
68
|
+
'i-accept-clear-data-risk': { type: 'boolean', default: false },
|
|
69
|
+
'json-pretty': { type: 'boolean', default: false },
|
|
70
|
+
help: { type: 'boolean', short: 'h', default: false },
|
|
71
|
+
},
|
|
72
|
+
});
|
|
73
|
+
values = parsed.values;
|
|
74
|
+
positionals = parsed.positionals;
|
|
75
|
+
} catch (e) {
|
|
76
|
+
console.error(e.message);
|
|
77
|
+
printHelp();
|
|
78
|
+
return 1;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
if (values.help) {
|
|
82
|
+
printHelp();
|
|
83
|
+
return 0;
|
|
84
|
+
}
|
|
85
|
+
if (positionals.length === 0) {
|
|
86
|
+
printHelp();
|
|
87
|
+
return 1;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
const sub = positionals[0];
|
|
91
|
+
const credBuRaw = positionals[1];
|
|
92
|
+
if (!credBuRaw) {
|
|
93
|
+
console.error('Missing <credential>/<businessUnit>.');
|
|
94
|
+
printHelp();
|
|
95
|
+
return 1;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const projectRoot = path.resolve(values.project ?? process.cwd());
|
|
99
|
+
const fmt = values.format ?? 'csv';
|
|
100
|
+
if (!['csv', 'tsv', 'json'].includes(fmt)) {
|
|
101
|
+
console.error(`Invalid --format: ${fmt}`);
|
|
102
|
+
return 1;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const { mcdevrc, mcdevAuth } = loadMcdevProject(projectRoot);
|
|
106
|
+
const { credential, bu } = parseCredBu(credBuRaw);
|
|
107
|
+
const { mid, authCred } = resolveCredentialAndMid(mcdevrc, mcdevAuth, credential, bu);
|
|
108
|
+
const authObj = buildSdkAuthObject(authCred, mid);
|
|
109
|
+
const sdk = new SDK(authObj, { requestAttempts: 3 });
|
|
110
|
+
|
|
111
|
+
if (sub === 'export') {
|
|
112
|
+
const des = [].concat(values.de ?? []);
|
|
113
|
+
if (des.length === 0) {
|
|
114
|
+
console.error('export requires at least one --de <customerKey>');
|
|
115
|
+
return 1;
|
|
116
|
+
}
|
|
117
|
+
for (const deKey of des) {
|
|
118
|
+
const out = await exportDataExtensionToFile(sdk, {
|
|
119
|
+
projectRoot,
|
|
120
|
+
credentialName: credential,
|
|
121
|
+
buName: bu,
|
|
122
|
+
deKey,
|
|
123
|
+
format: /** @type {'csv'|'tsv'|'json'} */ (fmt),
|
|
124
|
+
jsonPretty: values['json-pretty'],
|
|
125
|
+
});
|
|
126
|
+
console.error(`Exported: ${out}`);
|
|
127
|
+
}
|
|
128
|
+
return 0;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
if (sub === 'import') {
|
|
132
|
+
const api = values.api ?? 'async';
|
|
133
|
+
const mode = values.mode ?? 'upsert';
|
|
134
|
+
if (!['async', 'sync'].includes(api)) {
|
|
135
|
+
console.error(`Invalid --api: ${api}`);
|
|
136
|
+
return 1;
|
|
137
|
+
}
|
|
138
|
+
if (!['upsert', 'insert', 'update'].includes(mode)) {
|
|
139
|
+
console.error(`Invalid --mode: ${mode}`);
|
|
140
|
+
return 1;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
const hasDe = values.de?.length > 0;
|
|
144
|
+
const hasFile = values.file?.length > 0;
|
|
145
|
+
if (hasDe === hasFile) {
|
|
146
|
+
console.error('import requires exactly one of: repeated --de <key> OR repeated --file <path>');
|
|
147
|
+
return 1;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const clear = values['clear-before-import'];
|
|
151
|
+
const acceptRisk = values['i-accept-clear-data-risk'];
|
|
152
|
+
|
|
153
|
+
if (hasDe) {
|
|
154
|
+
const deKeys = [].concat(values.de ?? []);
|
|
155
|
+
const dataDir = dataDirectoryForBu(projectRoot, credential, bu);
|
|
156
|
+
if (clear) {
|
|
157
|
+
await confirmClearBeforeImport({
|
|
158
|
+
deKeys,
|
|
159
|
+
acceptRiskFlag: acceptRisk,
|
|
160
|
+
isTTY: process.stdin.isTTY === true,
|
|
161
|
+
});
|
|
162
|
+
for (const deKey of deKeys) {
|
|
163
|
+
await clearDataExtensionRows(sdk.soap, deKey);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
for (const deKey of deKeys) {
|
|
167
|
+
const candidates = await findImportCandidates(dataDir, deKey, fmt);
|
|
168
|
+
if (candidates.length === 0) {
|
|
169
|
+
console.error(`No ${fmt} file found for DE "${deKey}" under ${dataDir}`);
|
|
170
|
+
return 1;
|
|
171
|
+
}
|
|
172
|
+
const filePath =
|
|
173
|
+
candidates.length === 1 ? candidates[0] : await pickLatestByMtime(candidates);
|
|
174
|
+
await importFromFile(sdk, {
|
|
175
|
+
filePath,
|
|
176
|
+
deKey,
|
|
177
|
+
format: /** @type {'csv'|'tsv'|'json'} */ (fmt),
|
|
178
|
+
api: /** @type {'async'|'sync'} */ (api),
|
|
179
|
+
mode: /** @type {'upsert'|'insert'|'update'} */ (mode),
|
|
180
|
+
});
|
|
181
|
+
console.error(`Imported ${filePath} -> DE ${deKey}`);
|
|
182
|
+
}
|
|
183
|
+
return 0;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
const fileList = values.file ?? [];
|
|
187
|
+
const keysFromFiles = fileList.map((fp) => parseExportBasename(path.basename(fp)).customerKey);
|
|
188
|
+
if (clear) {
|
|
189
|
+
await confirmClearBeforeImport({
|
|
190
|
+
deKeys: keysFromFiles,
|
|
191
|
+
acceptRiskFlag: acceptRisk,
|
|
192
|
+
isTTY: process.stdin.isTTY === true,
|
|
193
|
+
});
|
|
194
|
+
for (const deKey of keysFromFiles) {
|
|
195
|
+
await clearDataExtensionRows(sdk.soap, deKey);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
for (const filePath of fileList) {
|
|
199
|
+
const base = path.basename(filePath);
|
|
200
|
+
const { customerKey } = parseExportBasename(base);
|
|
201
|
+
await importFromFile(sdk, {
|
|
202
|
+
filePath,
|
|
203
|
+
deKey: customerKey,
|
|
204
|
+
format: /** @type {'csv'|'tsv'|'json'} */ (fmt),
|
|
205
|
+
api: /** @type {'async'|'sync'} */ (api),
|
|
206
|
+
mode: /** @type {'upsert'|'insert'|'update'} */ (mode),
|
|
207
|
+
});
|
|
208
|
+
console.error(`Imported ${filePath}`);
|
|
209
|
+
}
|
|
210
|
+
return 0;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
console.error(`Unknown command: ${sub}`);
|
|
214
|
+
printHelp();
|
|
215
|
+
return 1;
|
|
216
|
+
}
|
package/lib/config.mjs
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @typedef {object} McdevrcCredentials
|
|
6
|
+
* @property {Record<string, Record<string, number|string>>} businessUnits
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* @typedef {object} Mcdevrc
|
|
11
|
+
* @property {Record<string, McdevrcCredentials>} credentials
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @typedef {object} AuthCredential
|
|
16
|
+
* @property {string} client_id
|
|
17
|
+
* @property {string} client_secret
|
|
18
|
+
* @property {string} auth_url
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* @param {string} projectRoot
|
|
23
|
+
* @returns {{ mcdevrc: Mcdevrc, mcdevAuth: Record<string, AuthCredential> }}
|
|
24
|
+
*/
|
|
25
|
+
export function loadMcdevProject(projectRoot) {
|
|
26
|
+
const rcPath = path.join(projectRoot, '.mcdevrc.json');
|
|
27
|
+
const authPath = path.join(projectRoot, '.mcdev-auth.json');
|
|
28
|
+
if (!fs.existsSync(rcPath)) {
|
|
29
|
+
throw new Error(`Missing ${rcPath}`);
|
|
30
|
+
}
|
|
31
|
+
if (!fs.existsSync(authPath)) {
|
|
32
|
+
throw new Error(`Missing ${authPath}`);
|
|
33
|
+
}
|
|
34
|
+
const mcdevrc = JSON.parse(fs.readFileSync(rcPath, 'utf8'));
|
|
35
|
+
const mcdevAuth = JSON.parse(fs.readFileSync(authPath, 'utf8'));
|
|
36
|
+
return { mcdevrc, mcdevAuth };
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* @param {string} credBu - `CredentialName/BUName`
|
|
41
|
+
* @returns {{ credential: string, bu: string }}
|
|
42
|
+
*/
|
|
43
|
+
export function parseCredBu(credBu) {
|
|
44
|
+
const slash = credBu.indexOf('/');
|
|
45
|
+
if (slash <= 0 || slash === credBu.length - 1) {
|
|
46
|
+
throw new Error(`Expected <credential>/<businessUnit>, got: ${credBu}`);
|
|
47
|
+
}
|
|
48
|
+
return {
|
|
49
|
+
credential: credBu.slice(0, slash),
|
|
50
|
+
bu: credBu.slice(slash + 1),
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* @param {Mcdevrc} mcdevrc
|
|
56
|
+
* @param {Record<string, AuthCredential>} mcdevAuth
|
|
57
|
+
* @param {string} credentialName
|
|
58
|
+
* @param {string} buName
|
|
59
|
+
* @returns {{ mid: number, authCred: AuthCredential }}
|
|
60
|
+
*/
|
|
61
|
+
export function resolveCredentialAndMid(mcdevrc, mcdevAuth, credentialName, buName) {
|
|
62
|
+
const credBlock = mcdevrc.credentials?.[credentialName];
|
|
63
|
+
if (!credBlock) {
|
|
64
|
+
throw new Error(`Unknown credential "${credentialName}" in .mcdevrc.json`);
|
|
65
|
+
}
|
|
66
|
+
const midRaw = credBlock.businessUnits?.[buName];
|
|
67
|
+
if (midRaw === undefined || midRaw === null) {
|
|
68
|
+
throw new Error(`Unknown business unit "${buName}" under credential "${credentialName}"`);
|
|
69
|
+
}
|
|
70
|
+
const mid =
|
|
71
|
+
typeof midRaw === 'number' ? midRaw : Number.parseInt(String(midRaw), 10);
|
|
72
|
+
if (!Number.isInteger(mid)) {
|
|
73
|
+
throw new Error(`Invalid MID for ${credentialName}/${buName}: ${midRaw}`);
|
|
74
|
+
}
|
|
75
|
+
const authCred = mcdevAuth[credentialName];
|
|
76
|
+
if (!authCred?.client_id || !authCred?.client_secret || !authCred?.auth_url) {
|
|
77
|
+
throw new Error(`Missing auth fields for credential "${credentialName}" in .mcdev-auth.json`);
|
|
78
|
+
}
|
|
79
|
+
return { mid, authCred };
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Auth object for sfmc-sdk `Auth` / `SDK` constructor.
|
|
84
|
+
* @param {AuthCredential} authCred
|
|
85
|
+
* @param {number} mid
|
|
86
|
+
* @returns {import('sfmc-sdk').AuthObject}
|
|
87
|
+
*/
|
|
88
|
+
export function buildSdkAuthObject(authCred, mid) {
|
|
89
|
+
return {
|
|
90
|
+
client_id: authCred.client_id,
|
|
91
|
+
client_secret: authCred.client_secret,
|
|
92
|
+
auth_url: authCred.auth_url,
|
|
93
|
+
account_id: mid,
|
|
94
|
+
};
|
|
95
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import readline from 'node:readline/promises';
|
|
2
|
+
import { stdin as input, stdout as output } from 'node:process';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @param {object} opts
|
|
6
|
+
* @param {string[]} opts.deKeys
|
|
7
|
+
* @param {boolean} opts.acceptRiskFlag
|
|
8
|
+
* @param {boolean} opts.isTTY
|
|
9
|
+
* @param {NodeJS.ReadableStream} [opts.stdin]
|
|
10
|
+
* @param {NodeJS.WritableStream} [opts.stdout]
|
|
11
|
+
* @returns {Promise<void>}
|
|
12
|
+
*/
|
|
13
|
+
export async function confirmClearBeforeImport(opts) {
|
|
14
|
+
const { deKeys, acceptRiskFlag, isTTY } = opts;
|
|
15
|
+
const stdin = opts.stdin ?? input;
|
|
16
|
+
const stdout = opts.stdout ?? output;
|
|
17
|
+
if (acceptRiskFlag) {
|
|
18
|
+
return;
|
|
19
|
+
}
|
|
20
|
+
if (!isTTY) {
|
|
21
|
+
throw new Error(
|
|
22
|
+
'Refusing to clear data in non-interactive mode without --i-accept-clear-data-risk. ' +
|
|
23
|
+
'All rows in the target Data Extension(s) would be permanently deleted.'
|
|
24
|
+
);
|
|
25
|
+
}
|
|
26
|
+
const msg =
|
|
27
|
+
'\n*** DANGER: CLEAR DATA ***\n' +
|
|
28
|
+
'This will permanently DELETE ALL ROWS in:\n' +
|
|
29
|
+
deKeys.map((k) => ` - ${k}\n`).join('') +
|
|
30
|
+
'This cannot be undone. Enterprise 2.0 / admin / shared-DE rules may apply.\n' +
|
|
31
|
+
'Type YES to continue, anything else to abort: ';
|
|
32
|
+
stdout.write(msg);
|
|
33
|
+
const rl = readline.createInterface({ input: stdin, output: stdout });
|
|
34
|
+
try {
|
|
35
|
+
const line = await rl.question('');
|
|
36
|
+
if (line.trim() !== 'YES') {
|
|
37
|
+
throw new Error('Aborted by user (clear not confirmed).');
|
|
38
|
+
}
|
|
39
|
+
} finally {
|
|
40
|
+
rl.close();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { stringify } from 'csv-stringify/sync';
|
|
4
|
+
import { rowsetGetPath } from './import-routes.mjs';
|
|
5
|
+
import { buildExportBasename, filesystemSafeTimestamp } from './filename.mjs';
|
|
6
|
+
import { dataDirectoryForBu } from './paths.mjs';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* @param {{ rest: { get: (path: string) => Promise<any> } }} sdk
|
|
10
|
+
* @param {string} deKey
|
|
11
|
+
* @returns {Promise<object[]>}
|
|
12
|
+
*/
|
|
13
|
+
export async function fetchAllRowObjects(sdk, deKey) {
|
|
14
|
+
const pageSize = 2500;
|
|
15
|
+
let page = 1;
|
|
16
|
+
const rows = [];
|
|
17
|
+
let hasMore = true;
|
|
18
|
+
while (hasMore) {
|
|
19
|
+
const qs = new URLSearchParams({
|
|
20
|
+
page: String(page),
|
|
21
|
+
pageSize: String(pageSize),
|
|
22
|
+
});
|
|
23
|
+
const urlPath = `${rowsetGetPath(deKey)}?${qs.toString()}`;
|
|
24
|
+
const data = await sdk.rest.get(urlPath);
|
|
25
|
+
const items = data.items ?? [];
|
|
26
|
+
for (const item of items) {
|
|
27
|
+
rows.push({ ...item.keys, ...item.values });
|
|
28
|
+
}
|
|
29
|
+
if (items.length === 0) {
|
|
30
|
+
hasMore = false;
|
|
31
|
+
} else if (data.hasMoreRows === false) {
|
|
32
|
+
hasMore = false;
|
|
33
|
+
} else if (data.hasMoreRows === true) {
|
|
34
|
+
hasMore = true;
|
|
35
|
+
page++;
|
|
36
|
+
} else {
|
|
37
|
+
hasMore = items.length === pageSize;
|
|
38
|
+
page++;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return rows;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* @param {object[]} rows
|
|
46
|
+
* @param {'csv'|'tsv'|'json'} format
|
|
47
|
+
* @param {boolean} jsonPretty
|
|
48
|
+
* @returns {string}
|
|
49
|
+
*/
|
|
50
|
+
export function serializeRows(rows, format, jsonPretty) {
|
|
51
|
+
if (format === 'json') {
|
|
52
|
+
const space = jsonPretty ? 2 : undefined;
|
|
53
|
+
return JSON.stringify(rows, null, space) + '\n';
|
|
54
|
+
}
|
|
55
|
+
const delimiter = format === 'tsv' ? '\t' : ',';
|
|
56
|
+
return stringify(rows, {
|
|
57
|
+
header: true,
|
|
58
|
+
quoted: true,
|
|
59
|
+
bom: true,
|
|
60
|
+
delimiter,
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* @param {{ rest: { get: (path: string) => Promise<any> } }} sdk
|
|
66
|
+
* @param {object} params
|
|
67
|
+
* @param {string} params.projectRoot
|
|
68
|
+
* @param {string} params.credentialName
|
|
69
|
+
* @param {string} params.buName
|
|
70
|
+
* @param {string} params.deKey
|
|
71
|
+
* @param {'csv'|'tsv'|'json'} params.format
|
|
72
|
+
* @param {boolean} [params.jsonPretty]
|
|
73
|
+
* @returns {Promise<string>} written file path
|
|
74
|
+
*/
|
|
75
|
+
export async function exportDataExtensionToFile(sdk, params) {
|
|
76
|
+
const { projectRoot, credentialName, buName, deKey, format, jsonPretty = false } = params;
|
|
77
|
+
const rows = await fetchAllRowObjects(sdk, deKey);
|
|
78
|
+
const dir = dataDirectoryForBu(projectRoot, credentialName, buName);
|
|
79
|
+
await fs.mkdir(dir, { recursive: true });
|
|
80
|
+
const ts = filesystemSafeTimestamp();
|
|
81
|
+
const basename = buildExportBasename(deKey, ts, format);
|
|
82
|
+
const outPath = path.join(dir, basename);
|
|
83
|
+
const body = serializeRows(rows, format, jsonPretty);
|
|
84
|
+
await fs.writeFile(outPath, body, 'utf8');
|
|
85
|
+
return outPath;
|
|
86
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { filterIllegalFilenames, MCDATA_SENTINEL } from './filename.mjs';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Find export files under data dir matching encoded DE key prefix and extension.
|
|
7
|
+
*
|
|
8
|
+
* @param {string} dataDir
|
|
9
|
+
* @param {string} customerKey
|
|
10
|
+
* @param {'csv'|'tsv'|'json'} format
|
|
11
|
+
* @returns {Promise<string[]>} full paths
|
|
12
|
+
*/
|
|
13
|
+
export async function findImportCandidates(dataDir, customerKey, format) {
|
|
14
|
+
const prefix = filterIllegalFilenames(customerKey) + MCDATA_SENTINEL;
|
|
15
|
+
let entries;
|
|
16
|
+
try {
|
|
17
|
+
entries = await fs.readdir(dataDir, { withFileTypes: true });
|
|
18
|
+
} catch {
|
|
19
|
+
return [];
|
|
20
|
+
}
|
|
21
|
+
const ext = format;
|
|
22
|
+
const matches = [];
|
|
23
|
+
for (const ent of entries) {
|
|
24
|
+
if (!ent.isFile()) {
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
const name = ent.name;
|
|
28
|
+
if (!name.endsWith(`.${ext}`)) {
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
if (name.startsWith(prefix)) {
|
|
32
|
+
matches.push(path.join(dataDir, name));
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return matches;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* @param {string[]} paths
|
|
40
|
+
* @returns {Promise<string>} path with newest mtime
|
|
41
|
+
*/
|
|
42
|
+
export async function pickLatestByMtime(paths) {
|
|
43
|
+
if (paths.length === 0) {
|
|
44
|
+
throw new Error('No candidate files');
|
|
45
|
+
}
|
|
46
|
+
let best = paths[0];
|
|
47
|
+
let bestTime = 0;
|
|
48
|
+
for (const p of paths) {
|
|
49
|
+
const st = await fs.stat(p);
|
|
50
|
+
const t = st.mtimeMs;
|
|
51
|
+
if (t >= bestTime) {
|
|
52
|
+
bestTime = t;
|
|
53
|
+
best = p;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
return best;
|
|
57
|
+
}
|
package/lib/filename.mjs
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Mirrors sfmc-devtools `File.filterIllegalFilenames` / `reverseFilterIllegalFilenames`
|
|
3
|
+
* so export filenames stay consistent with mcdev retrieve-style paths.
|
|
4
|
+
* @see https://github.com/Accenture/sfmc-devtools (lib/util/file.js)
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* @param {string} filename
|
|
9
|
+
* @returns {string}
|
|
10
|
+
*/
|
|
11
|
+
export function filterIllegalFilenames(filename) {
|
|
12
|
+
return (
|
|
13
|
+
encodeURIComponent(filename)
|
|
14
|
+
.replaceAll(/[*]/g, '_STAR_')
|
|
15
|
+
.split('%20')
|
|
16
|
+
.join(' ')
|
|
17
|
+
.split('%7B')
|
|
18
|
+
.join('{')
|
|
19
|
+
.split('%7D')
|
|
20
|
+
.join('}')
|
|
21
|
+
.split('%5B')
|
|
22
|
+
.join('[')
|
|
23
|
+
.split('%5D')
|
|
24
|
+
.join(']')
|
|
25
|
+
.split('%40')
|
|
26
|
+
.join('@')
|
|
27
|
+
);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @param {string} filename
|
|
32
|
+
* @returns {string}
|
|
33
|
+
*/
|
|
34
|
+
export function reverseFilterIllegalFilenames(filename) {
|
|
35
|
+
return decodeURIComponent(filename).split('_STAR_').join('*');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/** Sentinel between encoded DE key and timestamp in export basenames; cannot appear in the key segment after encoding. */
|
|
39
|
+
export const MCDATA_SENTINEL = '+MCDATA+';
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* @param {string} customerKey
|
|
43
|
+
* @param {string} safeTs - filesystem-safe UTC timestamp
|
|
44
|
+
* @param {'csv'|'tsv'|'json'} ext
|
|
45
|
+
* @returns {string} basename without directory
|
|
46
|
+
*/
|
|
47
|
+
export function buildExportBasename(customerKey, safeTs, ext) {
|
|
48
|
+
return `${filterIllegalFilenames(customerKey)}${MCDATA_SENTINEL}${safeTs}.${ext}`;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* @param {Date} [d]
|
|
53
|
+
* @returns {string} e.g. 2026-04-06T15-48-30Z
|
|
54
|
+
*/
|
|
55
|
+
export function filesystemSafeTimestamp(d = new Date()) {
|
|
56
|
+
return d.toISOString().replaceAll(':', '-');
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* @param {string} basename - e.g. `encodedKey+MCDATA+2026-04-06T15-00-00.000Z.csv`
|
|
61
|
+
* @returns {{ customerKey: string, timestampPart: string, ext: string }}
|
|
62
|
+
*/
|
|
63
|
+
export function parseExportBasename(basename) {
|
|
64
|
+
const lastDot = basename.lastIndexOf('.');
|
|
65
|
+
const stem = lastDot === -1 ? basename : basename.slice(0, lastDot);
|
|
66
|
+
const ext = lastDot === -1 ? '' : basename.slice(lastDot + 1);
|
|
67
|
+
const idx = stem.indexOf(MCDATA_SENTINEL);
|
|
68
|
+
if (idx === -1) {
|
|
69
|
+
throw new Error(
|
|
70
|
+
`Filename must contain "${MCDATA_SENTINEL}" between encoded key and timestamp: ${basename}`
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
const encodedKey = stem.slice(0, idx);
|
|
74
|
+
const timestampPart = stem.slice(idx + MCDATA_SENTINEL.length);
|
|
75
|
+
return {
|
|
76
|
+
customerKey: reverseFilterIllegalFilenames(encodedKey),
|
|
77
|
+
timestampPart,
|
|
78
|
+
ext: ext.toLowerCase(),
|
|
79
|
+
};
|
|
80
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { chunkItemsForPayload } from './batch.mjs';
|
|
2
|
+
import { resolveImportRoute } from './import-routes.mjs';
|
|
3
|
+
import { withRetry429 } from './retry.mjs';
|
|
4
|
+
import { readRowsFromFile } from './read-rows.mjs';
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* @param {{ rest: { put: Function, post: Function } }} sdk
|
|
8
|
+
* @param {object} params
|
|
9
|
+
* @param {string} params.deKey
|
|
10
|
+
* @param {object[]} params.rows
|
|
11
|
+
* @param {'async'|'sync'} params.api
|
|
12
|
+
* @param {'upsert'|'insert'|'update'} params.mode
|
|
13
|
+
* @returns {Promise<void>}
|
|
14
|
+
*/
|
|
15
|
+
export async function importRowsForDe(sdk, params) {
|
|
16
|
+
const { deKey, rows, api, mode } = params;
|
|
17
|
+
const route = resolveImportRoute(api, mode);
|
|
18
|
+
const chunks = chunkItemsForPayload(rows);
|
|
19
|
+
for (const chunk of chunks) {
|
|
20
|
+
const path = route.path(deKey);
|
|
21
|
+
const body = { items: chunk };
|
|
22
|
+
await withRetry429(() =>
|
|
23
|
+
route.method === 'PUT'
|
|
24
|
+
? sdk.rest.put(path, body)
|
|
25
|
+
: sdk.rest.post(path, body)
|
|
26
|
+
);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @param {{ rest: { put: Function, post: Function } }} sdk
|
|
32
|
+
* @param {object} params
|
|
33
|
+
* @param {string} params.filePath
|
|
34
|
+
* @param {string} params.deKey - target DE customer key for API
|
|
35
|
+
* @param {'csv'|'tsv'|'json'} params.format
|
|
36
|
+
* @param {'async'|'sync'} params.api
|
|
37
|
+
* @param {'upsert'|'insert'|'update'} params.mode
|
|
38
|
+
* @returns {Promise<void>}
|
|
39
|
+
*/
|
|
40
|
+
export async function importFromFile(sdk, params) {
|
|
41
|
+
const rows = await readRowsFromFile(params.filePath, params.format);
|
|
42
|
+
await importRowsForDe(sdk, {
|
|
43
|
+
deKey: params.deKey,
|
|
44
|
+
rows,
|
|
45
|
+
api: params.api,
|
|
46
|
+
mode: params.mode,
|
|
47
|
+
});
|
|
48
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* REST paths for Data Extension row writes (relative to REST base URL).
|
|
3
|
+
* Confirm against current Salesforce reference hubs when adjusting behavior.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* @param {string} deKey
|
|
8
|
+
* @returns {string} path starting with /
|
|
9
|
+
*/
|
|
10
|
+
export function rowsetGetPath(deKey) {
|
|
11
|
+
return `/data/v1/customobjectdata/key/${encodeURIComponent(deKey)}/rowset`;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Async bulk upsert (default for `--api async`).
|
|
16
|
+
* @param {string} deKey
|
|
17
|
+
* @returns {string}
|
|
18
|
+
*/
|
|
19
|
+
export function asyncUpsertPath(deKey) {
|
|
20
|
+
return `/data/v1/async/dataextensions/key:${encodeURIComponent(deKey)}/rows`;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Synchronous upsert row set.
|
|
25
|
+
* @param {string} deKey
|
|
26
|
+
* @returns {string}
|
|
27
|
+
*/
|
|
28
|
+
export function syncUpsertPath(deKey) {
|
|
29
|
+
return `/data/v1/customobjectdata/key/${encodeURIComponent(deKey)}/rows`;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Synchronous insert row set (POST).
|
|
34
|
+
* @param {string} deKey
|
|
35
|
+
* @returns {string}
|
|
36
|
+
*/
|
|
37
|
+
export function syncInsertPath(deKey) {
|
|
38
|
+
return `/data/v1/customobjectdata/key/${encodeURIComponent(deKey)}/rows`;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* @param {'async'|'sync'} api
|
|
43
|
+
* @param {'upsert'|'insert'|'update'} mode
|
|
44
|
+
* @returns {{ method: 'PUT'|'POST', path: (de: string) => string }}
|
|
45
|
+
*/
|
|
46
|
+
export function resolveImportRoute(api, mode) {
|
|
47
|
+
if (api === 'async') {
|
|
48
|
+
if (mode !== 'upsert') {
|
|
49
|
+
throw new Error(
|
|
50
|
+
`Import mode "${mode}" is not supported with --api async (use --api sync or --mode upsert).`
|
|
51
|
+
);
|
|
52
|
+
}
|
|
53
|
+
return { method: 'PUT', path: asyncUpsertPath };
|
|
54
|
+
}
|
|
55
|
+
if (api === 'sync') {
|
|
56
|
+
if (mode === 'upsert') {
|
|
57
|
+
return { method: 'PUT', path: syncUpsertPath };
|
|
58
|
+
}
|
|
59
|
+
if (mode === 'insert') {
|
|
60
|
+
return { method: 'POST', path: syncInsertPath };
|
|
61
|
+
}
|
|
62
|
+
if (mode === 'update') {
|
|
63
|
+
return { method: 'PUT', path: syncUpsertPath };
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
throw new Error(`Unsupported --api / --mode combination: ${api} + ${mode}`);
|
|
67
|
+
}
|
package/lib/index.mjs
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { main } from './cli.mjs';
|
|
2
|
+
export { filterIllegalFilenames, reverseFilterIllegalFilenames, parseExportBasename } from './filename.mjs';
|
|
3
|
+
export { chunkItemsForPayload, DEFAULT_MAX_BODY_BYTES, MAX_OBJECTS_PER_BATCH } from './batch.mjs';
|
|
4
|
+
export { resolveImportRoute, rowsetGetPath, asyncUpsertPath } from './import-routes.mjs';
|
|
5
|
+
export { loadMcdevProject, parseCredBu, resolveCredentialAndMid, buildSdkAuthObject } from './config.mjs';
|
package/lib/paths.mjs
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @param {string} projectRoot
|
|
5
|
+
* @param {string} credentialName
|
|
6
|
+
* @param {string} buName
|
|
7
|
+
* @returns {string} absolute path ./data/<cred>/<bu>/
|
|
8
|
+
*/
|
|
9
|
+
export function dataDirectoryForBu(projectRoot, credentialName, buName) {
|
|
10
|
+
return path.join(projectRoot, 'data', credentialName, buName);
|
|
11
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { createReadStream, promises as fsPromises } from 'node:fs';
|
|
2
|
+
import csv from 'csv-parser';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @param {string} filePath
|
|
6
|
+
* @param {'csv'|'tsv'|'json'} format
|
|
7
|
+
* @returns {Promise<object[]>}
|
|
8
|
+
*/
|
|
9
|
+
export async function readRowsFromFile(filePath, format) {
|
|
10
|
+
if (format === 'json') {
|
|
11
|
+
const raw = await fsPromises.readFile(filePath, 'utf8');
|
|
12
|
+
const parsed = JSON.parse(raw);
|
|
13
|
+
if (Array.isArray(parsed)) {
|
|
14
|
+
return parsed;
|
|
15
|
+
}
|
|
16
|
+
if (parsed && typeof parsed === 'object' && Array.isArray(parsed.items)) {
|
|
17
|
+
return parsed.items;
|
|
18
|
+
}
|
|
19
|
+
throw new Error('JSON import must be an array of row objects or { "items": [...] }');
|
|
20
|
+
}
|
|
21
|
+
const delimiter = format === 'tsv' ? '\t' : ',';
|
|
22
|
+
return new Promise((resolve, reject) => {
|
|
23
|
+
const rows = [];
|
|
24
|
+
createReadStream(filePath)
|
|
25
|
+
.pipe(
|
|
26
|
+
csv({
|
|
27
|
+
separator: delimiter,
|
|
28
|
+
bom: true,
|
|
29
|
+
})
|
|
30
|
+
)
|
|
31
|
+
.on('data', (row) => rows.push(row))
|
|
32
|
+
.on('end', () => resolve(rows))
|
|
33
|
+
.on('error', reject);
|
|
34
|
+
});
|
|
35
|
+
}
|
package/lib/retry.mjs
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { RestError } from 'sfmc-sdk/util';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @param {() => Promise<any>} fn
|
|
5
|
+
* @param {object} [opts]
|
|
6
|
+
* @param {number} [opts.maxAttempts] default 5
|
|
7
|
+
* @returns {Promise<any>}
|
|
8
|
+
*/
|
|
9
|
+
export async function withRetry429(fn, opts = {}) {
|
|
10
|
+
const maxAttempts = opts.maxAttempts ?? 5;
|
|
11
|
+
let attempt = 0;
|
|
12
|
+
let delayMs = 1000;
|
|
13
|
+
while (true) {
|
|
14
|
+
attempt++;
|
|
15
|
+
try {
|
|
16
|
+
return await fn();
|
|
17
|
+
} catch (e) {
|
|
18
|
+
const status = e instanceof RestError ? e.response?.status : undefined;
|
|
19
|
+
const retryAfter = e instanceof RestError ? e.response?.headers?.['retry-after'] : undefined;
|
|
20
|
+
if (status === 429 && attempt < maxAttempts) {
|
|
21
|
+
const wait =
|
|
22
|
+
retryAfter !== undefined
|
|
23
|
+
? Number.parseInt(String(retryAfter), 10) * 1000 || delayMs
|
|
24
|
+
: delayMs;
|
|
25
|
+
await sleep(wait);
|
|
26
|
+
delayMs = Math.min(delayMs * 2, 60_000);
|
|
27
|
+
continue;
|
|
28
|
+
}
|
|
29
|
+
throw e;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* @param {number} ms
|
|
36
|
+
* @returns {Promise<void>}
|
|
37
|
+
*/
|
|
38
|
+
function sleep(ms) {
|
|
39
|
+
return new Promise((r) => setTimeout(r, ms));
|
|
40
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "sfmc-dataloader",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "CLI (mcdata) to export and import Marketing Cloud Data Extension rows using mcdev project config and sfmc-sdk",
|
|
5
|
+
"author": "Jörn Berkefeld <joern.berkefeld@gmail.com>",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"repository": {
|
|
8
|
+
"type": "git",
|
|
9
|
+
"url": "https://github.com/JoernBerkefeld/sfmc-dataloader.git"
|
|
10
|
+
},
|
|
11
|
+
"bugs": {
|
|
12
|
+
"url": "https://github.com/JoernBerkefeld/sfmc-dataloader/issues"
|
|
13
|
+
},
|
|
14
|
+
"keywords": [
|
|
15
|
+
"sfmc",
|
|
16
|
+
"marketing-cloud",
|
|
17
|
+
"data-extension",
|
|
18
|
+
"mcdev",
|
|
19
|
+
"cli",
|
|
20
|
+
"exacttarget"
|
|
21
|
+
],
|
|
22
|
+
"type": "module",
|
|
23
|
+
"bin": {
|
|
24
|
+
"mcdata": "./bin/mcdata.mjs"
|
|
25
|
+
},
|
|
26
|
+
"main": "./lib/index.mjs",
|
|
27
|
+
"exports": {
|
|
28
|
+
".": "./lib/index.mjs"
|
|
29
|
+
},
|
|
30
|
+
"files": [
|
|
31
|
+
"bin",
|
|
32
|
+
"lib",
|
|
33
|
+
"README.md"
|
|
34
|
+
],
|
|
35
|
+
"engines": {
|
|
36
|
+
"node": "^20.19.0 || ^22.13.0 || >=24"
|
|
37
|
+
},
|
|
38
|
+
"peerDependencies": {
|
|
39
|
+
"mcdev": ">=7"
|
|
40
|
+
},
|
|
41
|
+
"dependencies": {
|
|
42
|
+
"csv-parser": "3.2.0",
|
|
43
|
+
"csv-stringify": "6.5.2",
|
|
44
|
+
"sfmc-sdk": "3.0.3"
|
|
45
|
+
},
|
|
46
|
+
"scripts": {
|
|
47
|
+
"test": "node --test test/**/*.test.js"
|
|
48
|
+
}
|
|
49
|
+
}
|