@cparra/apexdocs 3.12.1 → 3.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -19
- package/dist/cli/generate.js +159 -44
- package/dist/index.d.ts +8 -4
- package/dist/index.js +1 -1
- package/dist/{logger-dCI_xpGU.js → logger-BiPQ_i9-.js} +194 -21
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -84,6 +84,12 @@ Run the following command to generate markdown files for your global Salesforce
|
|
|
84
84
|
|
|
85
85
|
```bash
|
|
86
86
|
apexdocs markdown -s force-app
|
|
87
|
+
|
|
88
|
+
# Use sfdx-project.json as the source of directories
|
|
89
|
+
apexdocs markdown --useSfdxProjectJson
|
|
90
|
+
|
|
91
|
+
# Specify multiple source directories
|
|
92
|
+
apexdocs markdown --sourceDirs force-app force-lwc force-utils
|
|
87
93
|
```
|
|
88
94
|
|
|
89
95
|
#### OpenApi
|
|
@@ -111,21 +117,31 @@ apexdocs changelog --previousVersionDir force-app-previous --currentVersionDir f
|
|
|
111
117
|
|
|
112
118
|
#### Flags
|
|
113
119
|
|
|
114
|
-
| Flag
|
|
115
|
-
|
|
116
|
-
| `--sourceDir`
|
|
117
|
-
| `--
|
|
118
|
-
| `--
|
|
119
|
-
| `--
|
|
120
|
-
| `--
|
|
121
|
-
| `--
|
|
122
|
-
| `--
|
|
123
|
-
| `--
|
|
124
|
-
| `--
|
|
125
|
-
| `--
|
|
126
|
-
| `--
|
|
127
|
-
| `--
|
|
128
|
-
| `--
|
|
120
|
+
| Flag | Alias | Description | Default | Required |
|
|
121
|
+
|-----------------------------------|-------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------|----------|
|
|
122
|
+
| `--sourceDir` | `-s` | The directory where the source files are located. | N/A | * |
|
|
123
|
+
| `--sourceDirs` | N/A | Multiple source directories (space-separated). Cannot be used with `--sourceDir` or `--useSfdxProjectJson`. | N/A | * |
|
|
124
|
+
| `--useSfdxProjectJson` | N/A | Read source directories from `sfdx-project.json` packageDirectories. Cannot be used with `--sourceDir` or `--sourceDirs`. | `false` | * |
|
|
125
|
+
| `--sfdxProjectPath` | N/A | Path to directory containing `sfdx-project.json` (defaults to current directory). Only used with `--useSfdxProjectJson`. | `process.cwd()` | No |
|
|
126
|
+
| `--targetDir` | `-t` | The directory where the generated files will be placed. | `docs` | No |
|
|
127
|
+
| `--scope` | `-p` | A list of scopes to document. Values should be separated by a space, e.g --scope global public namespaceaccessible. | `[global]` | No |
|
|
128
|
+
| `--customObjectVisibility` | `-v` | Controls which custom objects are documented. Values should be separated by a space. | `[public]` | No |
|
|
129
|
+
| `--defaultGroupName` | N/A | The default group name to use when a group is not specified. | `Miscellaneous` | No |
|
|
130
|
+
| `--namespace` | N/A | The package namespace, if any. If provided, it will be added to the generated files. | N/A | No |
|
|
131
|
+
| `--sortAlphabetically` | N/A | Sorts files appearing in the Reference Guide alphabetically, as well as the members of a class, interface or enum alphabetically. If false, the members will be displayed in the same order as the code. | `false` | No |
|
|
132
|
+
| `--includeMetadata ` | N/A | Whether to include the file's meta.xml information: Whether it is active and and the API version | `false` | No |
|
|
133
|
+
| `--linkingStrategy` | N/A | The strategy to use when linking to other classes. Possible values are `relative`, `no-link`, and `none` | `relative` | No |
|
|
134
|
+
| `--customObjectsGroupName` | N/A | The name under which custom objects will be grouped in the Reference Guide | `Custom Objects` | No |
|
|
135
|
+
| `--triggersGroupName` | N/A | The name under which triggers will be grouped in the Reference Guide | `Triggers` | No |
|
|
136
|
+
| `--includeFieldSecurityMetadata` | N/A | Whether to include the compliance category and security classification for fields in the generated files. | `false` | No |
|
|
137
|
+
| `--includeInlineHelpTextMetadata` | N/A | Whether to include the inline help text for fields in the generated files. | `false` | No |
|
|
138
|
+
|
|
139
|
+
> **Note:** The `*` in the Required column indicates that **one** of the source directory options must be specified:
|
|
140
|
+
> - `--sourceDir` (single directory)
|
|
141
|
+
> - `--sourceDirs` (multiple directories)
|
|
142
|
+
> - `--useSfdxProjectJson` (read from sfdx-project.json)
|
|
143
|
+
>
|
|
144
|
+
> These options are mutually exclusive - you cannot use more than one at the same time.
|
|
129
145
|
|
|
130
146
|
##### Linking Strategy
|
|
131
147
|
|
|
@@ -365,7 +381,8 @@ having to copy-paste the same text across multiple classes, polluting your
|
|
|
365
381
|
source code.
|
|
366
382
|
|
|
367
383
|
A macro can be defined in your documentation using the `{{macro_name}}` syntax.
|
|
368
|
-
In the configuration file, you can then define the macro behavior as a key-value pair, where the key is the name of the
|
|
384
|
+
In the configuration file, you can then define the macro behavior as a key-value pair, where the key is the name of the
|
|
385
|
+
macro, and the value is a function that returns the text to inject in place of the macro.
|
|
369
386
|
|
|
370
387
|
**Type**
|
|
371
388
|
|
|
@@ -379,7 +396,8 @@ type MacroSourceMetadata = {
|
|
|
379
396
|
type MacroFunction = (metadata: MacroSourceMetadata) => string;
|
|
380
397
|
```
|
|
381
398
|
|
|
382
|
-
Notice that the `metadata` object contains information about the source of the file for which the macro is being
|
|
399
|
+
Notice that the `metadata` object contains information about the source of the file for which the macro is being
|
|
400
|
+
injected. This allows you to optionally
|
|
383
401
|
return different text based on the source of the file.
|
|
384
402
|
|
|
385
403
|
Example: Injecting a copyright notice
|
|
@@ -402,13 +420,14 @@ And then in your source code, you can use the macro like this:
|
|
|
402
420
|
* @description This is a class
|
|
403
421
|
*/
|
|
404
422
|
public class MyClass {
|
|
405
|
-
|
|
423
|
+
//...
|
|
406
424
|
}
|
|
407
425
|
```
|
|
408
426
|
|
|
409
427
|
##### **transformReferenceGuide**
|
|
410
428
|
|
|
411
|
-
Allows changing the frontmatter and content of the reference guide, or if creating a reference guide page altogether
|
|
429
|
+
Allows changing the frontmatter and content of the reference guide, or if creating a reference guide page altogether
|
|
430
|
+
should be skipped.
|
|
412
431
|
|
|
413
432
|
**Type**
|
|
414
433
|
|
package/dist/cli/generate.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
'use strict';
|
|
3
3
|
|
|
4
|
-
var logger$1 = require('../logger-
|
|
5
|
-
var
|
|
4
|
+
var logger$1 = require('../logger-BiPQ_i9-.js');
|
|
5
|
+
var require$$0 = require('yargs');
|
|
6
6
|
var cosmiconfig = require('cosmiconfig');
|
|
7
7
|
var E = require('fp-ts/Either');
|
|
8
8
|
var cosmiconfigTypescriptLoader = require('cosmiconfig-typescript-loader');
|
|
@@ -24,38 +24,53 @@ require('minimatch');
|
|
|
24
24
|
require('@salesforce/source-deploy-retrieve');
|
|
25
25
|
require('chalk');
|
|
26
26
|
|
|
27
|
-
var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
|
|
28
27
|
function _interopNamespaceDefault(e) {
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
28
|
+
var n = Object.create(null);
|
|
29
|
+
if (e) {
|
|
30
|
+
Object.keys(e).forEach(function (k) {
|
|
31
|
+
if (k !== 'default') {
|
|
32
|
+
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
33
|
+
Object.defineProperty(n, k, d.get ? d : {
|
|
34
|
+
enumerable: true,
|
|
35
|
+
get: function () { return e[k]; }
|
|
36
|
+
});
|
|
37
|
+
}
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
n.default = e;
|
|
41
|
+
return Object.freeze(n);
|
|
43
42
|
}
|
|
44
43
|
|
|
45
44
|
var E__namespace = /*#__PURE__*/_interopNamespaceDefault(E);
|
|
46
45
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
46
|
+
function validateMarkdownArgs(argv) {
|
|
47
|
+
const hasSourceDir = argv.sourceDir && (typeof argv.sourceDir === "string" || Array.isArray(argv.sourceDir) && argv.sourceDir.length > 0);
|
|
48
|
+
const hasUseSfdxProjectJson = argv.useSfdxProjectJson;
|
|
49
|
+
if (!hasSourceDir && !hasUseSfdxProjectJson) {
|
|
50
|
+
throw new Error("Must specify one of: --sourceDir or --useSfdxProjectJson");
|
|
51
|
+
}
|
|
52
|
+
return true;
|
|
53
|
+
}
|
|
53
54
|
const markdownOptions = {
|
|
54
55
|
sourceDir: {
|
|
55
56
|
type: "string",
|
|
57
|
+
array: true,
|
|
56
58
|
alias: "s",
|
|
57
|
-
demandOption:
|
|
58
|
-
describe: "The directory location which
|
|
59
|
+
demandOption: false,
|
|
60
|
+
describe: "The directory location(s) which contain your apex .cls classes. Can specify a single directory or multiple directories. Cannot be used with useSfdxProjectJson.",
|
|
61
|
+
conflicts: ["useSfdxProjectJson"]
|
|
62
|
+
},
|
|
63
|
+
useSfdxProjectJson: {
|
|
64
|
+
type: "boolean",
|
|
65
|
+
demandOption: false,
|
|
66
|
+
describe: "Read source directories from sfdx-project.json packageDirectories. Cannot be used with sourceDir.",
|
|
67
|
+
conflicts: ["sourceDir"]
|
|
68
|
+
},
|
|
69
|
+
sfdxProjectPath: {
|
|
70
|
+
type: "string",
|
|
71
|
+
demandOption: false,
|
|
72
|
+
describe: "Path to the directory containing sfdx-project.json (defaults to current working directory). Only used with useSfdxProjectJson.",
|
|
73
|
+
implies: "useSfdxProjectJson"
|
|
59
74
|
},
|
|
60
75
|
targetDir: {
|
|
61
76
|
type: "string",
|
|
@@ -129,12 +144,34 @@ const markdownOptions = {
|
|
|
129
144
|
}
|
|
130
145
|
};
|
|
131
146
|
|
|
147
|
+
function validateOpenApiArgs(argv) {
|
|
148
|
+
const hasSourceDir = argv.sourceDir && (typeof argv.sourceDir === "string" || Array.isArray(argv.sourceDir) && argv.sourceDir.length > 0);
|
|
149
|
+
const hasUseSfdxProjectJson = argv.useSfdxProjectJson;
|
|
150
|
+
if (!hasSourceDir && !hasUseSfdxProjectJson) {
|
|
151
|
+
throw new Error("Must specify one of: --sourceDir or --useSfdxProjectJson");
|
|
152
|
+
}
|
|
153
|
+
return true;
|
|
154
|
+
}
|
|
132
155
|
const openApiOptions = {
|
|
133
156
|
sourceDir: {
|
|
134
157
|
type: "string",
|
|
158
|
+
array: true,
|
|
135
159
|
alias: "s",
|
|
136
|
-
demandOption:
|
|
137
|
-
describe: "The directory location which
|
|
160
|
+
demandOption: false,
|
|
161
|
+
describe: "The directory location(s) which contain your apex .cls classes. Can specify a single directory or multiple directories. Cannot be used with useSfdxProjectJson.",
|
|
162
|
+
conflicts: ["useSfdxProjectJson"]
|
|
163
|
+
},
|
|
164
|
+
useSfdxProjectJson: {
|
|
165
|
+
type: "boolean",
|
|
166
|
+
demandOption: false,
|
|
167
|
+
describe: "Read source directories from sfdx-project.json packageDirectories. Cannot be used with sourceDir.",
|
|
168
|
+
conflicts: ["sourceDir"]
|
|
169
|
+
},
|
|
170
|
+
sfdxProjectPath: {
|
|
171
|
+
type: "string",
|
|
172
|
+
demandOption: false,
|
|
173
|
+
describe: "Path to the directory containing sfdx-project.json (defaults to current working directory). Only used with useSfdxProjectJson.",
|
|
174
|
+
implies: "useSfdxProjectJson"
|
|
138
175
|
},
|
|
139
176
|
targetDir: {
|
|
140
177
|
type: "string",
|
|
@@ -163,18 +200,31 @@ const openApiOptions = {
|
|
|
163
200
|
}
|
|
164
201
|
};
|
|
165
202
|
|
|
203
|
+
function validateChangelogArgs(argv) {
|
|
204
|
+
const hasPreviousVersionDir = argv.previousVersionDir && (typeof argv.previousVersionDir === "string" || Array.isArray(argv.previousVersionDir) && argv.previousVersionDir.length > 0);
|
|
205
|
+
const hasCurrentVersionDir = argv.currentVersionDir && (typeof argv.currentVersionDir === "string" || Array.isArray(argv.currentVersionDir) && argv.currentVersionDir.length > 0);
|
|
206
|
+
if (!hasPreviousVersionDir) {
|
|
207
|
+
throw new Error("Must specify --previousVersionDir");
|
|
208
|
+
}
|
|
209
|
+
if (!hasCurrentVersionDir) {
|
|
210
|
+
throw new Error("Must specify --currentVersionDir");
|
|
211
|
+
}
|
|
212
|
+
return true;
|
|
213
|
+
}
|
|
166
214
|
const changeLogOptions = {
|
|
167
215
|
previousVersionDir: {
|
|
168
216
|
type: "string",
|
|
217
|
+
array: true,
|
|
169
218
|
alias: "p",
|
|
170
|
-
demandOption:
|
|
171
|
-
describe: "The directory location of the previous version of the source code."
|
|
219
|
+
demandOption: false,
|
|
220
|
+
describe: "The directory location(s) of the previous version of the source code. Can specify a single directory or multiple directories."
|
|
172
221
|
},
|
|
173
222
|
currentVersionDir: {
|
|
174
223
|
type: "string",
|
|
224
|
+
array: true,
|
|
175
225
|
alias: "c",
|
|
176
|
-
demandOption:
|
|
177
|
-
describe: "The directory location of the current version of the source code."
|
|
226
|
+
demandOption: false,
|
|
227
|
+
describe: "The directory location(s) of the current version of the source code. Can specify a single directory or multiple directories."
|
|
178
228
|
},
|
|
179
229
|
targetDir: {
|
|
180
230
|
type: "string",
|
|
@@ -248,7 +298,7 @@ var __async$1 = (__this, __arguments, generator) => {
|
|
|
248
298
|
step((generator = generator.apply(__this, __arguments)).next());
|
|
249
299
|
});
|
|
250
300
|
};
|
|
251
|
-
const yargs = require
|
|
301
|
+
const yargs = require$$0;
|
|
252
302
|
const configOnlyMarkdownDefaults = {
|
|
253
303
|
targetGenerator: "markdown",
|
|
254
304
|
excludeTags: [],
|
|
@@ -304,11 +354,23 @@ function extractArgsForCommandProvidedThroughCli(extractFromProcessFn, config) {
|
|
|
304
354
|
const mergedConfig = __spreadProps(__spreadValues(__spreadValues({}, config.config), cliArgs), { targetGenerator: commandName });
|
|
305
355
|
switch (mergedConfig.targetGenerator) {
|
|
306
356
|
case "markdown":
|
|
307
|
-
return
|
|
357
|
+
return _function.pipe(
|
|
358
|
+
logger$1.validateSourceDirectoryConfig(extractSourceDirectoryConfig(mergedConfig)),
|
|
359
|
+
E__namespace.mapLeft((error) => new Error(`Invalid markdown configuration: ${error.message}`)),
|
|
360
|
+
E__namespace.map(() => __spreadValues(__spreadValues({}, configOnlyMarkdownDefaults), mergedConfig))
|
|
361
|
+
);
|
|
308
362
|
case "openapi":
|
|
309
|
-
return
|
|
363
|
+
return _function.pipe(
|
|
364
|
+
logger$1.validateSourceDirectoryConfig(extractSourceDirectoryConfig(mergedConfig)),
|
|
365
|
+
E__namespace.mapLeft((error) => new Error(`Invalid openapi configuration: ${error.message}`)),
|
|
366
|
+
E__namespace.map(() => __spreadValues(__spreadValues({}, configOnlyOpenApiDefaults), mergedConfig))
|
|
367
|
+
);
|
|
310
368
|
case "changelog":
|
|
311
|
-
return
|
|
369
|
+
return _function.pipe(
|
|
370
|
+
validateChangelogConfig(mergedConfig),
|
|
371
|
+
E__namespace.mapLeft((error) => new Error(`Invalid changelog configuration: ${error.message}`)),
|
|
372
|
+
E__namespace.map(() => __spreadValues(__spreadValues({}, configOnlyChangelogDefaults), mergedConfig))
|
|
373
|
+
);
|
|
312
374
|
default:
|
|
313
375
|
return E__namespace.left(new Error(`Invalid command provided: ${mergedConfig.targetGenerator}`));
|
|
314
376
|
}
|
|
@@ -324,12 +386,26 @@ function extractArgsForCommandsProvidedInConfig(extractFromProcessFn, config) {
|
|
|
324
386
|
E__namespace.map((cliArgs) => {
|
|
325
387
|
return cliArgs;
|
|
326
388
|
}),
|
|
327
|
-
E__namespace.
|
|
389
|
+
E__namespace.flatMap((cliArgs) => {
|
|
390
|
+
const mergedConfig = __spreadValues(__spreadValues(__spreadValues({}, configOnlyMarkdownDefaults), generatorConfig), cliArgs);
|
|
391
|
+
return _function.pipe(
|
|
392
|
+
logger$1.validateSourceDirectoryConfig(extractSourceDirectoryConfig(mergedConfig)),
|
|
393
|
+
E__namespace.mapLeft((error) => new Error(`Invalid markdown configuration: ${error.message}`)),
|
|
394
|
+
E__namespace.map(() => mergedConfig)
|
|
395
|
+
);
|
|
396
|
+
})
|
|
328
397
|
);
|
|
329
398
|
case "openapi":
|
|
330
399
|
return _function.pipe(
|
|
331
400
|
extractMultiCommandConfig(extractFromProcessFn, "openapi", generatorConfig),
|
|
332
|
-
E__namespace.
|
|
401
|
+
E__namespace.flatMap((cliArgs) => {
|
|
402
|
+
const mergedConfig = __spreadValues(__spreadValues(__spreadValues({}, configOnlyOpenApiDefaults), generatorConfig), cliArgs);
|
|
403
|
+
return _function.pipe(
|
|
404
|
+
logger$1.validateSourceDirectoryConfig(extractSourceDirectoryConfig(mergedConfig)),
|
|
405
|
+
E__namespace.mapLeft((error) => new Error(`Invalid openapi configuration: ${error.message}`)),
|
|
406
|
+
E__namespace.map(() => mergedConfig)
|
|
407
|
+
);
|
|
408
|
+
})
|
|
333
409
|
);
|
|
334
410
|
case "changelog":
|
|
335
411
|
return _function.pipe(
|
|
@@ -337,7 +413,14 @@ function extractArgsForCommandsProvidedInConfig(extractFromProcessFn, config) {
|
|
|
337
413
|
E__namespace.map((cliArgs) => {
|
|
338
414
|
return cliArgs;
|
|
339
415
|
}),
|
|
340
|
-
E__namespace.
|
|
416
|
+
E__namespace.flatMap((cliArgs) => {
|
|
417
|
+
const mergedConfig = __spreadValues(__spreadValues(__spreadValues({}, configOnlyChangelogDefaults), generatorConfig), cliArgs);
|
|
418
|
+
return _function.pipe(
|
|
419
|
+
validateChangelogConfig(mergedConfig),
|
|
420
|
+
E__namespace.mapLeft((error) => new Error(`Invalid changelog configuration: ${error.message}`)),
|
|
421
|
+
E__namespace.map(() => mergedConfig)
|
|
422
|
+
);
|
|
423
|
+
})
|
|
341
424
|
);
|
|
342
425
|
}
|
|
343
426
|
});
|
|
@@ -367,15 +450,15 @@ function extractYargsDemandingCommand(extractFromProcessFn, config) {
|
|
|
367
450
|
return yargs.config(config.config).command(
|
|
368
451
|
"markdown",
|
|
369
452
|
"Generate documentation from Apex classes as a Markdown site.",
|
|
370
|
-
(yargs2) => yargs2.options(markdownOptions)
|
|
453
|
+
(yargs2) => yargs2.options(markdownOptions).check(validateMarkdownArgs)
|
|
371
454
|
).command(
|
|
372
455
|
"openapi",
|
|
373
456
|
"Generate an OpenApi REST specification from Apex classes.",
|
|
374
|
-
() =>
|
|
457
|
+
(yargs2) => yargs2.options(openApiOptions).check(validateOpenApiArgs)
|
|
375
458
|
).command(
|
|
376
459
|
"changelog",
|
|
377
460
|
"Generate a changelog from 2 versions of the source code.",
|
|
378
|
-
() =>
|
|
461
|
+
(yargs2) => yargs2.options(changeLogOptions).check(validateChangelogArgs)
|
|
379
462
|
).demandCommand().parseSync(extractFromProcessFn());
|
|
380
463
|
}
|
|
381
464
|
function extractMultiCommandConfig(extractFromProcessFn, command, config) {
|
|
@@ -389,13 +472,45 @@ function extractMultiCommandConfig(extractFromProcessFn, command, config) {
|
|
|
389
472
|
return changeLogOptions;
|
|
390
473
|
}
|
|
391
474
|
}
|
|
475
|
+
function getValidationFunction(generator) {
|
|
476
|
+
switch (generator) {
|
|
477
|
+
case "markdown":
|
|
478
|
+
return validateMarkdownArgs;
|
|
479
|
+
case "openapi":
|
|
480
|
+
return validateOpenApiArgs;
|
|
481
|
+
case "changelog":
|
|
482
|
+
return validateChangelogArgs;
|
|
483
|
+
}
|
|
484
|
+
}
|
|
392
485
|
const options = getOptions(command);
|
|
486
|
+
const validator = getValidationFunction(command);
|
|
393
487
|
return E__namespace.tryCatch(() => {
|
|
394
|
-
return yargs(extractFromProcessFn()).config(config).options(options).fail((msg) => {
|
|
488
|
+
return yargs(extractFromProcessFn()).config(config).options(options).check(validator).fail((msg) => {
|
|
395
489
|
throw new Error(`Invalid configuration for command "${command}": ${msg}`);
|
|
396
490
|
}).parseSync();
|
|
397
491
|
}, E__namespace.toError);
|
|
398
492
|
}
|
|
493
|
+
function extractSourceDirectoryConfig(config) {
|
|
494
|
+
return {
|
|
495
|
+
sourceDir: config.sourceDir,
|
|
496
|
+
useSfdxProjectJson: config.useSfdxProjectJson,
|
|
497
|
+
sfdxProjectPath: config.sfdxProjectPath
|
|
498
|
+
};
|
|
499
|
+
}
|
|
500
|
+
function validateChangelogConfig(config) {
|
|
501
|
+
const previousVersionConfig = {
|
|
502
|
+
sourceDir: config.previousVersionDir
|
|
503
|
+
};
|
|
504
|
+
const currentVersionConfig = {
|
|
505
|
+
sourceDir: config.currentVersionDir
|
|
506
|
+
};
|
|
507
|
+
return _function.pipe(
|
|
508
|
+
E__namespace.Do,
|
|
509
|
+
E__namespace.bind("previousValid", () => logger$1.validateSourceDirectoryConfig(previousVersionConfig)),
|
|
510
|
+
E__namespace.bind("currentValid", () => logger$1.validateSourceDirectoryConfig(currentVersionConfig)),
|
|
511
|
+
E__namespace.map(() => config)
|
|
512
|
+
);
|
|
513
|
+
}
|
|
399
514
|
|
|
400
515
|
var __async = (__this, __arguments, generator) => {
|
|
401
516
|
return new Promise((resolve, reject) => {
|
|
@@ -428,8 +543,8 @@ function main() {
|
|
|
428
543
|
logger.error(`\u274C An error occurred while processing the request: ${error}`);
|
|
429
544
|
process.exit(1);
|
|
430
545
|
}
|
|
431
|
-
extractArgs().then((maybeConfigs) => __async(
|
|
432
|
-
E__namespace.match(catchUnexpectedError, (configs) => __async(
|
|
546
|
+
extractArgs().then((maybeConfigs) => __async(null, null, function* () {
|
|
547
|
+
E__namespace.match(catchUnexpectedError, (configs) => __async(null, null, function* () {
|
|
433
548
|
for (const config of configs) {
|
|
434
549
|
yield logger$1.Apexdocs.generate(config, logger).then(parseResult);
|
|
435
550
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -20,7 +20,9 @@ type MacroSourceMetadata = {
|
|
|
20
20
|
type MacroFunction = (metadata: MacroSourceMetadata) => string;
|
|
21
21
|
|
|
22
22
|
type CliConfigurableMarkdownConfig = {
|
|
23
|
-
sourceDir
|
|
23
|
+
sourceDir?: string | string[];
|
|
24
|
+
useSfdxProjectJson?: boolean;
|
|
25
|
+
sfdxProjectPath?: string;
|
|
24
26
|
targetDir: string;
|
|
25
27
|
scope: string[];
|
|
26
28
|
customObjectVisibility: string[];
|
|
@@ -45,7 +47,9 @@ type UserDefinedMarkdownConfig = {
|
|
|
45
47
|
|
|
46
48
|
type UserDefinedOpenApiConfig = {
|
|
47
49
|
targetGenerator: 'openapi';
|
|
48
|
-
sourceDir
|
|
50
|
+
sourceDir?: string | string[];
|
|
51
|
+
useSfdxProjectJson?: boolean;
|
|
52
|
+
sfdxProjectPath?: string;
|
|
49
53
|
targetDir: string;
|
|
50
54
|
fileName: string;
|
|
51
55
|
namespace?: string;
|
|
@@ -56,8 +60,8 @@ type UserDefinedOpenApiConfig = {
|
|
|
56
60
|
|
|
57
61
|
type UserDefinedChangelogConfig = {
|
|
58
62
|
targetGenerator: 'changelog';
|
|
59
|
-
previousVersionDir
|
|
60
|
-
currentVersionDir
|
|
63
|
+
previousVersionDir?: string | string[];
|
|
64
|
+
currentVersionDir?: string | string[];
|
|
61
65
|
targetDir: string;
|
|
62
66
|
fileName: string;
|
|
63
67
|
scope: string[];
|
package/dist/index.js
CHANGED
|
@@ -2719,8 +2719,8 @@ function transformDocumentationBundleHook(config) {
|
|
|
2719
2719
|
(error) => new HookError(error)
|
|
2720
2720
|
);
|
|
2721
2721
|
}
|
|
2722
|
-
const execTransformReferenceHook = (_0, ..._1) => __async$3(
|
|
2723
|
-
const hooked = references.map((reference) => __async$3(
|
|
2722
|
+
const execTransformReferenceHook = (_0, ..._1) => __async$3(null, [_0, ..._1], function* (references, hook = passThroughHook) {
|
|
2723
|
+
const hooked = references.map((reference) => __async$3(null, null, function* () {
|
|
2724
2724
|
const hookedResult = yield hook(reference);
|
|
2725
2725
|
return __spreadValues$8(__spreadValues$8({}, reference), hookedResult);
|
|
2726
2726
|
}));
|
|
@@ -2730,24 +2730,24 @@ const execTransformReferenceHook = (_0, ..._1) => __async$3(void 0, [_0, ..._1],
|
|
|
2730
2730
|
return acc;
|
|
2731
2731
|
}, {});
|
|
2732
2732
|
});
|
|
2733
|
-
const documentationBundleHook = (bundle, config) => __async$3(
|
|
2733
|
+
const documentationBundleHook = (bundle, config) => __async$3(null, null, function* () {
|
|
2734
2734
|
return {
|
|
2735
2735
|
referenceGuide: yield transformReferenceGuide(bundle.referenceGuide, config.transformReferenceGuide),
|
|
2736
2736
|
docs: yield transformDocs(bundle.docs, config.transformDocs, config.transformDocPage)
|
|
2737
2737
|
};
|
|
2738
2738
|
});
|
|
2739
|
-
const transformReferenceGuide = (_0, ..._1) => __async$3(
|
|
2739
|
+
const transformReferenceGuide = (_0, ..._1) => __async$3(null, [_0, ..._1], function* (referenceGuide, hook = passThroughHook) {
|
|
2740
2740
|
const result = yield hook(referenceGuide);
|
|
2741
2741
|
if (isSkip(result)) {
|
|
2742
2742
|
return result;
|
|
2743
2743
|
}
|
|
2744
2744
|
return __spreadValues$8(__spreadValues$8({}, referenceGuide), yield hook(referenceGuide));
|
|
2745
2745
|
});
|
|
2746
|
-
const transformDocs = (_0, ..._1) => __async$3(
|
|
2746
|
+
const transformDocs = (_0, ..._1) => __async$3(null, [_0, ..._1], function* (docs, transformDocsHook = passThroughHook, transformDocPageHook = passThroughHook) {
|
|
2747
2747
|
const transformed = yield transformDocsHook(docs);
|
|
2748
2748
|
return Promise.all(transformed.map((doc) => transformDocPage(doc, transformDocPageHook)));
|
|
2749
2749
|
});
|
|
2750
|
-
const transformDocPage = (_0, ..._1) => __async$3(
|
|
2750
|
+
const transformDocPage = (_0, ..._1) => __async$3(null, [_0, ..._1], function* (doc, hook = passThroughHook) {
|
|
2751
2751
|
return __spreadValues$8(__spreadValues$8({}, doc), yield hook(doc));
|
|
2752
2752
|
});
|
|
2753
2753
|
function postHookCompile$1(bundle) {
|
|
@@ -3829,8 +3829,9 @@ var __async$2 = (__this, __arguments, generator) => {
|
|
|
3829
3829
|
};
|
|
3830
3830
|
function openApi(logger, fileBodies, config) {
|
|
3831
3831
|
return __async$2(this, null, function* () {
|
|
3832
|
+
const sourceDirectory = (Array.isArray(config.sourceDir) ? config.sourceDir[0] : config.sourceDir) || (fileBodies.length > 0 ? fileBodies[0].filePath.split("/").slice(0, -1).join("/") : process.cwd());
|
|
3832
3833
|
OpenApiSettings.build({
|
|
3833
|
-
sourceDirectory
|
|
3834
|
+
sourceDirectory,
|
|
3834
3835
|
outputDir: config.targetDir,
|
|
3835
3836
|
openApiFileName: config.fileName,
|
|
3836
3837
|
openApiTitle: config.title,
|
|
@@ -4779,9 +4780,11 @@ function processFiles(fileSystem) {
|
|
|
4779
4780
|
)
|
|
4780
4781
|
};
|
|
4781
4782
|
const convertersToUse = componentTypesToRetrieve.map((componentType) => converters[componentType]);
|
|
4782
|
-
return (
|
|
4783
|
+
return (rootPaths, exclude) => {
|
|
4784
|
+
const paths = Array.isArray(rootPaths) ? rootPaths : [rootPaths];
|
|
4785
|
+
const allComponents = paths.flatMap((path) => fileSystem.getComponents(path));
|
|
4783
4786
|
return _function.pipe(
|
|
4784
|
-
|
|
4787
|
+
allComponents,
|
|
4785
4788
|
(components) => {
|
|
4786
4789
|
return components.map((component) => {
|
|
4787
4790
|
const pathLocation = component.type.name === "ApexClass" ? component.content : component.xml;
|
|
@@ -4827,6 +4830,132 @@ class DefaultFileSystem {
|
|
|
4827
4830
|
}
|
|
4828
4831
|
}
|
|
4829
4832
|
|
|
4833
|
+
function createSfdxProjectReadError(message, cause) {
|
|
4834
|
+
return {
|
|
4835
|
+
_tag: "SfdxProjectReadError",
|
|
4836
|
+
message,
|
|
4837
|
+
cause
|
|
4838
|
+
};
|
|
4839
|
+
}
|
|
4840
|
+
function readSfdxProjectConfig(projectRoot) {
|
|
4841
|
+
const sfdxProjectPath = path__namespace.join(projectRoot, "sfdx-project.json");
|
|
4842
|
+
return _function.pipe(
|
|
4843
|
+
E__namespace.tryCatch(
|
|
4844
|
+
() => {
|
|
4845
|
+
if (!fs__namespace.existsSync(sfdxProjectPath)) {
|
|
4846
|
+
throw new Error(`sfdx-project.json not found at ${sfdxProjectPath}`);
|
|
4847
|
+
}
|
|
4848
|
+
return fs__namespace.readFileSync(sfdxProjectPath, "utf8");
|
|
4849
|
+
},
|
|
4850
|
+
(error) => createSfdxProjectReadError(`Failed to read sfdx-project.json: ${error}`, error)
|
|
4851
|
+
),
|
|
4852
|
+
E__namespace.flatMap(
|
|
4853
|
+
(content) => E__namespace.tryCatch(
|
|
4854
|
+
() => JSON.parse(content),
|
|
4855
|
+
(error) => createSfdxProjectReadError(`Failed to parse sfdx-project.json: ${error}`, error)
|
|
4856
|
+
)
|
|
4857
|
+
),
|
|
4858
|
+
E__namespace.flatMap((config) => {
|
|
4859
|
+
if (!config.packageDirectories || !Array.isArray(config.packageDirectories)) {
|
|
4860
|
+
return E__namespace.left(
|
|
4861
|
+
createSfdxProjectReadError("sfdx-project.json does not contain a valid packageDirectories array")
|
|
4862
|
+
);
|
|
4863
|
+
}
|
|
4864
|
+
return E__namespace.right(config);
|
|
4865
|
+
})
|
|
4866
|
+
);
|
|
4867
|
+
}
|
|
4868
|
+
function getSfdxSourceDirectories(projectRoot, absolutePaths = true) {
|
|
4869
|
+
return _function.pipe(
|
|
4870
|
+
readSfdxProjectConfig(projectRoot),
|
|
4871
|
+
E__namespace.map((config) => config.packageDirectories.map((dir) => dir.path)),
|
|
4872
|
+
E__namespace.map((paths) => {
|
|
4873
|
+
if (absolutePaths) {
|
|
4874
|
+
return paths.map((dirPath) => path__namespace.resolve(projectRoot, dirPath));
|
|
4875
|
+
}
|
|
4876
|
+
return paths;
|
|
4877
|
+
}),
|
|
4878
|
+
E__namespace.flatMap((paths) => {
|
|
4879
|
+
const nonExistentPaths = paths.filter((dirPath) => !fs__namespace.existsSync(dirPath));
|
|
4880
|
+
if (nonExistentPaths.length > 0) {
|
|
4881
|
+
return E__namespace.left(
|
|
4882
|
+
createSfdxProjectReadError(
|
|
4883
|
+
`The following package directories do not exist: ${nonExistentPaths.join(", ")}`
|
|
4884
|
+
)
|
|
4885
|
+
);
|
|
4886
|
+
}
|
|
4887
|
+
return E__namespace.right(paths);
|
|
4888
|
+
})
|
|
4889
|
+
);
|
|
4890
|
+
}
|
|
4891
|
+
|
|
4892
|
+
function createSourceDirectoryResolutionError(message, cause) {
|
|
4893
|
+
return {
|
|
4894
|
+
_tag: "SourceDirectoryResolutionError",
|
|
4895
|
+
message,
|
|
4896
|
+
cause
|
|
4897
|
+
};
|
|
4898
|
+
}
|
|
4899
|
+
function resolveSourceDirectories(config) {
|
|
4900
|
+
const { sourceDir, useSfdxProjectJson, sfdxProjectPath } = config;
|
|
4901
|
+
const hasSourceDir = sourceDir && (typeof sourceDir === "string" || Array.isArray(sourceDir) && sourceDir.length > 0);
|
|
4902
|
+
const methodsSpecified = [hasSourceDir, useSfdxProjectJson].filter(Boolean).length;
|
|
4903
|
+
if (methodsSpecified === 0) {
|
|
4904
|
+
return E__namespace.left(
|
|
4905
|
+
createSourceDirectoryResolutionError(
|
|
4906
|
+
"No source directory method specified. Must provide one of: sourceDir or useSfdxProjectJson."
|
|
4907
|
+
)
|
|
4908
|
+
);
|
|
4909
|
+
}
|
|
4910
|
+
if (methodsSpecified > 1) {
|
|
4911
|
+
return E__namespace.left(
|
|
4912
|
+
createSourceDirectoryResolutionError(
|
|
4913
|
+
"Multiple source directory methods specified. Only one of sourceDir or useSfdxProjectJson can be used."
|
|
4914
|
+
)
|
|
4915
|
+
);
|
|
4916
|
+
}
|
|
4917
|
+
if (sourceDir) {
|
|
4918
|
+
if (typeof sourceDir === "string") {
|
|
4919
|
+
return E__namespace.right([sourceDir]);
|
|
4920
|
+
} else if (Array.isArray(sourceDir)) {
|
|
4921
|
+
return E__namespace.right(sourceDir);
|
|
4922
|
+
}
|
|
4923
|
+
}
|
|
4924
|
+
if (useSfdxProjectJson) {
|
|
4925
|
+
const projectPath = sfdxProjectPath || process.cwd();
|
|
4926
|
+
return _function.pipe(
|
|
4927
|
+
getSfdxSourceDirectories(projectPath),
|
|
4928
|
+
E__namespace.mapLeft(
|
|
4929
|
+
(sfdxError) => createSourceDirectoryResolutionError(
|
|
4930
|
+
`Failed to read source directories from sfdx-project.json: ${sfdxError.message}`,
|
|
4931
|
+
sfdxError
|
|
4932
|
+
)
|
|
4933
|
+
)
|
|
4934
|
+
);
|
|
4935
|
+
}
|
|
4936
|
+
return E__namespace.left(createSourceDirectoryResolutionError("Invalid source directory configuration."));
|
|
4937
|
+
}
|
|
4938
|
+
function validateSourceDirectoryConfig(config) {
|
|
4939
|
+
const { sourceDir, useSfdxProjectJson, sfdxProjectPath } = config;
|
|
4940
|
+
if (sourceDir && useSfdxProjectJson) {
|
|
4941
|
+
return E__namespace.left(
|
|
4942
|
+
createSourceDirectoryResolutionError("Cannot specify both sourceDir and useSfdxProjectJson. Use only one.")
|
|
4943
|
+
);
|
|
4944
|
+
}
|
|
4945
|
+
if (sfdxProjectPath && !useSfdxProjectJson) {
|
|
4946
|
+
return E__namespace.left(createSourceDirectoryResolutionError("sfdxProjectPath can only be used with useSfdxProjectJson."));
|
|
4947
|
+
}
|
|
4948
|
+
if (Array.isArray(sourceDir) && sourceDir.length === 0) {
|
|
4949
|
+
return E__namespace.left(
|
|
4950
|
+
createSourceDirectoryResolutionError("sourceDir array cannot be empty. Provide at least one directory.")
|
|
4951
|
+
);
|
|
4952
|
+
}
|
|
4953
|
+
return E__namespace.right(config);
|
|
4954
|
+
}
|
|
4955
|
+
function resolveAndValidateSourceDirectories(config) {
|
|
4956
|
+
return _function.pipe(validateSourceDirectoryConfig(config), E__namespace.flatMap(resolveSourceDirectories));
|
|
4957
|
+
}
|
|
4958
|
+
|
|
4830
4959
|
var __async = (__this, __arguments, generator) => {
|
|
4831
4960
|
return new Promise((resolve, reject) => {
|
|
4832
4961
|
var fulfilled = (value) => {
|
|
@@ -4874,11 +5003,15 @@ const readFiles = processFiles(new DefaultFileSystem());
|
|
|
4874
5003
|
function processMarkdown(config) {
|
|
4875
5004
|
return __async(this, null, function* () {
|
|
4876
5005
|
return _function.pipe(
|
|
4877
|
-
|
|
4878
|
-
|
|
4879
|
-
|
|
4880
|
-
|
|
4881
|
-
|
|
5006
|
+
resolveAndValidateSourceDirectories(config),
|
|
5007
|
+
E__namespace.mapLeft((error) => new FileReadingError(`Failed to resolve source directories: ${error.message}`, error)),
|
|
5008
|
+
E__namespace.flatMap(
|
|
5009
|
+
(sourceDirs) => E__namespace.tryCatch(
|
|
5010
|
+
() => readFiles(allComponentTypes, {
|
|
5011
|
+
includeMetadata: config.includeMetadata
|
|
5012
|
+
})(sourceDirs, config.exclude),
|
|
5013
|
+
(e) => new FileReadingError("An error occurred while reading files.", e)
|
|
5014
|
+
)
|
|
4882
5015
|
),
|
|
4883
5016
|
TE__namespace.fromEither,
|
|
4884
5017
|
TE__namespace.flatMap((fileBodies) => generate$1(fileBodies, config)),
|
|
@@ -4889,20 +5022,59 @@ function processMarkdown(config) {
|
|
|
4889
5022
|
}
|
|
4890
5023
|
function processOpenApi(config, logger) {
|
|
4891
5024
|
return __async(this, null, function* () {
|
|
4892
|
-
|
|
4893
|
-
|
|
5025
|
+
return _function.pipe(
|
|
5026
|
+
resolveAndValidateSourceDirectories(config),
|
|
5027
|
+
E__namespace.mapLeft((error) => new FileReadingError(`Failed to resolve source directories: ${error.message}`, error)),
|
|
5028
|
+
TE__namespace.fromEither,
|
|
5029
|
+
TE__namespace.flatMap(
|
|
5030
|
+
(sourceDirs) => TE__namespace.tryCatch(
|
|
5031
|
+
() => {
|
|
5032
|
+
const fileBodies = readFiles(["ApexClass"])(sourceDirs, config.exclude);
|
|
5033
|
+
return openApi(logger, fileBodies, config);
|
|
5034
|
+
},
|
|
5035
|
+
(e) => new FileReadingError("An error occurred while generating OpenAPI documentation.", e)
|
|
5036
|
+
)
|
|
5037
|
+
)
|
|
5038
|
+
);
|
|
4894
5039
|
});
|
|
4895
5040
|
}
|
|
4896
5041
|
function processChangeLog(config) {
|
|
4897
5042
|
return __async(this, null, function* () {
|
|
4898
5043
|
function loadFiles() {
|
|
4899
|
-
|
|
4900
|
-
|
|
4901
|
-
|
|
4902
|
-
|
|
5044
|
+
const previousVersionConfig = {
|
|
5045
|
+
sourceDir: config.previousVersionDir
|
|
5046
|
+
};
|
|
5047
|
+
const currentVersionConfig = {
|
|
5048
|
+
sourceDir: config.currentVersionDir
|
|
5049
|
+
};
|
|
5050
|
+
return _function.pipe(
|
|
5051
|
+
E__namespace.Do,
|
|
5052
|
+
E__namespace.bind(
|
|
5053
|
+
"previousVersionDirs",
|
|
5054
|
+
() => _function.pipe(
|
|
5055
|
+
resolveAndValidateSourceDirectories(previousVersionConfig),
|
|
5056
|
+
E__namespace.mapLeft(
|
|
5057
|
+
(error) => new FileReadingError(`Failed to resolve previous version source directories: ${error.message}`, error)
|
|
5058
|
+
)
|
|
5059
|
+
)
|
|
5060
|
+
),
|
|
5061
|
+
E__namespace.bind(
|
|
5062
|
+
"currentVersionDirs",
|
|
5063
|
+
() => _function.pipe(
|
|
5064
|
+
resolveAndValidateSourceDirectories(currentVersionConfig),
|
|
5065
|
+
E__namespace.mapLeft(
|
|
5066
|
+
(error) => new FileReadingError(`Failed to resolve current version source directories: ${error.message}`, error)
|
|
5067
|
+
)
|
|
5068
|
+
)
|
|
5069
|
+
),
|
|
5070
|
+
E__namespace.map(({ previousVersionDirs, currentVersionDirs }) => [
|
|
5071
|
+
readFiles(allComponentTypes)(previousVersionDirs, config.exclude),
|
|
5072
|
+
readFiles(allComponentTypes)(currentVersionDirs, config.exclude)
|
|
5073
|
+
])
|
|
5074
|
+
);
|
|
4903
5075
|
}
|
|
4904
5076
|
return _function.pipe(
|
|
4905
|
-
|
|
5077
|
+
loadFiles(),
|
|
4906
5078
|
TE__namespace.fromEither,
|
|
4907
5079
|
TE__namespace.flatMap(([previous, current]) => generate(previous, current, config)),
|
|
4908
5080
|
TE__namespace.mapLeft(toErrors)
|
|
@@ -4978,3 +5150,4 @@ exports.changeLogDefaults = changeLogDefaults;
|
|
|
4978
5150
|
exports.markdownDefaults = markdownDefaults;
|
|
4979
5151
|
exports.openApiDefaults = openApiDefaults;
|
|
4980
5152
|
exports.skip = skip;
|
|
5153
|
+
exports.validateSourceDirectoryConfig = validateSourceDirectoryConfig;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cparra/apexdocs",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.13.0",
|
|
4
4
|
"description": "Library with CLI capabilities to generate documentation for Salesforce Apex classes.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"apex",
|
|
@@ -94,7 +94,7 @@
|
|
|
94
94
|
},
|
|
95
95
|
"dependencies": {
|
|
96
96
|
"@cparra/apex-reflection": "2.19.0",
|
|
97
|
-
"@salesforce/source-deploy-retrieve": "^12.
|
|
97
|
+
"@salesforce/source-deploy-retrieve": "^12.20.1",
|
|
98
98
|
"@types/js-yaml": "^4.0.9",
|
|
99
99
|
"@types/yargs": "^17.0.32",
|
|
100
100
|
"chalk": "^4.1.2",
|