@haxtheweb/create 9.0.15 → 9.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/create.js +19 -1
- package/dist/lib/micro-frontend-registry.js +6 -5
- package/dist/lib/programs/site.js +205 -16
- package/package.json +3 -3
package/dist/create.js
CHANGED
|
@@ -34,7 +34,7 @@ async function main() {
|
|
|
34
34
|
.option('--org <char>', 'organization for package.json').option('--author <char>', 'author for site / package.json').option('--writeHaxProperties', 'Write haxProperties for the element')
|
|
35
35
|
|
|
36
36
|
// options for site
|
|
37
|
-
.option('--import-site <char>', 'URL of site to import').option('--import-structure <char>', `import method to use:\n\rpressbooksToSite\n\relmslnToSite\n\rhaxcmsToSite\n\rnotionToSite\n\rgitbookToSite\n\revolutionToSite\n\rhtmlToSite\n\rdocxToSite`).option('--node-op <char>', 'node operation to perform').option('--item-id <char>', 'node ID to operate on').option('--name <char>', 'name of the project').option('--domain <char>', 'published domain name').option('--items-import <char>', 'import items from a file / site').helpCommand(true);
|
|
37
|
+
.option('--import-site <char>', 'URL of site to import').option('--import-structure <char>', `import method to use:\n\rpressbooksToSite\n\relmslnToSite\n\rhaxcmsToSite\n\rnotionToSite\n\rgitbookToSite\n\revolutionToSite\n\rhtmlToSite\n\rdocxToSite`).option('--node-op <char>', 'node operation to perform').option('--item-id <char>', 'node ID to operate on').option('--name <char>', 'name of the project').option('--domain <char>', 'published domain name').option('--title-scrape <char>', 'CSS Selector for `title` in resource').option('--content-scrape <char>', 'CSS Selector for `body` in resource').option('--items-import <char>', 'import items from a file / site').helpCommand(true);
|
|
38
38
|
|
|
39
39
|
// default command which runs interactively
|
|
40
40
|
_commander.program.command('start').description('Interactive program to pick options').action(() => {
|
|
@@ -347,6 +347,24 @@ async function main() {
|
|
|
347
347
|
});
|
|
348
348
|
}
|
|
349
349
|
},
|
|
350
|
+
theme: async ({
|
|
351
|
+
results
|
|
352
|
+
}) => {
|
|
353
|
+
let themes = await (0, _site.siteThemeList)();
|
|
354
|
+
if (!commandRun.options.theme) {
|
|
355
|
+
// support having no theme but autoselecting
|
|
356
|
+
if (!commandRun.options.auto && !commandRun.options.skip) {
|
|
357
|
+
commandRun.options.theme = themes[0];
|
|
358
|
+
} else {
|
|
359
|
+
return p.select({
|
|
360
|
+
message: "Theme:",
|
|
361
|
+
required: false,
|
|
362
|
+
options: themes,
|
|
363
|
+
initialValue: themes[0]
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
},
|
|
350
368
|
extras: ({
|
|
351
369
|
results
|
|
352
370
|
}) => {
|
|
@@ -8,6 +8,7 @@ exports.enableCoreServices = enableCoreServices;
|
|
|
8
8
|
exports.enableExperimentalServices = enableExperimentalServices;
|
|
9
9
|
exports.enableHAXcmsServices = enableHAXcmsServices;
|
|
10
10
|
exports.enableServices = enableServices;
|
|
11
|
+
var _statements = require("./statements.js");
|
|
11
12
|
// because node hates mixing modern web at times this is a fork of @haxtheweb/micro-frontend-registry
|
|
12
13
|
// and can fall out of date
|
|
13
14
|
/**
|
|
@@ -50,8 +51,8 @@ const MicroFrontendRegCapabilities = function (SuperClass) {
|
|
|
50
51
|
*/
|
|
51
52
|
define(item) {
|
|
52
53
|
if (!(item instanceof MicroFrontend)) {
|
|
53
|
-
|
|
54
|
-
|
|
54
|
+
(0, _statements.log)("MicroFrontendRegistry: use class MicroFrontend instance but if keys match it will register still.", 'warn');
|
|
55
|
+
(0, _statements.log)(item, 'warn');
|
|
55
56
|
}
|
|
56
57
|
// validate item has all keys we care about
|
|
57
58
|
if (Object.keys(item).every(key => MicroFrontendKeys.includes(key))) {
|
|
@@ -102,7 +103,7 @@ const MicroFrontendRegCapabilities = function (SuperClass) {
|
|
|
102
103
|
}
|
|
103
104
|
}
|
|
104
105
|
if (!testOnly) {
|
|
105
|
-
|
|
106
|
+
(0, _statements.log)(`call for ${name} but not found in micro-frontend-registry`, 'error');
|
|
106
107
|
}
|
|
107
108
|
return null;
|
|
108
109
|
}
|
|
@@ -170,7 +171,7 @@ const MicroFrontendRegCapabilities = function (SuperClass) {
|
|
|
170
171
|
data: null
|
|
171
172
|
};
|
|
172
173
|
}).catch((e, d) => {
|
|
173
|
-
|
|
174
|
+
(0, _statements.log)("Request failed", 'warn');
|
|
174
175
|
// this is endpoint completely failed to respond
|
|
175
176
|
return {
|
|
176
177
|
status: 500,
|
|
@@ -190,7 +191,7 @@ const MicroFrontendRegCapabilities = function (SuperClass) {
|
|
|
190
191
|
data: null
|
|
191
192
|
};
|
|
192
193
|
}).catch((e, d) => {
|
|
193
|
-
|
|
194
|
+
(0, _statements.log)("Request failed", 'warn');
|
|
194
195
|
// this is endpoint completely failed to respond
|
|
195
196
|
return {
|
|
196
197
|
status: 500,
|
|
@@ -29,7 +29,7 @@ function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return
|
|
|
29
29
|
function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
|
|
30
30
|
// trick MFR into giving local paths
|
|
31
31
|
globalThis.MicroFrontendRegistryConfig = {
|
|
32
|
-
base: `@haxtheweb/open-apis
|
|
32
|
+
base: `@haxtheweb/open-apis`
|
|
33
33
|
};
|
|
34
34
|
// emable HAXcms routes so we have name => path just like on frontend!
|
|
35
35
|
_microFrontendRegistry.MicroFrontendRegistry.enableServices(['core', 'haxcms', 'experimental']);
|
|
@@ -110,6 +110,9 @@ function siteActions() {
|
|
|
110
110
|
}, {
|
|
111
111
|
value: 'site:sync',
|
|
112
112
|
label: "Sync git repo"
|
|
113
|
+
}, {
|
|
114
|
+
value: 'site:surge',
|
|
115
|
+
label: "Publish site to Surge.sh"
|
|
113
116
|
}];
|
|
114
117
|
}
|
|
115
118
|
async function siteCommandDetected(commandRun) {
|
|
@@ -137,12 +140,6 @@ async function siteCommandDetected(commandRun) {
|
|
|
137
140
|
// infinite loop until quitting the cli
|
|
138
141
|
while (operation.action !== 'quit') {
|
|
139
142
|
let actions = siteActions();
|
|
140
|
-
if (sysSurge) {
|
|
141
|
-
actions.push({
|
|
142
|
-
value: 'site:surge',
|
|
143
|
-
label: "Publish site to Surge.sh"
|
|
144
|
-
});
|
|
145
|
-
}
|
|
146
143
|
actions.push({
|
|
147
144
|
value: 'quit',
|
|
148
145
|
label: "🚪 Quit"
|
|
@@ -432,7 +429,6 @@ async function siteCommandDetected(commandRun) {
|
|
|
432
429
|
break;
|
|
433
430
|
case "node:add":
|
|
434
431
|
try {
|
|
435
|
-
// @todo accept title if not supplied
|
|
436
432
|
if (!commandRun.options.title) {
|
|
437
433
|
commandRun.options.title = await p.text({
|
|
438
434
|
message: `Title for this page`,
|
|
@@ -445,16 +441,66 @@ async function siteCommandDetected(commandRun) {
|
|
|
445
441
|
}
|
|
446
442
|
});
|
|
447
443
|
}
|
|
448
|
-
|
|
444
|
+
var createNodeBody = {
|
|
449
445
|
site: activeHaxsite,
|
|
450
446
|
node: {
|
|
451
447
|
title: commandRun.options.title
|
|
452
448
|
}
|
|
453
|
-
}
|
|
449
|
+
};
|
|
450
|
+
// this would be odd but could be direct with no format specified
|
|
451
|
+
if (commandRun.options.content && !commandRun.options.format) {
|
|
452
|
+
// only API where it's called contents and already out there {facepalm}
|
|
453
|
+
// but user already has commands where it's --content as arg
|
|
454
|
+
createNodeBody.node.contents = commandRun.options.content;
|
|
455
|
+
} else if (commandRun.options.content && commandRun.options.format) {
|
|
456
|
+
let locationContent = '';
|
|
457
|
+
// if we have format set, then we need to interpret content as a url
|
|
458
|
+
let location = commandRun.options.content;
|
|
459
|
+
// support for address, as in import from some place else
|
|
460
|
+
if (location.startsWith('https://') || location.startsWith('http://')) {
|
|
461
|
+
locationContent = await fetch(location).then(d => d.ok ? d.text() : '');
|
|
462
|
+
}
|
|
463
|
+
// look on prem
|
|
464
|
+
else if (fs.existsSync(location)) {
|
|
465
|
+
locationContent = await fs.readFileSync(location);
|
|
466
|
+
}
|
|
467
|
+
// format dictates additional processing; html is default
|
|
468
|
+
switch (commandRun.options.format) {
|
|
469
|
+
case 'json':
|
|
470
|
+
locationContent = JSON.parse(locationContent);
|
|
471
|
+
break;
|
|
472
|
+
case 'yaml':
|
|
473
|
+
locationContent = await (0, _jsYaml.load)(locationContent);
|
|
474
|
+
break;
|
|
475
|
+
case 'md':
|
|
476
|
+
let resp = await openApiBroker('@core', 'mdToHtml', {
|
|
477
|
+
md: locationContent,
|
|
478
|
+
raw: true
|
|
479
|
+
});
|
|
480
|
+
if (resp.res.data) {
|
|
481
|
+
locationContent = resp.res.data;
|
|
482
|
+
}
|
|
483
|
+
break;
|
|
484
|
+
}
|
|
485
|
+
// support for scraper mode to find title from the content responsee
|
|
486
|
+
if (commandRun.options.titleScrape) {
|
|
487
|
+
let dom = (0, _nodeHtmlParser.parse)(`${locationContent}`);
|
|
488
|
+
createNodeBody.node.title = dom.querySelector(`${commandRun.options.titleScrape}`).textContent;
|
|
489
|
+
}
|
|
490
|
+
// support scraper mode which targets a wrapper for the actual content
|
|
491
|
+
if (commandRun.options.contentScrape) {
|
|
492
|
+
let dom = (0, _nodeHtmlParser.parse)(`${locationContent}`);
|
|
493
|
+
locationContent = dom.querySelector(`${commandRun.options.contentScrape}`).innerHTML;
|
|
494
|
+
}
|
|
495
|
+
createNodeBody.node.contents = locationContent;
|
|
496
|
+
}
|
|
497
|
+
let resp = await haxcmsNodejsCli.cliBridge('createNode', createNodeBody);
|
|
454
498
|
if (commandRun.options.v) {
|
|
455
499
|
(0, _statements.log)(resp.res.data);
|
|
456
500
|
}
|
|
457
|
-
|
|
501
|
+
if (!commandRun.options.quiet) {
|
|
502
|
+
(0, _statements.log)(`"${createNodeBody.node.title}" added to site`);
|
|
503
|
+
}
|
|
458
504
|
} catch (e) {
|
|
459
505
|
(0, _statements.log)(e.stderr);
|
|
460
506
|
}
|
|
@@ -538,8 +584,52 @@ async function siteCommandDetected(commandRun) {
|
|
|
538
584
|
// ensure we set empty values, just not completely undefined values
|
|
539
585
|
if (typeof commandRun.options[commandRun.options.nodeOp] !== "undefined") {
|
|
540
586
|
if (commandRun.options.nodeOp === 'content') {
|
|
541
|
-
|
|
542
|
-
|
|
587
|
+
let locationContent = '';
|
|
588
|
+
// this would be odd but could be direct with no format specified
|
|
589
|
+
if (commandRun.options.content && !commandRun.options.format) {
|
|
590
|
+
locationContent = commandRun.options.content;
|
|
591
|
+
}
|
|
592
|
+
// this implies what we were given needs processing as a file / url
|
|
593
|
+
else if (commandRun.options.content && commandRun.options.format) {
|
|
594
|
+
// if we have format set, then we need to interpret content as a url
|
|
595
|
+
let location = commandRun.options.content;
|
|
596
|
+
// support for address, as in import from some place else
|
|
597
|
+
if (location.startsWith('https://') || location.startsWith('http://')) {
|
|
598
|
+
locationContent = await fetch(location).then(d => d.ok ? d.text() : '');
|
|
599
|
+
}
|
|
600
|
+
// look on prem
|
|
601
|
+
else if (fs.existsSync(location)) {
|
|
602
|
+
locationContent = await fs.readFileSync(location);
|
|
603
|
+
}
|
|
604
|
+
// format dictates additional processing; html is default
|
|
605
|
+
switch (commandRun.options.format) {
|
|
606
|
+
case 'json':
|
|
607
|
+
locationContent = JSON.parse(locationContent);
|
|
608
|
+
break;
|
|
609
|
+
case 'yaml':
|
|
610
|
+
locationContent = await (0, _jsYaml.load)(locationContent);
|
|
611
|
+
break;
|
|
612
|
+
case 'md':
|
|
613
|
+
let resp = await openApiBroker('@core', 'mdToHtml', {
|
|
614
|
+
md: locationContent,
|
|
615
|
+
raw: true
|
|
616
|
+
});
|
|
617
|
+
if (resp.res.data) {
|
|
618
|
+
locationContent = resp.res.data;
|
|
619
|
+
}
|
|
620
|
+
break;
|
|
621
|
+
}
|
|
622
|
+
// support scraper mode which targets a wrapper for the actual content
|
|
623
|
+
if (commandRun.options.contentScrape) {
|
|
624
|
+
let dom = (0, _nodeHtmlParser.parse)(`${locationContent}`);
|
|
625
|
+
locationContent = dom.querySelector(`${commandRun.options.contentScrape}`).innerHTML;
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
// if we have content (meaning it's not blank) then try to write the page location
|
|
629
|
+
if (locationContent && (await page.writeLocation(locationContent))) {
|
|
630
|
+
if (!commandRun.options.quiet) {
|
|
631
|
+
(0, _statements.log)(`node:edit success updated page content: "${page.id}`);
|
|
632
|
+
}
|
|
543
633
|
} else {
|
|
544
634
|
console.warn(`node:edit failure to write page content : ${page.id}`);
|
|
545
635
|
}
|
|
@@ -639,9 +729,20 @@ async function siteCommandDetected(commandRun) {
|
|
|
639
729
|
break;
|
|
640
730
|
case "site:surge":
|
|
641
731
|
try {
|
|
732
|
+
// attempt to install; implies they asked to publish with surge but
|
|
733
|
+
// system test did not see it globally
|
|
734
|
+
if (!sysSurge) {
|
|
735
|
+
let s = p.spinner();
|
|
736
|
+
s.start((0, _statements.merlinSays)('Installing Surge.sh globally so we can publish'));
|
|
737
|
+
let execOutput = await exec(`npm install --global surge`);
|
|
738
|
+
s.stop((0, _statements.merlinSays)('surge.sh installed globally'));
|
|
739
|
+
(0, _statements.log)(execOutput.stdout.trim());
|
|
740
|
+
sysSurge = true;
|
|
741
|
+
}
|
|
642
742
|
if (!commandRun.options.domain) {
|
|
643
743
|
commandRun.options.domain = await p.text({
|
|
644
744
|
message: `Domain for surge`,
|
|
745
|
+
initialValue: `haxcli-${activeHaxsite.name}.surge.sh`,
|
|
645
746
|
defaultValue: `haxcli-${activeHaxsite.name}.surge.sh`,
|
|
646
747
|
required: true,
|
|
647
748
|
validate: value => {
|
|
@@ -653,6 +754,7 @@ async function siteCommandDetected(commandRun) {
|
|
|
653
754
|
}
|
|
654
755
|
let execOutput = await exec(`cd ${activeHaxsite.directory} && surge . ${commandRun.options.domain}`);
|
|
655
756
|
(0, _statements.log)(execOutput.stdout.trim());
|
|
757
|
+
(0, _statements.log)(`Site published: https://${commandRun.options.domain}`);
|
|
656
758
|
} catch (e) {
|
|
657
759
|
(0, _statements.log)(e.stderr);
|
|
658
760
|
}
|
|
@@ -726,11 +828,21 @@ async function siteCommandDetected(commandRun) {
|
|
|
726
828
|
});
|
|
727
829
|
if (commandRun.options.toFile) {
|
|
728
830
|
fs.writeFileSync(commandRun.options.toFile, resp.res.data.data);
|
|
831
|
+
if (!commandRun.options.quiet) {
|
|
832
|
+
(0, _statements.log)(`${commandRun.options.toFile} written`);
|
|
833
|
+
}
|
|
729
834
|
} else {
|
|
730
835
|
(0, _statements.log)(resp.res.data.data);
|
|
731
836
|
}
|
|
732
837
|
} else {
|
|
733
|
-
(
|
|
838
|
+
if (commandRun.options.toFile) {
|
|
839
|
+
fs.writeFileSync(commandRun.options.toFile, siteContent);
|
|
840
|
+
if (!commandRun.options.quiet) {
|
|
841
|
+
(0, _statements.log)(`${commandRun.options.toFile} written`);
|
|
842
|
+
}
|
|
843
|
+
} else {
|
|
844
|
+
(0, _statements.log)(siteContent);
|
|
845
|
+
}
|
|
734
846
|
}
|
|
735
847
|
}
|
|
736
848
|
break;
|
|
@@ -739,7 +851,8 @@ async function siteCommandDetected(commandRun) {
|
|
|
739
851
|
process.exit(0);
|
|
740
852
|
break;
|
|
741
853
|
}
|
|
742
|
-
|
|
854
|
+
// y or noi need to act like it ran and finish instead of looping options
|
|
855
|
+
if (commandRun.options.y || !commandRun.options.i) {
|
|
743
856
|
process.exit(0);
|
|
744
857
|
}
|
|
745
858
|
operation.action = null;
|
|
@@ -853,7 +966,7 @@ async function siteProcess(commandRun, project, port = '3000') {
|
|
|
853
966
|
"site": {
|
|
854
967
|
"name": project.name,
|
|
855
968
|
"description": "own course",
|
|
856
|
-
"theme": commandRun.options.theme ? commandRun.options.theme : "clean-one"
|
|
969
|
+
"theme": commandRun.options.theme ? commandRun.options.theme : project.theme ? project.theme : "clean-one"
|
|
857
970
|
},
|
|
858
971
|
"build": {
|
|
859
972
|
"type": "own",
|
|
@@ -885,6 +998,82 @@ async function siteProcess(commandRun, project, port = '3000') {
|
|
|
885
998
|
siteRequest.build.files = resp.res.data.data.files;
|
|
886
999
|
}
|
|
887
1000
|
}
|
|
1001
|
+
// hidden import methodologies
|
|
1002
|
+
else if (commandRun.options.importStructure) {
|
|
1003
|
+
if (commandRun.options.importStructure === 'drupal7-book-print-html') {
|
|
1004
|
+
let siteContent = await fetch(commandRun.options.importSite).then(d => d.ok ? d.text() : '');
|
|
1005
|
+
if (siteContent) {
|
|
1006
|
+
// @todo refactor to support 9 levels of heirarchy as this is technically what Drupal supports
|
|
1007
|
+
let dom = (0, _nodeHtmlParser.parse)(siteContent);
|
|
1008
|
+
// pull all of level 1 of hierarchy
|
|
1009
|
+
let depth;
|
|
1010
|
+
let order = 0;
|
|
1011
|
+
let parent = null;
|
|
1012
|
+
let items = [];
|
|
1013
|
+
for (let branch1 of dom.querySelectorAll('.section-2')) {
|
|
1014
|
+
parent = null;
|
|
1015
|
+
depth = 0;
|
|
1016
|
+
let itemID = branch1.getAttribute('id');
|
|
1017
|
+
let item = {
|
|
1018
|
+
id: itemID,
|
|
1019
|
+
order: order,
|
|
1020
|
+
indent: depth,
|
|
1021
|
+
title: branch1.querySelector('h1').innerText,
|
|
1022
|
+
slug: itemID.replace('-', '/'),
|
|
1023
|
+
contents: branch1.querySelector(`.field.field-name-body .field-item`).innerHTML,
|
|
1024
|
+
parent: parent
|
|
1025
|
+
};
|
|
1026
|
+
items.push(item);
|
|
1027
|
+
order++;
|
|
1028
|
+
depth = 1;
|
|
1029
|
+
let parent2 = itemID;
|
|
1030
|
+
let order2 = 0;
|
|
1031
|
+
for (let branch2 of branch1.querySelectorAll('.section-3')) {
|
|
1032
|
+
itemID = branch2.getAttribute('id');
|
|
1033
|
+
let item = {
|
|
1034
|
+
id: itemID,
|
|
1035
|
+
order: order2,
|
|
1036
|
+
indent: depth,
|
|
1037
|
+
title: branch2.querySelector('h1').innerText,
|
|
1038
|
+
slug: itemID.replace('-', '/'),
|
|
1039
|
+
contents: branch2.querySelector(`.field.field-name-body .field-item`).innerHTML,
|
|
1040
|
+
parent: parent2
|
|
1041
|
+
};
|
|
1042
|
+
items.push(item);
|
|
1043
|
+
order2++;
|
|
1044
|
+
depth = 2;
|
|
1045
|
+
let parent3 = itemID;
|
|
1046
|
+
let order3 = 0;
|
|
1047
|
+
for (let branch3 of branch2.querySelectorAll('.section-4')) {
|
|
1048
|
+
itemID = branch3.getAttribute('id');
|
|
1049
|
+
let item = {
|
|
1050
|
+
id: itemID,
|
|
1051
|
+
order: order3,
|
|
1052
|
+
indent: depth,
|
|
1053
|
+
title: branch3.querySelector('h1').innerText,
|
|
1054
|
+
slug: itemID.replace('-', '/'),
|
|
1055
|
+
contents: branch3.querySelector(`.field.field-name-body .field-item`).innerHTML,
|
|
1056
|
+
parent: parent3
|
|
1057
|
+
};
|
|
1058
|
+
items.push(item);
|
|
1059
|
+
order3++;
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
// obtain all images on the system to bring along with additional spider request
|
|
1063
|
+
let location = new URL(commandRun.options.importSite).origin;
|
|
1064
|
+
var files = {};
|
|
1065
|
+
for (let image of dom.querySelectorAll("img[src^='/']")) {
|
|
1066
|
+
if (!image.getAttribute('src').startsWith('//')) {
|
|
1067
|
+
files[image.getAttribute('src')] = `${location}${image.getAttribute('src')}`;
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
siteRequest.build.files = files;
|
|
1071
|
+
}
|
|
1072
|
+
siteRequest.build.structure = 'import';
|
|
1073
|
+
siteRequest.build.items = items;
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
888
1077
|
}
|
|
889
1078
|
HAXCMS.cliWritePath = `${project.path}`;
|
|
890
1079
|
let res = new Res();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@haxtheweb/create",
|
|
3
|
-
"version": "9.0.
|
|
3
|
+
"version": "9.0.16",
|
|
4
4
|
"publishConfig": {
|
|
5
5
|
"access": "public"
|
|
6
6
|
},
|
|
@@ -42,8 +42,8 @@
|
|
|
42
42
|
"dependencies": {
|
|
43
43
|
"@clack/core": "0.3.4",
|
|
44
44
|
"@clack/prompts": "0.7.0",
|
|
45
|
-
"@haxtheweb/haxcms-nodejs": "^9.0.
|
|
46
|
-
"@haxtheweb/open-apis": "^9.0.
|
|
45
|
+
"@haxtheweb/haxcms-nodejs": "^9.0.20",
|
|
46
|
+
"@haxtheweb/open-apis": "^9.0.12",
|
|
47
47
|
"commander": "12.1.0",
|
|
48
48
|
"node-html-parser": "6.1.13",
|
|
49
49
|
"ejs": "3.1.10",
|