@haxtheweb/create 9.0.10 → 9.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -5,16 +5,55 @@ Rapidly build web components for the Web that work with HAX. HAX The Web's CLI t
5
5
  # this allows you to then use hax command
6
6
  npm install @haxtheweb/create --global
7
7
  # then run
8
- hax
8
+ hax start
9
9
  ```
10
10
 
11
11
  # Commands
12
12
 
13
13
  ## Default / global / new context
14
- - `hax` - fun ascii art and interactive CLI (via [clack](https://www.clack.cc/) )
14
+ - `hax start` - fun ascii art and interactive CLI (via [clack](https://www.clack.cc/) )
15
15
  - `hax --name my-element --y` - Make a new HAX capable, i18n wired, Design system (DDD) driven web component
16
16
  - if in a monorepo root, will place in correct location / inherit settings
17
- - `hax --type haxsite --name mysite --y` - create a new HAXsite (HAXcms, single site)
17
+ - `hax site mysite --y` - create a new HAXsite (HAXcms, single site)
18
+
19
+ ## --help
20
+ ```
21
+ Usage: hax [options] [command]
22
+
23
+ Options:
24
+ --
25
+ --v Verbose output for developers
26
+ --path <char> where to perform operation
27
+ --npm-client <char> npm client to use (must be installed) npm,
28
+ yarn, pnpm (default: "npm")
29
+ --y yes to all questions
30
+ --skip skip frills like animations
31
+ --auto yes to all questions, alias of y
32
+ --org <char> organization for package.json
33
+ --author <char> author for site / package.json
34
+ --import-site <char> URL of site to import
35
+ --node-op <char> node operation to perform
36
+ --item-id <char> node ID to operate on
37
+ --name <char> name of the project
38
+ --domain <char> published domain name
39
+ --title <char> Title
40
+ --content <char> Page content
41
+ --slug <char> Path (slug)
42
+ --published <char> Publishing status
43
+ --tags <char> Tags
44
+ --parent <char> Parent
45
+ --order <char> Order
46
+ --theme <char> Theme
47
+ --hide-in-menu <char> Hide in menu
48
+ -h, --help display help for command
49
+
50
+ Commands:
51
+ start Interactive program to pick options
52
+ site [options] [action]
53
+ webcomponent [options] [name] Create Lit based web components, with HAX
54
+ recommendations
55
+ help [command] display help for command
56
+ ```
18
57
 
19
58
  ## Site context
20
59
  - listing stats
package/dist/create.js CHANGED
@@ -34,7 +34,7 @@ async function main() {
34
34
  .option('--org <char>', 'organization for package.json').option('--author <char>', 'author for site / package.json')
35
35
 
36
36
  // options for site
37
- .option('--node-op <char>', 'node operation to perform').option('--item-id <char>', 'node ID to operate on').option('--name <char>', 'name of the project').option('--domain <char>', 'published domain name').helpCommand(true);
37
+ .option('--import-site <char>', 'URL of site to import').option('--import-structure <char>', `import method to use:\n\rpressbooksToSite\n\relmslnToSite\n\rhaxcmsToSite\n\rnotionToSite\n\rgitbookToSite\n\revolutionToSite\n\rhtmlToSite\n\rdocxToSite`).option('--node-op <char>', 'node operation to perform').option('--item-id <char>', 'node ID to operate on').option('--name <char>', 'name of the project').option('--domain <char>', 'published domain name').helpCommand(true);
38
38
 
39
39
  // default command which runs interactively
40
40
  _commander.program.command('start').description('Interactive program to pick options').action(() => {
@@ -61,7 +61,7 @@ async function main() {
61
61
  commandRun.arguments.action = action;
62
62
  commandRun.options.skip = true;
63
63
  }
64
- }).option('--path <char>', 'path the project should be created in').option('--name <char>', 'name of the site (when creating a new one)').option('--domain <char>', 'published domain name').option('--node-op <char>', 'node operation to perform').version(await HAXCMS.getHAXCMSVersion());
64
+ }).option('--path <char>', 'path the project should be created in').option('--import-site <char>', 'URL of site to import').option('--import-structure <char>', `import method to use:\n\rpressbooksToSite\n\relmslnToSite\n\rhaxcmsToSite\n\rnotionToSite\n\rgitbookToSite\n\revolutionToSite\n\rhtmlToSite\n\rdocxToSite`).option('--name <char>', 'name of the site (when creating a new one)').option('--domain <char>', 'published domain name').option('--node-op <char>', 'node operation to perform').version(await HAXCMS.getHAXCMSVersion());
65
65
  let siteNodeOps = (0, _site.siteNodeOperations)();
66
66
  for (var i in siteNodeOps) {
67
67
  _commander.program.option(`--${(0, _utils.camelToDash)(siteNodeOps[i].value)} <char>`, `${siteNodeOps[i].label}`);
@@ -169,6 +169,12 @@ async function main() {
169
169
  while (project.type !== 'quit') {
170
170
  if (activeProject) {
171
171
  p.note(` 🧙🪄 BE GONE ${_picocolors.default.bold(_picocolors.default.black(_picocolors.default.bgGreen(activeProject)))} sub-process daemon! 🪄 + ✨ 👹 = 💀 `);
172
+ // ensure if we were automatically running the command we end
173
+ if (commandRun.options.y) {
174
+ (0, _statements.communityStatement)();
175
+ process.exit(0);
176
+ }
177
+ // otherwise null to reset the program to run again
172
178
  commandRun = {
173
179
  command: null,
174
180
  arguments: {},
@@ -206,6 +212,12 @@ async function main() {
206
212
  }
207
213
  });
208
214
  }
215
+ // detect being in a haxcms scaffold. easiest way is _sites being in this directory
216
+ // set the path automatically so we skip the question
217
+ if (commandRun.command === "site" && fs.existsSync(`${process.cwd()}/_sites`)) {
218
+ p.intro(`${_picocolors.default.bgBlack(_picocolors.default.white(` HAXcms detected : Path set automatically `))}`);
219
+ commandRun.options.path = `${process.cwd()}/_sites`;
220
+ }
209
221
  activeProject = project.type;
210
222
  // silly but this way we don't have to take options for quitting
211
223
  if (project.type !== 'quit') {
@@ -0,0 +1,722 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.MicroFrontendRegistryNodeJS = exports.MicroFrontendRegistry = exports.MicroFrontendRegCapabilities = exports.MicroFrontend = void 0;
7
+ exports.enableCoreServices = enableCoreServices;
8
+ exports.enableExperimentalServices = enableExperimentalServices;
9
+ exports.enableHAXcmsServices = enableHAXcmsServices;
10
+ exports.enableServices = enableServices;
11
+ // because node hates mixing modern web at times this is a fork of @haxtheweb/micro-frontend-registry
12
+ // and can fall out of date
13
+ /**
14
+ * Copyright 2022 The Pennsylvania State University
15
+ * @license Apache-2.0, see License.md for full text.
16
+ */
17
+
18
+ // very basic class for micro
19
+ const MicroFrontendKeys = ["endpoint", "name", "title", "description", "params", "callback", "method"];
20
+
21
+ // new micro
22
+ class MicroFrontend {
23
+ constructor(values = {}) {
24
+ // set defaults for each key expected
25
+ MicroFrontendKeys.map(key => key === "params" ? this[key] = values[key] || {} : this[key] = values[key] || null);
26
+ }
27
+ }
28
+ exports.MicroFrontend = MicroFrontend;
29
+ const MicroFrontendRegCapabilities = function (SuperClass) {
30
+ return class extends SuperClass {
31
+ constructor() {
32
+ super();
33
+ this.list = [];
34
+ this.MicroFrontend = MicroFrontend;
35
+ }
36
+
37
+ /**
38
+ * Adding more or less alias for define
39
+ * @param {Object} params
40
+ */
41
+ add(params) {
42
+ this.define(new MicroFrontend(params));
43
+ }
44
+
45
+ /**
46
+ * define a new micro frontend
47
+ *
48
+ * @param {MicroFrontend} item - instanceof MicroFrontend
49
+ * @returns {Boolean} status of definition being accepted
50
+ */
51
+ define(item) {
52
+ if (!(item instanceof MicroFrontend)) {
53
+ console.warn("MicroFrontendRegistry: use class MicroFrontend instance but if keys match it will register still.");
54
+ console.warn(item);
55
+ }
56
+ // validate item has all keys we care about
57
+ if (Object.keys(item).every(key => MicroFrontendKeys.includes(key))) {
58
+ // support for local resolution of vercel vs serve for things that are
59
+ // built off of the main registry on localhost
60
+ if (item.endpoint.startsWith("/api/")) {
61
+ var base = "";
62
+ // support base rewrite
63
+ if (globalThis.MicroFrontendRegistryConfig.base) {
64
+ base = globalThis.MicroFrontendRegistryConfig.base;
65
+ }
66
+ // keep local based on if we're local, otherwise we need to leverage deployed address
67
+ else if ((!globalThis.HAXCMSContext || globalThis.HAXCMSContext !== "nodejs") && (globalThis.location.origin.startsWith("http://127.0.0.1") || globalThis.location.origin.startsWith("http://localhost"))) {
68
+ base = globalThis.location.origin.replace(/127.0.0.1:8(.*)/, "localhost:3000").replace(/localhost:8(.*)/, "localhost:3000");
69
+ }
70
+ // most common case, hit production open api address
71
+ else {
72
+ base = "https://open-apis.hax.cloud";
73
+ }
74
+ item.endpoint = `${base}${item.endpoint}`;
75
+ }
76
+ // check for registry config object
77
+ if (globalThis.MicroFrontendRegistryConfig[item.name]) {
78
+ Object.keys(globalThis.MicroFrontendRegistryConfig[item.name]).map(key => {
79
+ item[key] = globalThis.MicroFrontendRegistryConfig[item.name][key];
80
+ });
81
+ }
82
+ if (!this.has(item.name)) {
83
+ this.list.push(item);
84
+ return true;
85
+ }
86
+ } else {
87
+ return false;
88
+ }
89
+ }
90
+
91
+ /**
92
+ * get the definition for a machine named micro
93
+ *
94
+ * @param {String} name - machine name of the micro record requested
95
+ * @returns {MicroFrontend} the micro in question
96
+ */
97
+ get(name, testOnly = false) {
98
+ if (name && this.list.length > 0) {
99
+ const found = this.list.find(item => item.name === name);
100
+ if (found) {
101
+ return found;
102
+ }
103
+ }
104
+ if (!testOnly) {
105
+ console.error(`call for ${name} but not found in micro-frontend-registry`);
106
+ }
107
+ return null;
108
+ }
109
+
110
+ /**
111
+ * boolean for having the definition for a machine named micro
112
+ *
113
+ * @param {String} name - machine name of the micro record requested
114
+ * @returns {Boolean} if we have this micro
115
+ */
116
+ has(name) {
117
+ return this.get(name, true) !== null;
118
+ }
119
+
120
+ /**
121
+ * set the definition for a machine named micro that was already registered
122
+ *
123
+ * @param {String} name - machine name of the micro record requested
124
+ * @param {MicroFrontend} item - updated micro data
125
+ * @returns {MicroFrontend} the micro in question
126
+ */
127
+ set(name, item = {}) {
128
+ if (name && this.list.length > 0 && this.has(name)) {
129
+ const index = this.list.findIndex(item => item.name === name);
130
+ this.list[index] = item;
131
+ }
132
+ return null;
133
+ }
134
+
135
+ /**
136
+ * generate the call to the micro based on accepting name and params
137
+ *
138
+ * @param {String} name - machine name for the micro to call
139
+ * @param {Object} params - data to send to endpoint
140
+ * @param {Function} callback - Function callback on data return
141
+ * @param {Object} caller - reference to DOM node that called this
142
+ * @param {String} urlStringAddon - a string to add onto the fetch at the end. edge of edge of edge land here
143
+ * @returns {Object} Response object from microservice, otherwise `null`
144
+ */
145
+ async call(name, params = {}, callback = null, caller = null, urlStringAddon = "") {
146
+ if (this.has(name)) {
147
+ const item = this.get(name);
148
+ // default post, but this is not cacheable
149
+ let method = "POST";
150
+ // support definition requiring a certain method
151
+ if (item.method) {
152
+ method = item.method;
153
+ }
154
+ // support override when calling
155
+ if (params.__method) {
156
+ method = params.__method;
157
+ delete params.__method;
158
+ }
159
+ let data = null;
160
+ switch (method) {
161
+ case "GET":
162
+ case "HEAD":
163
+ // support for formdata which is already encoded
164
+ const searchParams = new URLSearchParams(params).toString();
165
+ data = await fetch(searchParams ? `${item.endpoint}?${searchParams}${urlStringAddon}` : item.endpoint + urlStringAddon, {
166
+ method: method
167
+ }).then(d => {
168
+ return d.ok ? d.json() : {
169
+ status: d.status,
170
+ data: null
171
+ };
172
+ }).catch((e, d) => {
173
+ console.warn("Request failed", e);
174
+ // this is endpoint completely failed to respond
175
+ return {
176
+ status: 500,
177
+ data: null
178
+ };
179
+ });
180
+ break;
181
+ case "POST":
182
+ default:
183
+ // support for formdata which is already encoded
184
+ data = await fetch(item.endpoint + urlStringAddon, {
185
+ method: method,
186
+ body: params instanceof FormData ? params : JSON.stringify(params)
187
+ }).then(d => {
188
+ return d.ok ? d.json() : {
189
+ status: d.status,
190
+ data: null
191
+ };
192
+ }).catch((e, d) => {
193
+ console.warn("Request failed", e);
194
+ // this is endpoint completely failed to respond
195
+ return {
196
+ status: 500,
197
+ data: null
198
+ };
199
+ });
200
+ break;
201
+ }
202
+ // endpoints can require a callback be hit every time
203
+ if (item.callback) {
204
+ await item.callback(data, caller);
205
+ }
206
+ if (callback) {
207
+ await callback(data, caller);
208
+ }
209
+ return data;
210
+ }
211
+ return null;
212
+ }
213
+
214
+ /**
215
+ * generate the call to the micro as a URL
216
+ *
217
+ * @param {String} name - machine name for the micro to call
218
+ * @param {Object} params - data to send to endpoint
219
+ * @returns {String} URL with parameters for a GET
220
+ */
221
+ url(name, params = {}) {
222
+ if (this.has(name)) {
223
+ const item = this.get(name);
224
+ // no null submissions
225
+ for (var key in params) {
226
+ if (params.hasOwnProperty(key)) {
227
+ if (params[key] == null) delete params[key];
228
+ }
229
+ }
230
+ return new URL(item.endpoint).toString() + `?${new URLSearchParams(params).toString()}`;
231
+ }
232
+ return "";
233
+ }
234
+ };
235
+ };
236
+ exports.MicroFrontendRegCapabilities = MicroFrontendRegCapabilities;
237
+ class MicroFrontendRegistryNodeJS extends MicroFrontendRegCapabilities(Object) {
238
+ constructor() {
239
+ super();
240
+ }
241
+ }
242
+
243
+ // register globally so we can make sure there is only one
244
+ exports.MicroFrontendRegistryNodeJS = MicroFrontendRegistryNodeJS;
245
+ globalThis.MicroFrontendRegistry = globalThis.MicroFrontendRegistry || {};
246
+ globalThis.MicroFrontendRegistryConfig = globalThis.MicroFrontendRegistryConfig || {};
247
+ globalThis.MicroFrontendRegistry.requestAvailability = () => {
248
+ if (!globalThis.MicroFrontendRegistry.instance) {
249
+ // weird but this would imply no DOM and thus node
250
+ if (globalThis.document && globalThis.document.body && globalThis.document.body.appendChild) {
251
+ globalThis.MicroFrontendRegistry.instance = globalThis.document.createElement(MicroFrontendRegistryEl.tag);
252
+ globalThis.document.body.appendChild(globalThis.MicroFrontendRegistry.instance);
253
+ } else {
254
+ globalThis.MicroFrontendRegistry.instance = new MicroFrontendRegistryNodeJS();
255
+ }
256
+ }
257
+ return globalThis.MicroFrontendRegistry.instance;
258
+ };
259
+ // most common way to access registry
260
+ const MicroFrontendRegistry = exports.MicroFrontendRegistry = globalThis.MicroFrontendRegistry.requestAvailability();
261
+
262
+ // integrate the core services of our webcomponents API layer
263
+ // While not required, this is the home for non-visual aspects of
264
+ // our ecosystem that can be leveraged independent of other things
265
+ // Examples of a platform specific implementation would be HAXcms
266
+ // and it's name spacing
267
+
268
+ function enableServices(services) {
269
+ services.forEach(service => {
270
+ switch (service) {
271
+ case "core":
272
+ enableCoreServices();
273
+ break;
274
+ case "experimental":
275
+ enableExperimentalServices();
276
+ break;
277
+ case "haxcms":
278
+ enableHAXcmsServices();
279
+ break;
280
+ }
281
+ });
282
+ }
283
+ // map service enable to global
284
+ MicroFrontendRegistry.enableServices = enableServices;
285
+
286
+ // core services
287
+ function enableCoreServices() {
288
+ // linkValidator
289
+ MicroFrontendRegistry.add({
290
+ endpoint: "/api/services/website/linkValidator",
291
+ name: "@core/linkValidator",
292
+ method: "GET",
293
+ title: "Validate URLs",
294
+ description: "Validates that an array of URLs are valid by returning status codes of a HEAD request",
295
+ params: {
296
+ links: "link for processing as link otherwise unused"
297
+ }
298
+ });
299
+
300
+ // metadata
301
+ MicroFrontendRegistry.add({
302
+ endpoint: "/api/services/website/metadata",
303
+ name: "@core/metadata",
304
+ method: "GET",
305
+ title: "URL Metadata",
306
+ description: "Skims metadata off a link",
307
+ params: {
308
+ q: "url to process"
309
+ },
310
+ userContext: [{
311
+ action: "paste",
312
+ data: "url",
313
+ memory: {
314
+ isLoggedIn: true
315
+ },
316
+ result: function (data) {
317
+ return {
318
+ tag: "a",
319
+ content: data.title || data["og:site_name"] || data["og:title"] || data.url,
320
+ properties: {
321
+ href: data.url,
322
+ rel: "nofollow"
323
+ }
324
+ };
325
+ }
326
+ }]
327
+ });
328
+
329
+ // mdToHtml
330
+ MicroFrontendRegistry.add({
331
+ endpoint: "/api/services/media/format/mdToHtml",
332
+ name: "@core/mdToHtml",
333
+ title: "Markdown to HTML",
334
+ description: "Convert Markdown string (or file) to HTML",
335
+ params: {
336
+ md: "MD or link to be converted",
337
+ type: "link for processing as link otherwise unused"
338
+ }
339
+ });
340
+
341
+ // htmlToMd
342
+ MicroFrontendRegistry.add({
343
+ endpoint: "/api/services/media/format/htmlToMd",
344
+ name: "@core/htmlToMd",
345
+ title: "HTML to MD",
346
+ description: "Convert HTML string (or file) to MD",
347
+ params: {
348
+ html: "HTML or link to be converted",
349
+ type: "link for processing as link otherwise unused"
350
+ }
351
+ });
352
+ // htmlToPdf
353
+ MicroFrontendRegistry.add({
354
+ endpoint: "/api/services/media/format/htmlToPdf",
355
+ name: "@core/htmlToPdf",
356
+ title: "HTML to PDF",
357
+ description: "Convert HTML string (or file) to a PDF",
358
+ params: {
359
+ html: "HTML or link to be converted",
360
+ type: "link for processing as link otherwise unused"
361
+ }
362
+ });
363
+
364
+ // prettyHtml
365
+ MicroFrontendRegistry.add({
366
+ endpoint: "/api/services/media/format/prettyHtml",
367
+ name: "@core/prettyHtml",
368
+ title: "Pretty HTML",
369
+ description: "Format HTML string (or file) to be more human readable",
370
+ params: {
371
+ html: "HTML or link to be converted",
372
+ type: "link for processing as link otherwise unused"
373
+ }
374
+ });
375
+ // crypto
376
+ MicroFrontendRegistry.add({
377
+ endpoint: "/api/services/security/aes256",
378
+ name: "@core/crypto",
379
+ title: "Cryptography from string",
380
+ description: "Convert a string to or from an aes256 based hash",
381
+ params: {
382
+ data: "HTML or link to be converted",
383
+ op: "decrypt or hash"
384
+ }
385
+ });
386
+ // duckDuckGo
387
+ MicroFrontendRegistry.add({
388
+ endpoint: "/api/services/website/duckDuckGo",
389
+ name: "@core/duckDuckGo",
390
+ method: "GET",
391
+ title: "Duck Duck Go",
392
+ description: "Search results from duck duck go",
393
+ params: {
394
+ q: "query param to search on"
395
+ }
396
+ });
397
+
398
+ // screenshot - kept by itself bc of size of getBrowserInstance
399
+ MicroFrontendRegistry.add({
400
+ endpoint: "https://screenshoturl.open-apis.hax.cloud/api/screenshotUrl",
401
+ name: "@core/screenshotUrl",
402
+ method: "GET",
403
+ title: "Screenshot page",
404
+ description: "Takes screenshot of a URL and returns image",
405
+ params: {
406
+ urlToCapture: "full url with https",
407
+ quality: "Optional image quality parameter"
408
+ }
409
+ });
410
+
411
+ // docxToPdf
412
+ MicroFrontendRegistry.add({
413
+ endpoint: "/api/services/media/format/docxToPdf",
414
+ name: "@core/docxToPdf",
415
+ title: "Docx to pdf",
416
+ description: "Convert .docx file to PDF response (downloaded)",
417
+ params: {
418
+ body: "FormData class w/ uploaded file encoded into it"
419
+ }
420
+ });
421
+
422
+ // docxToHtml
423
+ MicroFrontendRegistry.add({
424
+ endpoint: "/api/services/media/format/docxToHtml",
425
+ name: "@core/docxToHtml",
426
+ title: "Docx to HTML",
427
+ description: "Convert .docx file to HTML",
428
+ params: {
429
+ body: "FormData class w/ uploaded file encoded into it"
430
+ }
431
+ });
432
+
433
+ // htmlToDocx
434
+ MicroFrontendRegistry.add({
435
+ endpoint: "/api/services/media/format/htmlToDocx",
436
+ name: "@core/htmlToDocx",
437
+ title: "HTML to docx",
438
+ description: "Convert HTML to .docx file",
439
+ params: {
440
+ html: "html body to be converted to a docx file download"
441
+ }
442
+ });
443
+
444
+ // imgToAscii
445
+ MicroFrontendRegistry.add({
446
+ endpoint: "/api/services/media/format/imgToAscii",
447
+ name: "@core/imgToAscii",
448
+ title: "Image to ascii art",
449
+ description: "Convert any valid image formatted file to ASCII terminal style art",
450
+ params: {
451
+ body: "FormData class w/ uploaded file encoded into it"
452
+ }
453
+ });
454
+
455
+ // imgManipulation
456
+ MicroFrontendRegistry.add({
457
+ endpoint: "/api/services/media/image/manipulate",
458
+ name: "@core/imgManipulate",
459
+ title: "simple image manipulation",
460
+ description: "scale, resize, convert and perform operations to manipulate any image",
461
+ params: {
462
+ src: "image source",
463
+ height: "height in numbers",
464
+ width: "width in numbers",
465
+ quality: "0-100, jpeg quality to reduce image by if jpeg",
466
+ fit: "how to crop if height and width are supplied (https://sharp.pixelplumbing.com/api-resize)",
467
+ watermark: "SRC for an image to watermark on the output",
468
+ wmspot: "nw,ne,se,sw for moving the location of the watermark",
469
+ rotate: "https://sharp.pixelplumbing.com/api-operation#rotate",
470
+ format: "png, jpg, gif, webp"
471
+ }
472
+ });
473
+ MicroFrontendRegistry.add({
474
+ endpoint: "/api/services/text/readability",
475
+ name: "@core/readability",
476
+ title: "readability score",
477
+ description: "Readability metrics from analyzing text",
478
+ params: {
479
+ body: "Block of text to enhance"
480
+ }
481
+ });
482
+ }
483
+
484
+ // HAXcms services
485
+ function enableHAXcmsServices() {
486
+ // docxToSite
487
+ MicroFrontendRegistry.add({
488
+ endpoint: "/api/apps/haxcms/docxToSite",
489
+ name: "@haxcms/docxToSite",
490
+ title: "Docx to Site",
491
+ description: "Convert .docx file to Site schema",
492
+ params: {
493
+ body: "FormData class w/ uploaded file encoded into it"
494
+ }
495
+ });
496
+ // htmlToSite
497
+ MicroFrontendRegistry.add({
498
+ endpoint: "/api/apps/haxcms/convert/htmlToSite",
499
+ name: "@haxcms/htmlToSite",
500
+ title: "HTML to Site",
501
+ description: "Convert HTML file location to Site schema",
502
+ params: {
503
+ repoUrl: "Location of the repo"
504
+ }
505
+ });
506
+ // evolutionToSite
507
+ MicroFrontendRegistry.add({
508
+ endpoint: "/system/api/importEvolution",
509
+ name: "@haxcms/evolutionToSite",
510
+ title: "Evolution to Site",
511
+ description: "Convert .zip and schema to valid HAXcms",
512
+ params: {
513
+ body: "FormData class w/ uploaded file encoded into it"
514
+ }
515
+ });
516
+ // gitbookToSite
517
+ MicroFrontendRegistry.add({
518
+ endpoint: "/api/apps/haxcms/convert/gitbookToSite",
519
+ name: "@haxcms/gitbookToSite",
520
+ title: "Gitbook to Site",
521
+ description: "Convert Gitbook baseed repo to valid HAXcms",
522
+ params: {
523
+ md: "Location of the repo"
524
+ }
525
+ });
526
+ // notionToSite
527
+ MicroFrontendRegistry.add({
528
+ endpoint: "/api/apps/haxcms/convert/notionToSite",
529
+ name: "@haxcms/notionToSite",
530
+ title: "Notion to Site",
531
+ description: "Convert notion baseed repo to valid HAXcms",
532
+ params: {
533
+ repoUrl: "Location of the repo"
534
+ }
535
+ });
536
+ // haxcmsToSite
537
+ MicroFrontendRegistry.add({
538
+ endpoint: "/api/apps/haxcms/convert/haxcmsToSite",
539
+ name: "@haxcms/haxcmsToSite",
540
+ title: "HAXcms to Site",
541
+ description: "Use a HAXcms site as the basis for a new one",
542
+ params: {
543
+ repoUrl: "Location of the site"
544
+ }
545
+ });
546
+ // elmslnToSite
547
+ MicroFrontendRegistry.add({
548
+ endpoint: "/api/apps/haxcms/convert/elmslnToSite",
549
+ name: "@haxcms/elmslnToSite",
550
+ title: "ELMS:LN to Site",
551
+ description: "Import an ELMS:LN site to HAXcms",
552
+ params: {
553
+ repoUrl: "Location of the site"
554
+ }
555
+ });
556
+ // pressbooksToSite
557
+ MicroFrontendRegistry.add({
558
+ endpoint: "/api/apps/haxcms/convert/pressbooksToSite",
559
+ name: "@haxcms/pressbooksToSite",
560
+ title: "Pressbooks to Site",
561
+ description: "Convert pressbooks HTML export to Site schema",
562
+ params: {
563
+ body: "FormData class w/ uploaded file encoded into it"
564
+ }
565
+ });
566
+ // insights
567
+ MicroFrontendRegistry.add({
568
+ endpoint: "/api/apps/haxcms/insights",
569
+ name: "@haxcms/insights",
570
+ title: "Site insights",
571
+ description: "States relative to the page, lesson, and site as a whole. Used for content authors.",
572
+ params: {
573
+ site: "location of the HAXcms site OR site.json data",
574
+ type: "site for site.json or link for remote loading",
575
+ activeId: "id to query from"
576
+ }
577
+ });
578
+ // contentBrowser
579
+ MicroFrontendRegistry.add({
580
+ endpoint: "/api/apps/haxcms/contentBrowser",
581
+ name: "@haxcms/contentBrowser",
582
+ title: "Content browser",
583
+ description: "Returns details about content relative to an activeID",
584
+ params: {
585
+ site: "location of the HAXcms site OR site.json data",
586
+ type: "site for site.json or link for remote loading",
587
+ activeId: "id to query from"
588
+ }
589
+ });
590
+ // mediaBrowser
591
+ MicroFrontendRegistry.add({
592
+ endpoint: "/api/apps/haxcms/mediaBrowser",
593
+ name: "@haxcms/mediaBrowser",
594
+ title: "Media browser",
595
+ description: "Returns details about media relative to an activeID",
596
+ params: {
597
+ site: "location of the HAXcms site OR site.json data",
598
+ type: "site for site.json or link for remote loading",
599
+ activeId: "id to query from"
600
+ }
601
+ });
602
+ // linkChecker
603
+ MicroFrontendRegistry.add({
604
+ endpoint: "/api/apps/haxcms/linkChecker",
605
+ name: "@haxcms/linkChecker",
606
+ title: "Check site links",
607
+ description: "Returns details about links relative to an activeID",
608
+ params: {
609
+ site: "location of the HAXcms site OR site.json data",
610
+ type: "site for site.json or link for remote loading",
611
+ activeId: "id to query from"
612
+ }
613
+ });
614
+ // courseStats
615
+ MicroFrontendRegistry.add({
616
+ endpoint: "/api/apps/haxcms/courseStats",
617
+ name: "@haxcms/courseStats",
618
+ title: "Course stats",
619
+ description: "Relevant stats for teaching relative to the ancestor in question",
620
+ params: {
621
+ site: "location of the HAXcms site OR site.json data",
622
+ type: "site for site.json or link for remote loading",
623
+ ancestor: "optional: ancestor to print from as opposed to entire site"
624
+ }
625
+ });
626
+ // siteToHtml
627
+ MicroFrontendRegistry.add({
628
+ endpoint: "/api/apps/haxcms/siteToHtml",
629
+ name: "@haxcms/siteToHtml",
630
+ title: "HAXcms Full Site HTML",
631
+ description: "Load entire HAXcms site via URL as HTML",
632
+ params: {
633
+ site: "location of the HAXcms site OR site.json data",
634
+ type: "site for site.json or link for remote loading",
635
+ ancestor: "optional: ancestor to print from as opposed to entire site"
636
+ }
637
+ });
638
+ // pageCache
639
+ MicroFrontendRegistry.add({
640
+ endpoint: "/api/apps/haxcms/pageCache",
641
+ method: "GET",
642
+ name: "@haxcms/pageCache",
643
+ title: "HAXcms Page cache",
644
+ description: "Load a page from a site via uuid",
645
+ params: {
646
+ site: "location of the HAXcms site OR site.json data",
647
+ type: "site for site.json or link for remote loading",
648
+ uuid: "page to return content of"
649
+ }
650
+ });
651
+ // siteManifest
652
+ MicroFrontendRegistry.add({
653
+ endpoint: "/api/apps/haxcms/siteManifest",
654
+ name: "@haxcms/siteManifest",
655
+ title: "HAXcms manifest",
656
+ description: "Load the manifest for a site based on URL",
657
+ params: {
658
+ site: "location of the HAXcms site OR site.json data"
659
+ }
660
+ });
661
+
662
+ // siteGlossary
663
+ MicroFrontendRegistry.add({
664
+ endpoint: "/api/apps/haxcms/siteGlossary",
665
+ name: "@haxcms/siteGlossary",
666
+ title: "HAXcms site Glossary",
667
+ description: "array of terms found in the glossary slug",
668
+ params: {
669
+ url: "location of the HAXcms site"
670
+ }
671
+ });
672
+
673
+ // views
674
+ MicroFrontendRegistry.add({
675
+ endpoint: "/api/apps/haxcms/views",
676
+ name: "@haxcms/views",
677
+ title: "Views, but for HAX",
678
+ description: "Views criteria for slicing and remixing HAX site data",
679
+ params: {
680
+ site: "location of the HAXcms site"
681
+ }
682
+ });
683
+
684
+ // termsInPage
685
+ MicroFrontendRegistry.add({
686
+ endpoint: "/api/apps/haxcms/termsInPage",
687
+ name: "@haxcms/termsInPage",
688
+ title: "HAXcms Page terms",
689
+ description: "array of terms from glossary found in a blob of html",
690
+ params: {
691
+ body: "HTML blob to process",
692
+ site: "location of the HAXcms site OR site.json data",
693
+ type: "site for site.json or link for remote loading",
694
+ wikipedia: "if wikipedia links should be included in response, if found",
695
+ terms: "Optional array of term objects. This is intended for future use / forcibly passing a list from elsewhere"
696
+ }
697
+ });
698
+ // siteToEpub
699
+ MicroFrontendRegistry.add({
700
+ endpoint: "/api/apps/haxcms/siteToEpub",
701
+ name: "@haxcms/siteToEpub",
702
+ title: "HAXcms Full Site EPUB",
703
+ description: "generate .epub of entire HAXcms site via URL",
704
+ params: {
705
+ url: "location of the HAXcms site"
706
+ }
707
+ });
708
+ }
709
+
710
+ // experimental service
711
+ function enableExperimentalServices() {
712
+ // hydrateSsr
713
+ MicroFrontendRegistry.add({
714
+ endpoint: "https://webcomponents.hax.cloud/api/hydrateSsr",
715
+ name: "@experiments/hydrateSsr",
716
+ title: "Hydrate SSR",
717
+ description: "Hydrate web components via lit server side",
718
+ params: {
719
+ html: "blob of HTML or link to html file to load"
720
+ }
721
+ });
722
+ }
@@ -14,6 +14,7 @@ var _promises = require("node:timers/promises");
14
14
  var p = _interopRequireWildcard(require("@clack/prompts"));
15
15
  var _picocolors = _interopRequireDefault(require("picocolors"));
16
16
  var _statements = require("../statements.js");
17
+ var _microFrontendRegistry = require("../micro-frontend-registry.js");
17
18
  var haxcmsNodejsCli = _interopRequireWildcard(require("@haxtheweb/haxcms-nodejs/dist/cli.js"));
18
19
  var hax = _interopRequireWildcard(require("@haxtheweb/haxcms-nodejs"));
19
20
  var child_process = _interopRequireWildcard(require("child_process"));
@@ -21,6 +22,12 @@ var util = _interopRequireWildcard(require("node:util"));
21
22
  function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
22
23
  function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); }
23
24
  function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; }
25
+ // trick MFR into giving local paths
26
+ globalThis.MicroFrontendRegistryConfig = {
27
+ base: `@haxtheweb/open-apis/`
28
+ };
29
+ // emable HAXcms routes so we have name => path just like on frontend!
30
+ _microFrontendRegistry.MicroFrontendRegistry.enableServices(['haxcms']);
24
31
  const HAXCMS = hax.HAXCMS;
25
32
  const exec = util.promisify(child_process.exec);
26
33
  var sysSurge = true;
@@ -410,6 +417,51 @@ function siteNodeOperations(search = null) {
410
417
  return obj;
411
418
  }
412
419
 
420
+ // fake response clas so we can capture the response from the headless route as opposed to print to console
421
+ class Res {
422
+ constructor() {
423
+ this.query = {};
424
+ this.data = null;
425
+ this.statusCode = null;
426
+ }
427
+ send(data) {
428
+ this.data = data;
429
+ return this;
430
+ }
431
+ status(status) {
432
+ this.statusCode = status;
433
+ return this;
434
+ }
435
+ setHeader() {
436
+ return this;
437
+ }
438
+ }
439
+
440
+ // broker a call to the open-api repo which is an express based wrapper for vercel (originally)
441
+ // this ensures the calls are identical and yet are converted to something the CLI can leverage
442
+ async function openApiBroker(call, body) {
443
+ let mfItem = _microFrontendRegistry.MicroFrontendRegistry.get(`@haxcms/${call}`);
444
+ // ensure we have a MFR record to do the connection
445
+ // fun thing is this is local file access directly via import()
446
+ if (mfItem) {
447
+ // dynamic import... this might upset some stuff later bc it's not a direct reference
448
+ // but it's working locally at least.
449
+ const handler = await (specifier => new Promise(r => r(specifier)).then(s => _interopRequireWildcard(require(s))))(`${mfItem.endpoint.replace('/api/', '/dist/')}.js`);
450
+ // start the fake response
451
+ let res = new Res();
452
+ let req = {
453
+ body: JSON.stringify(body),
454
+ method: "post"
455
+ };
456
+ // js pass by ref for the win; these will both update bc of how we structured the calls
457
+ await handler.default(req, res);
458
+ // they'll need unpacked but that's a small price!
459
+ return {
460
+ req: req,
461
+ res: res
462
+ };
463
+ }
464
+ }
413
465
  // process site creation
414
466
  async function siteProcess(commandRun, project, port = '3000') {
415
467
  // auto select operations to perform if requested
@@ -436,6 +488,26 @@ async function siteProcess(commandRun, project, port = '3000') {
436
488
  "icon": "av:library-add"
437
489
  }
438
490
  };
491
+ // allow for importSite option
492
+ if (commandRun.options.importSite) {
493
+ if (!commandRun.options.importStructure) {
494
+ // assume hax to hax if it's not defined
495
+ commandRun.options.importStructure = 'haxcmsToSite';
496
+ }
497
+ // verify this is a valid way to do an import
498
+ if (commandRun.options.importStructure && _microFrontendRegistry.MicroFrontendRegistry.get(`@haxcms/${commandRun.options.importStructure}`)) {
499
+ let resp = await openApiBroker(commandRun.options.importStructure, {
500
+ repoUrl: commandRun.options.importSite
501
+ });
502
+ if (resp.res.data && resp.res.data.data && resp.res.data.data.items) {
503
+ siteRequest.build.structure = 'import';
504
+ siteRequest.build.items = resp.res.data.data.items;
505
+ }
506
+ if (resp.res.data && resp.res.data.data && resp.res.data.data.files) {
507
+ siteRequest.build.files = resp.res.data.data.files;
508
+ }
509
+ }
510
+ }
439
511
  HAXCMS.cliWritePath = `${project.path}`;
440
512
  await hax.RoutesMap.post.createSite({
441
513
  body: siteRequest
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@haxtheweb/create",
3
- "version": "9.0.10",
3
+ "version": "9.0.11",
4
4
  "publishConfig": {
5
5
  "access": "public"
6
6
  },
@@ -43,6 +43,7 @@
43
43
  "@clack/core": "0.3.4",
44
44
  "@clack/prompts": "0.7.0",
45
45
  "@haxtheweb/haxcms-nodejs": "^9.0.15",
46
+ "@haxtheweb/open-apis": "^9.0.11",
46
47
  "ejs": "3.1.10",
47
48
  "picocolors": "1.0.1",
48
49
  "commander": "12.1.0"