@griddo/cx 10.4.7 → 10.4.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/build/create-build-data.js +18 -18
  2. package/build/index.js +42 -40
  3. package/exporter/adapters/gatsby/index.ts +67 -15
  4. package/exporter/adapters/gatsby/utils.ts +1 -5
  5. package/exporter/adapters/index.ts +2 -3
  6. package/exporter/services/robots.ts +12 -9
  7. package/exporter/services/store.ts +1 -1
  8. package/exporter/types/global.ts +10 -2
  9. package/exporter/utils/folders.ts +40 -65
  10. package/exporter/utils/instance.ts +5 -2
  11. package/exporter/utils/shared.ts +1 -1
  12. package/exporter/utils/sites.ts +21 -15
  13. package/exporter/utils/store.ts +7 -2
  14. package/exporter/utils/temp-utils.ts +86 -0
  15. package/gatsby-browser.tsx +1 -2
  16. package/gatsby-config.ts +2 -1
  17. package/gatsby-node.ts +18 -47
  18. package/gatsby-ssr.tsx +1 -2
  19. package/package.json +2 -2
  20. package/src/components/Head.tsx +1 -2
  21. package/src/components/template.tsx +1 -2
  22. package/src/gatsby-node-utils.ts +1 -57
  23. package/exporter/adapters/astro/index.ts +0 -36
  24. package/exporter/adapters/astro/utils.ts +0 -30
  25. package/exporter/index-width-adapter.ts +0 -25
  26. package/static/.gitkeep +0 -0
  27. package/static/README.md +0 -3
  28. package/static/fonts/dm-sans/dm-sans-v14-latin-500.woff2 +0 -0
  29. package/static/fonts/dm-sans/dm-sans-v14-latin-500italic.woff2 +0 -0
  30. package/static/fonts/dm-sans/dm-sans-v14-latin-600.woff2 +0 -0
  31. package/static/fonts/dm-sans/dm-sans-v14-latin-600italic.woff2 +0 -0
  32. package/static/fonts/dm-sans/dm-sans-v14-latin-700.woff2 +0 -0
  33. package/static/fonts/dm-sans/dm-sans-v14-latin-700italic.woff2 +0 -0
  34. package/static/fonts/dm-sans/dm-sans-v14-latin-800.woff2 +0 -0
  35. package/static/fonts/dm-sans/dm-sans-v14-latin-800italic.woff2 +0 -0
  36. package/static/fonts/dm-sans/dm-sans-v14-latin-900.woff2 +0 -0
  37. package/static/fonts/dm-sans/dm-sans-v14-latin-900italic.woff2 +0 -0
  38. package/static/fonts/dm-sans/dm-sans-v14-latin-italic.woff2 +0 -0
  39. package/static/fonts/dm-sans/dm-sans-v14-latin-regular.woff2 +0 -0
  40. package/static/fonts/dm-serif-display/dm-serif-display-v15-latin-italic.woff2 +0 -0
  41. package/static/fonts/dm-serif-display/dm-serif-display-v15-latin-regular.woff2 +0 -0
  42. package/static/fonts/gilroy/Gilroy-Black.woff2 +0 -0
  43. package/static/fonts/gilroy/Gilroy-BlackItalic.woff2 +0 -0
  44. package/static/fonts/gilroy/Gilroy-Bold.woff2 +0 -0
  45. package/static/fonts/gilroy/Gilroy-BoldItalic.woff2 +0 -0
  46. package/static/fonts/gilroy/Gilroy-ExtraBold.woff2 +0 -0
  47. package/static/fonts/gilroy/Gilroy-ExtraBoldItalic.woff2 +0 -0
  48. package/static/fonts/gilroy/Gilroy-Heavy.woff2 +0 -0
  49. package/static/fonts/gilroy/Gilroy-HeavyItalic.woff2 +0 -0
  50. package/static/fonts/gilroy/Gilroy-Light.woff2 +0 -0
  51. package/static/fonts/gilroy/Gilroy-LightItalic.woff2 +0 -0
  52. package/static/fonts/gilroy/Gilroy-Medium.woff2 +0 -0
  53. package/static/fonts/gilroy/Gilroy-MediumItalic.woff2 +0 -0
  54. package/static/fonts/gilroy/Gilroy-Regular.woff2 +0 -0
  55. package/static/fonts/gilroy/Gilroy-RegularItalic.woff2 +0 -0
  56. package/static/fonts/gilroy/Gilroy-SemiBold.woff2 +0 -0
  57. package/static/fonts/gilroy/Gilroy-SemiBoldItalic.woff2 +0 -0
  58. package/static/fonts/gilroy/Gilroy-Thin.woff2 +0 -0
  59. package/static/fonts/gilroy/Gilroy-ThinItalic.woff2 +0 -0
  60. package/static/fonts/gilroy/Gilroy-UltraLight.woff2 +0 -0
  61. package/static/fonts/gilroy/Gilroy-UltraLightItalic.woff2 +0 -0
  62. package/static/fonts/work-sans/work-sans-v17-latin-700.woff +0 -0
  63. package/static/fonts/work-sans/work-sans-v17-latin-700.woff2 +0 -0
  64. package/static/fonts/work-sans/work-sans-v17-latin-regular.woff +0 -0
  65. package/static/fonts/work-sans/work-sans-v17-latin-regular.woff2 +0 -0
  66. package/static/grisso.css +0 -1
  67. package/static/robots.txt +0 -3
  68. package/static/webfonts.css +0 -115
@@ -1,17 +1,51 @@
1
1
  import { attempts, getGatsbyDomainRunner } from "./utils";
2
+ import { getGatsbyAssetPrefixSlug } from "../../../src/gatsby-node-utils";
3
+ import { RobotsService } from "../../services/robots";
2
4
  import { getInstanceDomains } from "../../utils/domains";
3
5
  import { createBuildData } from "../../utils/download-build-data";
4
- import { clearEmptyDirs, removeMultiPagesFromStore } from "../../utils/folders";
6
+ import {
7
+ clearEmptyDirs,
8
+ copyArtifacts,
9
+ removeArtifacts,
10
+ removeMultiPagesFromStore,
11
+ } from "../../utils/folders";
5
12
  import { uploadSearchContentToAPI } from "../../utils/searches";
6
13
  import { doLifeCycle, pause, printExporterLogo } from "../../utils/shared";
14
+ import { generateBuildReport, generateSitemaps } from "../../utils/sites";
15
+ import {
16
+ getConfig,
17
+ legacy__createDistFromGatsbyPublic,
18
+ } from "../../utils/temp-utils";
7
19
 
8
20
  async function runGatsbyAdapter() {
9
21
  printExporterLogo("gatsby");
10
22
 
11
23
  const domains = await getInstanceDomains();
24
+ const config = await getConfig();
12
25
 
13
26
  for (const domain of domains) {
14
27
  const runner = getGatsbyDomainRunner(domain);
28
+ const { __components, __cx } = config.dirs(domain);
29
+ const assetPrefix = getGatsbyAssetPrefixSlug(domain);
30
+ const needsAssetPrefix = !!assetPrefix && assetPrefix !== "";
31
+
32
+ /**
33
+ * Prepare
34
+ */
35
+ await doLifeCycle({
36
+ name: "Prepare",
37
+ steps: [
38
+ () =>
39
+ copyArtifacts({
40
+ artifacts: ["static"],
41
+ from: __components,
42
+ to: __cx,
43
+ }),
44
+ runner.init,
45
+ () => pause("Prepared LifeCycle"),
46
+ ],
47
+ attempts: attempts.prepare,
48
+ });
15
49
 
16
50
  /**
17
51
  * Restore
@@ -22,12 +56,9 @@ async function runGatsbyAdapter() {
22
56
  name: "Restore",
23
57
  attempts: attempts.restore,
24
58
  steps: [
25
- runner.init,
26
- () => pause("init done!"),
27
59
  runner.restoreArtifacts,
28
- () => pause("Restore `dist` and `assets` dirs done!"),
29
60
  runner.restoreCacheArtifacts,
30
- () => pause("Restore `apiCache`, `store` and `.cache` restored!"),
61
+ () => pause("Restore LifeCycle"),
31
62
  ],
32
63
  });
33
64
 
@@ -40,10 +71,7 @@ async function runGatsbyAdapter() {
40
71
  await doLifeCycle({
41
72
  name: "Data",
42
73
  attempts: attempts.data,
43
- steps: [
44
- () => createBuildData(domain),
45
- () => pause("Download data from API to `store` done!"),
46
- ],
74
+ steps: [() => createBuildData(domain), () => pause("Data LifeCycle")],
47
75
  });
48
76
 
49
77
  /**
@@ -56,7 +84,18 @@ async function runGatsbyAdapter() {
56
84
  await doLifeCycle({
57
85
  name: "SSG",
58
86
  attempts: attempts.ssg,
59
- steps: [runner.runGatsbyBuild, () => pause("Gatsby build done!")],
87
+ steps: [runner.runGatsbyBuild, () => pause("SSG LifeCycle")],
88
+ });
89
+
90
+ /**
91
+ * Relocation
92
+ */
93
+ await doLifeCycle({
94
+ name: "Relocation",
95
+ steps: [
96
+ () => legacy__createDistFromGatsbyPublic(domain, needsAssetPrefix),
97
+ () => pause("Relocation LifeCycle"),
98
+ ],
60
99
  });
61
100
 
62
101
  /**
@@ -72,7 +111,11 @@ async function runGatsbyAdapter() {
72
111
  name: "Meta",
73
112
  attempts: attempts.meta,
74
113
  steps: [
114
+ () => generateBuildReport(),
115
+ () => RobotsService.writeFiles(domain),
116
+ () => generateSitemaps(),
75
117
  () => (shouldUploadSearchData ? uploadSearchContentToAPI() : undefined),
118
+ () => pause("Meta LifeCycle"),
76
119
  ],
77
120
  });
78
121
 
@@ -90,16 +133,25 @@ async function runGatsbyAdapter() {
90
133
  // aseguramos que se vuelven a crear por Gatsby siempre
91
134
  // actualizadas.
92
135
  removeMultiPagesFromStore,
93
- () => pause("MultiPages removed from `store`"),
94
136
  () => clearEmptyDirs(),
95
- () => pause("Clean empty dirs done!"),
96
137
  runner.removeDisposableArtifacts,
97
- () => pause("Clean `public` done!"),
98
138
  runner.archiveArtifacts,
99
- () => pause("Archive `dist` and `assets` done!"),
100
139
  runner.archiveCacheArtifacts,
101
- () => pause("Archive `apiCache`, `.cache` and `store` done!"),
140
+ () => pause("Archive LifeCycle"),
141
+ ],
142
+ });
143
+
144
+ await doLifeCycle({
145
+ name: "Clean",
146
+ steps: [
147
+ () =>
148
+ removeArtifacts({
149
+ from: __cx,
150
+ artifacts: ["static", "apiCache"],
151
+ }),
152
+ () => pause("Clean LifeCycle"),
102
153
  ],
154
+ attempts: attempts.clean,
103
155
  });
104
156
  }
105
157
  }
@@ -24,7 +24,6 @@ const attempts = {
24
24
  clean: JSON.parse(process.env.GRIDDO_CLEAN_LIFECYCLE_MAX_ATTEMPTS || "1"),
25
25
  };
26
26
 
27
-
28
27
  /**
29
28
  * Return a runner, a serie of functions to manage the Gatsby render artifacts.
30
29
  */
@@ -58,10 +57,8 @@ function getGatsbyDomainRunner(domain: string) {
58
57
  const assetPrefix = getGatsbyAssetPrefixSlug(domain);
59
58
 
60
59
  const run = getEnvRunner({
61
- DOMAIN: domain,
62
60
  GRIDDO_ASSET_PREFIX: assetPrefix,
63
- NEEDS_ASSET_DOMAIN_PREFIX: assetPrefix && assetPrefix !== "",
64
- GRIDDO_RENDERID: new Date().valueOf(),
61
+ FOO: "true-and-false",
65
62
  });
66
63
 
67
64
  /**
@@ -378,4 +375,3 @@ function getGatsbyAssetPrefixSlug(domain: string) {
378
375
  }
379
376
 
380
377
  export { attempts, getGatsbyDomainRunner };
381
-
@@ -1,5 +1,4 @@
1
- import { runAstroAdapter } from "./astro";
2
1
  import { runGatsbyAdapter } from "./gatsby";
3
2
 
4
- export type Adapters = "gatsby" | "astro";
5
- export { runAstroAdapter, runGatsbyAdapter };
3
+ export type Adapters = "gatsby";
4
+ export { runGatsbyAdapter };
@@ -8,6 +8,7 @@ dotenv.config();
8
8
  import dotenv from "dotenv";
9
9
 
10
10
  import { get } from "../utils/api";
11
+ import { getConfig } from "../utils/temp-utils";
11
12
 
12
13
  /**
13
14
  * TODO: JSDoc
@@ -48,27 +49,29 @@ class RobotsService {
48
49
  /**
49
50
  * Write robots.txt files for the current rendering domain.
50
51
  */
51
- async writeFiles(basePath: string) {
52
+ async writeFiles(domain: string) {
53
+ const config = await getConfig();
54
+ const dirs = config.dirs(domain);
55
+ const distDirectory = path.join(dirs.__cx, "dist");
56
+
52
57
  await this.getRobots();
53
58
 
54
- const robot = this.robots.find(
55
- ({ path }) => path === `/${process.env.DOMAIN}`
56
- );
59
+ const robot = this.robots.find(({ path }) => path === `/${domain}`);
57
60
 
58
61
  if (!robot) {
59
- console.log(`Robots not found for ${process.env.DOMAIN}`);
62
+ console.log(`Robots not found for ${domain}`);
60
63
  return;
61
64
  }
62
65
 
63
- if (fs.existsSync(basePath)) {
66
+ if (fs.existsSync(distDirectory)) {
64
67
  const fileLocation = JSON.parse(
65
68
  process.env.GRIDDO_EXPORT_STRIP_DOMAIN_FROM_PATH || "false"
66
69
  )
67
- ? path.join(basePath, "robots.txt")
68
- : path.join(basePath, robot.path, "robots.txt");
70
+ ? path.join(distDirectory, "robots.txt")
71
+ : path.join(distDirectory, robot.path, "robots.txt");
69
72
  fs.writeFileSync(fileLocation, robot?.content);
70
73
  } else {
71
- console.log(`${basePath} not found`);
74
+ console.log(`${distDirectory} not found`);
72
75
  }
73
76
  }
74
77
  }
@@ -39,7 +39,7 @@ dotenv.config();
39
39
  /* prettier-ignore */ const REACT_APP_INSTANCE = process.env.GRIDDO_REACT_APP_INSTANCE || process.env.REACT_APP_INSTANCE;
40
40
  /* prettier-ignore */ const API_CONCURRENCY_COUNT = parseInt(process.env.GRIDDO_API_CONCURRENCY_COUNT || "10");
41
41
  /* prettier-ignore */ const PUBLIC_API_URL = process.env.PUBLIC_API_URL as string;
42
- /* prettier-ignore */ const RENDER_ID = process.env.GRIDDO_RENDERID || new Date().valueOf().toString();
42
+ /* prettier-ignore */ const RENDER_ID = new Date().valueOf().toString();
43
43
 
44
44
  /**
45
45
  * Fetch, process and save object pages and sites data into the file system to
@@ -86,7 +86,13 @@ type LifeCyclesNames =
86
86
  | "Archive"
87
87
  | "Clean";
88
88
 
89
- type CXDir = "__exports" | "__cache" | "__cx" | "__ssg";
89
+ type CXDir =
90
+ | "__exports"
91
+ | "__cache"
92
+ | "__cx"
93
+ | "__ssg"
94
+ | "__components"
95
+ | "__root";
90
96
 
91
97
  interface CXConfig {
92
98
  proDomain: string;
@@ -95,8 +101,10 @@ interface CXConfig {
95
101
  CACHE: "__cache";
96
102
  CX: "__cx";
97
103
  SSG: "__ssg";
104
+ COMPONENTS: "__components";
105
+ ROOT: "__root";
98
106
  };
99
- dirs: (domain: string) => Record<CXDir, string>;
107
+ dirs: (domain?: string) => Record<CXDir, string>;
100
108
  }
101
109
 
102
110
  export {
@@ -4,12 +4,11 @@ import type { Site } from "../types/sites";
4
4
  import { spawnSync } from "node:child_process";
5
5
  import path from "node:path";
6
6
 
7
- import fs from "fs-extra";
7
+ import fsx from "fs-extra";
8
8
 
9
9
  import { CXRootDir, instanceRootDir, logInfo } from "./shared";
10
10
  import { getPageInStoreDir, removePagesFromStore } from "./store";
11
- // eslint-disable-next-line node/no-unpublished-import
12
- import config from "../../cx.config";
11
+ import { getConfig } from "./temp-utils";
13
12
 
14
13
  export const STORE_DIR = path.resolve(__dirname, "../store/");
15
14
  export const DIST_DIR = path.resolve(__dirname, "../dist/");
@@ -45,19 +44,19 @@ async function deleteSites(updatedSites: Array<Site>, DOMAIN: string) {
45
44
  logInfo(`Page data dir ${pageDataDir}`);
46
45
 
47
46
  // delete directory recursively
48
- if (!fs.existsSync(siteDir)) continue;
47
+ if (!fsx.existsSync(siteDir)) continue;
49
48
 
50
49
  try {
51
- await fs.rm(siteDir, { recursive: true });
50
+ await fsx.rm(siteDir, { recursive: true });
52
51
  logInfo(`${siteDir} was deleted!`);
53
52
  } catch (err) {
54
53
  console.log(err);
55
54
  }
56
55
 
57
- if (!fs.existsSync(pageDataDir)) continue;
56
+ if (!fsx.existsSync(pageDataDir)) continue;
58
57
 
59
58
  try {
60
- await fs.rm(pageDataDir, { recursive: true });
59
+ await fsx.rm(pageDataDir, { recursive: true });
61
60
  console.info(`${pageDataDir} was deleted!`);
62
61
  } catch (err) {
63
62
  console.log(err);
@@ -72,13 +71,13 @@ async function deleteSites(updatedSites: Array<Site>, DOMAIN: string) {
72
71
  const clearEmptyDirs = (baseDir?: string) => {
73
72
  const dir = baseDir || path.resolve(CXRootDir, "dist");
74
73
 
75
- const isDir = fs.statSync(dir).isDirectory();
74
+ const isDir = fsx.statSync(dir).isDirectory();
76
75
  if (!isDir) {
77
76
  return;
78
77
  }
79
78
 
80
79
  // archivos o direvtorios dentro de `dir`
81
- let children = fs.readdirSync(dir);
80
+ let children = fsx.readdirSync(dir);
82
81
  // let children = childrenRaw.filter((file) => {
83
82
  // return path.extname(file).toLowerCase() !== ".xml";
84
83
  // });
@@ -97,9 +96,9 @@ const clearEmptyDirs = (baseDir?: string) => {
97
96
  if (childrenCount === xmlCount) {
98
97
  children.forEach(function (xmlFile) {
99
98
  const fullPath = path.join(dir, xmlFile);
100
- fs.rmSync(fullPath);
99
+ fsx.rmSync(fullPath);
101
100
  });
102
- children = fs.readdirSync(dir);
101
+ children = fsx.readdirSync(dir);
103
102
  }
104
103
 
105
104
  children.forEach(function (file) {
@@ -109,12 +108,12 @@ const clearEmptyDirs = (baseDir?: string) => {
109
108
 
110
109
  // re-evaluate files; after deleting subdir we may have parent dir
111
110
  // empty now...
112
- children = fs.readdirSync(dir);
111
+ children = fsx.readdirSync(dir);
113
112
  }
114
113
 
115
114
  // Si no tiene hijos, lo borramos
116
115
  if (children.length === 0) {
117
- fs.rmdirSync(dir);
116
+ fsx.rmdirSync(dir);
118
117
  return;
119
118
  }
120
119
  };
@@ -122,29 +121,25 @@ const clearEmptyDirs = (baseDir?: string) => {
122
121
  /**
123
122
  * Copy artifacts between CX valid directories.
124
123
  *
125
- * @param args.domain - The render domain
126
124
  * @param args.from - Source dir
127
125
  * @param args.to - Destination dir
128
126
  * @param args.artifacts - Artifact names
129
127
  * @param args.withBackup - Create a previous backup
130
128
  */
131
- async function copyArtifacts(args: {
132
- domain: string;
133
- from: CXDir;
134
- to: CXDir;
129
+ function copyArtifacts(args: {
130
+ from: string;
131
+ to: string;
135
132
  artifacts: Array<string>;
136
133
  withBackup?: boolean;
137
134
  }) {
138
- const { artifacts, domain, from, to, withBackup } = args;
139
-
140
- const dirs = config.dirs(domain);
135
+ const { artifacts, from, to, withBackup } = args;
141
136
 
142
137
  for (const artifact of artifacts) {
143
- const src = path.join(dirs[from], artifact);
144
- const dest = path.join(dirs[to], artifact);
138
+ const src = path.join(from, artifact);
139
+ const dest = path.join(to, artifact);
145
140
 
146
141
  // The dir we want to copy, doesn't exist.
147
- if (!fs.existsSync(src)) {
142
+ if (!fsx.existsSync(src)) {
148
143
  console.log(`Source directory does not exist: ${src}`);
149
144
  continue;
150
145
  }
@@ -157,7 +152,7 @@ async function copyArtifacts(args: {
157
152
  // Copy artifact
158
153
  try {
159
154
  // First clean destination
160
- if (fs.existsSync(dest)) {
155
+ if (fsx.existsSync(dest)) {
161
156
  spawnSync("rm", ["-rf", dest]);
162
157
  }
163
158
  spawnSync("cp", ["-Rp", src, dest]);
@@ -177,44 +172,30 @@ async function copyArtifacts(args: {
177
172
  /**
178
173
  * Move artifacts between CX valid directories.
179
174
  *
180
- * @param domain - The render domain
181
175
  * @param from - Source dir
182
176
  * @param to - Destination dir
183
177
  * @param artifacts - Artifact names
184
178
  *
185
179
  * @example
186
180
  * // normal
187
- * moveArtifacts({domain: domain, from: "__cx", to: "__ssg", ["dist"]})
188
- * // renaming
189
- * moveArtifacts({
190
- *
191
- * domain: domain,
192
- * from: "__cx",
193
- * to: "__ssg",
194
- * ["foo", "taz", "bar"],
195
- * ["foo", "taz", "bar-renamed"]
196
- *
197
- * })
181
+ * moveArtifacts({from: __cx, to: __ssg, ["dist"]})
198
182
  * // with backup
199
- * moveArtifacts({domain: domain, from: "__cx", to: "__ssg", ["dist"]})
183
+ * moveArtifacts({from: __cx, to: __ssg, ["dist"]})
200
184
  */
201
185
  function moveArtifacts(args: {
202
- domain: string;
203
- from: CXDir;
204
- to: CXDir;
186
+ from: string;
187
+ to: string;
205
188
  artifacts: Array<string>;
206
189
  withBackup?: boolean;
207
190
  }) {
208
- const { artifacts, domain, from, to, withBackup } = args;
209
-
210
- const dirs = config.dirs(domain);
191
+ const { artifacts, from, to, withBackup } = args;
211
192
 
212
193
  for (const artifact of artifacts) {
213
- const src = path.join(dirs[from], artifact);
214
- const dest = path.join(dirs[to], artifact);
194
+ const src = path.join(from, artifact);
195
+ const dest = path.join(to, artifact);
215
196
 
216
197
  // The dir we want to move, doesn't exist.
217
- if (!fs.existsSync(src)) {
198
+ if (!fsx.existsSync(src)) {
218
199
  console.log(`Source directory does not exist: ${src}`);
219
200
  continue;
220
201
  }
@@ -225,7 +206,7 @@ function moveArtifacts(args: {
225
206
 
226
207
  try {
227
208
  // First clean destination
228
- if (fs.existsSync(dest)) {
209
+ if (fsx.existsSync(dest)) {
229
210
  spawnSync("rm", ["-rf", dest]);
230
211
  }
231
212
  spawnSync("mv", [src, dest]);
@@ -245,27 +226,20 @@ function moveArtifacts(args: {
245
226
  /**
246
227
  * Remove artifacts from CX valid directories.
247
228
  *
248
- * @param domain - The render domain
249
229
  * @param from - Source dir
250
230
  * @param to - Destination dir
251
231
  * @param artifacts - Artifact names
252
232
  *
253
233
  * @example
254
- * removeArtifacts(domain, "__cx", "__ssg", ["dist"], ["public"])
234
+ * removeArtifacts({from: __cx, to: __ssg, ["dist", "public"]})
255
235
  */
256
- function removeArtifacts(args: {
257
- domain: string;
258
- from: CXDir;
259
- artifacts: Array<string>;
260
- }) {
261
- const { artifacts, domain, from } = args;
262
-
263
- const dirs = config.dirs(domain);
236
+ function removeArtifacts(args: { from: string; artifacts: Array<string> }) {
237
+ const { artifacts, from } = args;
264
238
 
265
239
  for (const artifact of artifacts) {
266
240
  if (artifact) {
267
- const src = path.join(dirs[from], artifact);
268
- if (fs.existsSync(src)) {
241
+ const src = path.join(from, artifact);
242
+ if (fsx.existsSync(src)) {
269
243
  spawnSync("rm", ["-rf", src]);
270
244
  }
271
245
  }
@@ -285,7 +259,7 @@ function restoreBackup(src: string, suffix = "-BACKUP") {
285
259
  function deleteBackup(src: string, suffix = "-BACKUP") {
286
260
  const dest = src + suffix;
287
261
 
288
- if (!fs.existsSync(dest)) {
262
+ if (!fsx.existsSync(dest)) {
289
263
  console.log(`Source ${dest} does not exist`);
290
264
  return;
291
265
  }
@@ -301,12 +275,12 @@ function deleteBackup(src: string, suffix = "-BACKUP") {
301
275
  function createBackup(src: string, suffix = "-BACKUP") {
302
276
  const dest = src + suffix;
303
277
 
304
- if (!fs.existsSync(src)) {
278
+ if (!fsx.existsSync(src)) {
305
279
  console.log(`Source ${src} does not exist`);
306
280
  return;
307
281
  }
308
282
 
309
- if (fs.existsSync(dest)) {
283
+ if (fsx.existsSync(dest)) {
310
284
  console.log(`Destination ${dest} already exists`);
311
285
  return;
312
286
  }
@@ -323,8 +297,9 @@ function isMultiPageId(id: number) {
323
297
  return Number.isInteger(id) && id < 0;
324
298
  }
325
299
 
326
- function removeMultiPagesFromStore() {
327
- const dirs = config.dirs("");
300
+ async function removeMultiPagesFromStore() {
301
+ const config = await getConfig();
302
+ const dirs = config.dirs();
328
303
  const storePath = path.join(dirs.__cx, "store");
329
304
  try {
330
305
  const multiPageFiles = getPageInStoreDir(storePath).filter(isMultiPageId);
@@ -62,15 +62,18 @@ function getComponentsLibAliases() {
62
62
  };
63
63
  },
64
64
  {
65
+ // Por este motivo se puede hacer `... import from "@components" en
66
+ // los packages del monorepo.
67
+ "@components": `${resolveComponentsPath()}/src/index.js`,
65
68
  components: `${resolveComponentsPath()}/src/index.js`,
66
69
  }
67
70
  );
68
71
  }
69
72
 
70
73
  export {
71
- getComponentsJSConfig,
72
- getComponentsLibAliases,
73
74
  IS_COMPONENT_LIBRARY,
74
75
  PROJECT_ALIASES,
76
+ getComponentsJSConfig,
77
+ getComponentsLibAliases,
75
78
  resolveComponentsPath,
76
79
  };
@@ -292,7 +292,7 @@ function pause(title: string) {
292
292
 
293
293
  return new Promise<void>((resolve) => {
294
294
  console.log("\n");
295
- logBox(title, "🥓", 1, 0);
295
+ logBox(`⌛️ ${title}`, "", 1, 0);
296
296
  process.stdin.once("data", () => {
297
297
  resolve();
298
298
  });
@@ -1,4 +1,3 @@
1
- import type { BuildProcessData } from "../types/global";
2
1
  import type { Site, SiteData } from "../types/sites";
3
2
 
4
3
  import path from "node:path";
@@ -7,6 +6,8 @@ import fs from "fs-extra";
7
6
  import { parse } from "js2xmlparser";
8
7
 
9
8
  import { logInfo } from "./shared";
9
+ import { getBuildMetadata } from "./store";
10
+ import { getConfig } from "./temp-utils";
10
11
  import { AuthService } from "../services/auth";
11
12
  import { SitesService } from "../services/sites";
12
13
 
@@ -148,14 +149,16 @@ async function getSiteData(siteID: number) {
148
149
 
149
150
  /**
150
151
  * Save a file with the end of build process
151
- *
152
- * @param filePathName The pathname for the file report
153
- * @param buildProcessData The whole build process data.
154
152
  */
155
- async function generateBuildReport(
156
- filePathName: string,
157
- buildProcessData: BuildProcessData
158
- ) {
153
+ async function generateBuildReport() {
154
+ const config = await getConfig();
155
+ const dirs = config.dirs();
156
+
157
+ const DIST_FOLDER = path.join(dirs.__cx, "dist");
158
+
159
+ const { buildProcessData } = await getBuildMetadata();
160
+ const filePathName = path.join(DIST_FOLDER, "__build-report__.json");
161
+
159
162
  // Get the token
160
163
  const authControl = await AuthService.login();
161
164
 
@@ -179,8 +182,13 @@ async function generateBuildReport(
179
182
  *
180
183
  * @param sites An array of sites
181
184
  */
182
- async function generateSitemaps(sites: Array<Site>) {
183
- const promisesOfSites = sites.map(async (site) => {
185
+ async function generateSitemaps() {
186
+ const { sitesToPublish } = await getBuildMetadata();
187
+ const config = await getConfig();
188
+ const dirs = config.dirs();
189
+ const basePath = path.resolve(dirs.__cx, "dist");
190
+
191
+ const promisesOfSites = sitesToPublish.map(async (site) => {
184
192
  const { id: siteID, languages } = site;
185
193
 
186
194
  const promisesOfLanguages = languages.map(async (lang) => {
@@ -207,11 +215,9 @@ async function generateSitemaps(sites: Array<Site>) {
207
215
  const sitemaps: Array<string> = [];
208
216
  const sitemapPageGroupKeys = Object.keys(sitemapPagesGroup);
209
217
 
210
- const sitemapBasePath = path.resolve(
211
- __dirname,
212
- `../../public/${
213
- STRIP_DOMAIN_FROM_PATH ? slug.replace(domain, "") : slug
214
- }`
218
+ const sitemapBasePath = path.join(
219
+ basePath,
220
+ `${STRIP_DOMAIN_FROM_PATH ? slug.replace(domain, "") : slug}`
215
221
  );
216
222
 
217
223
  for (const templateId of sitemapPageGroupKeys) {
@@ -9,6 +9,7 @@ import path from "node:path";
9
9
  import fsx from "fs-extra";
10
10
 
11
11
  import { removeProperties, walk } from "./shared";
12
+ import { getConfig } from "./temp-utils";
12
13
  import { Site } from "../types/sites";
13
14
 
14
15
  /**
@@ -44,9 +45,13 @@ async function* getBuildPages<PageType extends GriddoPageObject>(
44
45
  * Get the build metadata from the Store.
45
46
  * TODO: Refactorizar para leer un solo archivo: __metadata__.json
46
47
  */
47
- function getBuildMetadata(basePath: string): BuildMetaData {
48
+ async function getBuildMetadata(): Promise<BuildMetaData> {
49
+ const config = await getConfig();
50
+ const dirs = config.dirs();
51
+ const storePath = path.join(dirs.__cx, "store");
52
+
48
53
  const { sitesToPublish, createdPages, buildProcessData } = fsx.readJSONSync(
49
- path.resolve(basePath, "metadata", "render-info.json")
54
+ path.resolve(storePath, "metadata", "render-info.json")
50
55
  );
51
56
  return {
52
57
  buildProcessData,