@griddo/cx 11.9.7-rc.2 → 11.9.8-rc.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (222) hide show
  1. package/README.md +78 -193
  2. package/build/commands/end-render.js +31 -0
  3. package/build/commands/end-render.js.map +7 -0
  4. package/build/commands/prepare-assets-directory.js +9 -0
  5. package/build/commands/prepare-assets-directory.js.map +7 -0
  6. package/build/commands/prepare-domains-render.js +38 -0
  7. package/build/commands/prepare-domains-render.js.map +7 -0
  8. package/build/commands/reset-render.js +31 -0
  9. package/build/commands/reset-render.js.map +7 -0
  10. package/build/commands/start-embeddings.js +31 -0
  11. package/build/commands/start-embeddings.js.map +7 -0
  12. package/build/commands/start-render.js +66 -0
  13. package/build/commands/start-render.js.map +7 -0
  14. package/build/commands/upload-search-content.js +31 -0
  15. package/build/commands/upload-search-content.js.map +7 -0
  16. package/build/core/GriddoLog.d.ts +16 -0
  17. package/build/core/db.d.ts +4 -0
  18. package/build/core/dist-rollback.d.ts +2 -0
  19. package/build/{errors/index.d.ts → core/errors.d.ts} +5 -4
  20. package/build/core/fs.d.ts +69 -0
  21. package/build/core/logger.d.ts +18 -0
  22. package/build/index.d.ts +10 -29
  23. package/build/index.js +406 -73
  24. package/build/services/auth.d.ts +2 -5
  25. package/build/services/manage-store.d.ts +48 -0
  26. package/build/services/render.d.ts +70 -0
  27. package/build/shared/envs.d.ts +19 -0
  28. package/build/{errors/errors-data.d.ts → shared/errors.d.ts} +5 -3
  29. package/build/shared/npm-modules/brush.d.ts +18 -0
  30. package/build/shared/npm-modules/find-up-simple.d.ts +34 -0
  31. package/build/shared/npm-modules/pkg-dir.d.ts +7 -0
  32. package/build/{types → shared/types}/api.d.ts +18 -18
  33. package/build/{types → shared/types}/global.d.ts +15 -16
  34. package/build/{types → shared/types}/navigation.d.ts +5 -5
  35. package/build/{types → shared/types}/pages.d.ts +9 -9
  36. package/build/shared/types/render.d.ts +54 -0
  37. package/build/{types → shared/types}/sites.d.ts +18 -19
  38. package/cli.mjs +239 -0
  39. package/exporter/build-esbuild.noop +42 -0
  40. package/exporter/build.sh +17 -28
  41. package/exporter/commands/README.md +151 -0
  42. package/exporter/commands/end-render.ts +65 -86
  43. package/exporter/commands/prepare-assets-directory.ts +34 -0
  44. package/exporter/commands/prepare-domains-render.ts +143 -35
  45. package/exporter/commands/reset-render.ts +12 -7
  46. package/exporter/commands/single-domain-upload-search-content.noop +206 -0
  47. package/exporter/commands/start-embeddings.ts +29 -0
  48. package/exporter/commands/start-render.ts +26 -64
  49. package/exporter/commands/upload-search-content.ts +201 -26
  50. package/exporter/core/GriddoLog.ts +45 -0
  51. package/exporter/core/check-env-health.ts +200 -0
  52. package/exporter/core/db-class.ts +54 -0
  53. package/exporter/core/db.ts +33 -0
  54. package/exporter/core/dist-rollback.ts +40 -0
  55. package/exporter/core/errors.ts +82 -0
  56. package/exporter/core/fs.ts +385 -0
  57. package/exporter/{utils → core}/images.ts +1 -6
  58. package/exporter/{utils → core}/instance.ts +9 -13
  59. package/exporter/core/life-cycle.ts +73 -0
  60. package/exporter/core/logger.ts +141 -0
  61. package/exporter/core/objects.ts +37 -0
  62. package/exporter/core/print-logos.ts +21 -0
  63. package/exporter/index.ts +14 -56
  64. package/exporter/services/api.ts +306 -0
  65. package/exporter/services/auth.ts +8 -10
  66. package/exporter/services/domains.ts +23 -8
  67. package/exporter/services/manage-sites.ts +116 -0
  68. package/exporter/services/manage-store.ts +235 -0
  69. package/exporter/services/navigation.ts +12 -18
  70. package/exporter/{utils → services}/pages.ts +27 -92
  71. package/exporter/services/reference-fields.ts +14 -32
  72. package/exporter/services/render-artifacts.ts +44 -0
  73. package/exporter/services/render.ts +229 -0
  74. package/exporter/services/robots.ts +33 -61
  75. package/exporter/services/sitemaps.ts +129 -0
  76. package/exporter/services/sites.ts +40 -28
  77. package/exporter/services/store.ts +386 -319
  78. package/exporter/shared/context.ts +49 -0
  79. package/exporter/{constants → shared}/endpoints.ts +12 -11
  80. package/exporter/shared/envs.ts +62 -0
  81. package/exporter/{errors/errors-data.ts → shared/errors.ts} +24 -14
  82. package/exporter/shared/npm-modules/README.md +36 -0
  83. package/exporter/shared/npm-modules/brush.ts +34 -0
  84. package/exporter/shared/npm-modules/find-up-simple.ts +100 -0
  85. package/exporter/shared/npm-modules/pkg-dir.ts +17 -0
  86. package/exporter/shared/npm-modules/xml-parser.ts +57 -0
  87. package/exporter/{types → shared/types}/api.ts +40 -41
  88. package/exporter/{types → shared/types}/global.ts +17 -21
  89. package/exporter/{types → shared/types}/navigation.ts +3 -3
  90. package/exporter/{types → shared/types}/pages.ts +10 -11
  91. package/exporter/shared/types/render.ts +63 -0
  92. package/exporter/{types → shared/types}/sites.ts +18 -19
  93. package/exporter/ssg-adapters/gatsby/actions/clean.ts +26 -0
  94. package/exporter/ssg-adapters/gatsby/actions/close.ts +17 -0
  95. package/exporter/ssg-adapters/gatsby/actions/data.ts +22 -0
  96. package/exporter/ssg-adapters/gatsby/actions/healthCheck.ts +10 -0
  97. package/exporter/ssg-adapters/gatsby/actions/init.ts +12 -0
  98. package/exporter/ssg-adapters/gatsby/actions/logs.ts +10 -0
  99. package/exporter/ssg-adapters/gatsby/actions/meta.ts +13 -0
  100. package/exporter/ssg-adapters/gatsby/actions/prepare.ts +9 -0
  101. package/exporter/ssg-adapters/gatsby/actions/relocation.ts +15 -0
  102. package/exporter/ssg-adapters/gatsby/actions/restore.ts +21 -0
  103. package/exporter/ssg-adapters/gatsby/actions/ssg.ts +12 -0
  104. package/exporter/ssg-adapters/gatsby/actions/sync.ts +65 -0
  105. package/exporter/ssg-adapters/gatsby/index.ts +114 -0
  106. package/exporter/ssg-adapters/gatsby/shared/artifacts.ts +16 -0
  107. package/exporter/ssg-adapters/gatsby/shared/diff-assets.ts +128 -0
  108. package/exporter/ssg-adapters/gatsby/shared/extract-assets.ts +75 -0
  109. package/exporter/ssg-adapters/gatsby/shared/gatsby-build.ts +58 -0
  110. package/exporter/ssg-adapters/gatsby/shared/sync-render.ts +300 -0
  111. package/exporter/ssg-adapters/gatsby/shared/types.ts +35 -0
  112. package/exporter/ssg-adapters/gatsby/shared/utils.ts +33 -0
  113. package/gatsby-browser.tsx +41 -58
  114. package/gatsby-config.ts +10 -17
  115. package/gatsby-node.ts +20 -80
  116. package/gatsby-ssr.tsx +2 -1
  117. package/package.json +41 -92
  118. package/plugins/gatsby-plugin-svgr-loader/gatsby-node.js +55 -0
  119. package/plugins/gatsby-plugin-svgr-loader/package.json +8 -0
  120. package/react/DynamicScript/index.tsx +33 -0
  121. package/{exporter/react/Favicon → react/GriddoFavicon}/index.tsx +3 -9
  122. package/{exporter/react → react}/GriddoIntegrations/index.tsx +17 -23
  123. package/{exporter/react → react}/GriddoIntegrations/utils.ts +24 -12
  124. package/react/GriddoOpenGraph/index.tsx +39 -0
  125. package/src/components/Head.tsx +30 -73
  126. package/src/components/template.tsx +8 -30
  127. package/src/gatsby-node-utils.ts +76 -2
  128. package/src/html.tsx +2 -11
  129. package/src/types.ts +5 -5
  130. package/tsconfig.commands.json +36 -0
  131. package/tsconfig.exporter.json +20 -0
  132. package/tsconfig.json +5 -3
  133. package/build/adapters/gatsby/index.d.ts +0 -4
  134. package/build/adapters/gatsby/utils.d.ts +0 -22
  135. package/build/artifacts/index.d.ts +0 -6
  136. package/build/commands/end-render.d.ts +0 -2
  137. package/build/commands/move-assets.d.ts +0 -1
  138. package/build/commands/prepare-domains-render.d.ts +0 -1
  139. package/build/commands/reset-render.d.ts +0 -2
  140. package/build/commands/start-render.d.ts +0 -2
  141. package/build/commands/upload-search-content.d.ts +0 -2
  142. package/build/constants/envs.d.ts +0 -37
  143. package/build/constants/index.d.ts +0 -57
  144. package/build/end-render.js +0 -74
  145. package/build/end-render.js.map +0 -7
  146. package/build/index.js.map +0 -7
  147. package/build/prepare-domains-render.js +0 -73
  148. package/build/prepare-domains-render.js.map +0 -7
  149. package/build/react/Favicon/index.d.ts +0 -5
  150. package/build/react/Favicon/utils.d.ts +0 -9
  151. package/build/react/GriddoIntegrations/index.d.ts +0 -20
  152. package/build/react/GriddoIntegrations/utils.d.ts +0 -26
  153. package/build/react/index.d.ts +0 -3
  154. package/build/react/index.js +0 -3
  155. package/build/registers/api.d.ts +0 -9
  156. package/build/registers/gatsby.d.ts +0 -9
  157. package/build/registers/index.d.ts +0 -3
  158. package/build/reset-render.js +0 -74
  159. package/build/reset-render.js.map +0 -7
  160. package/build/services/domains.d.ts +0 -6
  161. package/build/services/navigation.d.ts +0 -50
  162. package/build/services/reference-fields.d.ts +0 -20
  163. package/build/services/register.d.ts +0 -36
  164. package/build/services/robots.d.ts +0 -19
  165. package/build/services/settings.d.ts +0 -4
  166. package/build/services/sites.d.ts +0 -29
  167. package/build/services/store.d.ts +0 -6
  168. package/build/start-render.js +0 -100
  169. package/build/start-render.js.map +0 -7
  170. package/build/types/templates.d.ts +0 -8
  171. package/build/upload-search-content.js +0 -74
  172. package/build/upload-search-content.js.map +0 -7
  173. package/build/utils/alerts.d.ts +0 -3
  174. package/build/utils/api.d.ts +0 -23
  175. package/build/utils/cache.d.ts +0 -35
  176. package/build/utils/core-utils.d.ts +0 -107
  177. package/build/utils/create-build-data.d.ts +0 -8
  178. package/build/utils/domains.d.ts +0 -13
  179. package/build/utils/folders.d.ts +0 -53
  180. package/build/utils/health-checks.d.ts +0 -7
  181. package/build/utils/images.d.ts +0 -16
  182. package/build/utils/loggin.d.ts +0 -51
  183. package/build/utils/pages.d.ts +0 -34
  184. package/build/utils/render.d.ts +0 -13
  185. package/build/utils/searches.d.ts +0 -15
  186. package/build/utils/sites.d.ts +0 -31
  187. package/build/utils/store.d.ts +0 -81
  188. package/cx.config.d.ts +0 -5
  189. package/cx.config.js +0 -36
  190. package/exporter/adapters/gatsby/index.ts +0 -162
  191. package/exporter/adapters/gatsby/utils.ts +0 -161
  192. package/exporter/artifacts/README.md +0 -34
  193. package/exporter/artifacts/index.ts +0 -33
  194. package/exporter/commands/move-assets.ts +0 -11
  195. package/exporter/constants/envs.ts +0 -94
  196. package/exporter/constants/index.ts +0 -129
  197. package/exporter/errors/index.ts +0 -40
  198. package/exporter/react/index.tsx +0 -11
  199. package/exporter/registers/api.ts +0 -14
  200. package/exporter/registers/gatsby.ts +0 -14
  201. package/exporter/registers/index.ts +0 -4
  202. package/exporter/services/register.ts +0 -113
  203. package/exporter/services/settings.ts +0 -17
  204. package/exporter/utils/alerts.ts +0 -29
  205. package/exporter/utils/api.ts +0 -243
  206. package/exporter/utils/cache.ts +0 -142
  207. package/exporter/utils/core-utils.ts +0 -458
  208. package/exporter/utils/create-build-data.ts +0 -17
  209. package/exporter/utils/domains.ts +0 -39
  210. package/exporter/utils/folders.ts +0 -320
  211. package/exporter/utils/health-checks.ts +0 -64
  212. package/exporter/utils/loggin.ts +0 -184
  213. package/exporter/utils/render.ts +0 -71
  214. package/exporter/utils/searches.ts +0 -156
  215. package/exporter/utils/sites.ts +0 -312
  216. package/exporter/utils/store.ts +0 -314
  217. package/src/README.md +0 -7
  218. package/start-render.js +0 -7
  219. /package/build/{utils → core}/instance.d.ts +0 -0
  220. /package/build/{constants → shared}/endpoints.d.ts +0 -0
  221. /package/exporter/{types → shared/types}/templates.ts +0 -0
  222. /package/{exporter/react/Favicon → react/GriddoFavicon}/utils.ts +0 -0
package/README.md CHANGED
@@ -1,246 +1,131 @@
1
- # Griddo CX
1
+ # Proceso de Renderizado en CX
2
2
 
3
- Griddo CX es un package dentro del monorepo de Griddo (`packages/griddo-cx`) que se encarga de ofrecer herramientas para orquestar el render de una instancia utilizando la biblioteca de componentes, un framework SSG y los datos obtenidos de la API privada de Griddo.
3
+ Este documento describe el flujo del render en Griddo CX y otros aspectos técnicos relevantes.
4
4
 
5
- # Arquitectura
5
+ _Nota: Se asume que todos los comandos se invocan a través del [CLI de CX](#cli-de-cx)._
6
6
 
7
- CX está escrito como una biblioteca en TypeScript. \**Los consumidores de la misma son el *Adapter, el framework SSG y una serie de scripts en TypeScript que viven en el propio `package/griddo-cx` y que son utilizados por infra, normalmente invocados mediante un `npm run ...`
7
+ ---
8
8
 
9
- \*Para los casos del Adapter y los “scripts para infra”, estos utilizarán directamente el código Typescript de la biblioteca de CX. Para el caso del SSG, este utilizará el código bundlelizado disponible en `@griddo/cx` y `griddo/cx/react`
9
+ ## Flujo General del render
10
10
 
11
- Como ejemplo aquí vemos un snippet dentro de Gatsby (actual framework SSG) importando un componente `<GriddoIntegrations>` que forma parte de la biblioteca de CX, en concreto del export de react.
11
+ El proceso sigue una secuencia de fases bien definida:
12
12
 
13
- ```tsx
14
- // src/components/template.tsx
15
- import { GriddoIntegrations } from "@griddo/cx/react";
16
- ```
17
-
18
- ## Exports
19
-
20
- CX tiene dos exports separados: **main y react**.
21
-
22
- - **main** se exporta en `@griddo/cx` . Es código que se ejecuta en un entorno nodejs.
23
- - **react** se exporta en `@griddo-cx/react` . Es código React :)
24
-
25
- **Ejemplo de import**
26
-
27
- ```tsx
28
- // React import
29
- import { GriddoIntegrations } from "@griddo/cx/react";
30
- // Core import
31
- import {
32
- IS_COMPONENT_LIBRARY,
33
- PROJECT_ALIASES,
34
- resolveComponentsPath,
35
- } from "@griddo/cx";
36
- ```
37
-
38
- ## Bundle
13
+ **preparación → renderizado → reseteo (en caso de error) → finalización**
39
14
 
40
- El bundle del código TypeScript se genera con [esbuild](https://esbuild.github.io/). Compilando el código a CommonJS junto con las definiciones de tipos.
15
+ El ciclo se inicia cuando la **infra** detecta la necesidad de un nuevo render y orquesta el proceso invocando al CLI de CX con una serie de comandos. A continuación, se detallan las fases y los comandos asociados.
41
16
 
42
- Se puede ejecutar el bundle de todo CX con `yarn run build` desde `packages/griddo-cx` . Esto creará los distintos exports: _node_, _react_ y también los scripts para _infra_: reset-render, build-complete y upload-search-content junto con los archivos de definición de tipos.
17
+ ### 1. `prepare-renders-domain`
43
18
 
44
- Este comando, `yarn run build` se ejecuta en el despliegue del monorepo, npm prepare, etc. No es necesario que manualmente hagamos un build para los despliegues.
19
+ La **infra** inicia esta fase ejecutando:
20
+ `node cli.mjs prepare-renders-domain --root=<builder-root-dir>`
45
21
 
46
- # Features
22
+ 1. **Creación de la caché**: Se crea el directorio `<builder-root-dir>/.cx-cache` y, dentro de él, el archivo `db.json`. Este archivo actúa como una base de datos interna para almacenar metadatos del render (dominios, modos de render, hashes, etc.). El directorio `.cx-cache` también almacena archivos de caché de Gatsby y de CX.
23
+ 2. **Generación de la lista de dominios**: Se genera el archivo `<cx-package>/domains.json`, que contiene la lista de dominios a renderizar (ej: `["pro-griddo", "pre-griddo"]`), ordenados por la cantidad de páginas de menor a mayor. Está previsto que este archivo sea reemplazado por `db.json` en el futuro.
47
24
 
48
- ## Archivo de configuración
25
+ ### 2. `start-render` (por dominio)
49
26
 
50
- CX tiene un archivo de configuración en el raíz del package `griddo-cx/cx.config.js` donde se establecen ciertos valores globales para todo el package.
27
+ Para cada dominio, la **infra** ejecuta:
28
+ `node cli.mjs start-render`
51
29
 
52
- Los siguientes puntos están incluidos en el archivo de configuración y deben ser leídos de este, evitando hardcodear o volver a calcularlos en el resto del código.
30
+ Este es el comando principal y gestiona la mayor parte del proceso de renderizado.
53
31
 
54
- ```jsx
55
- const config = {
56
- proDomain: "pro-", // Prefijo para los dominios "pro"
57
- griddoVersion, // Versión de griddo obtenida del package.json
58
- buildReportFileName: "build-report.json", // Archivo de reporte de render
59
- // función que resuelve la ruta absoluta a los placeholders
60
- paths: (domain) => ({
61
- __cache: path.join(CX_CACHE_DIR, domain || ""),
62
- __components: COMPONENTS_DIR,
63
- __cx: CX_ROOT_DIR,
64
- __exports: path.join(EXPORTS_DIR, domain || ""),
65
- __root: REPO_ROOT_DIR,
66
- __ssg: SSG_DIR,
67
- }),
68
- };
69
- ```
32
+ 1. Se realiza una comprobación de estado (**`check-health`**). Si esta verificación falla, el render se aborta y el proceso de Node finaliza con un código de error.
33
+ 2. Si el `check-health` es exitoso, comienza el renderizado con sus sub-fases correspondientes: **restore**, **data**, **ssg**, **archive**, etc.
34
+ 3. **Manejo de errores**: Si ocurre un error durante el renderizado, se aplican las siguientes lógicas de recuperación:
35
+ - Si el fallo ocurre en una fase inicial segura (ej: durante el login), el render se reintenta automáticamente.
36
+ - Si el fallo ocurre tras haber modificado archivos del _bundle_ o descargado datos, el sistema realiza una limpieza para evitar un estado corrupto:
37
+ - Se elimina el directorio `exports/sites/<domain>/dist`.
38
+ - Se restaura una copia de seguridad desde `exports/sites/<domain>/dist-backup`, si existe. Si no hay _backup_, el siguiente render será más lento, ya que deberá reconstruir el _bundle_ desde cero.
39
+ - En cualquier caso de error, el `RenderMode` del dominio se forzará a **`FROM_SCRATCH`** por seguridad. Para más información, consulta la sección [Rollback por Errores](#rollback-por-errores-funcionamiento-optimista).
70
40
 
71
- El contenido del archivo de configuración se leerá con la función `getConfig()` donde sea que necesitemos acceder a la misma.
41
+ Una vez `start-render` finaliza, la **infra** actúa según el resultado:
72
42
 
73
- **Ejemplo**
43
+ - **Si falla**: La **infra** ejecuta `node cli.mjs reset-render` para limpiar el estado.
44
+ - **Si tiene éxito**: La **infra** comienza la subida de los artefactos generados a S3.
74
45
 
75
- ```tsx
76
- const config = getConfig()
77
- const { proDomain, ... } = config
78
- ```
46
+ ### 3. `end-render` (por dominio)
79
47
 
80
- ### Dominio \*pro-\*\*
48
+ Cuando la subida a S3 ha finalizado con éxito, la **infra** notifica a CX ejecutando:
49
+ `node cli.mjs end-render --domain=<domain-name>`
81
50
 
82
- En los renders de Griddo se diferencia cuando el render es de un dominio de producción, esto es que el dominio interno empieza por `pro-` , por ejemplo `pro-griddo`
51
+ ### `upload-search-content`
83
52
 
84
- Este `pro-` se especifica directamente y una sola vez en el archivo de configuración.
53
+ La **infra** ejecuta este comando para todos los dominios de la instancia:
54
+ `node cli.mjs upload-search-content`
85
55
 
86
- ### Versión de Griddo
56
+ Este script sube contenido a la base de datos de búsqueda mediante una llamada `POST` a la API. Su ejecución depende de una variable de entorno; si no está activada, el script no realiza ninguna acción.
87
57
 
88
- Si es necesario obtener la versión de CX la podemos tomar directamente del archivo de configuración
58
+ Aunque es un proceso independiente, requiere que un render previo se haya completado para disponer de contenido actualizado. Si la funcionalidad de _embeddings_ está activa, también se encarga de invocarlos.
89
59
 
90
- ### Sistema de paths interno
60
+ ---
91
61
 
92
- Mediante el archivo de configuración se establecen unas rutas absolutas globales a todo CX e instancia (ya sea instancia interna del monorepo o la de un cliente) que nos ayudará a orquestar los artefactos durante los LifeCycles de un render. Un uso parecido a los `__dirname` o `__filename` de javascript CommonJS.
62
+ ## Render Incremental y `RenderMode`
93
63
 
94
- **Rutas con el dominio actual concatenado**
64
+ El flujo del render incremental utiliza las mismas fases descritas anteriormente. La principal diferencia radica en cómo `prepare-renders-domain` determina la estrategia a seguir para cada dominio y cómo se llama a Gatsby tan solo con ls páginas necesarias, necesitando después una sincronización entre la salida de Gatsby y el render previo.
95
65
 
96
- Ya que la mayoría de las veces el uso de estas rutas son durante el render de un dominio, la ruta incluirá el dominio para así hacer operaciones más fácilmente sin tener que estar adjuntándolo (concat) constantemente. Esto es así para los placeholders `__exports` y `__cache` . Para ello a `paths()` hay que pasarle el dominio como único argumento cuando obtengamos las rutas con `config.paths()`
66
+ Para optimizar el proceso, `prepare-renders-domain` analiza cada dominio y le asigna un `RenderMode`, que define si un dominio tiene cambios, si es su primer render, o si puede ser ignorado.
97
67
 
98
- **Ejemplo**
68
+ Posteriormente, `start-render` realiza una segunda evaluación de los `RenderMode` para manejar posibles efectos secundarios o inconsistencias detectadas tras la primera evaluación. Por ejemplo, un dominio marcado como `INCREMENTAL` podría pasar a `FROM_SCRATCH` si se detecta un error en un render previo.
99
69
 
100
- ```tsx
101
- const { __exports, __cache } = getConfig().paths("mi-dominio");
102
- console.log(__exports); // ...export/sites/**mi-dominio**
103
- console.log(__cache); // ...griddo-cx/.cx-cache/**mi-dominio**
104
- ```
70
+ Los `RenderMode` disponibles son:
105
71
 
106
- Esta son las rutas existentes.
72
+ - **`FROM_SCRATCH`**: Indica que el dominio debe ser renderizado completamente desde cero. Esto ocurre en el primer render, tras un cambio de código en la instancia, o si un render anterior falló.
73
+ - **`INCREMENTAL`**: Indica que el dominio ya tiene un render previo válido. El nuevo render solo procesará las páginas nuevas, modificadas o eliminadas. Tras el cómputo, CX realiza una **sincronización** entre los nuevos artefactos y los existentes.
74
+ > Este modo implicó un refactor significativo en CX. La sincronización que antes delegaba en Gatsby ahora es gestionada al 100% por CX, lo que requirió un análisis detallado de su funcionamiento interno (ej: `page-data.json`).
75
+ - **`IDLE`**: El dominio no presenta ningún cambio y, por tanto, se ignora. Aunque `start-render` se ejecuta, el proceso finaliza inmediatamente indicando que no hay tareas que realizar. En este caso, no se ejecutan `end-render` ni `upload-search-content`.
107
76
 
108
- - `__cx` La ruta absoluta del package de CX
109
- - `__ssg` La ruta absoluta del SGG configurado
110
- - `__exports` La ruta donde se aloja el render final
111
- - `__cache` La ruta del caché de CX, donde se guardan artefactos entre renders
112
- - `__components` La ruta de la instancia. En el monorepo la de `griddo-components`
113
- - `__root` El directorio raíz siempre, en el monorepo y en la instancia. (uso residual)
77
+ ---
114
78
 
115
- **Ejemplo de uso real**
79
+ ## Carpetas de CX
116
80
 
117
- ```tsx
118
- import { getConfig } from "./utils/config";
81
+ En CX existen dos ubicaciones principales relacionadas con el render: una donde CX guarda sus cachés y otra donde CX deja el resultado del render para que infra lo suba definitivamente a S3 para publicar los sites.
119
82
 
120
- // Sin dominio
121
- const config = await getConfig();
122
- const { __cx, __ssg } = config.paths();
123
- const storeDir = path.join(__cx, "store");
124
- const templateFile = path.join(__ssg, "src/components/template.tsx");
83
+ Ambos directorios se guardan en el directorio pasado como argumento al cli mediante `--root`
125
84
 
126
- // Con dominio
127
- const { __cache } = config.paths("pre-griddo");
128
- console.log(__exports); // /griddo/packages/griddo-cx/.cx-cache/**pre-griddo**
129
- ```
85
+ ## CLI de CX
130
86
 
131
- ## LifeCycles
87
+ CX proporciona una interfaz de línea de comandos para ejecutar las distintas fases del renderizado. Se puede obtener ayuda sobre los comandos disponibles ejecutando `node cli.mjs` o `node cli.mjs --help`.
132
88
 
133
- Los LifeCycles se utilizan dentro del contexto de un _Adapter_. Se usa a través de la función `doLifeCycle` que ejecuta un batch de funciones (`actions`) de forma secuencial. Informando por consola del inicio, fin y tiempo invertido en ejecutar todas las funciones del `actions`, manejando cualquier error en las mismas. En caso de error, es posible indicar un número de _attempts_ que hará que se ejecute de nuevo el clico de vida las veces indicadas.
89
+ **Ejemplos de uso:**
134
90
 
135
- En CX existen estos LifeCycles: `Prepare`, `Restore`, `Data`, `SSG`, `Relocation`, `Meta`, `Archive`, `Clean`, `HealthCheck` y uno de `__DEBUG__` internamente son iguales (usan `doLifeCycle`) y se utilizan estos distintos nombres para identificarlos dentro de un render, poner distintos _attempts_, etc..
91
+ ```bash
92
+ # Resetear un render fallido
93
+ node cli.mjs reset-render
136
94
 
137
- **Ejemplo:**
95
+ # Preparar los dominios para el renderizado
96
+ node cli.mjs prepare-domains-render --root=<path-builder-cx-root>
138
97
 
139
- ```tsx
140
- await doLifeCycle({
141
- name: "SSG",
142
- attempts: 2, // intentará hacer **todos** las actions dos veces si hay un error en alguno de ellos
143
- actions: [func1, func2, func3],
144
- });
98
+ # Iniciar el render de un dominio específico
99
+ node cli.mjs start-render --domain=<domain-name>
145
100
  ```
146
101
 
147
- # Scripts para infra.
148
-
149
- Como hemos visto uno de los consumidores de CX son scripts “individuales” que están alojados en `griddo-cx/src/scripts` Estos scripts son siempre llamados por infra, o por el desarrollador cuando se hacen render en local.
150
-
151
- ## `start-render`
152
-
153
- CX es una biblioteca por lo tanto no tiene nada ejecutable como tal, no hay un entry point desde el punto de vista del _package_. En el package.json existe un binario `griddo-cx` que usa infra/API para ejecutar un render, este binario apunta a `griddo-cx/start-rener.js` con el que se desencadena el proceso de publicación.
154
-
155
- ## `reset-render`
156
-
157
- Lo ejecuta infra mediante `yarn run reset-render` . Este resetea la API en caso de que un render salga mal. De esa manera la API al ser preguntada volverá a comunicar que hay un render pendiente y comenzará con ello de nuevo.
158
-
159
- Si no se llamase correctamente al script, la API se quedaría esperando a que finalice el render “eternamente”. Hay un time-out de X horas.
160
-
161
- ## `build-complete`
162
-
163
- Lo ejecuta infra mediante `yarn run build-complete` cuando un render acaba de manera exitosa y el contenido ha sido subido al servidor, es decir, cuando se ha completado **una publicación**. Este script envía a la API información del render y le comunica que este ha terminado y que está disponible para un rrnuevo render.
164
-
165
- ## `upload-search-content`
166
-
167
- Lo ejecuta infra mediante `yarn run upload-search-content` cuando un render ha acabado o con cierta frecuencia. Sube contenido de los estáticos del render a un endpoint para el uso en buscadores.
168
-
169
- # Adapter
170
-
171
- Un Adapter es una función que se ejecuta en el script `start-render.js` que es el que se triggea cuando API avisa de un nuevo render. Los Adapters están en el directorio `griddo-cx/exporter/adapters`
172
-
173
- El Adapter es el responsable de manejar el proceso de render mediante las utilidades de la biblioteca. En el proceso puede hacer lo que estime oportuno salvando ciertas obligaciones para que un render sea Griddo-compliant.
174
-
175
- <aside>
176
- 💡 Los Adapters utilizarán el código TypeScript de CX, no el bundlelizado. Ya que el propio adapter también se bundleliza.
177
-
178
- </aside>
179
-
180
- ### **Obligaciones de un Adapter**
181
-
182
- **Exports**
183
-
184
- **Dist**
185
-
186
- Dejar una carpeta con los archivos estáticos finales en el path `__exports` , que es una carpeta `exports/sites/<dominio>/dist` donde `dominio` es cada dominio de la instancia de Griddo. Una vez terminado el render, _infra_ tomará esa carpeta y la subirá. Infra la sube mediante sincronización por lo que siempre tiene que estar actualizada y con la totalidad de los datos. Si en la carpeta destino hay un archivo que no existe en la carpeta fuente `exports/sites/<dominio>/dist` se borrará.
187
-
188
- **Assets**
189
-
190
- Igualmente dejar una carpeta con los “assets” de javascript. Esto es verdad en el mundo Gatsby no sabremos qué pasará con otros frameworks.
191
-
192
- **Caches**
193
-
194
- El adapter deberá manejar manualmente la caché de Griddo utilizando las funciones `moveDirsSync`, `copyDirsSync` y `removeDirsSync`.
195
-
196
- La caché de Griddo son dos directorios que se crean en `griddo-cx` por cada render y dominio: `store` y `apiCache` . Para facilitar el trabajo CX cuenta con placeholders para las rutas, en este caso `__cache` que haría referencia `griddo-cx/.cx-cache/<domain>`
197
-
198
- ```tsx
199
- griddo-cx
200
- |-.cx-cache
201
- |- store
202
- |- apiCache
203
- ```
204
-
205
- ¿Cómo se maneja la caché? ¿Qué hago con ella?
206
-
207
- Los datos de la caché se generan de forma automática en `griddo-cx`El manejo se basa en _restaurar (Restore)_, _archivar_ (Arhive) o _invalidar (Clean)_ los directorios de la caché, tanto `store` como `apiCache`
208
-
209
- **Restaurando la caché**
210
-
211
- Cuando se inicia un render debemos mover tanto `store` como `apiCache` que estarán dentro de `griddo-cx/.rendrr-cache/<dominio>` al raíz de CX `griddo-cx` para que el proceso de render haga uso de las mismas.
212
-
213
- **Archivando la caché**
214
-
215
- Cuando el render de un dominio termina correctamente, se deben mover las carpetas `store` y `apiCache` a la carpeta de caches `griddo-cx/.cx-cache/<domain>` para poder restaurarlas en un próximo render.
216
-
217
- **Invalidando la caché**
218
-
219
- Si un render ha dado error pueden quedarse en el raíz de CX las carpetas `store` y `apiCache`. **Estas deben ser borradas** antes de la nueva fase de restauración. De hecho probablemente no exista nada que restaurar porque el render dio error. En ese caso lo que ocurre es que efectivamente no hay nada que restaurar y habrá que descargarse de nuevo todos los datos.
220
-
221
- A su vez, en cada despliegue que exista en la instancia también se borrarán ya que de alguna manera se alojan en lo que sería el `node_modules` de la instancia. Y se borrará CX enteramente.
102
+ ## Argumentos:
222
103
 
223
- <aside>
224
- 💡 Coming soon: Gestión automática de la caché de Griddo (no de los frameworks SSGS)
104
+ `--domain`: El nombre del dominio sobre el que actuará el comando.
225
105
 
226
- </aside>
106
+ `--root`: Directorio raíz del builder. Este directorio contiene las carpetas `exports/sites` y `.cx-cache`.
227
107
 
228
- # Logs
108
+ ## Rollback por Errores (Funcionamiento Optimista)
229
109
 
230
-
110
+ El render de CX opera de forma "optimista", lo que significa que modifica directamente los archivos en la carpeta de destino `exports/sites/<domain>/*` en lugar de trabajar en un directorio temporal.
231
111
 
232
- # Errores
112
+ Este enfoque mejora el rendimiento, pero implica que si se produce un error, es crucial revertir los cambios para no dejar el sitio en un estado corrupto. Este proceso de restauración y limpieza se detalla en la sección de manejo de errores de `start-render`.
233
113
 
234
-
114
+ ## Tareas Pendientes (TODO)
235
115
 
236
- # Testing
116
+ - Implementar `--root` en el CLI, solo en el `prepare-domains-render` y este ya lo deja escrito en `db.json`
117
+ - Optimizar el caso en que un dominio solo tenga páginas para despublicar y/o eliminar. En esta situación, no sería necesario ejecutar Gatsby; bastaría con una sincronización para eliminar los archivos html y page-data correspondientes y regenerar los sitemaps.
237
118
 
238
-
239
119
 
240
- # FAQ’s
120
+ ## Flujos de errores
241
121
 
242
- ### ¿Griddo procesa imágenes en tiempo de render?
122
+ - (start-render) login error -> exit
123
+ - (start-render) retry n -> exit -> rollback? -> reset-render
124
+ - (end-render) exit -> reset-render
243
125
 
244
- No, los proyectos actuales de Griddo se apoyan en imágenes alojadas en remoto, en concreto en un servicio externo “DAM” propiedad de Secuoyas. Este servicio ofrece las transformaciones necesarias. Existen un componente de React en Griddo `<GriddoImage>` que las instancias pueden utilizar y que se integra con el “DAM”.
126
+ ## TODO:
245
127
 
246
- Algunas de las primeras instancias utilizan la misma estrategia con cloudinary, usando un componente de React proporcionado también por Griddo: `<CloudinaryImage>`
128
+ - Usar el CLI.
129
+ - Rollback. Ver qué hacemos con el dist-backup... copy y después rsync.
130
+ - Llamar a search y embedding sigue igual: se sube para todos los dominios que tengan algo en exports...
131
+ - Usar db.json en lugar de archivo domains si es posible... (actualmente se lee domains.json)
@@ -0,0 +1,31 @@
1
+ "use strict";var Fe=Object.create;var Y=Object.defineProperty;var Be=Object.getOwnPropertyDescriptor;var Le=Object.getOwnPropertyNames;var Ne=Object.getPrototypeOf,Ge=Object.prototype.hasOwnProperty;var Ce=(r,e,t,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of Le(e))!Ge.call(r,n)&&n!==t&&Y(r,n,{get:()=>e[n],enumerable:!(o=Be(e,n))||o.enumerable});return r};var c=(r,e,t)=>(t=r!=null?Fe(Ne(r)):{},Ce(e||!r||!r.__esModule?Y(t,"default",{value:r,enumerable:!0}):t,r));var Oe=c(require("node:fs/promises")),Te=c(require("node:path"));var he=c(require("node:path"));var x=c(require("node:path"));var C=c(require("node:fs/promises")),L=c(require("node:path"));var X=c(require("node:path"));var V=c(require("node:fs"));var S=c(require("node:path")),q=c(require("node:process")),Z=require("node:url"),K=r=>r instanceof URL?(0,Z.fileURLToPath)(r):r;function z(r,e={}){let{cwd:t=q.default.cwd(),type:o="file",stopAt:n}=e,a=S.default.resolve(K(t)??""),{root:l}=S.default.parse(a),d=n?S.default.resolve(a,K(n)):l,m=S.default.isAbsolute(r);for(;;){let E=m?r:S.default.join(a,r);try{let A=V.default.statSync(E,{throwIfNoEntry:!1});if(o==="file"&&A?.isFile()||o==="directory"&&A?.isDirectory())return E}catch{}if(a===d||a===l)break;a=S.default.dirname(a)}}function Q(r){let{cwd:e}=r||{},t=z("package.json",{cwd:e});return t&&X.default.dirname(t)}var{env:s}=process;function g(r){if(!r)return!1;switch(r.trim().toLowerCase()){case"1":case"true":case"yes":case"y":case"on":return!0;default:return!1}}var cr=s.GRIDDO_API_URL||s.API_URL,dr=s.GRIDDO_PUBLIC_API_URL||s.PUBLIC_API_URL,ee=s.botEmail||s.GRIDDO_BOT_USER,re=s.botPassword||s.GRIDDO_BOT_PASSWORD,fr=Number.parseInt(s.GRIDDO_API_CONCURRENCY_COUNT||"10"),pr=g(s.GRIDDO_SKIP_BUILD_CHECKS),te=g(s.GRIDDO_RENDER_BY_DOMAINS),B=g(s.GRIDDO_BUILD_LOGS),mr=g(s.GRIDDO_BUILD_LOGS_TO_FILE),oe=Number.parseInt(s.GRIDDO_BUILD_LOGS_BUFFER_SIZE||"500"),lr=g(s.GRIDDO_SSG_VERBOSE_LOGS),ur=g(s.GRIDDO_SEARCH_FEATURE),gr=s.GRIDDO_ASSET_PREFIX||s.ASSET_PREFIX,hr=s.GRIDDO_REACT_APP_INSTANCE||s.REACT_APP_INSTANCE,Dr=g(s.GRIDDO_AI_EMBEDDINGS),ne=g(s.GRIDDO_VERBOSE_LOGS),yr=g(s.GRIDDO_USE_DIST_BACKUP),Rr=g(s.GRIDDO_SSG_BUNDLE_ANALYZER);var ve="\x1B[0m",ie={black:"\x1B[30m",red:"\x1B[31m",green:"\x1B[32m",yellow:"\x1B[33m",blue:"\x1B[34m",magenta:"\x1B[35m",cyan:"\x1B[36m",white:"\x1B[37m",gray:"\x1B[90m",bold:"\x1B[1m",dim:"\x1B[2m"},f={};for(let r in ie){let e=r;f[e]=t=>`${ie[e]}${t}${ve}`}var i=class r{constructor(){}static verbose(...e){ne&&console.log(f.yellow("verbose"),f.dim(e.join(" ")))}static build(...e){B&&r.log(...e)}static info(...e){console.log(`${f.blue("info")} ${e.join(" ")}`)}static success(...e){console.log(`${f.green("success")} ${e.join(" ")}`)}static error(...e){console.error(`${f.red("error")} ${e.join(" ")}`)}static warn(...e){console.warn(`${f.yellow("warn")} ${e.join(" ")}`)}static log(...e){console.log(...e)}};var ke=Q({cwd:L.default.resolve(__dirname,"../../..")})||"",Ue=L.default.join(ke,".griddo/cache"),se=L.default.join(Ue,"db.json");async function u(r=""){let e=r||se;try{return JSON.parse(await C.default.readFile(e,"utf-8"))}catch(t){throw i.error(`Failed to read DB file at ${e}:`,t),t}}async function N(r,e=""){let t=e||se;try{await C.default.writeFile(t,JSON.stringify(r,null," "))}catch(o){throw i.error(`Failed to write DB file at ${t}:`,o),o}}var de=c(require("node:fs/promises"));var v={error:"ArtifactError",message:"There was a problem with an artifact",expected:"An external process may have has modified or deleted one of the artifacts (files and directories).",hint:"Have there been any recent deployments? These can delete directories from the current render."};var ae={error:"LoginError",message:"There was a problem logging in to the API",expected:"This happens if the API is currently not working or the credentials are incorrect."},ce={error:"NoDomainsFoundError",message:"No domains were found in this instance. The process cannot continue.",expected:"This may happen if the API is not functioning, or the site is not properly configured, or the domains are not registered.",hint:"You can contact the instance administrator."};async function R(r){try{return await de.default.access(r),!0}catch{return!1}}var k={FROM_SCRATCH:"FROM_SCRATCH",INCREMENTAL:"INCREMENTAL",IDLE:"IDLE",ERROR:"ERROR",COMPLETED:"COMPLETED"};var h=process.env.GRIDDO_API_URL,je=process.env.GRIDDO_PUBLIC_API_URL,Lr=`${h}/ai/embeddings`,Nr=`${je}/alert`,fe=`${h}/domains`,Me=`${h}/sites/all`,He=`${h}/page`,pe=`${h}/login_check`,Gr=`${h}/debug/reset-render`,Cr=`${h}/domains/robots`,vr=`${h}/search`,kr=`${h}/settings`,Je=`${h}/site/`,me=[Je,"/build/end"];var U=class{headers;async login(){try{let e=await fetch(pe,{method:"POST",headers:{"Content-Type":"application/json",Connection:"close"},body:JSON.stringify({username:ee,password:re})});if(!e.ok)throw new Error("Error while login in the API");let{token:t}=await e.json();return this.headers={Authorization:`bearer ${t}`,"Cache-Control":"no-store"},this.headers}catch(e){D(ae,e)}}},T=new U;async function le(r){let e=await u();if(!e.domains[r])throw new Error(f.red(`[!] Error: Domain ${r} not found in DB`));if(!e.domains[r].renderMode)throw new Error(f.red(`[!] Error: Render mode not found for domain ${r}`));return{renderMode:e.domains[r].renderMode,reason:e.domains[r].renderModeReason}}async function _(r){let{domain:e,dbFilePath:t}=r||{},n=(await u(t)).paths;return{__root:n.root,__cache:x.default.join(n.cxCache,e||""),__components:n.components,__cx:n.cx,__sites:n.exportsDir,__exports:x.default.join(n.exportsDir,e||""),__exports_backup:x.default.join(n.exportsDirBackup,e||""),__ssg:n.ssg,__exports_dist:x.default.join(n.exportsDir,e||"","dist")}}async function ue(){let r=await u();return{griddoVersion:r.griddoVersion,buildReportFileName:r.buildReportFileName}}var j=c(require("node:fs/promises")),w=c(require("node:path"));async function ge(r){let e=await u(),{exportsDir:t,exportsDirBackup:o}=e.paths;i.info(`Cleaning exports dir for the domain ${r}`),i.verbose(`Deleting ${w.default.join(t,r)}...`),await j.default.rm(w.default.join(t,r),{recursive:!0,force:!0}),await R(w.default.join(o,r))?(await j.default.cp(w.default.join(o,r),w.default.join(t,r),{recursive:!0}),i.info(`export-backup dir for the domain ${r} found. Restoring before exit...`),i.verbose(`Copying ${w.default.join(o,r)} -> ${w.default.join(t,r)}...`)):i.info("No export-backup found, skipping rollback. Next render will create a new exports dir from scratch...")}var I=class extends Error{constructor(e){super(e instanceof Error?e.message:String(e)),this.name="InternalCXError",this.stack=e instanceof Error?e.stack:""}};function D(r,e){let{error:t,message:o,expected:n,hint:a}=r,l=i.log(f.red(`[ ${t} ]`)),d=[n,a].filter(Boolean).join(`
2
+ `);throw i.log(`
3
+ ${l}
4
+ ${o}
5
+ ${d}
6
+
7
+ ${f.red("stack")}
8
+ ${JSON.stringify(e,null,2)}`),new I(e)}async function De(r){try{await r()}catch(e){e instanceof I?i.error("Internal Griddo RenderError"):e instanceof Error?i.error(e.message):i.error(`An unexpected error occurred ${e}`);try{let{__root:o}=await _(),n=await u();n.needsRollbackOnError?(i.info("Cleaning exports dir..."),i.verbose(`Deleting ${he.default.join(o,"exports")}...`),await ge(n.currentRenderingDomain)):i.info("No rollback needed, skipping...")}catch{i.info("Early render stage, no db.json created yet...")}let t=await u();throw t.domains[t.currentRenderingDomain].isRendering=!1,t.domains[t.currentRenderingDomain].renderMode="ERROR",await N(t),e}}var _e=c(require("node:crypto")),G=c(require("node:fs/promises")),H=c(require("node:path"));var Re=c(require("node:fs/promises"));var b=[],ye=null,P=null,We=oe;var Ye=!B;function M(r){Ye||(b.push(r.toString()),b.length>=We&&Ke().catch(e=>{i.error("Background log flush failed:",e)}))}async function Ke(){if(P&&await P,b.length===0||!ye)return;P=(async()=>{let e=[...b];b.length=0;try{await Re.default.appendFile(ye,`${e.join(`
9
+ `)}
10
+ `)}catch(t){b.unshift(...e),i.error("Error flushing logs:",t)}})();try{await P}finally{P=null}}var{env:Ve}=process,{RETRY_WAIT_SECONDS:qe="4",RETRY_ATTEMPTS:we="4"}=Ve;async function J(r,e,t=""){let{endpoint:o,body:n,cacheKey:a="",attempt:l=1,headers:d,useApiCacheDir:m=!0,logToFile:E=!0}=r,A={endpoint:o,body:n,headers:d,cacheKey:a};if(a&&m){let $=new Date,p=await Qe(A);if(p){if(E){let y=Ee(p),O=y?`site: ${y}`:"",F=Se(Date.now()-$.getTime());M(`${e} (cache) ${O} ${o} - ${F}s ${t}`)}return p}}try{let $=new Date,p={method:e.toUpperCase(),headers:Object.assign({},d,T.headers)};e.toLowerCase()!=="get"&&n&&(p.body=JSON.stringify(n),p.headers||(p.headers={}),p.headers["Content-Type"]="application/json");let y=await fetch(o,p);if(!y.ok){if(y.status===404)return null;throw new Error(`HTTP ${y.status}: ${y.statusText}`)}let O=await y.json();if(E){let F=Ee(O),xe=F?`site: ${F}`:"",Pe=Se(Date.now()-$.getTime());M(`${e} (fetch) ${xe} ${o} - ${Pe}s ${t}`)}return m&&await Xe(A,O),O}catch($){let p=$;if(l>parseInt(we))throw i.log(`
11
+ Max attempts ${we} reached
12
+ --------------------------------------
13
+ - ${e.toUpperCase()} ${o}
14
+ - BODY: ${JSON.stringify(n)}
15
+ - HEADERS: ${JSON.stringify(d)}
16
+ - ERROR: ${p.message}
17
+ --------------------------------------
18
+ `),new I(p);return Ze(p,{callInfo:{endpoint:o,body:n}}),i.warn(`Waiting for retry: ${e}`,o),await ze(parseInt(qe)*1e3),J({endpoint:o,body:n,headers:d,cacheKey:a,attempt:l+1},e,t)}}async function W(r){return J(r,"get")}async function Ie(r){let{endpoint:e,body:t,headers:o}=r,n=e.endsWith("/distributor")&&`# ReferenceField body: ${JSON.stringify(t)} lang: ${JSON.stringify(o?.lang)}`;return J(r,"post",n||"")}function Ze(r,e){let{message:t,stack:o}=r,{callInfo:n}=e,a=[];for(let m of Object.keys(n))a.push(`${m}: ${typeof n[m]=="object"?JSON.stringify(n[m]):n[m]}`);let l=a.join(`
19
+ `),d=`${t}
20
+ ${o}`;i.warn(f.red(`
21
+ =============
22
+
23
+ { Call info }
24
+ ${l}
25
+
26
+ { Error details }
27
+ ${d}
28
+
29
+ =============
30
+ `))}function Ee(r){if(!(typeof r!="object"||r===null||Array.isArray(r)))return"site"in r&&r.site?r.site:void 0}function ze(r){return new Promise(e=>setTimeout(e,r))}function Se(r,e=3){return Number.parseFloat((r/1e3).toFixed(e))}async function be(r){let{__root:e}=await _(),t=H.default.join(e,"apiCache"),o=_e.default.createHash("sha256");return o.update(JSON.stringify(r)),`${t}/${o.digest("hex")}`}async function Xe(r,e){let t=typeof e=="string"?e:JSON.stringify(e),o=await be(r),n=H.default.dirname(o);await R(n)||await G.default.mkdir(n,{recursive:!0}),await G.default.writeFile(o,t,"utf8")}async function Qe(r){try{let e=await be(r),t=await G.default.readFile(e,"utf8");return JSON.parse(t)}catch{return null}}async function Ae(){let r=await W({endpoint:fe,useApiCacheDir:!1});r.length||D(ce),i.verbose(`getting domains names (${r.length})`);let e=r.filter(({slug:t})=>!!t).map(({slug:t})=>t.replace("/",""));return[...new Set(e)]}async function $e(r,e){let[t,o]=me;await Ie({endpoint:`${t}${r}${o}`,body:e,useApiCacheDir:!1,logToFile:!1})}async function er(r){let{buildReportFileName:e,domain:t,exportsSiteDir:o}=r,n=Te.default.resolve(o,t,"dist",e);return await R(n)||(i.info(`Build report file "${n}" not found.`),D(v)),JSON.parse(await Oe.default.readFile(n,"utf-8"))}async function rr(){let{buildReportFileName:r}=await ue(),{__sites:e}=await _();await T.login();let t=[];if(te){let[o]=process.argv.slice(2);t.push(o)}else i.warn("Legacy Render Mode"),t=await Ae();for(let o of t){let{renderMode:n,reason:a}=await le(o);if(n===k.IDLE){i.info(`(From Current Render) [${o}]: Skipping end-render as it is marked as IDLE with the reason ${a}.`);return}let l=await er({domain:o,exportsSiteDir:e,buildReportFileName:r});i.info(`Sending ending call to ${o} sites`);for(let d of l.sites){d.publishHashes=[...new Set(d.publishHashes)],d.unpublishHashes=[...new Set(d.unpublishHashes)];let{siteId:m,...E}=d;i.info(`Site (${m})`),await $e(m,E)}}}async function tr(){await rr()}De(tr);
31
+ //# sourceMappingURL=end-render.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../exporter/commands/end-render.ts", "../../exporter/core/errors.ts", "../../exporter/services/render.ts", "../../exporter/core/db.ts", "../../exporter/shared/npm-modules/pkg-dir.ts", "../../exporter/shared/npm-modules/find-up-simple.ts", "../../exporter/shared/envs.ts", "../../exporter/shared/npm-modules/brush.ts", "../../exporter/core/GriddoLog.ts", "../../exporter/core/fs.ts", "../../exporter/shared/errors.ts", "../../exporter/shared/types/render.ts", "../../exporter/shared/endpoints.ts", "../../exporter/services/auth.ts", "../../exporter/core/dist-rollback.ts", "../../exporter/services/api.ts", "../../exporter/core/logger.ts", "../../exporter/services/domains.ts", "../../exporter/services/sites.ts"],
4
+ "sourcesContent": ["import fsp from \"node:fs/promises\";\nimport path from \"node:path\";\n\nimport { throwError, withErrorHandler } from \"../core/errors\";\nimport { pathExists } from \"../core/fs\";\nimport { GriddoLog } from \"../core/GriddoLog\";\nimport { AuthService } from \"../services/auth\";\nimport { getInstanceDomains } from \"../services/domains\";\nimport {\n\tgetRenderMetadataFromDB,\n\tgetRenderModeFromDB,\n\tgetRenderPathsHydratedWithDomainFromDB,\n} from \"../services/render\";\nimport { endSiteRender } from \"../services/sites\";\nimport { GRIDDO_RENDER_BY_DOMAINS } from \"../shared/envs\";\nimport { ArtifactError } from \"../shared/errors\";\nimport { RENDER_MODE, type Report } from \"../shared/types/render\";\n\nasync function getRenderReport(options: {\n\tdomain: string;\n\texportsSiteDir: string;\n\tbuildReportFileName: string;\n}) {\n\tconst { buildReportFileName, domain, exportsSiteDir } = options;\n\tconst buildReportFile = path.resolve(exportsSiteDir, domain, \"dist\", buildReportFileName);\n\n\tif (!(await pathExists(buildReportFile))) {\n\t\tGriddoLog.info(`Build report file \"${buildReportFile}\" not found.`);\n\t\tthrowError(ArtifactError);\n\t}\n\n\treturn JSON.parse(await fsp.readFile(buildReportFile, \"utf-8\")) as Report;\n}\n\nasync function endRender() {\n\tconst { buildReportFileName } = await getRenderMetadataFromDB();\n\tconst { __sites } = await getRenderPathsHydratedWithDomainFromDB();\n\n\tawait AuthService.login();\n\n\tlet domains = [];\n\n\tif (GRIDDO_RENDER_BY_DOMAINS) {\n\t\tconst [domainName] = process.argv.slice(2);\n\t\tdomains.push(domainName);\n\t} else {\n\t\tGriddoLog.warn(\"Legacy Render Mode\");\n\t\tdomains = await getInstanceDomains();\n\t}\n\n\tfor (const domain of domains) {\n\t\tconst { renderMode, reason } = await getRenderModeFromDB(domain);\n\n\t\tif (renderMode === RENDER_MODE.IDLE) {\n\t\t\tGriddoLog.info(\n\t\t\t\t`(From Current Render) [${domain}]: Skipping end-render as it is marked as IDLE with the reason ${reason}.`,\n\t\t\t);\n\t\t\treturn;\n\t\t}\n\n\t\tconst report = await getRenderReport({\n\t\t\tdomain,\n\t\t\texportsSiteDir: __sites,\n\t\t\tbuildReportFileName,\n\t\t});\n\n\t\tGriddoLog.info(`Sending ending call to ${domain} sites`);\n\n\t\tfor (const site of report.sites) {\n\t\t\tsite.publishHashes = [...new Set(site.publishHashes)];\n\t\t\tsite.unpublishHashes = [...new Set(site.unpublishHashes)];\n\n\t\t\tconst { siteId, ...body } = site;\n\t\t\tGriddoLog.info(`Site (${siteId})`);\n\t\t\tawait endSiteRender(siteId, body);\n\t\t}\n\t}\n}\n\nasync function main() {\n\tawait endRender();\n}\n\nwithErrorHandler(main);\n", "import type { ErrorsType } from \"../shared/errors\";\n\nimport path from \"node:path\";\n\nimport { getRenderPathsHydratedWithDomainFromDB } from \"../services/render\";\nimport { brush } from \"../shared/npm-modules/brush\";\nimport { readDB, writeDB } from \"./db\";\nimport { distRollback } from \"./dist-rollback\";\nimport { GriddoLog } from \"./GriddoLog\";\n\nexport type ErrorData = {\n\terror: ErrorsType;\n\tmessage: string;\n\texpected?: string;\n\thint?: string;\n};\n\nexport class RenderError extends Error {\n\tconstructor(originalError?: unknown) {\n\t\tsuper(originalError instanceof Error ? originalError.message : String(originalError));\n\n\t\tthis.name = \"InternalCXError\";\n\t\tthis.stack = originalError instanceof Error ? originalError.stack : \"\";\n\t}\n}\n\n/**\n * Throws an error with the provided error message, expected value, and hint.\n */\nfunction throwError(options: ErrorData, stack?: unknown): never {\n\tconst { error, message, expected, hint } = options;\n\n\tconst errorColor = GriddoLog.log(brush.red(`[ ${error} ]`));\n\tconst extraText = [expected, hint].filter(Boolean).join(\"\\n\");\n\n\tGriddoLog.log(`\n${errorColor}\n${message}\n${extraText}\n\n${brush.red(\"stack\")}\n${JSON.stringify(stack, null, 2)}`);\n\n\tthrow new RenderError(stack);\n}\n\nasync function withErrorHandler<T>(fn: () => Promise<T>) {\n\ttry {\n\t\tawait fn();\n\t} catch (error) {\n\t\tif (error instanceof RenderError) {\n\t\t\tGriddoLog.error(\"Internal Griddo RenderError\");\n\t\t} else if (error instanceof Error) {\n\t\t\tGriddoLog.error(error.message);\n\t\t} else {\n\t\t\tGriddoLog.error(`An unexpected error occurred ${error}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst { __root } = await getRenderPathsHydratedWithDomainFromDB();\n\t\t\tconst data = await readDB();\n\t\t\tif (data.needsRollbackOnError) {\n\t\t\t\tGriddoLog.info(\"Cleaning exports dir...\");\n\t\t\t\tGriddoLog.verbose(`Deleting ${path.join(__root, \"exports\")}...`);\n\n\t\t\t\tawait distRollback(data.currentRenderingDomain!);\n\t\t\t} else {\n\t\t\t\tGriddoLog.info(\"No rollback needed, skipping...\");\n\t\t\t}\n\t\t} catch (_e) {\n\t\t\tGriddoLog.info(\"Early render stage, no db.json created yet...\");\n\t\t}\n\n\t\tconst data = await readDB();\n\t\tdata.domains[data.currentRenderingDomain!].isRendering = false;\n\t\tdata.domains[data.currentRenderingDomain!].renderMode = \"ERROR\";\n\t\tawait writeDB(data);\n\t\tthrow error;\n\t}\n}\n\nexport { throwError, withErrorHandler };\n", "import type { RenderModeTuple } from \"../shared/types/render\";\n\nimport { execSync } from \"node:child_process\";\nimport fsp from \"node:fs/promises\";\nimport path from \"node:path\";\n\nimport { readDB, writeDB } from \"../core/db\";\nimport { throwError } from \"../core/errors\";\nimport { pathExists } from \"../core/fs\";\nimport { GriddoLog } from \"../core/GriddoLog\";\nimport { RenderUUIDError } from \"../shared/errors\";\nimport { brush } from \"../shared/npm-modules/brush\";\nimport { RENDER_MODE } from \"../shared/types/render\";\nimport { AuthService } from \"./auth\";\nimport { getBuildMetadata } from \"./manage-store\";\n\n/**\n * Creates a sentinel file with the current date and time.\n * This file is used to track later if node_modules/@griddo/cx was cleaned by a\n * npm install coming from a deploy.\n */\nasync function markRenderAsStarted(options: { domain: string; basePath: string }) {\n\tconst { domain } = options;\n\n\tconst db = await readDB();\n\tdb.domains[domain].isRendering = true;\n\tawait writeDB(db);\n\n\t// Creamos un archivo centinela, si al terminar el render este archivo no\n\t// existe es que ha habido un deploy por medio y hay que invalidar el render\n\tconst { __ssg } = await getRenderPathsHydratedWithDomainFromDB();\n\n\tconst renderSentinelFile = path.join(__ssg, `.render-sentinel-${domain}`);\n\tawait fsp.writeFile(renderSentinelFile, new Date().toISOString());\n}\n\nasync function markRenderAsCompleted(domain: string) {\n\tconst db = await readDB();\n\tdb.domains[domain].isRendering = false;\n\tdb.currentRenderingDomain = null;\n\tdb.domains[domain].renderMode = \"COMPLETED\";\n\t// db.domains[domain].shouldBeRendered = false;\n\tawait writeDB(db);\n\n\t// Borramos finalmente el archivo centinela\n\tconst { __ssg } = await getRenderPathsHydratedWithDomainFromDB();\n\tconst renderSentinelFile = path.join(__ssg, `.render-sentinel-${domain}`);\n\tawait fsp.unlink(renderSentinelFile);\n}\n\nasync function assertRenderIsValid(domain: string) {\n\t// Comprobamos que .render-sentinel exista, si no es que un deploy lo borro\n\t// y hay que invalidar el render.\n\tconst { __ssg } = await getRenderPathsHydratedWithDomainFromDB();\n\tconst renderSentinelFile = path.join(__ssg, `.render-sentinel-${domain}`);\n\tif (!(await pathExists(renderSentinelFile))) {\n\t\tthrowError(RenderUUIDError);\n\t}\n}\n\n/**\n * Determines the appropriate render mode for a given domain based on its current state,\n * previous render errors, deployment status, and whether rendering is required.\n *\n * @param options - An object containing:\n * - `domain`: The domain name to resolve the render mode for.\n * - `shouldBeRendered`: A boolean indicating if the domain should be rendered.\n * @returns An object with:\n * - `renderMode`: The resolved render mode (`FROM_SCRATCH`, `INCREMENTAL`, or `IDLE`).\n * - `reason`: A string describing the reason for the chosen render mode.\n *\n * @remarks\n * The function checks for missing exports, previous render errors, new deployments,\n * and whether rendering is necessary to decide the render mode.\n *\n * @todo\n * Improve ifs and reason concatenations...\n */\nasync function resolveDomainRenderMode(options: { domain: string; shouldBeRendered: boolean }) {\n\tconst { domain, shouldBeRendered } = options;\n\n\tconst db = await readDB();\n\n\tconst { __cache, __exports } = await getRenderPathsHydratedWithDomainFromDB({ domain });\n\tconst exportsAreMissing = !(await pathExists(path.join(__exports)));\n\tconst previousRenderFailed = db.domains[domain]?.isRendering;\n\tconst newDeployDetected = await hasNewCommit(__cache);\n\n\tif (exportsAreMissing) {\n\t\treturn {\n\t\t\trenderMode: RENDER_MODE.FROM_SCRATCH,\n\t\t\treason: \"missing exports directory\",\n\t\t};\n\t}\n\n\tif (previousRenderFailed) {\n\t\treturn {\n\t\t\trenderMode: RENDER_MODE.FROM_SCRATCH,\n\t\t\treason: \"error in previous render\",\n\t\t};\n\t}\n\n\tif (newDeployDetected) {\n\t\treturn {\n\t\t\trenderMode: RENDER_MODE.FROM_SCRATCH,\n\t\t\treason: \"new commit hash\",\n\t\t};\n\t}\n\n\tif (!shouldBeRendered) {\n\t\treturn {\n\t\t\trenderMode: RENDER_MODE.IDLE,\n\t\t\treason: \"no activity\",\n\t\t};\n\t}\n\n\treturn {\n\t\trenderMode: RENDER_MODE.INCREMENTAL,\n\t\treason: \"has changes\",\n\t};\n}\n\nasync function hasNewCommit(basePath: string): Promise<boolean> {\n\tconst commitFile = path.join(basePath, \"commit\");\n\tconst currentCommit = execSync(\"git rev-parse HEAD\").toString().trim();\n\n\tif (await pathExists(commitFile)) {\n\t\tconst savedCommit = (await fsp.readFile(commitFile, \"utf-8\")).trim();\n\t\tif (savedCommit === currentCommit) {\n\t\t\treturn false; // No hay nuevo commit\n\t\t}\n\n\t\treturn true;\n\t}\n\n\treturn true;\n}\n\nasync function updateCommitFile(options: { basePath: string }) {\n\tconst { basePath } = options;\n\tconst currentCommit = execSync(\"git rev-parse HEAD\").toString().trim();\n\tawait fsp.writeFile(path.join(basePath, \"commit\"), currentCommit);\n}\n\nasync function getRenderModeFromDB(domain: string): Promise<RenderModeTuple> {\n\tconst db = await readDB();\n\n\tif (!db.domains[domain]) {\n\t\tthrow new Error(brush.red(`[!] Error: Domain ${domain} not found in DB`));\n\t}\n\n\tif (!db.domains[domain].renderMode) {\n\t\tthrow new Error(brush.red(`[!] Error: Render mode not found for domain ${domain}`));\n\t}\n\n\treturn {\n\t\trenderMode: db.domains[domain].renderMode,\n\t\treason: db.domains[domain].renderModeReason,\n\t};\n}\n\nasync function getRenderPathsHydratedWithDomainFromDB(options?: {\n\tdomain?: string;\n\tdbFilePath?: string;\n}) {\n\tconst { domain, dbFilePath } = options || {};\n\n\tconst db = await readDB(dbFilePath);\n\tconst paths = db.paths;\n\n\treturn {\n\t\t__root: paths.root,\n\t\t__cache: path.join(paths.cxCache, domain || \"\"),\n\t\t__components: paths.components,\n\t\t__cx: paths.cx,\n\t\t__sites: paths.exportsDir,\n\t\t__exports: path.join(paths.exportsDir, domain || \"\"),\n\t\t__exports_backup: path.join(paths.exportsDirBackup, domain || \"\"),\n\t\t__ssg: paths.ssg,\n\t\t__exports_dist: path.join(paths.exportsDir, domain || \"\", \"dist\"),\n\t};\n}\n\nasync function getRenderMetadataFromDB() {\n\tconst db = await readDB();\n\treturn {\n\t\tgriddoVersion: db.griddoVersion,\n\t\tbuildReportFileName: db.buildReportFileName,\n\t};\n}\n\n/**\n * Save a file with the end of build process to use as `end-render` signal.\n */\nasync function generateBuildReport(domain: string) {\n\tconst authControl = await AuthService.login();\n\n\tconst { __root } = await getRenderPathsHydratedWithDomainFromDB();\n\tconst { buildReportFileName } = await getRenderMetadataFromDB();\n\tconst { buildProcessData } = await getBuildMetadata(domain);\n\n\tconst buildSitesInfo = Object.keys(buildProcessData).map((siteID) => ({\n\t\t...buildProcessData[siteID],\n\t\tsiteId: Number.parseInt(siteID),\n\t}));\n\n\tconst report = {\n\t\tauthControl,\n\t\tsites: buildSitesInfo,\n\t};\n\n\tconst reportFilePath = path.join(__root, \"current-dist\", buildReportFileName);\n\n\tawait fsp.writeFile(reportFilePath, JSON.stringify(report));\n\n\tGriddoLog.verbose(`build report saved in ${reportFilePath}`);\n}\n\nexport {\n\tassertRenderIsValid,\n\tgenerateBuildReport,\n\tgetRenderMetadataFromDB,\n\tgetRenderModeFromDB,\n\tgetRenderPathsHydratedWithDomainFromDB,\n\tmarkRenderAsCompleted,\n\tmarkRenderAsStarted,\n\tresolveDomainRenderMode,\n\tupdateCommitFile,\n};\n", "import type { RenderDB } from \"../shared/types/render\";\n\nimport fsp from \"node:fs/promises\";\nimport path from \"node:path\";\n\nimport { pkgDirSync } from \"../shared/npm-modules/pkg-dir\";\nimport { GriddoLog } from \"./GriddoLog\";\n\nconst root = pkgDirSync({ cwd: path.resolve(__dirname, \"../../..\") }) || \"\";\nconst cache = path.join(root, \".griddo/cache\");\nconst dbFilePath = path.join(cache, \"db.json\");\n\nasync function readDB(customDBPath = \"\") {\n\tconst file = customDBPath || dbFilePath;\n\ttry {\n\t\treturn JSON.parse(await fsp.readFile(file, \"utf-8\")) as RenderDB;\n\t} catch (error) {\n\t\tGriddoLog.error(`Failed to read DB file at ${file}:`, error);\n\t\tthrow error;\n\t}\n}\n\nasync function writeDB(renderDB: RenderDB, customDBPath = \"\") {\n\tconst file = customDBPath || dbFilePath;\n\ttry {\n\t\tawait fsp.writeFile(file, JSON.stringify(renderDB, null, \"\\t\"));\n\t} catch (error) {\n\t\tGriddoLog.error(`Failed to write DB file at ${file}:`, error);\n\t\tthrow error;\n\t}\n}\n\nexport { readDB, writeDB };\n", "import path from \"node:path\";\n\nimport { findUp, findUpSync } from \"./find-up-simple\";\n\nasync function pkgDir(options?: { readonly cwd?: string }) {\n\tconst { cwd } = options || {};\n\tconst filePath = await findUp(\"package.json\", { cwd });\n\treturn filePath && path.dirname(filePath);\n}\n\nfunction pkgDirSync(options?: { readonly cwd?: string }) {\n\tconst { cwd } = options || {};\n\tconst filePath = findUpSync(\"package.json\", { cwd });\n\treturn filePath && path.dirname(filePath);\n}\n\nexport { pkgDir, pkgDirSync };\n", "import fs from \"node:fs\";\nimport fsPromises from \"node:fs/promises\";\nimport path from \"node:path\";\nimport process from \"node:process\";\nimport { fileURLToPath } from \"node:url\";\n\n/**\n * Opciones para controlar el comportamiento de la b\u00FAsqueda.\n */\nexport type Options = {\n\t/**\n\tEl directorio desde donde empezar a buscar.\n\t@default process.cwd()\n\t*/\n\treadonly cwd?: URL | string;\n\n\t/**\n\tEl tipo de ruta a buscar.\n\t@default 'file'\n\t*/\n\treadonly type?: \"file\" | \"directory\";\n\n\t/**\n\tUn directorio en el que la b\u00FAsqueda se detiene si no se encuentran coincidencias.\n\t@default El directorio ra\u00EDz del sistema\n\t*/\n\treadonly stopAt?: URL | string;\n};\n\n// Funci\u00F3n auxiliar para convertir una URL en una ruta de archivo de tipo string.\nconst toPath = (urlOrPath: URL | string | undefined): string | undefined =>\n\turlOrPath instanceof URL ? fileURLToPath(urlOrPath) : urlOrPath;\n\n/**\n * Encuentra un archivo o directorio de forma as\u00EDncrona subiendo por los directorios padre.\n * @param name - El nombre del archivo o directorio a buscar.\n * @param options - Opciones de b\u00FAsqueda.\n * @returns La ruta encontrada o `undefined` si no se pudo encontrar.\n */\nexport async function findUp(name: string, options: Options = {}): Promise<string | undefined> {\n\tconst { cwd = process.cwd(), type = \"file\", stopAt: stopAtOption } = options;\n\n\tlet directory = path.resolve(toPath(cwd) ?? \"\");\n\tconst { root } = path.parse(directory);\n\tconst stopAt = stopAtOption ? path.resolve(directory, toPath(stopAtOption)!) : root;\n\tconst isAbsoluteName = path.isAbsolute(name);\n\n\twhile (true) {\n\t\tconst filePath = isAbsoluteName ? name : path.join(directory, name);\n\t\ttry {\n\t\t\t// eslint-disable-next-line no-await-in-loop\n\t\t\tconst stats = await fsPromises.stat(filePath);\n\t\t\tif ((type === \"file\" && stats.isFile()) || (type === \"directory\" && stats.isDirectory())) {\n\t\t\t\treturn filePath;\n\t\t\t}\n\t\t} catch {\n\t\t\t// Ignora errores (ej. el archivo no existe) y contin\u00FAa la b\u00FAsqueda.\n\t\t}\n\n\t\tif (directory === stopAt || directory === root) {\n\t\t\tbreak;\n\t\t}\n\n\t\tdirectory = path.dirname(directory);\n\t}\n}\n\n/**\n * Encuentra un archivo o directorio de forma s\u00EDncrona subiendo por los directorios padre.\n * @param name - El nombre del archivo o directorio a buscar.\n * @param options - Opciones de b\u00FAsqueda.\n * @returns La ruta encontrada o `undefined` si no se pudo encontrar.\n */\nexport function findUpSync(name: string, options: Options = {}): string | undefined {\n\tconst { cwd = process.cwd(), type = \"file\", stopAt: stopAtOption } = options;\n\n\tlet directory = path.resolve(toPath(cwd) ?? \"\");\n\tconst { root } = path.parse(directory);\n\tconst stopAt = stopAtOption ? path.resolve(directory, toPath(stopAtOption)!) : root;\n\tconst isAbsoluteName = path.isAbsolute(name);\n\n\twhile (true) {\n\t\tconst filePath = isAbsoluteName ? name : path.join(directory, name);\n\t\ttry {\n\t\t\tconst stats = fs.statSync(filePath, { throwIfNoEntry: false });\n\t\t\tif ((type === \"file\" && stats?.isFile()) || (type === \"directory\" && stats?.isDirectory())) {\n\t\t\t\treturn filePath;\n\t\t\t}\n\t\t} catch {\n\t\t\t// En teor\u00EDa, statSync con `throwIfNoEntry: false` no deber\u00EDa lanzar un error,\n\t\t\t// pero se mantiene por seguridad.\n\t\t}\n\n\t\tif (directory === stopAt || directory === root) {\n\t\t\tbreak;\n\t\t}\n\n\t\tdirectory = path.dirname(directory);\n\t}\n}\n", "const { env } = process;\n\n/**\n * Returns true/false from string\n */\nfunction envIsTruthy(value?: string): boolean {\n\tif (!value) return false;\n\n\tswitch (value.trim().toLowerCase()) {\n\t\tcase \"1\":\n\t\tcase \"true\":\n\t\tcase \"yes\":\n\t\tcase \"y\":\n\t\tcase \"on\":\n\t\t\treturn true;\n\t\tdefault:\n\t\t\treturn false;\n\t}\n}\n\n// Credentials\nconst GRIDDO_API_URL = env.GRIDDO_API_URL || env.API_URL;\nconst GRIDDO_PUBLIC_API_URL = env.GRIDDO_PUBLIC_API_URL || env.PUBLIC_API_URL;\nconst GRIDDO_BOT_USER = env.botEmail || env.GRIDDO_BOT_USER;\nconst GRIDDO_BOT_PASSWORD = env.botPassword || env.GRIDDO_BOT_PASSWORD;\n\n// Rendering\nconst GRIDDO_API_CONCURRENCY_COUNT = Number.parseInt(env.GRIDDO_API_CONCURRENCY_COUNT || \"10\");\nconst GRIDDO_SKIP_BUILD_CHECKS = envIsTruthy(env.GRIDDO_SKIP_BUILD_CHECKS);\nconst GRIDDO_RENDER_BY_DOMAINS = envIsTruthy(env.GRIDDO_RENDER_BY_DOMAINS);\nconst GRIDDO_BUILD_LOGS = envIsTruthy(env.GRIDDO_BUILD_LOGS);\nconst GRIDDO_BUILD_LOGS_TO_FILE = envIsTruthy(env.GRIDDO_BUILD_LOGS_TO_FILE);\nconst GRIDDO_BUILD_LOGS_BUFFER_SIZE = Number.parseInt(env.GRIDDO_BUILD_LOGS_BUFFER_SIZE || \"500\");\nconst GRIDDO_SSG_VERBOSE_LOGS = envIsTruthy(env.GRIDDO_SSG_VERBOSE_LOGS);\nconst GRIDDO_SEARCH_FEATURE = envIsTruthy(env.GRIDDO_SEARCH_FEATURE);\nconst GRIDDO_ASSET_PREFIX = env.GRIDDO_ASSET_PREFIX || env.ASSET_PREFIX;\nconst GRIDDO_REACT_APP_INSTANCE = env.GRIDDO_REACT_APP_INSTANCE || env.REACT_APP_INSTANCE;\nconst GRIDDO_AI_EMBEDDINGS = envIsTruthy(env.GRIDDO_AI_EMBEDDINGS);\nconst GRIDDO_VERBOSE_LOGS = envIsTruthy(env.GRIDDO_VERBOSE_LOGS);\nconst GRIDDO_USE_DIST_BACKUP = envIsTruthy(env.GRIDDO_USE_DIST_BACKUP);\nconst GRIDDO_SSG_BUNDLE_ANALYZER = envIsTruthy(env.GRIDDO_SSG_BUNDLE_ANALYZER);\n\nexport {\n\tGRIDDO_AI_EMBEDDINGS,\n\tGRIDDO_API_CONCURRENCY_COUNT,\n\tGRIDDO_API_URL,\n\tGRIDDO_ASSET_PREFIX,\n\tGRIDDO_BOT_PASSWORD,\n\tGRIDDO_BOT_USER,\n\tGRIDDO_BUILD_LOGS,\n\tGRIDDO_BUILD_LOGS_BUFFER_SIZE,\n\tGRIDDO_BUILD_LOGS_TO_FILE,\n\tGRIDDO_PUBLIC_API_URL,\n\tGRIDDO_REACT_APP_INSTANCE,\n\tGRIDDO_RENDER_BY_DOMAINS,\n\tGRIDDO_SEARCH_FEATURE,\n\tGRIDDO_SKIP_BUILD_CHECKS,\n\tGRIDDO_SSG_BUNDLE_ANALYZER,\n\tGRIDDO_SSG_VERBOSE_LOGS,\n\tGRIDDO_USE_DIST_BACKUP,\n\tGRIDDO_VERBOSE_LOGS,\n};\n", "//\n// Brush adds color to a string|number, it does not print it!\n// Its simple, no log, no chains, just color in a string|number\n// usage:\n// console.log(brush.green(\"sucess!\"))\n//\n\nconst RESET = \"\\x1b[0m\";\nconst CODES = {\n\tblack: \"\\x1b[30m\",\n\tred: \"\\x1b[31m\",\n\tgreen: \"\\x1b[32m\",\n\tyellow: \"\\x1b[33m\",\n\tblue: \"\\x1b[34m\",\n\tmagenta: \"\\x1b[35m\",\n\tcyan: \"\\x1b[36m\",\n\twhite: \"\\x1b[37m\",\n\tgray: \"\\x1b[90m\",\n\tbold: \"\\x1b[1m\",\n\tdim: \"\\x1b[2m\",\n} as const;\n\ntype ColorFunction = (text: string | number) => string;\ntype ColorName = keyof typeof CODES;\ntype Brush = Record<ColorName, ColorFunction>;\n\nconst brush = {} as Brush;\n\nfor (const color in CODES) {\n\tconst key = color as ColorName;\n\tbrush[key] = (text: string | number) => `${CODES[key]}${text}${RESET}`;\n}\n\nexport { brush };\n", "import { GRIDDO_BUILD_LOGS, GRIDDO_VERBOSE_LOGS } from \"../shared/envs\";\nimport { brush } from \"../shared/npm-modules/brush\";\n\n/**\n * Clase est\u00E1tica para gestionar los logs de la aplicaci\u00F3n.\n * No se puede instanciar, se usa directamente: GriddoLogs.info(\"mensaje\").\n */\nclass GriddoLog {\n\t/** El constructor es privado para prevenir la instanciaci\u00F3n de la clase. */\n\tprivate constructor() {}\n\n\tpublic static verbose(...str: unknown[]): void {\n\t\tif (GRIDDO_VERBOSE_LOGS) {\n\t\t\tconsole.log(brush.yellow(\"verbose\"), brush.dim(str.join(\" \")));\n\t\t}\n\t}\n\n\tpublic static build(...str: unknown[]): void {\n\t\tif (GRIDDO_BUILD_LOGS) {\n\t\t\tGriddoLog.log(...str);\n\t\t}\n\t}\n\n\tpublic static info(...str: unknown[]): void {\n\t\tconsole.log(`${brush.blue(\"info\")} ${str.join(\" \")}`);\n\t}\n\n\tpublic static success(...str: unknown[]): void {\n\t\tconsole.log(`${brush.green(\"success\")} ${str.join(\" \")}`);\n\t}\n\n\tpublic static error(...str: unknown[]): void {\n\t\tconsole.error(`${brush.red(\"error\")} ${str.join(\" \")}`);\n\t}\n\n\tpublic static warn(...str: unknown[]): void {\n\t\tconsole.warn(`${brush.yellow(\"warn\")} ${str.join(\" \")}`);\n\t}\n\n\tpublic static log(...args: Parameters<typeof console.log>): void {\n\t\tconsole.log(...args);\n\t}\n}\n\nexport { GriddoLog };\n", "import type { MakeDirectoryOptions } from \"node:fs\";\n\nimport fsp from \"node:fs/promises\";\nimport path from \"node:path\";\n\nimport { ArtifactError } from \"../shared/errors\";\nimport { throwError } from \"./errors\";\nimport { GriddoLog } from \"./GriddoLog\";\n\n/**\n * Remove an empty directory from the basePath recursively.\n * If the directory has only .xml files it will handle as empty too (empty site)\n *\n * @param baseDir - The base directory.\n */\nasync function deleteDisposableSiteDirs(baseDir: string) {\n\tif (!(await pathExists(baseDir))) {\n\t\treturn;\n\t}\n\n\tconst sitesDirs = (await fsp.readdir(baseDir, { withFileTypes: true })).filter((file) =>\n\t\tfile.isDirectory(),\n\t);\n\n\tfor (const siteDir of sitesDirs) {\n\t\tconst sitePath = path.join(baseDir, siteDir.name);\n\t\tif (await siteIsEmpty(sitePath)) {\n\t\t\tawait fsp.rm(sitePath, { recursive: true });\n\t\t}\n\t}\n}\n\n/**\n * Creates multiple artifact directories.\n *\n * @param dirs - An array of directory paths.\n * @param options - Same option as `fs.mkdirSync()`\n */\nasync function mkDirs(dirs: string[], options?: MakeDirectoryOptions) {\n\tfor (const dir of dirs) {\n\t\ttry {\n\t\t\tif (!(await pathExists(dir))) {\n\t\t\t\tawait fsp.mkdir(dir, { recursive: true, ...options });\n\t\t\t\tGriddoLog.verbose(`create directory: ${dir}`);\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tthrowError(ArtifactError, error);\n\t\t}\n\t}\n}\n\nasync function renamePath(src: string, dst: string) {\n\ttry {\n\t\tif (await pathExists(src)) {\n\t\t\tawait fsp.rename(src, dst);\n\t\t\tGriddoLog.verbose(`rename ${src} to ${dst}`);\n\t\t}\n\t} catch (error) {\n\t\tthrowError(ArtifactError, error);\n\t}\n}\n\n/**\n * Copy multiple directories with backup option.\n *\n * @param src - Source directory.\n * @param dst - Destination directory.\n * @param dirs - Directories to copy.\n * @param options.withBackup - Create a previous backup before copy.\n */\nasync function cpDirs(\n\tsrc: string,\n\tdst: string,\n\tdirs: string[],\n\toptions = {\n\t\twithBackup: false,\n\t},\n) {\n\tconst { withBackup } = options;\n\tfor (const dir of dirs) {\n\t\tconst srcCompose = path.join(src, dir);\n\t\tconst dstCompose = path.join(dst, dir);\n\n\t\t// The dir we want to copy, doesn't exist.\n\t\tif (!(await pathExists(srcCompose))) {\n\t\t\tGriddoLog.info(`(Maybe first render) Source directory does not exist: ${srcCompose}`);\n\t\t\tcontinue;\n\t\t}\n\n\t\t// Create the backup\n\t\tif (withBackup) {\n\t\t\tawait createBackup(dstCompose);\n\t\t\tGriddoLog.verbose(`create backup: ${dstCompose}`);\n\t\t}\n\n\t\t// Copy directory\n\t\ttry {\n\t\t\t// First clean destination\n\t\t\tif (await pathExists(dstCompose)) {\n\t\t\t\tawait fsp.rm(dstCompose, { recursive: true, force: true });\n\t\t\t\tGriddoLog.verbose(`clean destination: ${dstCompose}`);\n\t\t\t}\n\n\t\t\t// Then copy src to dst\n\t\t\tawait fsp.cp(srcCompose, dstCompose, {\n\t\t\t\trecursive: true,\n\t\t\t\tpreserveTimestamps: true,\n\t\t\t});\n\t\t\tGriddoLog.verbose(`copy: ${srcCompose} to ${dstCompose}`);\n\n\t\t\tif (withBackup) {\n\t\t\t\tawait deleteBackup(dstCompose);\n\t\t\t\tGriddoLog.verbose(`delete backup: ${dstCompose}`);\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tif (withBackup) {\n\t\t\t\tawait restoreBackup(dstCompose);\n\t\t\t\tGriddoLog.verbose(\"Backup has been restored.\");\n\t\t\t}\n\n\t\t\tthrowError(ArtifactError, error);\n\t\t}\n\t}\n}\n\n/**\n * Move artifacts between cx-paths\n *\n * @param src - Source directory.\n * @param dst - Destination directory.\n * @param dirs - Directories to move.\n * @param options - Options.\n */\nasync function mvDirs(\n\tsrc: string,\n\tdst: string,\n\tdirs: string[],\n\toptions?: { withBackup?: boolean; override?: boolean },\n) {\n\tconst { override, withBackup } = options || {};\n\n\tfor (const dir of dirs) {\n\t\tconst srcCompose = path.join(src, dir);\n\t\tconst dstCompose = path.join(dst, dir);\n\n\t\tif (!(await pathExists(srcCompose))) {\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (withBackup) {\n\t\t\tawait createBackup(dstCompose);\n\t\t}\n\n\t\ttry {\n\t\t\t// Clean destination\n\t\t\tif (override && (await pathExists(dstCompose))) {\n\t\t\t\tawait fsp.rm(dstCompose, { recursive: true, force: true });\n\t\t\t}\n\n\t\t\tawait fsp.rename(srcCompose, dstCompose);\n\t\t\tGriddoLog.verbose(`moved: ${srcCompose} to ${dstCompose}`);\n\n\t\t\tif (withBackup) {\n\t\t\t\tawait deleteBackup(dstCompose);\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tif (withBackup) {\n\t\t\t\tawait restoreBackup(dstCompose);\n\t\t\t\tGriddoLog.info(\"Backup has been restored.\");\n\t\t\t}\n\n\t\t\tthrowError(ArtifactError, error);\n\t\t}\n\t}\n}\n\n/**\n * Removes multiple artifact directories.\n *\n * @param dirs - An array of directory paths.\n */\nasync function rmDirs(dirs: string[]) {\n\tfor (const dir of dirs) {\n\t\ttry {\n\t\t\tawait fsp.rm(dir, { recursive: true, force: true });\n\t\t\tGriddoLog.verbose(`artifact removed: ${dir}`);\n\t\t} catch (error) {\n\t\t\tthrowError(ArtifactError, error);\n\t\t}\n\t}\n}\n\nasync function restoreBackup(src: string, suffix = \"-BACKUP\") {\n\tconst dst = src + suffix;\n\ttry {\n\t\tawait fsp.rename(dst, src);\n\t\tGriddoLog.info(`Backup ${dst} has been restored`);\n\t} catch (_error) {\n\t\tthrow new Error(`Error while delete ${dst} backup`);\n\t}\n}\n\nasync function deleteBackup(src: string, suffix = \"-BACKUP\") {\n\tconst dst = src + suffix;\n\n\tif (!(await pathExists(dst))) {\n\t\treturn;\n\t}\n\n\ttry {\n\t\tawait fsp.rm(dst, { recursive: true, force: true });\n\t\tGriddoLog.verbose(`Backup ${dst} has been deleted`);\n\t} catch (_error) {\n\t\tthrow new Error(`Error while delete ${dst} backup`);\n\t}\n}\n\nasync function createBackup(src: string, suffix = \"-BACKUP\") {\n\tconst dst = src + suffix;\n\n\tif (!(await pathExists(src))) {\n\t\treturn;\n\t}\n\n\tif (await pathExists(dst)) {\n\t\tGriddoLog.warn(`Destination ${dst} already exists`);\n\t\treturn;\n\t}\n\n\ttry {\n\t\tawait fsp.rename(src, dst);\n\t\tGriddoLog.verbose(`Backup of ${src} has been created in ${dst}`);\n\t} catch (error) {\n\t\tGriddoLog.error(`Error while coping ${src} to ${dst} backup`);\n\t\tthrowError(ArtifactError, error);\n\t}\n}\n\n/**\n * Return true if the site folder is empty or only has xml files. (Recursively)\n */\nasync function siteIsEmpty(sitePath: string) {\n\tconst siteFiles = (\n\t\tawait fsp.readdir(sitePath, {\n\t\t\twithFileTypes: true,\n\t\t\trecursive: true,\n\t\t})\n\t).filter((file) => file.isFile() && !path.basename(file.name).startsWith(\".\"));\n\n\tconst xmlFiles = siteFiles.filter((file) => file.name.endsWith(\".xml\"));\n\n\tif (siteFiles.length === xmlFiles.length) {\n\t\treturn true;\n\t}\n}\n\n/**\n * Delete empty directories from the given directory in a recursive way.\n */\nasync function deleteEmptyDirectories(dirPath: string) {\n\ttry {\n\t\tconst stats = await fsp.stat(dirPath);\n\n\t\t// Si no es un directorio, no hacemos nada\n\t\tif (!stats.isDirectory()) {\n\t\t\treturn;\n\t\t}\n\n\t\tlet filesInDirectory: string[];\n\t\ttry {\n\t\t\tfilesInDirectory = await fsp.readdir(dirPath);\n\t\t} catch (err: any) {\n\t\t\t// Si el directorio no existe o no se puede leer (ej. permisos), lo saltamos.\n\t\t\tif (err.code === \"ENOENT\") {\n\t\t\t\tGriddoLog.warn(`The directory \"${dirPath}\" does not exist, skipping it.`);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tGriddoLog.error(`Error al leer el directorio \"${dirPath}\":`, err);\n\t\t\tthrow err; // Re-lanza el error para que sea manejado por el llamador\n\t\t}\n\n\t\t// Recorrer los contenidos del directorio\n\t\tfor (const file of filesInDirectory) {\n\t\t\tconst fullPath = path.join(dirPath, file);\n\t\t\tawait deleteEmptyDirectories(fullPath); // Llamada recursiva s\u00EDncrona\n\t\t}\n\n\t\t// Despu\u00E9s de procesar todos los subdirectorios, verifica si el directorio actual est\u00E1 vac\u00EDo\n\t\tconst remainingFiles = await fsp.readdir(dirPath);\n\n\t\tif (remainingFiles.length === 0) {\n\t\t\ttry {\n\t\t\t\tawait fsp.rmdir(dirPath);\n\t\t\t\tGriddoLog.verbose(`Remove empty directory: ${dirPath}`);\n\t\t\t} catch (err: any) {\n\t\t\t\t// Puede que haya habido un problema de concurrencia o permisos\n\t\t\t\tif (err.code === \"ENOENT\") {\n\t\t\t\t\tGriddoLog.warn(\n\t\t\t\t\t\t`El directorio \"${dirPath}\" ya no existe. Posiblemente fue borrado por otra operaci\u00F3n.`,\n\t\t\t\t\t);\n\t\t\t\t} else {\n\t\t\t\t\tGriddoLog.error(`Error al borrar el directorio \"${dirPath}\":`, err);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} catch (err: any) {\n\t\tif (err.code === \"ENOENT\") {\n\t\t\t// El directorio ya no existe, no es un error para nosotros en este contexto\n\t\t\tGriddoLog.warn(`The directory \"${dirPath}\" does not exist or has already been processed.`);\n\t\t} else {\n\t\t\tGriddoLog.error(`General error general while processing \"${dirPath}\":`, err);\n\t\t\tthrow err;\n\t\t}\n\t}\n}\n\nasync function pathExists(dir: string) {\n\ttry {\n\t\tawait fsp.access(dir);\n\t\treturn true;\n\t} catch {\n\t\treturn false;\n\t}\n}\n\n/**\n * Busca recursivamente archivos que terminen con un sufijo espec\u00EDfico dentro de un directorio.\n * Esta funci\u00F3n es un generador as\u00EDncrono, lo que la hace muy eficiente en uso de memoria.\n *\n * @param dir El directorio base para comenzar la b\u00FAsqueda.\n * @param suffix El sufijo con el que deben terminar los nombres de archivo (ej: 'page-data.json').\n * @returns Un generador as\u00EDncrono que produce la ruta completa de cada archivo encontrado.\n * @throws Si el directorio inicial `dir` no existe o no se puede leer.\n */\nasync function* findFilesBySuffix(dir: string, suffix: string): AsyncGenerator<string> {\n\tconst dirHandle = await fsp.opendir(dir);\n\tfor await (const item of dirHandle) {\n\t\tconst fullPath = path.join(dir, item.name);\n\t\tif (item.isDirectory()) {\n\t\t\t// yield* para encadenar otro generator.\n\t\t\tyield* findFilesBySuffix(fullPath, suffix);\n\t\t} else if (item.isFile() && item.name.endsWith(suffix)) {\n\t\t\tyield fullPath;\n\t\t}\n\t}\n}\n\n/**\n * Walk a directory and returns the JSON file absolute paths with one level of depth.\n * /abs/.../sotre/331/158.json\n * /abs/.../sotre/114/443.json\n * /abs/.../sotre/131/217.json\n * /abs/.../sotre/191/281.json\n */\nasync function* walkStore(storeDir: string): AsyncGenerator<string> {\n\tconst storeDirHandle = await fsp.opendir(storeDir);\n\n\tfor await (const siteDirent of storeDirHandle) {\n\t\tif (siteDirent.isDirectory()) {\n\t\t\tconst siteDirPath = path.join(storeDir, siteDirent.name);\n\t\t\tconst siteDirHandle = await fsp.opendir(siteDirPath);\n\n\t\t\tfor await (const fileDirent of siteDirHandle) {\n\t\t\t\tconst filePath = path.join(siteDirPath, fileDirent.name);\n\n\t\t\t\tif (fileDirent.isFile() && path.extname(filePath) === \".json\") {\n\t\t\t\t\tyield filePath;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n\nexport {\n\tcpDirs,\n\tdeleteDisposableSiteDirs,\n\tdeleteEmptyDirectories,\n\tfindFilesBySuffix,\n\tmkDirs,\n\tmvDirs,\n\tpathExists,\n\trenamePath,\n\trmDirs,\n\twalkStore,\n};\n", "/**\n * Do you want to add a new error to the list?\n *\n * 1 - Add the new error type name to the `ErrorsType` union type.\n * 2 - Export a new ErrorData object (or a function that returns one) in this\n * file by completing the `error` (ErrosType) and `message` (string) properties\n * obligatorily.\n */\n\nimport type { SpawnSyncReturns } from \"node:child_process\";\nimport type { ErrorData } from \"../core/errors\";\n\ntype ErrorsType =\n\t| \"ArtifactError\"\n\t| \"BundlesInconsistencyError\"\n\t| \"CheckHealthError\"\n\t| \"ErrorInSSGBuildProcess\"\n\t| \"LifecycleExecutionError\"\n\t| \"LoginError\"\n\t| \"NoDomainsFoundError\"\n\t| \"NoJSConfigFileFound\"\n\t| \"ReadFromStoreError\"\n\t| \"ReferenceFieldSourcesNotFoundError\"\n\t| \"RenderUUIDError\"\n\t| \"UploadSearchError\"\n\t| \"WriteToStoreError\";\n\nconst ArtifactError: ErrorData = {\n\terror: \"ArtifactError\",\n\tmessage: \"There was a problem with an artifact\",\n\texpected:\n\t\t\"An external process may have has modified or deleted one of the artifacts (files and directories).\",\n\thint: \"Have there been any recent deployments? These can delete directories from the current render.\",\n};\n\nconst ErrorInSSGBuildProcess = (command: SpawnSyncReturns<string>): ErrorData => ({\n\terror: \"ErrorInSSGBuildProcess\",\n\tmessage: `Error in SSG build process: ${JSON.stringify(command)}`,\n\texpected: \"This can happen if there was a problem with the SSG build process.\",\n});\n\nconst LifecycleExecutionError = (attempts: number, name: string): ErrorData => ({\n\terror: \"LifecycleExecutionError\",\n\tmessage: `Exceeded maximum retry attempts (${attempts}) for ${name} LifeCycle`,\n});\n\nconst LoginError: ErrorData = {\n\terror: \"LoginError\",\n\tmessage: \"There was a problem logging in to the API\",\n\texpected: \"This happens if the API is currently not working or the credentials are incorrect.\",\n};\n\nconst NoDomainsFoundError: ErrorData = {\n\terror: \"NoDomainsFoundError\",\n\tmessage: \"No domains were found in this instance. The process cannot continue.\",\n\texpected:\n\t\t\"This may happen if the API is not functioning, or the site is not properly configured, or the domains are not registered.\",\n\thint: \"You can contact the instance administrator.\",\n};\n\nconst NoJSConfigFileFound: ErrorData = {\n\terror: \"NoJSConfigFileFound\",\n\tmessage: \"Could not find jsconfig.json or tsconfig.json\",\n\texpected:\n\t\t\"This can happen if the instance is not properly configured with a jsconfig.json or tsconfig.json file.\",\n};\n\nconst ReadFromStoreError: ErrorData = {\n\terror: \"ReadFromStoreError\",\n\tmessage: \"There was an error reading a file to the Store directory\",\n\thint: \"There may be an issue such as permissions preventing the file from being read.\",\n};\n\nconst ReferenceFieldSourcesNotFoundError: ErrorData = {\n\terror: \"ReferenceFieldSourcesNotFoundError\",\n\tmessage: \"The distributor has no sources defined.\",\n\texpected:\n\t\t\"It is expected to have at least one data source in the `sources` property, even if it is empty.\",\n};\n\nconst RenderUUIDError: ErrorData = {\n\terror: \"RenderUUIDError\",\n\tmessage: `Render sentinel file does not exist.\nThe rendering UUID cannot be read safely.\nThere was probably an instance deployment during the render, and files were deleted.\n\nThe files generated in this render will not be published.`,\n};\n\nconst WriteToStoreError: ErrorData = {\n\terror: \"WriteToStoreError\",\n\tmessage: \"There was an error writing a file to the Store directory\",\n\thint: \"There may be an issue such as lack of space or permissions preventing the file from being written.\",\n};\n\nconst UploadSearchError: ErrorData = {\n\terror: \"UploadSearchError\",\n\tmessage: \"There was an error uploading content to API for search\",\n\thint: \"This happens if the API is currently not working or the credentials are incorrect.\",\n};\n\nconst CheckHealthError: ErrorData = {\n\terror: \"CheckHealthError\",\n\tmessage: \"There was a problem with environment vars configuration.\",\n\texpected: \"Some of the required environment variables are not set correctly or are missing\",\n\thint: \"Are the environment variables correctly set?\",\n};\n\nexport {\n\tArtifactError,\n\tCheckHealthError,\n\tErrorInSSGBuildProcess,\n\tLifecycleExecutionError,\n\tLoginError,\n\tNoDomainsFoundError,\n\tNoJSConfigFileFound,\n\tReadFromStoreError,\n\tReferenceFieldSourcesNotFoundError,\n\tRenderUUIDError,\n\tUploadSearchError,\n\tWriteToStoreError,\n\ttype ErrorsType,\n};\n", "import type { RenderInfo } from \"./global\";\n\nconst RENDER_MODE = {\n\tFROM_SCRATCH: \"FROM_SCRATCH\",\n\tINCREMENTAL: \"INCREMENTAL\",\n\tIDLE: \"IDLE\",\n\tERROR: \"ERROR\",\n\tCOMPLETED: \"COMPLETED\",\n} as const;\n\ntype RenderMode = (typeof RENDER_MODE)[keyof typeof RENDER_MODE];\n\nexport interface RenderModeTuple {\n\trenderMode: RenderMode;\n\treason?: string;\n}\n\ntype DomainLike = string;\n\ntype RenderDB = {\n\tgriddoVersion: string;\n\tbuildReportFileName: string;\n\tsortedDomains: DomainLike[];\n\tneedsRollbackOnError: boolean;\n\tcurrentRenderingDomain: string | null;\n\tpaths: {\n\t\troot: string;\n\t\tcx: string;\n\t\tssg: string;\n\t\tcxCache: string;\n\t\tcomponents: string;\n\t\texportsDir: string;\n\t\texportsDirBackup: string;\n\t};\n\tdomains: {\n\t\t[key: DomainLike]: {\n\t\t\trenderMode?: RenderMode;\n\t\t\tshouldBeRendered?: boolean;\n\t\t\tisRendering?: boolean;\n\t\t\trenderInfo?: RenderInfo;\n\t\t\trenderModeReason?: string;\n\t\t};\n\t};\n};\n\ninterface Report {\n\tauthControl:\n\t\t| {\n\t\t\t\tAuthorization: string;\n\t\t\t\t\"Cache-Control\": string;\n\t\t\t\tlang?: string | undefined;\n\t\t }\n\t\t| undefined;\n\tsites: {\n\t\tsiteId: number;\n\t\tpublishHashes: string[];\n\t\tsiteHash: string | null;\n\t\tunpublishHashes: string[];\n\t\tpublishPagesIds: number[];\n\t}[];\n}\n\nexport { RENDER_MODE, type RenderDB, type RenderMode, type Report };\n", "const GRIDDO_API_URL = process.env.GRIDDO_API_URL;\nconst GRIDDO_PUBLIC_API_URL = process.env.GRIDDO_PUBLIC_API_URL;\n\nconst AI_EMBEDDINGS = `${GRIDDO_API_URL}/ai/embeddings`;\nconst ALERT = `${GRIDDO_PUBLIC_API_URL}/alert`;\nconst DOMAINS = `${GRIDDO_API_URL}/domains`;\nconst GET_ALL = `${GRIDDO_API_URL}/sites/all`;\nconst GET_PAGE = `${GRIDDO_API_URL}/page`;\nconst LOGIN = `${GRIDDO_API_URL}/login_check`;\nconst RESET_RENDER = `${GRIDDO_API_URL}/debug/reset-render`;\nconst ROBOTS = `${GRIDDO_API_URL}/domains/robots`;\nconst SEARCH = `${GRIDDO_API_URL}/search`;\nconst SETTINGS = `${GRIDDO_API_URL}/settings`;\n\n// Site\nconst SITE_URI = `${GRIDDO_API_URL}/site/`;\nconst BUILD_END = [SITE_URI, \"/build/end\"];\nconst BUILD_START = [SITE_URI, \"/build/start\"];\nconst GET_PAGES = [SITE_URI, \"/pages?pagination=false\"];\nconst GET_REFERENCE_FIELD_DATA = [SITE_URI, \"/distributor\"];\nconst GET_SITEMAP = [SITE_URI, \"/sitemap\"];\nconst INFO = [SITE_URI, \"/all\"];\nconst LANGUAGES = [SITE_URI, \"/languages\"];\nconst SOCIALS = [SITE_URI, \"/socials\"];\n\nexport {\n\tAI_EMBEDDINGS,\n\tALERT,\n\tBUILD_END,\n\tBUILD_START,\n\tDOMAINS,\n\tGET_ALL,\n\tGET_PAGE,\n\tGET_PAGES,\n\tGET_REFERENCE_FIELD_DATA,\n\tGET_SITEMAP,\n\tINFO,\n\tLANGUAGES,\n\tLOGIN,\n\tRESET_RENDER,\n\tROBOTS,\n\tSEARCH,\n\tSETTINGS,\n\tSOCIALS,\n};\n", "import type { AuthHeaders } from \"../shared/types/api\";\n\nimport { throwError } from \"../core/errors\";\nimport { LOGIN } from \"../shared/endpoints\";\nimport { GRIDDO_BOT_PASSWORD, GRIDDO_BOT_USER } from \"../shared/envs\";\nimport { LoginError } from \"../shared/errors\";\n\nclass AuthService {\n\theaders: AuthHeaders | undefined;\n\n\tasync login() {\n\t\ttry {\n\t\t\tconst response = await fetch(LOGIN, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: { \"Content-Type\": \"application/json\", Connection: \"close\" },\n\t\t\t\tbody: JSON.stringify({\n\t\t\t\t\tusername: GRIDDO_BOT_USER,\n\t\t\t\t\tpassword: GRIDDO_BOT_PASSWORD,\n\t\t\t\t}),\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tthrow new Error(\"Error while login in the API\");\n\t\t\t}\n\n\t\t\tconst { token } = await response.json();\n\t\t\tthis.headers = {\n\t\t\t\tAuthorization: `bearer ${token}`,\n\t\t\t\t\"Cache-Control\": \"no-store\",\n\t\t\t};\n\n\t\t\treturn this.headers;\n\t\t} catch (e) {\n\t\t\tthrowError(LoginError, e);\n\t\t}\n\t}\n}\n\nconst authService = new AuthService();\n\nexport { authService as AuthService };\n", "import fsp from \"node:fs/promises\";\nimport path from \"node:path\";\n\nimport { readDB } from \"./db\";\nimport { pathExists } from \"./fs\";\nimport { GriddoLog } from \"./GriddoLog\";\n\nasync function distRollback(domain: string) {\n\tconst data = await readDB();\n\tconst { exportsDir, exportsDirBackup } = data.paths;\n\n\tGriddoLog.info(`Cleaning exports dir for the domain ${domain}`);\n\tGriddoLog.verbose(`Deleting ${path.join(exportsDir, domain)}...`);\n\n\t// Probar rsync en lugar de borrar y copiar\n\n\t// 1 - Borrar dist corrupto\n\tawait fsp.rm(path.join(exportsDir, domain), {\n\t\trecursive: true,\n\t\tforce: true,\n\t});\n\n\t// 2 - Si hay backup, restaurar\n\tif (await pathExists(path.join(exportsDirBackup, domain))) {\n\t\tawait fsp.cp(path.join(exportsDirBackup, domain), path.join(exportsDir, domain), {\n\t\t\trecursive: true,\n\t\t});\n\n\t\tGriddoLog.info(`export-backup dir for the domain ${domain} found. Restoring before exit...`);\n\t\tGriddoLog.verbose(\n\t\t\t`Copying ${path.join(exportsDirBackup, domain)} -> ${path.join(exportsDir, domain)}...`,\n\t\t);\n\t} else {\n\t\tGriddoLog.info(\n\t\t\t\"No export-backup found, skipping rollback. Next render will create a new exports dir from scratch...\",\n\t\t);\n\t}\n}\n\nexport { distRollback };\n", "import type {\n\tAPIRequest,\n\tAPIResponses,\n\tGetAPI,\n\tPostAPI,\n\tPutAPI,\n\tShowApiErrorOptions,\n} from \"../shared/types/api\";\nimport type { Petition } from \"../shared/types/global\";\n\nimport crypto from \"node:crypto\";\nimport fsp from \"node:fs/promises\";\nimport path from \"node:path\";\n\nimport { RenderError } from \"../core/errors\";\nimport { pathExists } from \"../core/fs\";\nimport { GriddoLog } from \"../core/GriddoLog\";\nimport { addLogToBuffer } from \"../core/logger\";\nimport { brush } from \"../shared/npm-modules/brush\";\nimport { AuthService } from \"./auth\";\nimport { getRenderPathsHydratedWithDomainFromDB } from \"./render\";\n\n// Envs\nconst { env } = process;\nconst { RETRY_WAIT_SECONDS = \"4\", RETRY_ATTEMPTS = \"4\" } = env;\n\n/**\n * Make a GET/PUT/POST request to the Griddo API.\n *\n * @template T Response Type returned.\n * @returns {Promise<T>} A promise that is resolved with the data from the API response.\n *\n * @example\n *\tconst response = await requestAPI<Site>(\n *\t\t{ endpoint: \"...\", cacheKey: \"...\", ... },\n *\t\t\"get\",\n *\t\t\"...\"\n *\t);\n */\nasync function requestAPI<T extends APIResponses>(\n\tprops: APIRequest,\n\tmethod: string,\n\tappendToLog = \"\",\n): Promise<T> {\n\tconst {\n\t\tendpoint,\n\t\tbody,\n\t\tcacheKey = \"\",\n\t\tattempt = 1,\n\t\theaders,\n\t\tuseApiCacheDir = true,\n\t\tlogToFile = true,\n\t} = props;\n\tconst cacheOptions = { endpoint, body, headers, cacheKey };\n\n\t// Cache\n\tif (cacheKey && useApiCacheDir) {\n\t\tconst start = new Date();\n\t\tconst cacheData = await searchCacheData<T>(cacheOptions);\n\n\t\tif (cacheData) {\n\t\t\tif (logToFile) {\n\t\t\t\tconst siteId = getSafeSiteId(cacheData);\n\t\t\t\tconst siteIdMsg = siteId ? `site: ${siteId}` : \"\";\n\t\t\t\tconst duration = msToSec(Date.now() - start.getTime());\n\t\t\t\taddLogToBuffer(`${method} (cache) ${siteIdMsg} ${endpoint} - ${duration}s ${appendToLog}`);\n\t\t\t}\n\t\t\treturn cacheData;\n\t\t}\n\t}\n\n\t// Network\n\ttry {\n\t\tconst start = new Date();\n\n\t\t// Prepare fetch options\n\t\tconst fetchOptions: RequestInit = {\n\t\t\tmethod: method.toUpperCase(),\n\t\t\theaders: Object.assign({}, headers, AuthService.headers) as Record<string, string>,\n\t\t};\n\n\t\t// Add body for non-GET requests\n\t\tif (method.toLowerCase() !== \"get\" && body) {\n\t\t\tfetchOptions.body = JSON.stringify(body);\n\t\t\tif (!fetchOptions.headers) fetchOptions.headers = {};\n\t\t\t(fetchOptions.headers as Record<string, string>)[\"Content-Type\"] = \"application/json\";\n\t\t}\n\n\t\tconst response = await fetch(endpoint, fetchOptions);\n\n\t\t// Handle non-2xx responses\n\t\tif (!response.ok) {\n\t\t\tif (response.status === 404) {\n\t\t\t\t// @ts-expect-error page maybe will be 404\n\t\t\t\treturn null;\n\t\t\t}\n\n\t\t\tthrow new Error(`HTTP ${response.status}: ${response.statusText}`);\n\t\t}\n\n\t\tconst data: T = await response.json();\n\n\t\tif (logToFile) {\n\t\t\tconst siteId = getSafeSiteId(data);\n\t\t\tconst siteIdMsg = siteId ? `site: ${siteId}` : \"\";\n\t\t\tconst duration = msToSec(Date.now() - start.getTime());\n\t\t\taddLogToBuffer(`${method} (fetch) ${siteIdMsg} ${endpoint} - ${duration}s ${appendToLog}`);\n\t\t}\n\n\t\tif (useApiCacheDir) {\n\t\t\tawait saveCache(cacheOptions, data);\n\t\t}\n\n\t\treturn data;\n\t} catch (e) {\n\t\tconst error = e as Error;\n\n\t\tif (attempt > parseInt(RETRY_ATTEMPTS)) {\n\t\t\tGriddoLog.log(`\nMax attempts ${RETRY_ATTEMPTS} reached\n--------------------------------------\n- ${method.toUpperCase()} ${endpoint}\n- BODY: ${JSON.stringify(body)}\n- HEADERS: ${JSON.stringify(headers)}\n- ERROR: ${error.message}\n--------------------------------------\n`);\n\t\t\tthrow new RenderError(error);\n\t\t}\n\n\t\tshowApiError(error, {\n\t\t\tcallInfo: { endpoint, body },\n\t\t});\n\n\t\tGriddoLog.warn(`Waiting for retry: ${method}`, endpoint);\n\n\t\tawait delay(parseInt(RETRY_WAIT_SECONDS) * 1000);\n\n\t\treturn requestAPI<T>(\n\t\t\t{\n\t\t\t\tendpoint,\n\t\t\t\tbody,\n\t\t\t\theaders,\n\t\t\t\tcacheKey,\n\t\t\t\tattempt: attempt + 1,\n\t\t\t},\n\t\t\tmethod,\n\t\t\tappendToLog,\n\t\t);\n\t}\n}\n\n/**\n * Make a GET request to the Griddo API.\n *\n * @template T Response Type returned.\n * @returns A promise that is resolved with the data from the API response.\n */\nasync function getApi<T extends APIResponses>(props: GetAPI) {\n\treturn requestAPI<T>(props, \"get\");\n}\n\n/**\n * Make a PUT request to the Griddo API.\n *\n * @template T Response Type returned.\n * @returns A promise that is resolved with the data from the API response.\n */\nasync function putApi<T extends APIResponses>(props: PutAPI) {\n\treturn requestAPI<T>(props, \"put\");\n}\n\n/**\n * Make a POST request to the Griddo API.\n *\n * @template T Response Type returned.\n * @returns A promise that is resolved with the data from the API response.\n */\nasync function postApi<T extends APIResponses>(props: PostAPI) {\n\tconst { endpoint, body, headers } = props;\n\tconst referenceFieldBodyParams =\n\t\tendpoint.endsWith(\"/distributor\") &&\n\t\t`# ReferenceField body: ${JSON.stringify(body)} lang: ${JSON.stringify(headers?.lang)}`;\n\n\treturn requestAPI<T>(props, \"post\", referenceFieldBodyParams || \"\");\n}\n\n/**\n * Shows an API error through the terminal.\n */\nfunction showApiError(error: Error, options: ShowApiErrorOptions) {\n\tconst { message, stack } = error;\n\tconst { callInfo } = options;\n\tconst callInfoArray = [];\n\n\tfor (const item of Object.keys(callInfo) as (keyof typeof callInfo)[]) {\n\t\tcallInfoArray.push(\n\t\t\t`${item}: ${\n\t\t\t\ttypeof callInfo[item] === \"object\" ? JSON.stringify(callInfo[item]) : callInfo[item]\n\t\t\t}`,\n\t\t);\n\t}\n\n\t// Compose the errors output\n\tconst callInfoStr = callInfoArray.join(\"\\n\");\n\tconst errorDetailsStr = `${message}\\n${stack}`;\n\n\t// Print the error\n\tGriddoLog.warn(\n\t\tbrush.red(`\n=============\n\n{ Call info }\n${callInfoStr}\n\n{ Error details }\n${errorDetailsStr}\n\n=============\n`),\n\t);\n}\n\n/**\n * Return a siteID from a response object if exist\n * @param response A response object\n */\nfunction getSafeSiteId(response: APIResponses) {\n\tif (typeof response !== \"object\" || response === null || Array.isArray(response)) {\n\t\treturn undefined;\n\t}\n\n\treturn \"site\" in response && response.site ? response.site : undefined;\n}\n\n/**\n * Custom delay using the \"promise hack\",\n *\n * @param ms Amount of miliseconds to be delayed\n */\nfunction delay(ms: number): Promise<void> {\n\treturn new Promise((res) => setTimeout(res, ms));\n}\n\n/**\n * Converts milliseconds to seconds with a fixed number of decimals.\n *\n * @param ms The number in milliseconds.\n * @param fixed The amount of fixed decimals.\n * @returns The converted number in seconds with the fixed number of decimals.\n */\nfunction msToSec(ms: number, decimals = 3): number {\n\treturn Number.parseFloat((ms / 1000).toFixed(decimals));\n}\n\n/**\n * Generate a filename with a hash using a Petition object\n * @param petition An object\n */\nasync function generateFilenameWithHash(petition: Petition) {\n\tconst { __root } = await getRenderPathsHydratedWithDomainFromDB();\n\tconst apiCacheDir = path.join(__root, \"apiCache\");\n\n\tconst hashSum = crypto.createHash(\"sha256\");\n\thashSum.update(JSON.stringify(petition));\n\n\treturn `${apiCacheDir}/${hashSum.digest(\"hex\")}`;\n}\n\n/**\n * Save a file using a hash name.\n *\n * @param petition An object.\n * @param content Content to be saved.\n */\nasync function saveCache<T>(petition: Petition, content: T) {\n\tconst stringContent = typeof content === \"string\" ? content : JSON.stringify(content);\n\tconst filename = await generateFilenameWithHash(petition);\n\tconst filepath = path.dirname(filename);\n\n\tif (!(await pathExists(filepath))) {\n\t\tawait fsp.mkdir(filepath, { recursive: true });\n\t}\n\n\tawait fsp.writeFile(filename, stringContent, \"utf8\");\n}\n\n/**\n * Search in the `apiCache` dir for a file using the petition as hash generator.\n * Return the file content if found or null if not.\n *\n * @param petition An object\n */\nasync function searchCacheData<T>(petition: Petition) {\n\ttry {\n\t\tconst file = await generateFilenameWithHash(petition);\n\t\tconst fileContent = await fsp.readFile(file, \"utf8\");\n\t\tconst jsonData = JSON.parse(fileContent) as T;\n\n\t\treturn jsonData;\n\t} catch {\n\t\treturn null;\n\t}\n}\n\nexport { getApi as get, postApi as post, putApi as put };\n", "import type { RenderDB } from \"../shared/types/render\";\n\nimport fsp from \"node:fs/promises\";\nimport path from \"node:path\";\n\nimport {\n\tGRIDDO_BUILD_LOGS,\n\tGRIDDO_BUILD_LOGS_BUFFER_SIZE,\n\t// GRIDDO_BUILD_LOGS_TO_FILE,\n} from \"../shared/envs\";\nimport { readDB } from \"./db\";\nimport { GriddoLog } from \"./GriddoLog\";\n\nconst logBuffer: string[] = [];\nlet logFilePath: string | null = null;\nlet flushPromise: Promise<void> | null = null;\nconst LOG_BUFFER_FLUSH_THRESHOLD = GRIDDO_BUILD_LOGS_BUFFER_SIZE;\nconst LOG_FILENAME = \"render-detail-log.txt\";\nconst LOG_TO_FILE_IS_DISABLED = !GRIDDO_BUILD_LOGS; // || !GRIDDO_BUILD_LOGS_TO_FILE;\n\nlet dbData: RenderDB | null = null;\n\n// Universal data cache for this module...\nasync function getDBData() {\n\tif (!dbData) {\n\t\tdbData = await readDB();\n\t}\n\treturn dbData;\n}\n\nasync function initializeLogFile() {\n\tif (LOG_TO_FILE_IS_DISABLED) {\n\t\treturn;\n\t}\n\n\tconst data = await getDBData();\n\tlogFilePath = path.join(data.paths.root, LOG_FILENAME);\n\n\tawait fsp.rm(logFilePath, { force: true });\n}\n\nfunction addLogToBuffer(lineContent: string) {\n\tif (LOG_TO_FILE_IS_DISABLED) {\n\t\treturn;\n\t}\n\n\tlogBuffer.push(lineContent.toString());\n\n\tif (logBuffer.length >= LOG_BUFFER_FLUSH_THRESHOLD) {\n\t\t// Fire-and-forget flush to avoid blocking the main thread.\n\t\tflushLogsToFile().catch((error) => {\n\t\t\tGriddoLog.error(\"Background log flush failed:\", error);\n\t\t});\n\t}\n}\n\n/**\n * Vuelca el contenido del buffer de logs al archivo de logs en disco.\n * Si el buffer est\u00E1 vac\u00EDo, ya se est\u00E1 volcando, o no hay ruta de archivo, no hace nada.\n * Si ocurre un error al escribir, los logs se reinsertan al buffer para reintentar en el siguiente flush.\n */\nasync function flushLogsToFile() {\n\t// Si ya hay un volcado en curso, espera a que termine.\n\tif (flushPromise) {\n\t\tawait flushPromise;\n\t}\n\n\tif (logBuffer.length === 0 || !logFilePath) {\n\t\treturn;\n\t}\n\n\tconst performFlush = async () => {\n\t\tconst logsToFlush = [...logBuffer];\n\t\tlogBuffer.length = 0;\n\n\t\ttry {\n\t\t\tawait fsp.appendFile(logFilePath!, `${logsToFlush.join(\"\\n\")}\\n`);\n\t\t} catch (error) {\n\t\t\tlogBuffer.unshift(...logsToFlush);\n\t\t\tGriddoLog.error(\"Error flushing logs:\", error);\n\t\t\t// No relanzamos el error para no detener el \"fire-and-forget\"\n\t\t}\n\t};\n\n\tflushPromise = performFlush();\n\n\ttry {\n\t\tawait flushPromise;\n\t} finally {\n\t\tflushPromise = null;\n\t}\n}\n\n/**\n * Copia el archivo de log detallado de renderizado (\"render-detail-log.txt\")\n * desde el directorio ra\u00EDz del proyecto al directorio de logs del dominio en exports,\n * usando una marca de tiempo en el nombre de destino.\n * Si el archivo no existe, ignora el error salvo que sea distinto de ENOENT.\n *\n * @param domain - Nombre del dominio para el que se guarda el log.\n */\nasync function saveDetailRenderLog(domain: string) {\n\tawait flushLogsToFile();\n\n\tconst data = await getDBData();\n\n\tconst dateString = getFormattedDateTime();\n\tconst debugDir = path.join(data.paths.exportsDir, domain, \"logs\");\n\n\tawait fsp.mkdir(debugDir, { recursive: true });\n\n\tconst src = path.join(data.paths.root, LOG_FILENAME);\n\tconst dst = path.join(debugDir, `${dateString}-${LOG_FILENAME}`);\n\n\t// Move log to exports\n\ttry {\n\t\tawait fsp.cp(src, dst);\n\t} catch (error) {\n\t\t// It's possible the file doesn't exist if GRIDDO_BUILD_LOGS is false\n\t\t// or if no logs were ever added.\n\t\tif (error instanceof Error && \"code\" in error && error.code !== \"ENOENT\") {\n\t\t\tthrow error;\n\t\t}\n\t}\n}\n\nfunction getFormattedDateTime() {\n\tconst now = new Date();\n\tconst year = now.getFullYear();\n\tconst month = String(now.getMonth() + 1).padStart(2, \"0\");\n\tconst day = String(now.getDate()).padStart(2, \"0\");\n\n\tconst hours = String(now.getHours()).padStart(2, \"0\");\n\tconst minutes = String(now.getMinutes()).padStart(2, \"0\");\n\tconst seconds = String(now.getSeconds()).padStart(2, \"0\");\n\n\t// Formato recomendado para ordenaci\u00F3n de archivos: YYYY-MM-DD_HH-mm-ss\n\treturn `${year}-${month}-${day}_${hours}-${minutes}-${seconds}`;\n}\n\nexport { addLogToBuffer, flushLogsToFile, initializeLogFile, saveDetailRenderLog };\n", "import type { Domains } from \"../shared/types/global\";\n\nimport { throwError } from \"../core/errors\";\nimport { GriddoLog } from \"../core/GriddoLog\";\nimport { DOMAINS } from \"../shared/endpoints\";\nimport { NoDomainsFoundError } from \"../shared/errors\";\nimport { get } from \"./api\";\n\n/**\n * Return an array of domains name (string) of the current instance.\n */\nasync function getInstanceDomains() {\n\tconst domains = await get<Domains>({\n\t\tendpoint: DOMAINS,\n\t\tuseApiCacheDir: false,\n\t});\n\n\tif (!domains.length) {\n\t\tthrowError(NoDomainsFoundError);\n\t}\n\n\tGriddoLog.verbose(`getting domains names (${domains.length})`);\n\n\tconst filteredDomains = domains\n\t\t.filter(({ slug }) => !!slug)\n\t\t.map(({ slug }) => slug.replace(\"/\", \"\"));\n\n\treturn [...new Set(filteredDomains)];\n}\n\nexport { getInstanceDomains };\n", "import type { Core } from \"@griddo/core\";\nimport type {\n\tAllSitesReponse,\n\tEndPageInfoResponse,\n\tEndSiteRenderBody,\n\tLanguagesResponse,\n\tPageResponse,\n\tReferenceFieldBody,\n\tReferenceFieldResponse,\n\tSitemapAPIResponse,\n\tSocialsResponse,\n\tStartPageRenderResponse,\n} from \"../shared/types/api\";\nimport type { Site } from \"../shared/types/sites\";\n\nimport {\n\tBUILD_END,\n\tBUILD_START,\n\tGET_ALL,\n\tGET_PAGE,\n\tGET_REFERENCE_FIELD_DATA,\n\tGET_SITEMAP,\n\tINFO,\n\tLANGUAGES,\n\tSOCIALS,\n} from \"../shared/endpoints\";\nimport { get, post } from \"./api\";\n\n/**\n * Get a list of site objects.\n */\nasync function getAllSites(domain: string) {\n\treturn get<AllSitesReponse>({\n\t\tendpoint: `${GET_ALL}?domainSlug=${domain}`,\n\t});\n}\n\n/**\n * Get a list of site objects from a domain.\n */\nasync function getAllSitesFromDomain(domain: string) {\n\tconst allSites = await get<AllSitesReponse>({ endpoint: GET_ALL });\n\n\tif (allSites.length) {\n\t\tfor (const site of allSites) {\n\t\t\tconst { items } = await getSiteLanguages(site.id);\n\n\t\t\t// A\u00F1adimos la prop domains con el dominio \"cocinado\" con los\n\t\t\t// idiomas y teniendo en cuenta solo el dominio actual.\n\t\t\tsite.domains = items\n\t\t\t\t.filter(\n\t\t\t\t\t(item) =>\n\t\t\t\t\t\titem.domain && (item.domain.slug === domain || item.domain.slug === `/${domain}`),\n\t\t\t\t)\n\t\t\t\t.map((item) => ({ [item.id]: `${item.domain.slug}${item.path}` }));\n\t\t}\n\t}\n\n\treturn allSites.filter((site) => site.domains.length > 0);\n}\n\n/**\n * Fetch a page object from API.\n */\nasync function getPage(id: number, cacheKey: string) {\n\treturn get<PageResponse>({\n\t\tendpoint: `${GET_PAGE}/${id}`,\n\t\tcacheKey,\n\t});\n}\n\n/**\n * Get site info\n */\nasync function getSiteInfo(id: number, cacheKey = \"\") {\n\tconst [prefix, suffix] = INFO;\n\n\treturn get<Site>({\n\t\tendpoint: `${prefix}${id}${suffix}`,\n\t\tcacheKey,\n\t});\n}\n\nasync function getSiteLanguages(id: number, cacheKey = \"\") {\n\tconst [prefix, suffix] = LANGUAGES;\n\n\treturn get<LanguagesResponse>({\n\t\tendpoint: `${prefix}${id}${suffix}`,\n\t\tcacheKey,\n\t});\n}\n\nasync function startSiteRender(id: number) {\n\tconst [prefix, suffix] = BUILD_START;\n\n\treturn get<StartPageRenderResponse>({\n\t\tendpoint: `${prefix}${id}${suffix}`,\n\t});\n}\n\n/**\n * Send the end signal to API for a render site.\n */\nasync function endSiteRender(id: number, body: EndSiteRenderBody) {\n\tconst [prefix, suffix] = BUILD_END;\n\n\tawait post<EndPageInfoResponse>({\n\t\tendpoint: `${prefix}${id}${suffix}`,\n\t\tbody,\n\t\tuseApiCacheDir: false,\n\t\tlogToFile: false,\n\t});\n}\n\nasync function getReferenceFieldSiteData(\n\tpage: Core.Page,\n\tbody: ReferenceFieldBody,\n\tcacheKey: string,\n\tdataSiteId?: number,\n\tdataLangID?: number,\n) {\n\tconst [prefix, suffix] = GET_REFERENCE_FIELD_DATA;\n\tconst site = dataSiteId || page.site;\n\tconst lang = dataLangID || page.language;\n\n\treturn post<ReferenceFieldResponse>({\n\t\tendpoint: `${prefix}${site}${suffix}`,\n\t\tbody,\n\t\theaders: { lang },\n\t\tcacheKey,\n\t});\n}\n\nasync function getSitemap(options: { siteId: number; headers?: Record<string, string> }) {\n\tconst { siteId, headers } = options;\n\tconst [prefix, suffix] = GET_SITEMAP;\n\n\treturn get<SitemapAPIResponse>({\n\t\tendpoint: `${prefix}${siteId}${suffix}`,\n\t\theaders,\n\t});\n}\n\nasync function getSiteSocials(id: number, cacheKey = \"\") {\n\tconst [prefix, suffix] = SOCIALS;\n\n\treturn get<SocialsResponse>({\n\t\tendpoint: `${prefix}${id}${suffix}`,\n\t\tcacheKey,\n\t});\n}\n\nexport {\n\tendSiteRender,\n\tgetAllSites,\n\tgetAllSitesFromDomain,\n\tgetPage,\n\tgetReferenceFieldSiteData,\n\tgetSiteInfo,\n\tgetSiteLanguages,\n\tgetSitemap,\n\tgetSiteSocials,\n\tstartSiteRender,\n};\n"],
5
+ "mappings": "oeAAA,IAAAA,GAAgB,+BAChBC,GAAiB,wBCCjB,IAAAC,GAAiB,wBCEjB,IAAAC,EAAiB,wBCFjB,IAAAC,EAAgB,+BAChBC,EAAiB,wBCHjB,IAAAC,EAAiB,wBCAjB,IAAAC,EAAe,sBAEf,IAAAC,EAAiB,wBACjBC,EAAoB,2BACpBC,EAA8B,oBA0BxBC,EAAUC,GACfA,aAAqB,OAAM,iBAAcA,CAAS,EAAIA,EA0ChD,SAASC,EAAWC,EAAcC,EAAmB,CAAC,EAAuB,CACnF,GAAM,CAAE,IAAAC,EAAM,EAAAC,QAAQ,IAAI,EAAG,KAAAC,EAAO,OAAQ,OAAQC,CAAa,EAAIJ,EAEjEK,EAAY,EAAAC,QAAK,QAAQC,EAAON,CAAG,GAAK,EAAE,EACxC,CAAE,KAAAO,CAAK,EAAI,EAAAF,QAAK,MAAMD,CAAS,EAC/BI,EAASL,EAAe,EAAAE,QAAK,QAAQD,EAAWE,EAAOH,CAAY,CAAE,EAAII,EACzEE,EAAiB,EAAAJ,QAAK,WAAWP,CAAI,EAE3C,OAAa,CACZ,IAAMY,EAAWD,EAAiBX,EAAO,EAAAO,QAAK,KAAKD,EAAWN,CAAI,EAClE,GAAI,CACH,IAAMa,EAAQ,EAAAC,QAAG,SAASF,EAAU,CAAE,eAAgB,EAAM,CAAC,EAC7D,GAAKR,IAAS,QAAUS,GAAO,OAAO,GAAOT,IAAS,aAAeS,GAAO,YAAY,EACvF,OAAOD,CAET,MAAQ,CAGR,CAEA,GAAIN,IAAcI,GAAUJ,IAAcG,EACzC,MAGDH,EAAY,EAAAC,QAAK,QAAQD,CAAS,CACnC,CACD,CDzFA,SAASS,EAAWC,EAAqC,CACxD,GAAM,CAAE,IAAAC,CAAI,EAAID,GAAW,CAAC,EACtBE,EAAWC,EAAW,eAAgB,CAAE,IAAAF,CAAI,CAAC,EACnD,OAAOC,GAAY,EAAAE,QAAK,QAAQF,CAAQ,CACzC,CEdA,GAAM,CAAE,IAAAG,CAAI,EAAI,QAKhB,SAASC,EAAYC,EAAyB,CAC7C,GAAI,CAACA,EAAO,MAAO,GAEnB,OAAQA,EAAM,KAAK,EAAE,YAAY,EAAG,CACnC,IAAK,IACL,IAAK,OACL,IAAK,MACL,IAAK,IACL,IAAK,KACJ,MAAO,GACR,QACC,MAAO,EACT,CACD,CAGA,IAAMC,GAAiBH,EAAI,gBAAkBA,EAAI,QAC3CI,GAAwBJ,EAAI,uBAAyBA,EAAI,eACzDK,GAAkBL,EAAI,UAAYA,EAAI,gBACtCM,GAAsBN,EAAI,aAAeA,EAAI,oBAG7CO,GAA+B,OAAO,SAASP,EAAI,8BAAgC,IAAI,EACvFQ,GAA2BP,EAAYD,EAAI,wBAAwB,EACnES,GAA2BR,EAAYD,EAAI,wBAAwB,EACnEU,EAAoBT,EAAYD,EAAI,iBAAiB,EACrDW,GAA4BV,EAAYD,EAAI,yBAAyB,EACrEY,GAAgC,OAAO,SAASZ,EAAI,+BAAiC,KAAK,EAC1Fa,GAA0BZ,EAAYD,EAAI,uBAAuB,EACjEc,GAAwBb,EAAYD,EAAI,qBAAqB,EAC7De,GAAsBf,EAAI,qBAAuBA,EAAI,aACrDgB,GAA4BhB,EAAI,2BAA6BA,EAAI,mBACjEiB,GAAuBhB,EAAYD,EAAI,oBAAoB,EAC3DkB,GAAsBjB,EAAYD,EAAI,mBAAmB,EACzDmB,GAAyBlB,EAAYD,EAAI,sBAAsB,EAC/DoB,GAA6BnB,EAAYD,EAAI,0BAA0B,ECjC7E,IAAMqB,GAAQ,UACRC,GAAQ,CACb,MAAO,WACP,IAAK,WACL,MAAO,WACP,OAAQ,WACR,KAAM,WACN,QAAS,WACT,KAAM,WACN,MAAO,WACP,KAAM,WACN,KAAM,UACN,IAAK,SACN,EAMMC,EAAQ,CAAC,EAEf,QAAWC,KAASF,GAAO,CAC1B,IAAMG,EAAMD,EACZD,EAAME,CAAG,EAAKC,GAA0B,GAAGJ,GAAMG,CAAG,CAAC,GAAGC,CAAI,GAAGL,EAAK,EACrE,CCxBA,IAAMM,EAAN,MAAMC,CAAU,CAEP,aAAc,CAAC,CAEvB,OAAc,WAAWC,EAAsB,CAC1CC,IACH,QAAQ,IAAIC,EAAM,OAAO,SAAS,EAAGA,EAAM,IAAIF,EAAI,KAAK,GAAG,CAAC,CAAC,CAE/D,CAEA,OAAc,SAASA,EAAsB,CACxCG,GACHJ,EAAU,IAAI,GAAGC,CAAG,CAEtB,CAEA,OAAc,QAAQA,EAAsB,CAC3C,QAAQ,IAAI,GAAGE,EAAM,KAAK,MAAM,CAAC,IAAIF,EAAI,KAAK,GAAG,CAAC,EAAE,CACrD,CAEA,OAAc,WAAWA,EAAsB,CAC9C,QAAQ,IAAI,GAAGE,EAAM,MAAM,SAAS,CAAC,IAAIF,EAAI,KAAK,GAAG,CAAC,EAAE,CACzD,CAEA,OAAc,SAASA,EAAsB,CAC5C,QAAQ,MAAM,GAAGE,EAAM,IAAI,OAAO,CAAC,IAAIF,EAAI,KAAK,GAAG,CAAC,EAAE,CACvD,CAEA,OAAc,QAAQA,EAAsB,CAC3C,QAAQ,KAAK,GAAGE,EAAM,OAAO,MAAM,CAAC,IAAIF,EAAI,KAAK,GAAG,CAAC,EAAE,CACxD,CAEA,OAAc,OAAOI,EAA4C,CAChE,QAAQ,IAAI,GAAGA,CAAI,CACpB,CACD,ELlCA,IAAMC,GAAOC,EAAW,CAAE,IAAK,EAAAC,QAAK,QAAQ,UAAW,UAAU,CAAE,CAAC,GAAK,GACnEC,GAAQ,EAAAD,QAAK,KAAKF,GAAM,eAAe,EACvCI,GAAa,EAAAF,QAAK,KAAKC,GAAO,SAAS,EAE7C,eAAeE,EAAOC,EAAe,GAAI,CACxC,IAAMC,EAAOD,GAAgBF,GAC7B,GAAI,CACH,OAAO,KAAK,MAAM,MAAM,EAAAI,QAAI,SAASD,EAAM,OAAO,CAAC,CACpD,OAASE,EAAO,CACf,MAAAC,EAAU,MAAM,6BAA6BH,CAAI,IAAKE,CAAK,EACrDA,CACP,CACD,CAEA,eAAeE,EAAQC,EAAoBN,EAAe,GAAI,CAC7D,IAAMC,EAAOD,GAAgBF,GAC7B,GAAI,CACH,MAAM,EAAAI,QAAI,UAAUD,EAAM,KAAK,UAAUK,EAAU,KAAM,GAAI,CAAC,CAC/D,OAASH,EAAO,CACf,MAAAC,EAAU,MAAM,8BAA8BH,CAAI,IAAKE,CAAK,EACtDA,CACP,CACD,CM5BA,IAAAI,GAAgB,+BCyBhB,IAAMC,EAA2B,CAChC,MAAO,gBACP,QAAS,uCACT,SACC,qGACD,KAAM,+FACP,EAaA,IAAMC,GAAwB,CAC7B,MAAO,aACP,QAAS,4CACT,SAAU,oFACX,EAEMC,GAAiC,CACtC,MAAO,sBACP,QAAS,uEACT,SACC,4HACD,KAAM,6CACP,EDkQA,eAAeC,EAAWC,EAAa,CACtC,GAAI,CACH,aAAM,GAAAC,QAAI,OAAOD,CAAG,EACb,EACR,MAAQ,CACP,MAAO,EACR,CACD,CEjUA,IAAME,EAAc,CACnB,aAAc,eACd,YAAa,cACb,KAAM,OACN,MAAO,QACP,UAAW,WACZ,ECRA,IAAMC,EAAiB,QAAQ,IAAI,eAC7BC,GAAwB,QAAQ,IAAI,sBAEpCC,GAAgB,GAAGF,CAAc,iBACjCG,GAAQ,GAAGF,EAAqB,SAChCG,GAAU,GAAGJ,CAAc,WAC3BK,GAAU,GAAGL,CAAc,aAC3BM,GAAW,GAAGN,CAAc,QAC5BO,GAAQ,GAAGP,CAAc,eACzBQ,GAAe,GAAGR,CAAc,sBAChCS,GAAS,GAAGT,CAAc,kBAC1BU,GAAS,GAAGV,CAAc,UAC1BW,GAAW,GAAGX,CAAc,YAG5BY,GAAW,GAAGZ,CAAc,SAC5Ba,GAAY,CAACD,GAAU,YAAY,ECTzC,IAAME,EAAN,KAAkB,CACjB,QAEA,MAAM,OAAQ,CACb,GAAI,CACH,IAAMC,EAAW,MAAM,MAAMC,GAAO,CACnC,OAAQ,OACR,QAAS,CAAE,eAAgB,mBAAoB,WAAY,OAAQ,EACnE,KAAM,KAAK,UAAU,CACpB,SAAUC,GACV,SAAUC,EACX,CAAC,CACF,CAAC,EAED,GAAI,CAACH,EAAS,GACb,MAAM,IAAI,MAAM,8BAA8B,EAG/C,GAAM,CAAE,MAAAI,CAAM,EAAI,MAAMJ,EAAS,KAAK,EACtC,YAAK,QAAU,CACd,cAAe,UAAUI,CAAK,GAC9B,gBAAiB,UAClB,EAEO,KAAK,OACb,OAAS,EAAG,CACXC,EAAWC,GAAY,CAAC,CACzB,CACD,CACD,EAEMC,EAAc,IAAIR,EX0GxB,eAAeS,GAAoBC,EAA0C,CAC5E,IAAMC,EAAK,MAAMC,EAAO,EAExB,GAAI,CAACD,EAAG,QAAQD,CAAM,EACrB,MAAM,IAAI,MAAMG,EAAM,IAAI,qBAAqBH,CAAM,kBAAkB,CAAC,EAGzE,GAAI,CAACC,EAAG,QAAQD,CAAM,EAAE,WACvB,MAAM,IAAI,MAAMG,EAAM,IAAI,+CAA+CH,CAAM,EAAE,CAAC,EAGnF,MAAO,CACN,WAAYC,EAAG,QAAQD,CAAM,EAAE,WAC/B,OAAQC,EAAG,QAAQD,CAAM,EAAE,gBAC5B,CACD,CAEA,eAAeI,EAAuCC,EAGnD,CACF,GAAM,CAAE,OAAAL,EAAQ,WAAAM,CAAW,EAAID,GAAW,CAAC,EAGrCE,GADK,MAAML,EAAOI,CAAU,GACjB,MAEjB,MAAO,CACN,OAAQC,EAAM,KACd,QAAS,EAAAC,QAAK,KAAKD,EAAM,QAASP,GAAU,EAAE,EAC9C,aAAcO,EAAM,WACpB,KAAMA,EAAM,GACZ,QAASA,EAAM,WACf,UAAW,EAAAC,QAAK,KAAKD,EAAM,WAAYP,GAAU,EAAE,EACnD,iBAAkB,EAAAQ,QAAK,KAAKD,EAAM,iBAAkBP,GAAU,EAAE,EAChE,MAAOO,EAAM,IACb,eAAgB,EAAAC,QAAK,KAAKD,EAAM,WAAYP,GAAU,GAAI,MAAM,CACjE,CACD,CAEA,eAAeS,IAA0B,CACxC,IAAMR,EAAK,MAAMC,EAAO,EACxB,MAAO,CACN,cAAeD,EAAG,cAClB,oBAAqBA,EAAG,mBACzB,CACD,CY7LA,IAAAS,EAAgB,+BAChBC,EAAiB,wBAMjB,eAAeC,GAAaC,EAAgB,CAC3C,IAAMC,EAAO,MAAMC,EAAO,EACpB,CAAE,WAAAC,EAAY,iBAAAC,CAAiB,EAAIH,EAAK,MAE9CI,EAAU,KAAK,uCAAuCL,CAAM,EAAE,EAC9DK,EAAU,QAAQ,YAAY,EAAAC,QAAK,KAAKH,EAAYH,CAAM,CAAC,KAAK,EAKhE,MAAM,EAAAO,QAAI,GAAG,EAAAD,QAAK,KAAKH,EAAYH,CAAM,EAAG,CAC3C,UAAW,GACX,MAAO,EACR,CAAC,EAGG,MAAMQ,EAAW,EAAAF,QAAK,KAAKF,EAAkBJ,CAAM,CAAC,GACvD,MAAM,EAAAO,QAAI,GAAG,EAAAD,QAAK,KAAKF,EAAkBJ,CAAM,EAAG,EAAAM,QAAK,KAAKH,EAAYH,CAAM,EAAG,CAChF,UAAW,EACZ,CAAC,EAEDK,EAAU,KAAK,oCAAoCL,CAAM,kCAAkC,EAC3FK,EAAU,QACT,WAAW,EAAAC,QAAK,KAAKF,EAAkBJ,CAAM,CAAC,OAAO,EAAAM,QAAK,KAAKH,EAAYH,CAAM,CAAC,KACnF,GAEAK,EAAU,KACT,sGACD,CAEF,CbpBO,IAAMI,EAAN,cAA0B,KAAM,CACtC,YAAYC,EAAyB,CACpC,MAAMA,aAAyB,MAAQA,EAAc,QAAU,OAAOA,CAAa,CAAC,EAEpF,KAAK,KAAO,kBACZ,KAAK,MAAQA,aAAyB,MAAQA,EAAc,MAAQ,EACrE,CACD,EAKA,SAASC,EAAWC,EAAoBC,EAAwB,CAC/D,GAAM,CAAE,MAAAC,EAAO,QAAAC,EAAS,SAAAC,EAAU,KAAAC,CAAK,EAAIL,EAErCM,EAAaC,EAAU,IAAIC,EAAM,IAAI,KAAKN,CAAK,IAAI,CAAC,EACpDO,EAAY,CAACL,EAAUC,CAAI,EAAE,OAAO,OAAO,EAAE,KAAK;AAAA,CAAI,EAE5D,MAAAE,EAAU,IAAI;AAAA,EACbD,CAAU;AAAA,EACVH,CAAO;AAAA,EACPM,CAAS;AAAA;AAAA,EAETD,EAAM,IAAI,OAAO,CAAC;AAAA,EAClB,KAAK,UAAUP,EAAO,KAAM,CAAC,CAAC,EAAE,EAE3B,IAAIJ,EAAYI,CAAK,CAC5B,CAEA,eAAeS,GAAoBC,EAAsB,CACxD,GAAI,CACH,MAAMA,EAAG,CACV,OAAST,EAAO,CACXA,aAAiBL,EACpBU,EAAU,MAAM,6BAA6B,EACnCL,aAAiB,MAC3BK,EAAU,MAAML,EAAM,OAAO,EAE7BK,EAAU,MAAM,gCAAgCL,CAAK,EAAE,EAGxD,GAAI,CACH,GAAM,CAAE,OAAAU,CAAO,EAAI,MAAMC,EAAuC,EAC1DC,EAAO,MAAMC,EAAO,EACtBD,EAAK,sBACRP,EAAU,KAAK,yBAAyB,EACxCA,EAAU,QAAQ,YAAY,GAAAS,QAAK,KAAKJ,EAAQ,SAAS,CAAC,KAAK,EAE/D,MAAMK,GAAaH,EAAK,sBAAuB,GAE/CP,EAAU,KAAK,iCAAiC,CAElD,MAAa,CACZA,EAAU,KAAK,+CAA+C,CAC/D,CAEA,IAAMO,EAAO,MAAMC,EAAO,EAC1B,MAAAD,EAAK,QAAQA,EAAK,sBAAuB,EAAE,YAAc,GACzDA,EAAK,QAAQA,EAAK,sBAAuB,EAAE,WAAa,QACxD,MAAMI,EAAQJ,CAAI,EACZZ,CACP,CACD,CcrEA,IAAAiB,GAAmB,0BACnBC,EAAgB,+BAChBC,EAAiB,wBCVjB,IAAAC,GAAgB,+BAWhB,IAAMC,EAAsB,CAAC,EACzBC,GAA6B,KAC7BC,EAAqC,KACnCC,GAA6BC,GAEnC,IAAMC,GAA0B,CAACC,EAuBjC,SAASC,EAAeC,EAAqB,CACxCC,KAIJC,EAAU,KAAKF,EAAY,SAAS,CAAC,EAEjCE,EAAU,QAAUC,IAEvBC,GAAgB,EAAE,MAAOC,GAAU,CAClCC,EAAU,MAAM,+BAAgCD,CAAK,CACtD,CAAC,EAEH,CAOA,eAAeD,IAAkB,CAMhC,GAJIG,GACH,MAAMA,EAGHL,EAAU,SAAW,GAAK,CAACM,GAC9B,OAgBDD,GAbqB,SAAY,CAChC,IAAME,EAAc,CAAC,GAAGP,CAAS,EACjCA,EAAU,OAAS,EAEnB,GAAI,CACH,MAAM,GAAAQ,QAAI,WAAWF,GAAc,GAAGC,EAAY,KAAK;AAAA,CAAI,CAAC;AAAA,CAAI,CACjE,OAASJ,EAAO,CACfH,EAAU,QAAQ,GAAGO,CAAW,EAChCH,EAAU,MAAM,uBAAwBD,CAAK,CAE9C,CACD,GAE4B,EAE5B,GAAI,CACH,MAAME,CACP,QAAE,CACDA,EAAe,IAChB,CACD,CDpEA,GAAM,CAAE,IAAAI,EAAI,EAAI,QACV,CAAE,mBAAAC,GAAqB,IAAK,eAAAC,GAAiB,GAAI,EAAIF,GAe3D,eAAeG,EACdC,EACAC,EACAC,EAAc,GACD,CACb,GAAM,CACL,SAAAC,EACA,KAAAC,EACA,SAAAC,EAAW,GACX,QAAAC,EAAU,EACV,QAAAC,EACA,eAAAC,EAAiB,GACjB,UAAAC,EAAY,EACb,EAAIT,EACEU,EAAe,CAAE,SAAAP,EAAU,KAAAC,EAAM,QAAAG,EAAS,SAAAF,CAAS,EAGzD,GAAIA,GAAYG,EAAgB,CAC/B,IAAMG,EAAQ,IAAI,KACZC,EAAY,MAAMC,GAAmBH,CAAY,EAEvD,GAAIE,EAAW,CACd,GAAIH,EAAW,CACd,IAAMK,EAASC,GAAcH,CAAS,EAChCI,EAAYF,EAAS,SAASA,CAAM,GAAK,GACzCG,EAAWC,GAAQ,KAAK,IAAI,EAAIP,EAAM,QAAQ,CAAC,EACrDQ,EAAe,GAAGlB,CAAM,YAAYe,CAAS,IAAIb,CAAQ,MAAMc,CAAQ,KAAKf,CAAW,EAAE,CAC1F,CACA,OAAOU,CACR,CACD,CAGA,GAAI,CACH,IAAMD,EAAQ,IAAI,KAGZS,EAA4B,CACjC,OAAQnB,EAAO,YAAY,EAC3B,QAAS,OAAO,OAAO,CAAC,EAAGM,EAASc,EAAY,OAAO,CACxD,EAGIpB,EAAO,YAAY,IAAM,OAASG,IACrCgB,EAAa,KAAO,KAAK,UAAUhB,CAAI,EAClCgB,EAAa,UAASA,EAAa,QAAU,CAAC,GAClDA,EAAa,QAAmC,cAAc,EAAI,oBAGpE,IAAME,EAAW,MAAM,MAAMnB,EAAUiB,CAAY,EAGnD,GAAI,CAACE,EAAS,GAAI,CACjB,GAAIA,EAAS,SAAW,IAEvB,OAAO,KAGR,MAAM,IAAI,MAAM,QAAQA,EAAS,MAAM,KAAKA,EAAS,UAAU,EAAE,CAClE,CAEA,IAAMC,EAAU,MAAMD,EAAS,KAAK,EAEpC,GAAIb,EAAW,CACd,IAAMK,EAASC,GAAcQ,CAAI,EAC3BP,GAAYF,EAAS,SAASA,CAAM,GAAK,GACzCG,GAAWC,GAAQ,KAAK,IAAI,EAAIP,EAAM,QAAQ,CAAC,EACrDQ,EAAe,GAAGlB,CAAM,YAAYe,EAAS,IAAIb,CAAQ,MAAMc,EAAQ,KAAKf,CAAW,EAAE,CAC1F,CAEA,OAAIM,GACH,MAAMgB,GAAUd,EAAca,CAAI,EAG5BA,CACR,OAASE,EAAG,CACX,IAAMC,EAAQD,EAEd,GAAInB,EAAU,SAASR,EAAc,EACpC,MAAA6B,EAAU,IAAI;AAAA,eACF7B,EAAc;AAAA;AAAA,IAEzBG,EAAO,YAAY,CAAC,IAAIE,CAAQ;AAAA,UAC1B,KAAK,UAAUC,CAAI,CAAC;AAAA,aACjB,KAAK,UAAUG,CAAO,CAAC;AAAA,WACzBmB,EAAM,OAAO;AAAA;AAAA,CAEvB,EACQ,IAAIE,EAAYF,CAAK,EAG5B,OAAAG,GAAaH,EAAO,CACnB,SAAU,CAAE,SAAAvB,EAAU,KAAAC,CAAK,CAC5B,CAAC,EAEDuB,EAAU,KAAK,sBAAsB1B,CAAM,GAAIE,CAAQ,EAEvD,MAAM2B,GAAM,SAASjC,EAAkB,EAAI,GAAI,EAExCE,EACN,CACC,SAAAI,EACA,KAAAC,EACA,QAAAG,EACA,SAAAF,EACA,QAASC,EAAU,CACpB,EACAL,EACAC,CACD,CACD,CACD,CAQA,eAAe6B,EAA+B/B,EAAe,CAC5D,OAAOD,EAAcC,EAAO,KAAK,CAClC,CAkBA,eAAegC,GAAgCC,EAAgB,CAC9D,GAAM,CAAE,SAAAC,EAAU,KAAAC,EAAM,QAAAC,CAAQ,EAAIH,EAC9BI,EACLH,EAAS,SAAS,cAAc,GAChC,0BAA0B,KAAK,UAAUC,CAAI,CAAC,UAAU,KAAK,UAAUC,GAAS,IAAI,CAAC,GAEtF,OAAOE,EAAcL,EAAO,OAAQI,GAA4B,EAAE,CACnE,CAKA,SAASE,GAAaC,EAAcC,EAA8B,CACjE,GAAM,CAAE,QAAAC,EAAS,MAAAC,CAAM,EAAIH,EACrB,CAAE,SAAAI,CAAS,EAAIH,EACfI,EAAgB,CAAC,EAEvB,QAAWC,KAAQ,OAAO,KAAKF,CAAQ,EACtCC,EAAc,KACb,GAAGC,CAAI,KACN,OAAOF,EAASE,CAAI,GAAM,SAAW,KAAK,UAAUF,EAASE,CAAI,CAAC,EAAIF,EAASE,CAAI,CACpF,EACD,EAID,IAAMC,EAAcF,EAAc,KAAK;AAAA,CAAI,EACrCG,EAAkB,GAAGN,CAAO;AAAA,EAAKC,CAAK,GAG5CM,EAAU,KACTC,EAAM,IAAI;AAAA;AAAA;AAAA;AAAA,EAIVH,CAAW;AAAA;AAAA;AAAA,EAGXC,CAAe;AAAA;AAAA;AAAA,CAGhB,CACA,CACD,CAMA,SAASG,GAAcC,EAAwB,CAC9C,GAAI,SAAOA,GAAa,UAAYA,IAAa,MAAQ,MAAM,QAAQA,CAAQ,GAI/E,MAAO,SAAUA,GAAYA,EAAS,KAAOA,EAAS,KAAO,MAC9D,CAOA,SAASC,GAAMC,EAA2B,CACzC,OAAO,IAAI,QAASC,GAAQ,WAAWA,EAAKD,CAAE,CAAC,CAChD,CASA,SAASE,GAAQF,EAAYG,EAAW,EAAW,CAClD,OAAO,OAAO,YAAYH,EAAK,KAAM,QAAQG,CAAQ,CAAC,CACvD,CAMA,eAAeC,GAAyBC,EAAoB,CAC3D,GAAM,CAAE,OAAAC,CAAO,EAAI,MAAMC,EAAuC,EAC1DC,EAAc,EAAAC,QAAK,KAAKH,EAAQ,UAAU,EAE1CI,EAAU,GAAAC,QAAO,WAAW,QAAQ,EAC1C,OAAAD,EAAQ,OAAO,KAAK,UAAUL,CAAQ,CAAC,EAEhC,GAAGG,CAAW,IAAIE,EAAQ,OAAO,KAAK,CAAC,EAC/C,CAQA,eAAeE,GAAaP,EAAoBQ,EAAY,CAC3D,IAAMC,EAAgB,OAAOD,GAAY,SAAWA,EAAU,KAAK,UAAUA,CAAO,EAC9EE,EAAW,MAAMX,GAAyBC,CAAQ,EAClDW,EAAW,EAAAP,QAAK,QAAQM,CAAQ,EAEhC,MAAME,EAAWD,CAAQ,GAC9B,MAAM,EAAAE,QAAI,MAAMF,EAAU,CAAE,UAAW,EAAK,CAAC,EAG9C,MAAM,EAAAE,QAAI,UAAUH,EAAUD,EAAe,MAAM,CACpD,CAQA,eAAeK,GAAmBd,EAAoB,CACrD,GAAI,CACH,IAAMe,EAAO,MAAMhB,GAAyBC,CAAQ,EAC9CgB,EAAc,MAAM,EAAAH,QAAI,SAASE,EAAM,MAAM,EAGnD,OAFiB,KAAK,MAAMC,CAAW,CAGxC,MAAQ,CACP,OAAO,IACR,CACD,CEpSA,eAAeC,IAAqB,CACnC,IAAMC,EAAU,MAAMC,EAAa,CAClC,SAAUC,GACV,eAAgB,EACjB,CAAC,EAEIF,EAAQ,QACZG,EAAWC,EAAmB,EAG/BC,EAAU,QAAQ,0BAA0BL,EAAQ,MAAM,GAAG,EAE7D,IAAMM,EAAkBN,EACtB,OAAO,CAAC,CAAE,KAAAO,CAAK,IAAM,CAAC,CAACA,CAAI,EAC3B,IAAI,CAAC,CAAE,KAAAA,CAAK,IAAMA,EAAK,QAAQ,IAAK,EAAE,CAAC,EAEzC,MAAO,CAAC,GAAG,IAAI,IAAID,CAAe,CAAC,CACpC,CC2EA,eAAeE,GAAcC,EAAYC,EAAyB,CACjE,GAAM,CAACC,EAAQC,CAAM,EAAIC,GAEzB,MAAMC,GAA0B,CAC/B,SAAU,GAAGH,CAAM,GAAGF,CAAE,GAAGG,CAAM,GACjC,KAAAF,EACA,eAAgB,GAChB,UAAW,EACZ,CAAC,CACF,ClB9FA,eAAeK,GAAgBC,EAI5B,CACF,GAAM,CAAE,oBAAAC,EAAqB,OAAAC,EAAQ,eAAAC,CAAe,EAAIH,EAClDI,EAAkB,GAAAC,QAAK,QAAQF,EAAgBD,EAAQ,OAAQD,CAAmB,EAExF,OAAM,MAAMK,EAAWF,CAAe,IACrCG,EAAU,KAAK,sBAAsBH,CAAe,cAAc,EAClEI,EAAWC,CAAa,GAGlB,KAAK,MAAM,MAAM,GAAAC,QAAI,SAASN,EAAiB,OAAO,CAAC,CAC/D,CAEA,eAAeO,IAAY,CAC1B,GAAM,CAAE,oBAAAV,CAAoB,EAAI,MAAMW,GAAwB,EACxD,CAAE,QAAAC,CAAQ,EAAI,MAAMC,EAAuC,EAEjE,MAAMC,EAAY,MAAM,EAExB,IAAIC,EAAU,CAAC,EAEf,GAAIC,GAA0B,CAC7B,GAAM,CAACC,CAAU,EAAI,QAAQ,KAAK,MAAM,CAAC,EACzCF,EAAQ,KAAKE,CAAU,CACxB,MACCX,EAAU,KAAK,oBAAoB,EACnCS,EAAU,MAAMG,GAAmB,EAGpC,QAAWjB,KAAUc,EAAS,CAC7B,GAAM,CAAE,WAAAI,EAAY,OAAAC,CAAO,EAAI,MAAMC,GAAoBpB,CAAM,EAE/D,GAAIkB,IAAeG,EAAY,KAAM,CACpChB,EAAU,KACT,0BAA0BL,CAAM,kEAAkEmB,CAAM,GACzG,EACA,MACD,CAEA,IAAMG,EAAS,MAAMzB,GAAgB,CACpC,OAAAG,EACA,eAAgBW,EAChB,oBAAAZ,CACD,CAAC,EAEDM,EAAU,KAAK,0BAA0BL,CAAM,QAAQ,EAEvD,QAAWuB,KAAQD,EAAO,MAAO,CAChCC,EAAK,cAAgB,CAAC,GAAG,IAAI,IAAIA,EAAK,aAAa,CAAC,EACpDA,EAAK,gBAAkB,CAAC,GAAG,IAAI,IAAIA,EAAK,eAAe,CAAC,EAExD,GAAM,CAAE,OAAAC,EAAQ,GAAGC,CAAK,EAAIF,EAC5BlB,EAAU,KAAK,SAASmB,CAAM,GAAG,EACjC,MAAME,GAAcF,EAAQC,CAAI,CACjC,CACD,CACD,CAEA,eAAeE,IAAO,CACrB,MAAMlB,GAAU,CACjB,CAEAmB,GAAiBD,EAAI",
6
+ "names": ["import_promises", "import_node_path", "import_node_path", "import_node_path", "import_promises", "import_node_path", "import_node_path", "import_node_fs", "import_node_path", "import_node_process", "import_node_url", "toPath", "urlOrPath", "findUpSync", "name", "options", "cwd", "process", "type", "stopAtOption", "directory", "path", "toPath", "root", "stopAt", "isAbsoluteName", "filePath", "stats", "fs", "pkgDirSync", "options", "cwd", "filePath", "findUpSync", "path", "env", "envIsTruthy", "value", "GRIDDO_API_URL", "GRIDDO_PUBLIC_API_URL", "GRIDDO_BOT_USER", "GRIDDO_BOT_PASSWORD", "GRIDDO_API_CONCURRENCY_COUNT", "GRIDDO_SKIP_BUILD_CHECKS", "GRIDDO_RENDER_BY_DOMAINS", "GRIDDO_BUILD_LOGS", "GRIDDO_BUILD_LOGS_TO_FILE", "GRIDDO_BUILD_LOGS_BUFFER_SIZE", "GRIDDO_SSG_VERBOSE_LOGS", "GRIDDO_SEARCH_FEATURE", "GRIDDO_ASSET_PREFIX", "GRIDDO_REACT_APP_INSTANCE", "GRIDDO_AI_EMBEDDINGS", "GRIDDO_VERBOSE_LOGS", "GRIDDO_USE_DIST_BACKUP", "GRIDDO_SSG_BUNDLE_ANALYZER", "RESET", "CODES", "brush", "color", "key", "text", "GriddoLog", "_GriddoLog", "str", "GRIDDO_VERBOSE_LOGS", "brush", "GRIDDO_BUILD_LOGS", "args", "root", "pkgDirSync", "path", "cache", "dbFilePath", "readDB", "customDBPath", "file", "fsp", "error", "GriddoLog", "writeDB", "renderDB", "import_promises", "ArtifactError", "LoginError", "NoDomainsFoundError", "pathExists", "dir", "fsp", "RENDER_MODE", "GRIDDO_API_URL", "GRIDDO_PUBLIC_API_URL", "AI_EMBEDDINGS", "ALERT", "DOMAINS", "GET_ALL", "GET_PAGE", "LOGIN", "RESET_RENDER", "ROBOTS", "SEARCH", "SETTINGS", "SITE_URI", "BUILD_END", "AuthService", "response", "LOGIN", "GRIDDO_BOT_USER", "GRIDDO_BOT_PASSWORD", "token", "throwError", "LoginError", "authService", "getRenderModeFromDB", "domain", "db", "readDB", "brush", "getRenderPathsHydratedWithDomainFromDB", "options", "dbFilePath", "paths", "path", "getRenderMetadataFromDB", "import_promises", "import_node_path", "distRollback", "domain", "data", "readDB", "exportsDir", "exportsDirBackup", "GriddoLog", "path", "fsp", "pathExists", "RenderError", "originalError", "throwError", "options", "stack", "error", "message", "expected", "hint", "errorColor", "GriddoLog", "brush", "extraText", "withErrorHandler", "fn", "__root", "getRenderPathsHydratedWithDomainFromDB", "data", "readDB", "path", "distRollback", "writeDB", "import_node_crypto", "import_promises", "import_node_path", "import_promises", "logBuffer", "logFilePath", "flushPromise", "LOG_BUFFER_FLUSH_THRESHOLD", "GRIDDO_BUILD_LOGS_BUFFER_SIZE", "LOG_TO_FILE_IS_DISABLED", "GRIDDO_BUILD_LOGS", "addLogToBuffer", "lineContent", "LOG_TO_FILE_IS_DISABLED", "logBuffer", "LOG_BUFFER_FLUSH_THRESHOLD", "flushLogsToFile", "error", "GriddoLog", "flushPromise", "logFilePath", "logsToFlush", "fsp", "env", "RETRY_WAIT_SECONDS", "RETRY_ATTEMPTS", "requestAPI", "props", "method", "appendToLog", "endpoint", "body", "cacheKey", "attempt", "headers", "useApiCacheDir", "logToFile", "cacheOptions", "start", "cacheData", "searchCacheData", "siteId", "getSafeSiteId", "siteIdMsg", "duration", "msToSec", "addLogToBuffer", "fetchOptions", "authService", "response", "data", "saveCache", "e", "error", "GriddoLog", "RenderError", "showApiError", "delay", "getApi", "postApi", "props", "endpoint", "body", "headers", "referenceFieldBodyParams", "requestAPI", "showApiError", "error", "options", "message", "stack", "callInfo", "callInfoArray", "item", "callInfoStr", "errorDetailsStr", "GriddoLog", "brush", "getSafeSiteId", "response", "delay", "ms", "res", "msToSec", "decimals", "generateFilenameWithHash", "petition", "__root", "getRenderPathsHydratedWithDomainFromDB", "apiCacheDir", "path", "hashSum", "crypto", "saveCache", "content", "stringContent", "filename", "filepath", "pathExists", "fsp", "searchCacheData", "file", "fileContent", "getInstanceDomains", "domains", "getApi", "DOMAINS", "throwError", "NoDomainsFoundError", "GriddoLog", "filteredDomains", "slug", "endSiteRender", "id", "body", "prefix", "suffix", "BUILD_END", "postApi", "getRenderReport", "options", "buildReportFileName", "domain", "exportsSiteDir", "buildReportFile", "path", "pathExists", "GriddoLog", "throwError", "ArtifactError", "fsp", "endRender", "getRenderMetadataFromDB", "__sites", "getRenderPathsHydratedWithDomainFromDB", "authService", "domains", "GRIDDO_RENDER_BY_DOMAINS", "domainName", "getInstanceDomains", "renderMode", "reason", "getRenderModeFromDB", "RENDER_MODE", "report", "site", "siteId", "body", "endSiteRender", "main", "withErrorHandler"]
7
+ }