@paroicms/site-generator-plugin 0.27.9 → 0.28.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/backend/dist/generator/fake-content-generator/content-helpers.js +13 -0
- package/backend/dist/generator/fake-content-generator/create-database-with-fake-content.js +10 -23
- package/backend/dist/generator/fake-content-generator/create-node-contents.js +57 -98
- package/backend/dist/generator/fake-content-generator/fill-lnodes.js +177 -52
- package/backend/dist/generator/fake-content-generator/fill-site-fields.js +22 -54
- package/backend/dist/generator/fake-content-generator/fill-taxonomy-fields.js +15 -24
- package/backend/dist/generator/fake-content-generator/generate-fake-content.js +4 -2
- package/backend/dist/generator/fake-content-generator/invoke-generate-fake-content.js +11 -3
- package/backend/dist/generator/lib/create-prompt.js +2 -2
- package/backend/dist/generator/lib/parse-llm-response.js +65 -10
- package/backend/dist/generator/site-generator/schema-with-authors.js +69 -0
- package/backend/dist/generator/site-generator/site-generator.js +18 -3
- package/backend/dist/generator/site-generator/theme-creator.js +4 -2
- package/backend/dist/lib/create-raw-context.js +1 -1
- package/backend/dist/lib/site-remover.js +3 -1
- package/backend/prompts/generate-fake-content-multiple-terms.md +21 -0
- package/frontend/dist/frontend.mjs +15 -15
- package/package.json +8 -9
- package/backend/prompts/0-context.md +0 -11
|
@@ -1,6 +1,7 @@
|
|
|
1
|
+
import { encodeLNodeId, } from "@paroicms/public-anywhere-lib";
|
|
1
2
|
export async function updateLNodesWithTaxonomies(ctx, options) {
|
|
2
|
-
const {
|
|
3
|
-
const {
|
|
3
|
+
const { siteSchema, siteConnector } = ctx;
|
|
4
|
+
const { idPicker } = options;
|
|
4
5
|
const { nodeTypes } = siteSchema;
|
|
5
6
|
for (const nodeType of Object.values(nodeTypes)) {
|
|
6
7
|
const labelingFields = nodeType.fields?.filter((f) => f.dataType === "labeling");
|
|
@@ -8,19 +9,20 @@ export async function updateLNodesWithTaxonomies(ctx, options) {
|
|
|
8
9
|
continue;
|
|
9
10
|
await updateLabelingFields(ctx, {
|
|
10
11
|
idPicker,
|
|
11
|
-
fqdn,
|
|
12
12
|
nodeType,
|
|
13
13
|
labelingFields,
|
|
14
|
-
},
|
|
14
|
+
}, siteConnector);
|
|
15
15
|
}
|
|
16
16
|
}
|
|
17
|
-
async function updateLabelingFields(ctx, options,
|
|
18
|
-
const {
|
|
17
|
+
async function updateLabelingFields(ctx, options, siteConnector) {
|
|
18
|
+
const { nodeType, labelingFields, idPicker } = options;
|
|
19
19
|
if (nodeType.kind !== "document" && nodeType.kind !== "part")
|
|
20
20
|
return;
|
|
21
21
|
const nodeIds = idPicker.pickNodeIds({ typeName: nodeType.typeName });
|
|
22
|
+
// Labeling fields are not localized, so use default language
|
|
23
|
+
const defaultLanguage = ctx.siteSchema.languages[0];
|
|
22
24
|
for (const nodeId of nodeIds) {
|
|
23
|
-
const fieldValues =
|
|
25
|
+
const fieldValues = {};
|
|
24
26
|
for (const field of labelingFields) {
|
|
25
27
|
const taxonomyIds = idPicker.pickNodeIds({ typeName: field.taxonomy });
|
|
26
28
|
if (taxonomyIds.length !== 1) {
|
|
@@ -34,24 +36,13 @@ async function updateLabelingFields(ctx, options, service) {
|
|
|
34
36
|
const termNodeIds = idPicker.pickNodeIds({ parentNodeId: taxonomyNodeId }, max);
|
|
35
37
|
if (termNodeIds.length === 0)
|
|
36
38
|
continue;
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
{
|
|
40
|
-
dataType: "labeling",
|
|
41
|
-
localized: false,
|
|
42
|
-
value: { t: termNodeIds },
|
|
43
|
-
},
|
|
44
|
-
]);
|
|
39
|
+
const labelingValue = { t: termNodeIds };
|
|
40
|
+
fieldValues[field.name] = labelingValue;
|
|
45
41
|
}
|
|
46
|
-
if (fieldValues.length === 0)
|
|
42
|
+
if (Object.keys(fieldValues).length === 0)
|
|
47
43
|
continue;
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
kind: nodeType.kind,
|
|
52
|
-
typeName: nodeType.typeName,
|
|
53
|
-
fields: Object.fromEntries(fieldValues),
|
|
54
|
-
},
|
|
55
|
-
});
|
|
44
|
+
// Use updateFields with the default language since labeling is not localized
|
|
45
|
+
const lNodeId = encodeLNodeId({ nodeId, language: defaultLanguage });
|
|
46
|
+
await siteConnector.updateFields(lNodeId, fieldValues);
|
|
56
47
|
}
|
|
57
48
|
}
|
|
@@ -12,13 +12,14 @@ export async function generateFieldSetContent(ctx, options, report) {
|
|
|
12
12
|
return list[0];
|
|
13
13
|
}
|
|
14
14
|
export async function generateMultipleFieldSetContents(ctx, options, report) {
|
|
15
|
-
const { siteSchema, nodeType, documentType, schemaI18n, count, withTitle, tolerateErrors, llmTaskName, } = options;
|
|
15
|
+
const { siteSchema, nodeType, documentType, schemaI18n, count, withTitle, tolerateErrors, llmTaskName, isTaxonomyTerm, } = options;
|
|
16
16
|
if (nodeType.kind === "site")
|
|
17
17
|
throw new Error("Cannot generate content for site node type");
|
|
18
18
|
// for a document, the LLM is best at generating the title, so we ask for it and remove it later
|
|
19
19
|
const skipTitle = nodeType.kind === "document" && !withTitle;
|
|
20
|
+
const titleTagDescription = isTaxonomyTerm ? "Term Name Here" : "Write the title here";
|
|
20
21
|
const outputTags = withTitle || skipTitle
|
|
21
|
-
? [{ tagName: "title", key: "title", format: "text", tagDescription:
|
|
22
|
+
? [{ tagName: "title", key: "title", format: "text", tagDescription: titleTagDescription }]
|
|
22
23
|
: [];
|
|
23
24
|
if (nodeType.fields) {
|
|
24
25
|
outputTags.push(...nodeType.fields.map(toFakeContentOutputTag).filter(isDef));
|
|
@@ -50,6 +51,7 @@ export async function generateMultipleFieldSetContents(ctx, options, report) {
|
|
|
50
51
|
documentDescription,
|
|
51
52
|
siteTheme,
|
|
52
53
|
language,
|
|
54
|
+
isTaxonomyTerm,
|
|
53
55
|
}, outputTags, { tolerateErrors, llmTaskName })
|
|
54
56
|
: undefined;
|
|
55
57
|
if (skipTitle && output) {
|
|
@@ -8,13 +8,14 @@ import { buildPromptTemplate } from "../lib/prompt-template.js";
|
|
|
8
8
|
const singlePromptTemplate = buildPromptTemplate(await readPromptFile("generate-fake-content-single.md"));
|
|
9
9
|
const multipleDocumentsPromptTemplate = buildPromptTemplate(await readPromptFile("generate-fake-content-multiple-documents.md"));
|
|
10
10
|
const multiplePartsPromptTemplate = buildPromptTemplate(await readPromptFile("generate-fake-content-multiple-parts.md"));
|
|
11
|
+
const multipleTermsPromptTemplate = buildPromptTemplate(await readPromptFile("generate-fake-content-multiple-terms.md"));
|
|
11
12
|
export async function invokeGenerateFakeContent(ctx, input, outputTags, options) {
|
|
12
13
|
const { language, typeKind } = input;
|
|
13
14
|
const single = input.count === 1;
|
|
14
15
|
const llmTaskName = `fake-content-${options.llmTaskName}${single ? "" : `-${input.count}`}`;
|
|
15
16
|
const tagAndDescriptions = outputTags
|
|
16
17
|
.map(({ tagName, tagDescription }) => `<${tagName}>${tagDescription}</${tagName}>`)
|
|
17
|
-
.join("\n
|
|
18
|
+
.join("\n");
|
|
18
19
|
// Call LLM in batch mode
|
|
19
20
|
const maxBatchSize = 15;
|
|
20
21
|
const batchInputs = [];
|
|
@@ -45,7 +46,9 @@ export async function invokeGenerateFakeContent(ctx, input, outputTags, options)
|
|
|
45
46
|
const promptTemplate = single
|
|
46
47
|
? singlePromptTemplate
|
|
47
48
|
: typeKind === "document"
|
|
48
|
-
?
|
|
49
|
+
? input.isTaxonomyTerm
|
|
50
|
+
? multipleTermsPromptTemplate
|
|
51
|
+
: multipleDocumentsPromptTemplate
|
|
49
52
|
: multiplePartsPromptTemplate;
|
|
50
53
|
// Process batch inputs
|
|
51
54
|
const messages = batchInputs.map(promptTemplate);
|
|
@@ -59,7 +62,12 @@ export async function invokeGenerateFakeContent(ctx, input, outputTags, options)
|
|
|
59
62
|
}
|
|
60
63
|
const results = [];
|
|
61
64
|
for (const llmMessageContent of llmOutput.outputs) {
|
|
62
|
-
const list = parseLlmResponseAsList(llmMessageContent, outputTags,
|
|
65
|
+
const list = parseLlmResponseAsList(llmMessageContent, outputTags, {
|
|
66
|
+
...options,
|
|
67
|
+
titleFromMarkdownHeading: outputTags.some((tag) => tag.key === "title")
|
|
68
|
+
? "title"
|
|
69
|
+
: undefined,
|
|
70
|
+
});
|
|
63
71
|
results.push(...list.map((fields) => Object.fromEntries(Object.entries(fields).map(([fieldName, value]) => [fieldName, { [language]: value }]))));
|
|
64
72
|
}
|
|
65
73
|
return { contents: results, llmReport: llmOutput.llmReport };
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import { jtDir } from "@paroicms/public-server-lib";
|
|
1
|
+
import { docsDir, jtDir } from "@paroicms/public-server-lib";
|
|
2
2
|
import { readFile } from "node:fs/promises";
|
|
3
3
|
import { join } from "node:path";
|
|
4
4
|
import { projectDir } from "../../context.js";
|
|
5
5
|
import { buildPromptTemplate } from "./prompt-template.js";
|
|
6
|
-
const contextContent = await
|
|
6
|
+
const contextContent = await readFile(join(docsDir, "introduction-to-paroicms.md"), "utf-8");
|
|
7
7
|
const siteSchemaTsDefs = await readFile(join(jtDir, "site-schema-json-types.d.ts"), "utf-8");
|
|
8
8
|
const predefinedFields = JSON.parse(await readPromptFile("predefined-fields.json"));
|
|
9
9
|
export async function createPromptTemplate(options) {
|
|
@@ -19,22 +19,38 @@ export function parseLlmResponseAsProperties(llmResponse, outputTags) {
|
|
|
19
19
|
return resultObj;
|
|
20
20
|
}
|
|
21
21
|
export function parseLlmResponseAsList(llmResponse, outputTags, options = {}) {
|
|
22
|
-
const { tolerateErrors } = options;
|
|
22
|
+
const { tolerateErrors, titleFromMarkdownHeading } = options;
|
|
23
23
|
const rawTags = parseLlmRawTags(llmResponse, outputTags.map((tag) => tag.tagName), options);
|
|
24
24
|
if (rawTags.length === 0)
|
|
25
25
|
return [];
|
|
26
26
|
const outputTagMap = new Map(outputTags.map((tag) => [tag.tagName, tag]));
|
|
27
|
+
const titleOutputTag = titleFromMarkdownHeading
|
|
28
|
+
? outputTags.find((tag) => tag.key === titleFromMarkdownHeading)
|
|
29
|
+
: undefined;
|
|
27
30
|
const result = [];
|
|
28
31
|
let current = {};
|
|
29
|
-
|
|
32
|
+
let currentItemStartPosition;
|
|
33
|
+
let previousItemEndPosition = 0;
|
|
34
|
+
for (let tagIndex = 0; tagIndex < rawTags.length; ++tagIndex) {
|
|
35
|
+
const rawTag = rawTags[tagIndex];
|
|
30
36
|
const outputTag = outputTagMap.get(rawTag.tagName);
|
|
31
37
|
if (!outputTag)
|
|
32
38
|
throw new Error(`Unexpected output tag "${rawTag.tagName}"`); // it's a real bug
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
39
|
+
// Track start position of current item (first tag we see for this item)
|
|
40
|
+
if (currentItemStartPosition === undefined) {
|
|
41
|
+
currentItemStartPosition = rawTag.startPosition;
|
|
42
|
+
}
|
|
43
|
+
if (outputTag.key in current) {
|
|
44
|
+
// We're starting a new item - try to complete the previous one
|
|
45
|
+
const completedItem = tryCompleteItem(current, outputTags, titleOutputTag, llmResponse, previousItemEndPosition, currentItemStartPosition, options);
|
|
46
|
+
if (completedItem) {
|
|
47
|
+
result.push(completedItem);
|
|
37
48
|
}
|
|
49
|
+
// Update end position for next gap calculation:
|
|
50
|
+
// The gap for the next item starts where this tag starts (not where the previous item started)
|
|
51
|
+
// This way, the Markdown heading before this tag can be found
|
|
52
|
+
previousItemEndPosition = currentItemStartPosition;
|
|
53
|
+
currentItemStartPosition = rawTag.startPosition;
|
|
38
54
|
current = {};
|
|
39
55
|
}
|
|
40
56
|
if (rawTag.content.trim() === "") {
|
|
@@ -44,21 +60,37 @@ export function parseLlmResponseAsList(llmResponse, outputTags, options = {}) {
|
|
|
44
60
|
throw new Error(message);
|
|
45
61
|
tolerateErrors.errorMessages.push(message);
|
|
46
62
|
current = {};
|
|
63
|
+
currentItemStartPosition = undefined;
|
|
47
64
|
}
|
|
48
65
|
continue;
|
|
49
66
|
}
|
|
50
67
|
current[outputTag.key] = formatRawContent(rawTag.content, outputTag);
|
|
51
68
|
}
|
|
69
|
+
// Handle the last item
|
|
52
70
|
if (Object.keys(current).length > 0) {
|
|
53
|
-
const
|
|
54
|
-
if (
|
|
55
|
-
result.push(
|
|
71
|
+
const completedItem = tryCompleteItem(current, outputTags, titleOutputTag, llmResponse, previousItemEndPosition, currentItemStartPosition ?? 0, options);
|
|
72
|
+
if (completedItem) {
|
|
73
|
+
result.push(completedItem);
|
|
56
74
|
}
|
|
57
75
|
}
|
|
58
76
|
return result;
|
|
59
77
|
}
|
|
60
|
-
|
|
78
|
+
/**
|
|
79
|
+
* Try to complete an item by ensuring all required properties are present.
|
|
80
|
+
* If the title is missing, attempt to extract it from a Markdown heading
|
|
81
|
+
* in the gap before the item's first tag.
|
|
82
|
+
*/
|
|
83
|
+
function tryCompleteItem(obj, outputTags, titleOutputTag, llmResponse, gapStart, gapEnd, options) {
|
|
61
84
|
const { tolerateErrors } = options;
|
|
85
|
+
// If title is missing and we have a title tag definition, try fallback extraction
|
|
86
|
+
if (titleOutputTag && !(titleOutputTag.key in obj)) {
|
|
87
|
+
const gapText = llmResponse.substring(gapStart, gapEnd);
|
|
88
|
+
const extractedTitle = extractMarkdownTitle(gapText);
|
|
89
|
+
if (extractedTitle) {
|
|
90
|
+
obj[titleOutputTag.key] = extractedTitle;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
// Now check all required properties
|
|
62
94
|
for (const tag of outputTags) {
|
|
63
95
|
if (!(tag.key in obj) && !tag.optional) {
|
|
64
96
|
const message = `Missing tag <${tag.tagName}>`;
|
|
@@ -86,6 +118,28 @@ function formatRawContent(rawContent, tag) {
|
|
|
86
118
|
throw new Error(`Unknown format "${format}"`);
|
|
87
119
|
}
|
|
88
120
|
}
|
|
121
|
+
/**
|
|
122
|
+
* Extract the first Markdown heading from a text snippet.
|
|
123
|
+
* Handles any heading depth (#, ##, ###, etc.) and strips bold markers (**).
|
|
124
|
+
* Returns undefined if no heading is found.
|
|
125
|
+
*/
|
|
126
|
+
export function extractMarkdownTitle(text) {
|
|
127
|
+
// Match any Markdown heading: # Title, ## Title, ### Page 1: **Title**, etc.
|
|
128
|
+
const headingMatch = text.match(/^#{1,6}\s+(.+)$/m);
|
|
129
|
+
if (!headingMatch)
|
|
130
|
+
return undefined;
|
|
131
|
+
let title = headingMatch[1].trim();
|
|
132
|
+
// Handle patterns like "Page 1: **Actual Title**" - extract just the title part
|
|
133
|
+
const colonMatch = title.match(/^(?:Page\s+\d+\s*:\s*)?(.+)$/i);
|
|
134
|
+
if (colonMatch) {
|
|
135
|
+
title = colonMatch[1].trim();
|
|
136
|
+
}
|
|
137
|
+
// Strip bold markers **title** -> title
|
|
138
|
+
title = title.replace(/^\*\*(.+)\*\*$/, "$1");
|
|
139
|
+
// Strip any trailing ** that might be left
|
|
140
|
+
title = title.replace(/\*\*/g, "").trim();
|
|
141
|
+
return title || undefined;
|
|
142
|
+
}
|
|
89
143
|
export function parseLlmRawTags(llmResponse, tagNames, options = {}) {
|
|
90
144
|
const { tolerateErrors } = options;
|
|
91
145
|
const tagList = [];
|
|
@@ -135,6 +189,7 @@ export function parseLlmRawTags(llmResponse, tagNames, options = {}) {
|
|
|
135
189
|
tagList.push({
|
|
136
190
|
tagName: current.tagName,
|
|
137
191
|
content,
|
|
192
|
+
startPosition: current.position,
|
|
138
193
|
});
|
|
139
194
|
// Skip to after this closing tag
|
|
140
195
|
i = j;
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
// Default authors/author types (injected by site-schema-factory at runtime, but needed for template generation)
|
|
2
|
+
const defaultAuthorsType = {
|
|
3
|
+
typeName: "authors",
|
|
4
|
+
kind: "document",
|
|
5
|
+
documentKind: "routing",
|
|
6
|
+
route: "authors",
|
|
7
|
+
regularChildren: ["author"],
|
|
8
|
+
regularChildrenSorting: "title asc",
|
|
9
|
+
};
|
|
10
|
+
const defaultAuthorType = {
|
|
11
|
+
typeName: "author",
|
|
12
|
+
kind: "document",
|
|
13
|
+
documentKind: "regular",
|
|
14
|
+
route: ":relativeId-:slug",
|
|
15
|
+
autoPublish: true,
|
|
16
|
+
};
|
|
17
|
+
/**
|
|
18
|
+
* Create a copy of the site schema with authors/author types injected if missing.
|
|
19
|
+
* This is needed because the site-schema-factory injects these at runtime,
|
|
20
|
+
* but we need them during template generation.
|
|
21
|
+
*/
|
|
22
|
+
export function createSchemaWithAuthors(siteSchema) {
|
|
23
|
+
const nodeTypes = siteSchema.nodeTypes ?? [];
|
|
24
|
+
const nodeTypeNames = new Set(nodeTypes.map((nt) => nt.typeName));
|
|
25
|
+
const hasAuthors = nodeTypeNames.has("authors");
|
|
26
|
+
const hasAuthor = nodeTypeNames.has("author");
|
|
27
|
+
// If both authors and author exist, check if home references authors
|
|
28
|
+
if (hasAuthors && hasAuthor) {
|
|
29
|
+
return ensureHomeReferencesAuthors(siteSchema);
|
|
30
|
+
}
|
|
31
|
+
const newNodeTypes = [...nodeTypes];
|
|
32
|
+
// Add author first (authors depends on it)
|
|
33
|
+
if (!hasAuthor) {
|
|
34
|
+
newNodeTypes.push(defaultAuthorType);
|
|
35
|
+
}
|
|
36
|
+
// Add authors
|
|
37
|
+
if (!hasAuthors) {
|
|
38
|
+
newNodeTypes.push(defaultAuthorsType);
|
|
39
|
+
}
|
|
40
|
+
return ensureHomeReferencesAuthors({
|
|
41
|
+
...siteSchema,
|
|
42
|
+
nodeTypes: newNodeTypes,
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Ensure the home document type has "authors" in its routingChildren.
|
|
47
|
+
*/
|
|
48
|
+
function ensureHomeReferencesAuthors(siteSchema) {
|
|
49
|
+
const nodeTypes = siteSchema.nodeTypes ?? [];
|
|
50
|
+
const homeIndex = nodeTypes.findIndex((nt) => nt.typeName === "home");
|
|
51
|
+
if (homeIndex === -1) {
|
|
52
|
+
return siteSchema;
|
|
53
|
+
}
|
|
54
|
+
const homeType = nodeTypes[homeIndex];
|
|
55
|
+
const routingChildren = homeType.routingChildren ?? [];
|
|
56
|
+
if (routingChildren.includes("authors")) {
|
|
57
|
+
return siteSchema;
|
|
58
|
+
}
|
|
59
|
+
// Create a new array with updated home type
|
|
60
|
+
const newNodeTypes = [...nodeTypes];
|
|
61
|
+
newNodeTypes[homeIndex] = {
|
|
62
|
+
...homeType,
|
|
63
|
+
routingChildren: [...routingChildren, "authors"],
|
|
64
|
+
};
|
|
65
|
+
return {
|
|
66
|
+
...siteSchema,
|
|
67
|
+
nodeTypes: newNodeTypes,
|
|
68
|
+
};
|
|
69
|
+
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { generateSlug } from "@paroicms/public-anywhere-lib";
|
|
2
|
+
import { createSimpleTranslator } from "@paroicms/public-server-lib";
|
|
2
3
|
import { randomUUID } from "node:crypto";
|
|
3
4
|
import { mkdir, writeFile } from "node:fs/promises";
|
|
4
5
|
import { join } from "node:path";
|
|
@@ -30,12 +31,13 @@ export async function generateSite(ctx, input) {
|
|
|
30
31
|
"new-website",
|
|
31
32
|
}), null, 2), "utf-8");
|
|
32
33
|
await createTheme(ctx, siteDir, siteSchema);
|
|
33
|
-
const regSite = await service.
|
|
34
|
+
const regSite = await service.getServerConnector().registerNewSite({
|
|
34
35
|
packName,
|
|
35
36
|
siteDir,
|
|
36
37
|
domain: siteId,
|
|
37
38
|
version: "0.0.0",
|
|
38
39
|
});
|
|
40
|
+
const siteConnector = service.getUnsafeSiteConnector({ fqdn: regSite.fqdn });
|
|
39
41
|
const account = {
|
|
40
42
|
kind: "local",
|
|
41
43
|
email: `${siteId}@yopmail.com`,
|
|
@@ -43,7 +45,7 @@ export async function generateSite(ctx, input) {
|
|
|
43
45
|
password: Math.random().toString(36).substring(2, 6), // 4 random lowercase characters,
|
|
44
46
|
roles: ["admin"],
|
|
45
47
|
};
|
|
46
|
-
await
|
|
48
|
+
await siteConnector.createAccount(account, { asContactEmail: true });
|
|
47
49
|
const { siteUrl } = regSite;
|
|
48
50
|
const values = {
|
|
49
51
|
status: withSampleData ? "pending" : "completed",
|
|
@@ -55,7 +57,20 @@ export async function generateSite(ctx, input) {
|
|
|
55
57
|
};
|
|
56
58
|
await saveGeneratedSiteStep(ctx, stepHandle, values);
|
|
57
59
|
if (withSampleData) {
|
|
58
|
-
|
|
60
|
+
const { siteSchema: loadedSiteSchema, homeRoutingCluster } = await siteConnector.loadSiteSchemaAndIds();
|
|
61
|
+
const schemaI18n = createSimpleTranslator({
|
|
62
|
+
labels: loadedSiteSchema.l10n,
|
|
63
|
+
logger: ctx.logger,
|
|
64
|
+
});
|
|
65
|
+
const generateCtx = {
|
|
66
|
+
...ctx,
|
|
67
|
+
siteConnector,
|
|
68
|
+
fqdn: regSite.fqdn,
|
|
69
|
+
siteSchema: loadedSiteSchema,
|
|
70
|
+
schemaI18n,
|
|
71
|
+
homeRoutingCluster,
|
|
72
|
+
};
|
|
73
|
+
safeCallStep(ctx, stepHandle, () => fillSiteWithFakeContent(generateCtx, stepHandle, { homeRoutingCluster, localizedValues }));
|
|
59
74
|
}
|
|
60
75
|
return await loadStep(ctx, stepHandle.stepNumber);
|
|
61
76
|
}
|
|
@@ -5,11 +5,13 @@ import { camelToKebabCase } from "../lib/utils.js";
|
|
|
5
5
|
import { templateOfSiteFooter, templateOfSiteHeader } from "./common-template-creator.js";
|
|
6
6
|
import { templateOfDocumentType } from "./document-template-creator.js";
|
|
7
7
|
import { isMultilingual } from "./jt-site-schema-helpers.js";
|
|
8
|
+
import { createSchemaWithAuthors } from "./schema-with-authors.js";
|
|
8
9
|
import { createThemeCreatorContext } from "./theme-creator-context.js";
|
|
9
10
|
import { getThemeCssContent } from "./theme-css.js";
|
|
10
11
|
export async function createTheme(ctx, siteDir, siteSchema) {
|
|
11
|
-
const
|
|
12
|
-
|
|
12
|
+
const schemaWithAuthors = createSchemaWithAuthors(siteSchema);
|
|
13
|
+
const themeContext = createThemeCreatorContext(schemaWithAuthors);
|
|
14
|
+
for (const nodeType of schemaWithAuthors.nodeTypes ?? []) {
|
|
13
15
|
if (nodeType.kind === "site")
|
|
14
16
|
continue;
|
|
15
17
|
if (nodeType.kind === "part")
|
|
@@ -4,7 +4,7 @@ import { readOrCreateJwtSecretSync } from "@paroicms/internal-server-lib";
|
|
|
4
4
|
import { join } from "node:path";
|
|
5
5
|
export function createRawContext(service, options) {
|
|
6
6
|
const { cn, logNextQuery, pluginConf, debugDir } = options;
|
|
7
|
-
const packConf = service.
|
|
7
|
+
const packConf = service.getServerConnector().getSitePackConf(pluginConf.packName);
|
|
8
8
|
const { sitesDir, packName } = packConf;
|
|
9
9
|
if (!sitesDir || packConf.serveOn !== "subDomain") {
|
|
10
10
|
throw new Error(`Site-generator plugin can generate sites only for sub-domain pack with "sitesDir", but pack "${packName}" doesn't have it`);
|
|
@@ -33,7 +33,9 @@ export async function removeExpiredSites(ctx) {
|
|
|
33
33
|
const dirPath = join(sitesDir, entry.name);
|
|
34
34
|
const st = await stat(dirPath);
|
|
35
35
|
if (st.ctime < expirationTime) {
|
|
36
|
-
|
|
36
|
+
const fqdn = `${entry.name}.${packConf.parentDomain}`;
|
|
37
|
+
const siteConnector = service.getUnsafeSiteConnector({ fqdn });
|
|
38
|
+
await siteConnector.removeSite();
|
|
37
39
|
}
|
|
38
40
|
}
|
|
39
41
|
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
Generate **{{count}}** different **taxonomy terms** (like categories or tags).
|
|
2
|
+
|
|
3
|
+
All the produced texts must be in **{{language}}**.
|
|
4
|
+
|
|
5
|
+
The taxonomy in question is: **{{typeLabel}}** ({{typeDescription}}).
|
|
6
|
+
|
|
7
|
+
For the context, the website's theme is: "{{siteTheme}}".
|
|
8
|
+
|
|
9
|
+
Make sure terms are distinct from each other.
|
|
10
|
+
|
|
11
|
+
## IMPORTANT RULES
|
|
12
|
+
|
|
13
|
+
1. You MUST use XML tags, NOT markdown headings
|
|
14
|
+
2. Each term MUST have a `<title>` tag
|
|
15
|
+
3. Do NOT use ### or ## or # for titles
|
|
16
|
+
|
|
17
|
+
## REQUIRED FORMAT
|
|
18
|
+
|
|
19
|
+
For each term, write EXACTLY like this:
|
|
20
|
+
|
|
21
|
+
{{tagAndDescriptions}}
|