@lowdefy/build 5.0.0 → 5.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/build/buildApi/buildRoutine/countStepTypes.js +3 -0
- package/dist/build/buildApi/buildRoutine/setStepId.js +3 -2
- package/dist/build/buildApi/buildRoutine/validateStep.js +19 -0
- package/dist/build/buildApi/validateEndpoint.js +10 -0
- package/dist/build/buildApi/validateStepReferences.js +4 -4
- package/dist/build/buildAuth/buildApiAuth.js +2 -1
- package/dist/build/buildAuth/buildPageAuth.js +2 -1
- package/dist/build/buildAuth/getApiRoles.js +12 -6
- package/dist/build/buildAuth/getPageRoles.js +12 -6
- package/dist/build/buildAuth/getProtectedApi.js +3 -2
- package/dist/build/buildAuth/getProtectedPages.js +3 -2
- package/dist/build/buildAuth/matchPattern.js +22 -0
- package/dist/build/buildConnections.js +42 -4
- package/dist/build/buildJs/jsMapParser.js +25 -12
- package/dist/build/buildJs/writeJs.js +2 -2
- package/dist/build/buildMenu.js +41 -0
- package/dist/build/buildModuleDefs.js +97 -0
- package/dist/build/buildModules.js +96 -0
- package/dist/build/buildPages/buildBlock/buildBlock.js +2 -2
- package/dist/build/buildPages/buildBlock/buildEvents.js +16 -1
- package/dist/build/buildPages/buildBlock/buildSubBlocks.js +2 -1
- package/dist/build/buildPages/buildBlock/validateBlock.js +3 -3
- package/dist/build/buildPages/buildPage.js +1 -0
- package/dist/build/buildPages/validateCallApiRefs.js +31 -0
- package/dist/build/buildRefs/getModuleRefContent.js +81 -0
- package/dist/build/buildRefs/makeRefDefinition.js +6 -0
- package/dist/build/buildRefs/walker.js +424 -44
- package/dist/build/fetchGitHubModule.js +94 -0
- package/dist/build/fetchModules.js +60 -0
- package/dist/build/full/buildPages.js +10 -1
- package/dist/build/full/writePages.js +1 -1
- package/dist/build/jit/buildPageJit.js +34 -4
- package/dist/build/jit/collectSkeletonSourceFiles.js +8 -0
- package/dist/build/jit/createPageRegistry.js +10 -1
- package/dist/build/jit/shallowBuild.js +22 -11
- package/dist/build/jit/writePageJit.js +2 -2
- package/dist/build/jit/writeSourcelessPages.js +1 -1
- package/dist/build/parseModuleSource.js +48 -0
- package/dist/build/registerModules.js +242 -0
- package/dist/build/resolveDepTarget.js +43 -0
- package/dist/build/resolveModuleDependencies.js +60 -0
- package/dist/build/resolveModuleOperators.js +27 -0
- package/dist/build/testSchema.js +22 -11
- package/dist/build/writePluginImports/writeGlobalsCss.js +30 -1
- package/dist/createContext.js +4 -0
- package/dist/defaultPackages.js +51 -0
- package/dist/defaultTypesMap.js +515 -355
- package/dist/index.js +16 -1
- package/dist/indexDev.js +3 -1
- package/dist/lowdefySchema.js +58 -0
- package/dist/scripts/generateDefaultTypes.js +1 -34
- package/package.json +46 -41
- package/dist/build/jit/stripPageContent.js +0 -29
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Copyright 2020-2026 Lowdefy, Inc
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
*/ import { execFile } from 'node:child_process';
|
|
16
|
+
import fs from 'node:fs';
|
|
17
|
+
import path from 'node:path';
|
|
18
|
+
import { pipeline } from 'node:stream/promises';
|
|
19
|
+
import { createGunzip } from 'node:zlib';
|
|
20
|
+
import { promisify } from 'node:util';
|
|
21
|
+
import { Unpack } from 'tar';
|
|
22
|
+
import { ConfigError } from '@lowdefy/errors';
|
|
23
|
+
const execFileAsync = promisify(execFile);
|
|
24
|
+
function isImmutableRef(ref) {
|
|
25
|
+
// Full or abbreviated commit SHAs
|
|
26
|
+
if (/^[0-9a-f]{7,40}$/.test(ref)) return true;
|
|
27
|
+
// Semver-like tags: v1.0.0, v1, 1.2.3, v1.0.0-beta.1, etc.
|
|
28
|
+
if (/^v?\d+(\.\d+)*(-[\w.]+)?$/.test(ref)) return true;
|
|
29
|
+
return false;
|
|
30
|
+
}
|
|
31
|
+
async function getGhToken() {
|
|
32
|
+
try {
|
|
33
|
+
const { stdout } = await execFileAsync('gh', [
|
|
34
|
+
'auth',
|
|
35
|
+
'token'
|
|
36
|
+
]);
|
|
37
|
+
return stdout.trim() || null;
|
|
38
|
+
} catch {
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
async function extractTarball(body, destDir) {
|
|
43
|
+
// Ensure destination exists and is clean
|
|
44
|
+
fs.mkdirSync(destDir, {
|
|
45
|
+
recursive: true
|
|
46
|
+
});
|
|
47
|
+
// GitHub tarballs have a top-level directory like {owner}-{repo}-{sha}/
|
|
48
|
+
// We strip it so contents extract directly into destDir
|
|
49
|
+
await pipeline(body, createGunzip(), new Unpack({
|
|
50
|
+
cwd: destDir,
|
|
51
|
+
strip: 1
|
|
52
|
+
}));
|
|
53
|
+
}
|
|
54
|
+
async function fetchGitHubModule(source, context) {
|
|
55
|
+
const cacheDir = path.join(context.directories.config, '.lowdefy', 'modules', 'github');
|
|
56
|
+
const repoCache = path.join(cacheDir, source.owner, source.repo, source.ref);
|
|
57
|
+
// Check cache — only skip fetch for refs we're confident are immutable
|
|
58
|
+
if (fs.existsSync(repoCache) && isImmutableRef(source.ref)) {
|
|
59
|
+
return {
|
|
60
|
+
packageRoot: repoCache
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
// Fetch tarball from GitHub API
|
|
64
|
+
const url = `https://api.github.com/repos/${source.owner}/${source.repo}/tarball/${source.ref}`;
|
|
65
|
+
const headers = {
|
|
66
|
+
Accept: 'application/vnd.github+json'
|
|
67
|
+
};
|
|
68
|
+
// Auth: GITHUB_TOKEN env var, then gh CLI token
|
|
69
|
+
const token = process.env.GITHUB_TOKEN || await getGhToken();
|
|
70
|
+
if (token) {
|
|
71
|
+
headers.Authorization = `Bearer ${token}`;
|
|
72
|
+
}
|
|
73
|
+
const response = await fetch(url, {
|
|
74
|
+
headers,
|
|
75
|
+
redirect: 'follow'
|
|
76
|
+
});
|
|
77
|
+
if (!response.ok) {
|
|
78
|
+
throw new ConfigError(`Failed to fetch module from ${url}: ${response.status} ${response.statusText}`);
|
|
79
|
+
}
|
|
80
|
+
// Clean existing cache for mutable refs before extracting
|
|
81
|
+
if (fs.existsSync(repoCache)) {
|
|
82
|
+
fs.rmSync(repoCache, {
|
|
83
|
+
recursive: true,
|
|
84
|
+
force: true
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
// Extract tarball to cache
|
|
88
|
+
await extractTarball(response.body, repoCache);
|
|
89
|
+
return {
|
|
90
|
+
packageRoot: repoCache
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
export default fetchGitHubModule;
|
|
94
|
+
export { isImmutableRef, getGhToken, extractTarball };
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Copyright 2020-2026 Lowdefy, Inc
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
*/ import fs from 'node:fs';
|
|
16
|
+
import path from 'node:path';
|
|
17
|
+
import { ConfigError } from '@lowdefy/errors';
|
|
18
|
+
import fetchGitHubModule from './fetchGitHubModule.js';
|
|
19
|
+
import parseModuleSource from './parseModuleSource.js';
|
|
20
|
+
function findGitRoot(startPath) {
|
|
21
|
+
let dir = startPath;
|
|
22
|
+
while(true){
|
|
23
|
+
if (fs.existsSync(path.join(dir, '.git'))) {
|
|
24
|
+
return dir;
|
|
25
|
+
}
|
|
26
|
+
const parent = path.dirname(dir);
|
|
27
|
+
if (parent === dir) return null;
|
|
28
|
+
dir = parent;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
async function fetchModules({ moduleEntries, context }) {
|
|
32
|
+
const resolved = {};
|
|
33
|
+
for (const entry of moduleEntries){
|
|
34
|
+
const source = parseModuleSource(entry.source);
|
|
35
|
+
if (source.type === 'file') {
|
|
36
|
+
const resolvedPath = path.resolve(context.directories.config, source.path);
|
|
37
|
+
if (!fs.existsSync(path.join(resolvedPath, 'module.lowdefy.yaml'))) {
|
|
38
|
+
throw new ConfigError(`Module "${entry.id}": module.lowdefy.yaml not found at ${resolvedPath}`);
|
|
39
|
+
}
|
|
40
|
+
resolved[entry.id] = {
|
|
41
|
+
packageRoot: findGitRoot(resolvedPath) ?? resolvedPath,
|
|
42
|
+
moduleRoot: resolvedPath,
|
|
43
|
+
isLocal: true
|
|
44
|
+
};
|
|
45
|
+
} else if (source.type === 'github') {
|
|
46
|
+
const cached = await fetchGitHubModule(source, context);
|
|
47
|
+
const moduleRoot = source.path ? path.join(cached.packageRoot, source.path) : cached.packageRoot;
|
|
48
|
+
if (!fs.existsSync(path.join(moduleRoot, 'module.lowdefy.yaml'))) {
|
|
49
|
+
throw new ConfigError(`Module "${entry.id}": module.lowdefy.yaml not found at path "${source.path || '/'}" in ${source.owner}/${source.repo}@${source.ref}`);
|
|
50
|
+
}
|
|
51
|
+
resolved[entry.id] = {
|
|
52
|
+
packageRoot: cached.packageRoot,
|
|
53
|
+
moduleRoot,
|
|
54
|
+
isLocal: false
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return resolved;
|
|
59
|
+
}
|
|
60
|
+
export default fetchModules;
|
|
@@ -16,6 +16,7 @@
|
|
|
16
16
|
import { ConfigError, shouldSuppressBuildCheck } from '@lowdefy/errors';
|
|
17
17
|
import buildPage from '../buildPages/buildPage.js';
|
|
18
18
|
import createCheckDuplicateId from '../../utils/createCheckDuplicateId.js';
|
|
19
|
+
import validateCallApiRefs from '../buildPages/validateCallApiRefs.js';
|
|
19
20
|
import validateLinkReferences from '../buildPages/validateLinkReferences.js';
|
|
20
21
|
import validatePayloadReferences from '../buildPages/validatePayloadReferences.js';
|
|
21
22
|
import validateServerStateReferences from '../buildPages/validateServerStateReferences.js';
|
|
@@ -25,8 +26,9 @@ function buildPages({ components, context }) {
|
|
|
25
26
|
const checkDuplicatePageId = createCheckDuplicateId({
|
|
26
27
|
message: 'Duplicate pageId "{{ id }}".'
|
|
27
28
|
});
|
|
28
|
-
// Initialize
|
|
29
|
+
// Initialize action ref collections across all pages
|
|
29
30
|
context.linkActionRefs = [];
|
|
31
|
+
context.callApiActionRefs = [];
|
|
30
32
|
// Track which pages failed to build so we skip them in validation
|
|
31
33
|
const failedPageIndices = new Set();
|
|
32
34
|
// Wrap each page build to collect errors instead of stopping on first error
|
|
@@ -64,6 +66,13 @@ function buildPages({ components, context }) {
|
|
|
64
66
|
pageIds,
|
|
65
67
|
context
|
|
66
68
|
});
|
|
69
|
+
// Validate that CallAPI actions don't target InternalApi endpoints
|
|
70
|
+
const endpointConfigs = type.isArray(components.api) ? components.api : [];
|
|
71
|
+
validateCallApiRefs({
|
|
72
|
+
callApiActionRefs: context.callApiActionRefs,
|
|
73
|
+
endpointConfigs,
|
|
74
|
+
context
|
|
75
|
+
});
|
|
67
76
|
// Validate that _state references use defined block IDs
|
|
68
77
|
// and _payload references use defined payload keys
|
|
69
78
|
// Skip pages that failed to build
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
limitations under the License.
|
|
15
15
|
*/ import { serializer } from '@lowdefy/helpers';
|
|
16
16
|
async function writePage({ page, context }) {
|
|
17
|
-
await context.writeBuildArtifact(`pages/${page.pageId}
|
|
17
|
+
await context.writeBuildArtifact(`pages/${page.pageId}.json`, serializer.serializeToString(page ?? {}));
|
|
18
18
|
}
|
|
19
19
|
async function writePages({ components, context }) {
|
|
20
20
|
const writePromises = components.pages.map((page)=>writePage({
|
|
@@ -19,6 +19,7 @@ import { ConfigError, LowdefyInternalError } from '@lowdefy/errors';
|
|
|
19
19
|
import operators from '@lowdefy/operators-js/operators/build';
|
|
20
20
|
import addKeys from '../addKeys.js';
|
|
21
21
|
import buildPage from '../buildPages/buildPage.js';
|
|
22
|
+
import validateCallApiRefs from '../buildPages/validateCallApiRefs.js';
|
|
22
23
|
import validateLinkReferences from '../buildPages/validateLinkReferences.js';
|
|
23
24
|
import validatePayloadReferences from '../buildPages/validatePayloadReferences.js';
|
|
24
25
|
import validateServerStateReferences from '../buildPages/validateServerStateReferences.js';
|
|
@@ -83,7 +84,7 @@ async function buildPageJit({ pageId, pageRegistry, context, directories, logger
|
|
|
83
84
|
// All user pages (with refId) always JIT-resolve from source YAML so that
|
|
84
85
|
// page-only edits are picked up without a skeleton rebuild.
|
|
85
86
|
if (!pageEntry.refId) {
|
|
86
|
-
const pagePath = path.join(buildContext.directories.build, 'pages',
|
|
87
|
+
const pagePath = path.join(buildContext.directories.build, 'pages', `${pageId}.json`);
|
|
87
88
|
try {
|
|
88
89
|
const content = await fs.promises.readFile(pagePath, 'utf8');
|
|
89
90
|
const page = serializer.deserialize(JSON.parse(content));
|
|
@@ -96,6 +97,13 @@ async function buildPageJit({ pageId, pageRegistry, context, directories, logger
|
|
|
96
97
|
if (err.code !== 'ENOENT') throw err;
|
|
97
98
|
}
|
|
98
99
|
}
|
|
100
|
+
// If this is a module page, set up module context
|
|
101
|
+
let moduleDependencies = null;
|
|
102
|
+
let moduleEntry = null;
|
|
103
|
+
if (pageEntry.moduleEntryId) {
|
|
104
|
+
moduleEntry = buildContext.modules[pageEntry.moduleEntryId];
|
|
105
|
+
moduleDependencies = moduleEntry?.moduleDependencies ?? null;
|
|
106
|
+
}
|
|
99
107
|
// Resolve the page file from scratch using the source file path determined
|
|
100
108
|
// by createPageRegistry's parent chain walk.
|
|
101
109
|
if (!pageEntry.refPath && !pageEntry.resolverOriginal) {
|
|
@@ -113,6 +121,10 @@ async function buildPageJit({ pageId, pageRegistry, context, directories, logger
|
|
|
113
121
|
refId: varRefDef.id,
|
|
114
122
|
sourceRefId: null,
|
|
115
123
|
vars: {},
|
|
124
|
+
moduleDependencies,
|
|
125
|
+
moduleEntry: moduleEntry ?? null,
|
|
126
|
+
moduleRoot: moduleEntry?.moduleRoot ?? null,
|
|
127
|
+
packageRoot: moduleEntry?.packageRoot ?? null,
|
|
116
128
|
path: '',
|
|
117
129
|
currentFile: pageEntry.refPath ?? pageEntry.resolverOriginal?.resolver ?? '',
|
|
118
130
|
refChain: new Set(),
|
|
@@ -149,6 +161,10 @@ async function buildPageJit({ pageId, pageRegistry, context, directories, logger
|
|
|
149
161
|
refId: refDef.id,
|
|
150
162
|
sourceRefId: null,
|
|
151
163
|
vars: refDef.vars ?? {},
|
|
164
|
+
moduleDependencies,
|
|
165
|
+
moduleEntry: moduleEntry ?? null,
|
|
166
|
+
moduleRoot: moduleEntry?.moduleRoot ?? null,
|
|
167
|
+
packageRoot: moduleEntry?.packageRoot ?? null,
|
|
152
168
|
path: '',
|
|
153
169
|
currentFile: refDef.path ?? '',
|
|
154
170
|
refChain: new Set(),
|
|
@@ -164,13 +180,18 @@ async function buildPageJit({ pageId, pageRegistry, context, directories, logger
|
|
|
164
180
|
refDef
|
|
165
181
|
});
|
|
166
182
|
// When resolving from a collection file (with vars), the result is an array of pages.
|
|
167
|
-
// Find the specific page by ID.
|
|
183
|
+
// Find the specific page by ID. For module pages, source IDs are unscoped.
|
|
168
184
|
if (type.isArray(processed)) {
|
|
169
|
-
|
|
185
|
+
const unscopedId = moduleEntry ? pageId.slice(`${moduleEntry.id}/`.length) : pageId;
|
|
186
|
+
processed = processed.find((p)=>type.isObject(p) && p.id === unscopedId);
|
|
170
187
|
if (!processed) {
|
|
171
188
|
throw new ConfigError(`Page "${pageId}" not found in resolved page source file.`);
|
|
172
189
|
}
|
|
173
190
|
}
|
|
191
|
+
// JIT builds resolve from source YAML — the page ID is unscoped for module pages
|
|
192
|
+
if (moduleEntry && type.isObject(processed) && processed.id) {
|
|
193
|
+
processed.id = `${moduleEntry.id}/${processed.id}`;
|
|
194
|
+
}
|
|
174
195
|
// Tag all objects with ~r for ref provenance (normally done inside _ref
|
|
175
196
|
// resolution by the walker; JIT resolves the page file directly).
|
|
176
197
|
tagRefDeep(processed, refDef.id);
|
|
@@ -186,10 +207,13 @@ async function buildPageJit({ pageId, pageRegistry, context, directories, logger
|
|
|
186
207
|
await writeMaps({
|
|
187
208
|
context: buildContext
|
|
188
209
|
});
|
|
189
|
-
// Initialize
|
|
210
|
+
// Initialize action ref collections for buildPage (normally done by buildPages)
|
|
190
211
|
if (!buildContext.linkActionRefs) {
|
|
191
212
|
buildContext.linkActionRefs = [];
|
|
192
213
|
}
|
|
214
|
+
if (!buildContext.callApiActionRefs) {
|
|
215
|
+
buildContext.callApiActionRefs = [];
|
|
216
|
+
}
|
|
193
217
|
// Build the page (validation, block processing)
|
|
194
218
|
const checkDuplicatePageId = createCheckDuplicateId({
|
|
195
219
|
message: 'Duplicate pageId "{{ id }}".'
|
|
@@ -237,6 +261,12 @@ async function buildPageJit({ pageId, pageRegistry, context, directories, logger
|
|
|
237
261
|
pageIds,
|
|
238
262
|
context: buildContext
|
|
239
263
|
});
|
|
264
|
+
const endpointConfigs = type.isArray(buildContext.components?.api) ? buildContext.components.api : [];
|
|
265
|
+
validateCallApiRefs({
|
|
266
|
+
callApiActionRefs: buildContext.callApiActionRefs,
|
|
267
|
+
endpointConfigs,
|
|
268
|
+
context: buildContext
|
|
269
|
+
});
|
|
240
270
|
validateStateReferences({
|
|
241
271
|
page: processed,
|
|
242
272
|
context: buildContext
|
|
@@ -39,6 +39,14 @@ function collectSkeletonSourceFiles({ components, context }) {
|
|
|
39
39
|
if (key === 'pages') continue;
|
|
40
40
|
walkRefIds(components[key], refIds);
|
|
41
41
|
}
|
|
42
|
+
// Module consumerVars contribute to skeleton state via modules.json.
|
|
43
|
+
// Their ~r markers may only appear under pages (when consumed via
|
|
44
|
+
// _module.var inside page-referenced components), so the non-page
|
|
45
|
+
// walk above can miss them. Walk here so changes to per-app module
|
|
46
|
+
// vars files trigger a skeleton rebuild.
|
|
47
|
+
for (const moduleEntry of Object.values(context.modules ?? {})){
|
|
48
|
+
walkRefIds(moduleEntry.consumerVars, refIds);
|
|
49
|
+
}
|
|
42
50
|
const sourceFiles = new Set();
|
|
43
51
|
// Walk parent chains for each collected ref ID
|
|
44
52
|
for (const refId of refIds){
|
|
@@ -58,6 +58,14 @@ function findPageSourceRef(refId, refMap, unresolvedRefVars) {
|
|
|
58
58
|
}
|
|
59
59
|
return firstChildOfRoot;
|
|
60
60
|
}
|
|
61
|
+
function getModuleEntryId(pageId, context) {
|
|
62
|
+
for (const entryId of Object.keys(context.modules ?? {})){
|
|
63
|
+
if (pageId.startsWith(`${entryId}/`)) {
|
|
64
|
+
return entryId;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
61
69
|
function createPageRegistry({ components, context }) {
|
|
62
70
|
const registry = new Map();
|
|
63
71
|
const unresolvedRefVars = context.unresolvedRefVars ?? {};
|
|
@@ -77,7 +85,8 @@ function createPageRegistry({ components, context }) {
|
|
|
77
85
|
refId: isInline ? null : refId,
|
|
78
86
|
refPath: sourceRef?.path ?? null,
|
|
79
87
|
unresolvedVars: sourceRef?.unresolvedVars ?? null,
|
|
80
|
-
resolverOriginal: sourceRef?.original ?? null
|
|
88
|
+
resolverOriginal: sourceRef?.original ?? null,
|
|
89
|
+
moduleEntryId: getModuleEntryId(page.id, context)
|
|
81
90
|
});
|
|
82
91
|
});
|
|
83
92
|
return registry;
|
|
@@ -12,7 +12,8 @@
|
|
|
12
12
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
13
|
See the License for the specific language governing permissions and
|
|
14
14
|
limitations under the License.
|
|
15
|
-
*/ import {
|
|
15
|
+
*/ import { serializer } from '@lowdefy/helpers';
|
|
16
|
+
import { BuildError, LowdefyInternalError } from '@lowdefy/errors';
|
|
16
17
|
import createContext from '../../createContext.js';
|
|
17
18
|
import logCollectedErrors from '../../utils/logCollectedErrors.js';
|
|
18
19
|
import makeId from '../../utils/makeId.js';
|
|
@@ -26,6 +27,8 @@ import buildApi from '../buildApi/buildApi.js';
|
|
|
26
27
|
import buildLogger from '../buildLogger.js';
|
|
27
28
|
import buildImports from '../buildImports/buildImports.js';
|
|
28
29
|
import buildMenu from '../buildMenu.js';
|
|
30
|
+
import buildModuleDefs from '../buildModuleDefs.js';
|
|
31
|
+
import buildModules from '../buildModules.js';
|
|
29
32
|
import buildRefs from '../buildRefs/buildRefs.js';
|
|
30
33
|
import buildTypes from '../buildTypes.js';
|
|
31
34
|
import cleanBuildDirectory from '../cleanBuildDirectory.js';
|
|
@@ -51,15 +54,19 @@ import buildJsShallow from './buildJsShallow.js';
|
|
|
51
54
|
import buildShallowPages from './buildShallowPages.js';
|
|
52
55
|
import collectPageContent from '../collectPageContent.js';
|
|
53
56
|
import collectSkeletonSourceFiles from './collectSkeletonSourceFiles.js';
|
|
54
|
-
import stripPageContent from './stripPageContent.js';
|
|
55
57
|
import writeSourcelessPages from './writeSourcelessPages.js';
|
|
56
58
|
async function shallowBuild(options) {
|
|
57
59
|
makeId.reset();
|
|
58
60
|
let context;
|
|
59
61
|
try {
|
|
60
62
|
context = createContext(options);
|
|
63
|
+
// Phase 1: Build module definitions
|
|
64
|
+
await buildModuleDefs({
|
|
65
|
+
context
|
|
66
|
+
});
|
|
61
67
|
let components;
|
|
62
68
|
try {
|
|
69
|
+
// Phase 2: Ref resolution (with shallow options)
|
|
63
70
|
components = await buildRefs({
|
|
64
71
|
context,
|
|
65
72
|
shallowOptions: true
|
|
@@ -75,6 +82,11 @@ async function shallowBuild(options) {
|
|
|
75
82
|
// Failed _ref resolutions leave null entries in arrays — logging now
|
|
76
83
|
// surfaces the real error before downstream code crashes on nulls.
|
|
77
84
|
logCollectedErrors(context);
|
|
85
|
+
// Phase 3: Process modules — scopes IDs, merges into components
|
|
86
|
+
buildModules({
|
|
87
|
+
components,
|
|
88
|
+
context
|
|
89
|
+
});
|
|
78
90
|
// Collect skeleton source files while ~r markers still exist on objects.
|
|
79
91
|
const skeletonSourceFiles = collectSkeletonSourceFiles({
|
|
80
92
|
components,
|
|
@@ -85,6 +97,13 @@ async function shallowBuild(options) {
|
|
|
85
97
|
components,
|
|
86
98
|
context
|
|
87
99
|
});
|
|
100
|
+
tryBuildStep(testSchema, 'testSchema', {
|
|
101
|
+
components,
|
|
102
|
+
context
|
|
103
|
+
});
|
|
104
|
+
logCollectedErrors(context);
|
|
105
|
+
// Collect page content strings for Tailwind to scan.
|
|
106
|
+
// Runs after testSchema so null block entries are caught before walking.
|
|
88
107
|
context.tailwindContentMap = new Map();
|
|
89
108
|
for (const page of components.pages ?? []){
|
|
90
109
|
const content = collectPageContent([
|
|
@@ -94,15 +113,6 @@ async function shallowBuild(options) {
|
|
|
94
113
|
context.tailwindContentMap.set(page.id, content);
|
|
95
114
|
}
|
|
96
115
|
}
|
|
97
|
-
stripPageContent({
|
|
98
|
-
components,
|
|
99
|
-
context
|
|
100
|
-
});
|
|
101
|
-
tryBuildStep(testSchema, 'testSchema', {
|
|
102
|
-
components,
|
|
103
|
-
context
|
|
104
|
-
});
|
|
105
|
-
logCollectedErrors(context);
|
|
106
116
|
// Build skeleton steps (everything except page content)
|
|
107
117
|
tryBuildStep(buildApp, 'buildApp', {
|
|
108
118
|
components,
|
|
@@ -238,6 +248,7 @@ async function shallowBuild(options) {
|
|
|
238
248
|
await context.writeBuildArtifact('installedPluginPackages.json', JSON.stringify([
|
|
239
249
|
...context.installedPackages ?? []
|
|
240
250
|
]));
|
|
251
|
+
await context.writeBuildArtifact('modules.json', serializer.serializeToString(context.modules ?? {}));
|
|
241
252
|
await writePluginImports({
|
|
242
253
|
components,
|
|
243
254
|
context
|
|
@@ -19,7 +19,7 @@ import collectPageContent from '../collectPageContent.js';
|
|
|
19
19
|
import writeJs from '../buildJs/writeJs.js';
|
|
20
20
|
async function writePageJit({ page, context }) {
|
|
21
21
|
// Write page JSON
|
|
22
|
-
await context.writeBuildArtifact(`pages/${page.pageId}
|
|
22
|
+
await context.writeBuildArtifact(`pages/${page.pageId}.json`, serializer.serializeToString(page ?? {}));
|
|
23
23
|
// Write page request JSONs
|
|
24
24
|
const requests = page.requests ?? [];
|
|
25
25
|
for (const request of requests){
|
|
@@ -47,6 +47,6 @@ async function writePageJit({ page, context }) {
|
|
|
47
47
|
const pageContent = collectPageContent([
|
|
48
48
|
page
|
|
49
49
|
]);
|
|
50
|
-
await writeFile(path.join(context.directories.server, 'lowdefy-build', 'tailwind', `${page.pageId}.html`), '<!-- Generated by Lowdefy build -->\n' + (pageContent ?? ''));
|
|
50
|
+
await writeFile(path.join(context.directories.server, 'lowdefy-build', 'tailwind', `${encodeURIComponent(page.pageId)}.html`), '<!-- Generated by Lowdefy build -->\n' + (pageContent ?? ''));
|
|
51
51
|
}
|
|
52
52
|
export default writePageJit;
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
limitations under the License.
|
|
15
15
|
*/ async function writeSourcelessPages({ sourcelessPageArtifacts, context }) {
|
|
16
16
|
for (const artifact of sourcelessPageArtifacts){
|
|
17
|
-
await context.writeBuildArtifact(`pages/${artifact.pageId}
|
|
17
|
+
await context.writeBuildArtifact(`pages/${artifact.pageId}.json`, artifact.pageJson);
|
|
18
18
|
for (const request of artifact.requests){
|
|
19
19
|
await context.writeBuildArtifact(`pages/${artifact.pageId}/requests/${request.requestId}.json`, request.requestJson);
|
|
20
20
|
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Copyright 2020-2026 Lowdefy, Inc
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
*/ import { ConfigError } from '@lowdefy/errors';
|
|
16
|
+
function parseModuleSource(source) {
|
|
17
|
+
if (source.startsWith('file:')) {
|
|
18
|
+
return {
|
|
19
|
+
type: 'file',
|
|
20
|
+
path: source.slice(5)
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
if (source.startsWith('github:')) {
|
|
24
|
+
const rest = source.slice(7);
|
|
25
|
+
const atIndex = rest.lastIndexOf('@');
|
|
26
|
+
if (atIndex === -1) {
|
|
27
|
+
throw new ConfigError(`Module source "${source}" is missing @ref (e.g., @v1.0.0).`);
|
|
28
|
+
}
|
|
29
|
+
const ref = rest.slice(atIndex + 1);
|
|
30
|
+
const fullPath = rest.slice(0, atIndex);
|
|
31
|
+
const segments = fullPath.split('/');
|
|
32
|
+
if (segments.length < 2) {
|
|
33
|
+
throw new ConfigError(`Module source "${source}" must include owner/repo.`);
|
|
34
|
+
}
|
|
35
|
+
const owner = segments[0];
|
|
36
|
+
const repo = segments[1];
|
|
37
|
+
const path = segments.length > 2 ? segments.slice(2).join('/') : null;
|
|
38
|
+
return {
|
|
39
|
+
type: 'github',
|
|
40
|
+
owner,
|
|
41
|
+
repo,
|
|
42
|
+
path,
|
|
43
|
+
ref
|
|
44
|
+
};
|
|
45
|
+
}
|
|
46
|
+
throw new ConfigError(`Unknown module source type: "${source}". Expected "github:" or "file:".`);
|
|
47
|
+
}
|
|
48
|
+
export default parseModuleSource;
|