nextjs-studio 1.0.5 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/nextjs-studio.js +6 -3
- package/dist/cli/ui/standalone.tar.gz +0 -0
- package/dist/core/index.d.ts +117 -0
- package/dist/core/server.d.ts +111 -0
- package/dist/core/server.js +670 -0
- package/dist/{query-builder-BOu-D7a1.d.ts → query-builder-CRcrmrbR.d.ts} +1 -1
- package/package.json +6 -3
|
@@ -6870,7 +6870,7 @@ async function loadConfigFromPath(configPath2) {
|
|
|
6870
6870
|
// package.json
|
|
6871
6871
|
var package_default = {
|
|
6872
6872
|
name: "nextjs-studio",
|
|
6873
|
-
version: "1.0.
|
|
6873
|
+
version: "1.0.6",
|
|
6874
6874
|
description: "A Git-based, local-first CMS for Next.js projects",
|
|
6875
6875
|
keywords: [
|
|
6876
6876
|
"nextjs",
|
|
@@ -6903,8 +6903,11 @@ var package_default = {
|
|
|
6903
6903
|
types: "./dist/core/index.d.ts",
|
|
6904
6904
|
bin: "dist/bin/nextjs-studio.js",
|
|
6905
6905
|
files: [
|
|
6906
|
-
"dist/bin",
|
|
6907
|
-
"dist/core",
|
|
6906
|
+
"dist/bin/nextjs-studio.js",
|
|
6907
|
+
"dist/core/index.js",
|
|
6908
|
+
"dist/core/index.d.ts",
|
|
6909
|
+
"dist/core/server.js",
|
|
6910
|
+
"dist/core/server.d.ts",
|
|
6908
6911
|
"dist/*.d.ts",
|
|
6909
6912
|
"dist/cli/ui/standalone.tar.gz",
|
|
6910
6913
|
"README.md",
|
|
Binary file
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
import { C as ContentEntry, e as FieldDefinition, T as TextField, L as LongTextField, N as NumberField, B as BooleanField, f as DateField, I as ISODate, g as SelectField, M as MultiSelectField, U as UrlField, H as HttpUrl, E as EmailField, h as Email, i as MediaField, j as MediaPath, O as ObjectField, A as ArrayField, k as IdField, l as ID, m as SlugField, n as Slug, R as RelationField, o as FormulaField, p as StatusField, r as CreatedTimeField, s as UpdatedTimeField, t as CollectionSchema, u as BaseField } from '../query-builder-CRcrmrbR.js';
|
|
2
|
+
export { a as Collection, b as CollectionConfig, c as CollectionTypeMap, v as EntryResult, w as FieldType, Q as QueryOptions, d as QueryResult, S as StudioConfig, q as queryCollection } from '../query-builder-CRcrmrbR.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @context Core layer — draft filter at src/core/draft-filter.ts
|
|
6
|
+
* @does Provides utilities to detect and filter draft content entries
|
|
7
|
+
* @depends src/shared/types.ts
|
|
8
|
+
* @do Add new draft detection heuristics here
|
|
9
|
+
* @dont Import from CLI or UI; access filesystem
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
declare function isDraft(entry: ContentEntry): boolean;
|
|
13
|
+
declare function filterDrafts(entries: ContentEntry[]): ContentEntry[];
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* @context Core layer — frontmatter binder at src/core/frontmatter-binder.ts
|
|
17
|
+
* @does Replaces {frontmatter.X} tokens in MDX body with actual frontmatter values
|
|
18
|
+
* @depends none
|
|
19
|
+
* @do Add new token patterns or transformation rules here
|
|
20
|
+
* @dont Import from CLI or UI; access filesystem
|
|
21
|
+
*/
|
|
22
|
+
/**
|
|
23
|
+
* Replaces `{frontmatter.X}` tokens in the body with values from the data object.
|
|
24
|
+
* Supports dot-notation for nested values (e.g. `{frontmatter.author.name}`).
|
|
25
|
+
*/
|
|
26
|
+
declare function bindFrontmatter(body: string, data: Record<string, unknown>): string;
|
|
27
|
+
/**
|
|
28
|
+
* Extracts all frontmatter token paths from the body.
|
|
29
|
+
*/
|
|
30
|
+
declare function extractFrontmatterTokens(body: string): string[];
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* @context Core layer — locale parser at src/core/locale-parser.ts
|
|
34
|
+
* @does Extracts locale codes from filenames using the convention `slug.locale.mdx`
|
|
35
|
+
* @depends none
|
|
36
|
+
* @do Add new locale detection strategies here
|
|
37
|
+
* @dont Import from CLI or UI; access filesystem
|
|
38
|
+
*/
|
|
39
|
+
/**
|
|
40
|
+
* Parses locale from a filename.
|
|
41
|
+
* Supports `post.pt.mdx`, `post.en-US.mdx` patterns.
|
|
42
|
+
* Returns undefined for files without a locale suffix.
|
|
43
|
+
*/
|
|
44
|
+
declare function parseLocaleFromFilename(filename: string): string | undefined;
|
|
45
|
+
/**
|
|
46
|
+
* Removes the locale suffix from a slug.
|
|
47
|
+
* Handles both pre-slugify (`.pt`) and post-slugify (`-pt`) formats,
|
|
48
|
+
* since `@sindresorhus/slugify` converts dots to dashes.
|
|
49
|
+
* `post.pt` → `post`, `post-pt` → `post`, `post` → `post`
|
|
50
|
+
*/
|
|
51
|
+
declare function stripLocaleFromSlug(slug: string, locale?: string): string;
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* @context Shared layer — schema inference types at src/shared/schema-types.ts
|
|
55
|
+
* @does Provides TypeScript utility types to infer typed data shapes from CollectionSchema
|
|
56
|
+
* @depends src/shared/fields.ts
|
|
57
|
+
* @do Add new schema-level inference utilities here
|
|
58
|
+
* @dont Import from CLI or UI; contain runtime logic or field definitions
|
|
59
|
+
*/
|
|
60
|
+
|
|
61
|
+
/** Infer the TypeScript value type for a single field definition. */
|
|
62
|
+
type InferFieldValue<F extends FieldDefinition> = F extends TextField ? string : F extends LongTextField ? string : F extends NumberField ? number : F extends BooleanField ? boolean : F extends DateField ? F["includeTime"] extends true ? Date : ISODate : F extends SelectField ? F["options"][number]["value"] : F extends MultiSelectField ? Array<F["options"][number]["value"]> : F extends UrlField ? HttpUrl : F extends EmailField ? Email : F extends MediaField ? MediaPath : F extends ObjectField ? InferObjectFields<F["fields"]> : F extends ArrayField ? Array<InferObjectFields<F["itemFields"]>> : F extends IdField ? ID : F extends SlugField ? Slug : F extends RelationField ? F["multiple"] extends true ? ID[] : ID : F extends FormulaField ? F["resultType"] extends "number" ? number : F["resultType"] extends "boolean" ? boolean : string : F extends StatusField ? F["options"][number]["value"] : F extends CreatedTimeField ? Date : F extends UpdatedTimeField ? Date : never;
|
|
63
|
+
/**
|
|
64
|
+
* Infer a record type from an array of field definitions.
|
|
65
|
+
* Fields marked `required: false` become optional (`T | undefined`).
|
|
66
|
+
*/
|
|
67
|
+
type InferObjectFields<Fields extends FieldDefinition[]> = {
|
|
68
|
+
[F in Fields[number] as F["name"]]: F extends {
|
|
69
|
+
required: false;
|
|
70
|
+
} ? InferFieldValue<F> | undefined : InferFieldValue<F>;
|
|
71
|
+
};
|
|
72
|
+
/**
|
|
73
|
+
* Infer the full data shape of a collection from its schema.
|
|
74
|
+
*
|
|
75
|
+
* @example
|
|
76
|
+
* ```ts
|
|
77
|
+
* const blogSchema = {
|
|
78
|
+
* collection: "blog",
|
|
79
|
+
* fields: [
|
|
80
|
+
* { name: "title", type: "text", required: true },
|
|
81
|
+
* { name: "published", type: "boolean" },
|
|
82
|
+
* ],
|
|
83
|
+
* } satisfies CollectionSchema;
|
|
84
|
+
*
|
|
85
|
+
* type BlogData = InferSchemaData<typeof blogSchema>;
|
|
86
|
+
* // => { title: string; published: boolean }
|
|
87
|
+
* ```
|
|
88
|
+
*/
|
|
89
|
+
type InferSchemaData<S extends CollectionSchema> = InferObjectFields<S["fields"]>;
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* @context Shared layer — field label utilities at src/shared/field-utils.ts
|
|
93
|
+
* @does Resolves human-readable labels for field definitions and raw key strings
|
|
94
|
+
* @depends src/shared/fields.ts
|
|
95
|
+
* @do Add field-related utility functions here
|
|
96
|
+
* @dont Import from CLI or UI; contain field type definitions or schema logic
|
|
97
|
+
*/
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Resolve the human-readable label for a field.
|
|
101
|
+
*
|
|
102
|
+
* When the field definition has an explicit `label`, that is returned as-is.
|
|
103
|
+
* Otherwise the `name` (camelCase / kebab-case / snake_case) is converted to Title Case:
|
|
104
|
+
*
|
|
105
|
+
* @example
|
|
106
|
+
* fieldLabel({ name: "siteName", type: "text" }) // "Site Name"
|
|
107
|
+
* fieldLabel({ name: "created_at", type: "date" }) // "Created At"
|
|
108
|
+
* fieldLabel({ name: "bio", type: "long-text", label: "About" }) // "About"
|
|
109
|
+
*/
|
|
110
|
+
declare function fieldLabel(field: Pick<BaseField, "name" | "label">): string;
|
|
111
|
+
/**
|
|
112
|
+
* Resolve the label for a raw key string (no field definition available).
|
|
113
|
+
* Useful for dynamic keys that have no schema entry.
|
|
114
|
+
*/
|
|
115
|
+
declare function keyLabel(name: string): string;
|
|
116
|
+
|
|
117
|
+
export { CollectionSchema, ContentEntry, FieldDefinition, type InferFieldValue, type InferSchemaData, bindFrontmatter, extractFrontmatterTokens, fieldLabel, filterDrafts, isDraft, keyLabel, parseLocaleFromFilename, stripLocaleFromSlug };
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { F as FileInfo, D as DirectoryFileEntry, S as StudioConfig, C as ContentEntry, a as Collection } from '../query-builder-CRcrmrbR.js';
|
|
2
|
+
export { b as CollectionConfig, c as CollectionTypeMap, Q as QueryOptions, d as QueryResult, q as queryCollection } from '../query-builder-CRcrmrbR.js';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @context Shared layer — FS adapter interface at src/shared/fs-adapter.interface.ts
|
|
6
|
+
* @does Defines the IFsAdapter contract so Core can perform I/O without depending on CLI
|
|
7
|
+
* @depends src/shared/types.ts
|
|
8
|
+
* @do Add new I/O methods here when Core needs them; keep the interface minimal
|
|
9
|
+
* @dont Import from CLI or UI; contain implementation logic
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
interface IFsAdapter {
|
|
13
|
+
readFile(filePath: string): Promise<string>;
|
|
14
|
+
writeFile(filePath: string, content: string): Promise<void>;
|
|
15
|
+
deleteFile(filePath: string): Promise<void>;
|
|
16
|
+
exists(filePath: string): Promise<boolean>;
|
|
17
|
+
getStats(filePath: string): Promise<FileInfo>;
|
|
18
|
+
listFiles(dirPath: string, extensions?: readonly string[]): Promise<string[]>;
|
|
19
|
+
listDirectories(dirPath: string): Promise<string[]>;
|
|
20
|
+
readBuffer(filePath: string): Promise<Buffer>;
|
|
21
|
+
writeBuffer(filePath: string, data: Buffer): Promise<void>;
|
|
22
|
+
listAllFiles(dirPath: string): Promise<DirectoryFileEntry[]>;
|
|
23
|
+
join(...segments: string[]): string;
|
|
24
|
+
basename(filePath: string): string;
|
|
25
|
+
extname(filePath: string): string;
|
|
26
|
+
relative(from: string, to: string): string;
|
|
27
|
+
normalizeSlug(relativePath: string, ext: string): string;
|
|
28
|
+
readFileSync(filePath: string): string;
|
|
29
|
+
existsSync(filePath: string): boolean;
|
|
30
|
+
listFilesSync(dirPath: string, extensions?: readonly string[]): string[];
|
|
31
|
+
listDirectoriesSync(dirPath: string): string[];
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* @context Core layer — filesystem adapter at src/core/fs-adapter.ts
|
|
36
|
+
* @does Implements IFsAdapter; abstracts all file read/write/list operations behind a single interface
|
|
37
|
+
* @depends src/shared/types.ts, src/shared/constants.ts, src/shared/fs-adapter.interface.ts
|
|
38
|
+
* @do Add new I/O operations here; all file access must go through this adapter
|
|
39
|
+
* @dont Import UI components, run HTTP requests, or contain business logic
|
|
40
|
+
*/
|
|
41
|
+
|
|
42
|
+
declare class FsAdapter implements IFsAdapter {
|
|
43
|
+
private readonly basePath;
|
|
44
|
+
constructor(basePath: string);
|
|
45
|
+
private resolve;
|
|
46
|
+
readFile(filePath: string): Promise<string>;
|
|
47
|
+
writeFile(filePath: string, content: string): Promise<void>;
|
|
48
|
+
deleteFile(filePath: string): Promise<void>;
|
|
49
|
+
exists(filePath: string): Promise<boolean>;
|
|
50
|
+
getStats(filePath: string): Promise<FileInfo>;
|
|
51
|
+
listFiles(dirPath: string, extensions?: readonly string[]): Promise<string[]>;
|
|
52
|
+
listDirectories(dirPath: string): Promise<string[]>;
|
|
53
|
+
readBuffer(filePath: string): Promise<Buffer>;
|
|
54
|
+
writeBuffer(filePath: string, data: Buffer): Promise<void>;
|
|
55
|
+
listAllFiles(dirPath: string): Promise<DirectoryFileEntry[]>;
|
|
56
|
+
join(...segments: string[]): string;
|
|
57
|
+
basename(filePath: string): string;
|
|
58
|
+
extname(filePath: string): string;
|
|
59
|
+
relative(from: string, to: string): string;
|
|
60
|
+
normalizeSlug(relativePath: string, ext: string): string;
|
|
61
|
+
readFileSync(filePath: string): string;
|
|
62
|
+
existsSync(filePath: string): boolean;
|
|
63
|
+
listFilesSync(dirPath: string, extensions?: readonly string[]): string[];
|
|
64
|
+
listDirectoriesSync(dirPath: string): string[];
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* @context Core layer — content indexer at src/core/indexer.ts
|
|
69
|
+
* @does Scans the contents directory, parses MDX/JSON files, and builds an in-memory index
|
|
70
|
+
* @depends src/shared/types.ts, src/shared/constants.ts, src/shared/fs-adapter.interface.ts, src/core/parsers/, src/core/schema-inferrer.ts
|
|
71
|
+
* @do Add new file type handling here; extend indexCollection for new collection behaviors
|
|
72
|
+
* @dont Import from CLI or UI; instantiate FsAdapter; access the filesystem directly
|
|
73
|
+
*/
|
|
74
|
+
|
|
75
|
+
declare class ContentIndex {
|
|
76
|
+
private readonly entries;
|
|
77
|
+
private readonly collections;
|
|
78
|
+
private readonly fs;
|
|
79
|
+
constructor(fsAdapter: IFsAdapter);
|
|
80
|
+
build(config?: StudioConfig): Promise<void>;
|
|
81
|
+
buildSync(config?: StudioConfig): void;
|
|
82
|
+
getCollection(name: string): ContentEntry[];
|
|
83
|
+
getCollections(): Collection[];
|
|
84
|
+
clear(): void;
|
|
85
|
+
updateEntry(collectionName: string, entry: ContentEntry): void;
|
|
86
|
+
removeEntry(collectionName: string, slug: string): void;
|
|
87
|
+
private updateCollectionMeta;
|
|
88
|
+
private indexCollection;
|
|
89
|
+
private indexCollectionSync;
|
|
90
|
+
private scanDir;
|
|
91
|
+
private scanDirSync;
|
|
92
|
+
private buildMdxEntry;
|
|
93
|
+
private buildJsonEntries;
|
|
94
|
+
private readOrdering;
|
|
95
|
+
private readOrderingSync;
|
|
96
|
+
private applyOrdering;
|
|
97
|
+
private detectCollectionType;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* @context Core layer — content store at src/core/content-store.ts
|
|
102
|
+
* @does Manages a singleton ContentIndex; exposes loadContent() and getStore() for consumers
|
|
103
|
+
* @depends src/core/indexer.ts, src/shared/types.ts
|
|
104
|
+
* @do Use this as the single access point for in-memory indexed content
|
|
105
|
+
* @dont Import from CLI or UI; contain parsing or I/O logic; import fs-adapter at top level
|
|
106
|
+
*/
|
|
107
|
+
|
|
108
|
+
declare function loadContent(fsAdapter: IFsAdapter, config?: StudioConfig): Promise<ContentIndex>;
|
|
109
|
+
declare function loadContentSync(fsAdapter: IFsAdapter, config?: StudioConfig): ContentIndex;
|
|
110
|
+
|
|
111
|
+
export { Collection, ContentEntry, ContentIndex, FsAdapter, StudioConfig, loadContent, loadContentSync };
|
|
@@ -0,0 +1,670 @@
|
|
|
1
|
+
// src/core/server.ts
|
|
2
|
+
import path2 from "path";
|
|
3
|
+
|
|
4
|
+
// src/shared/constants.ts
|
|
5
|
+
var CONTENTS_DIR = "contents";
|
|
6
|
+
var SUPPORTED_EXTENSIONS = [".mdx", ".json"];
|
|
7
|
+
var COLLECTION_ORDER_FILE = "collection.json";
|
|
8
|
+
var IMAGE_MIME_TYPES = [
|
|
9
|
+
"image/png",
|
|
10
|
+
"image/jpeg",
|
|
11
|
+
"image/gif",
|
|
12
|
+
"image/webp",
|
|
13
|
+
"image/svg+xml",
|
|
14
|
+
"image/avif"
|
|
15
|
+
];
|
|
16
|
+
var VIDEO_MIME_TYPES = ["video/mp4", "video/webm", "video/ogg"];
|
|
17
|
+
var AUDIO_MIME_TYPES = [
|
|
18
|
+
"audio/mpeg",
|
|
19
|
+
"audio/ogg",
|
|
20
|
+
"audio/wav",
|
|
21
|
+
"audio/webm",
|
|
22
|
+
"audio/aac",
|
|
23
|
+
"audio/flac"
|
|
24
|
+
];
|
|
25
|
+
var MEDIA_MIME_TYPES = [...IMAGE_MIME_TYPES, ...VIDEO_MIME_TYPES, ...AUDIO_MIME_TYPES];
|
|
26
|
+
|
|
27
|
+
// src/core/fs-adapter.ts
|
|
28
|
+
import fs from "fs/promises";
|
|
29
|
+
import fsSync from "fs";
|
|
30
|
+
import path from "path";
|
|
31
|
+
var FsAdapter = class {
|
|
32
|
+
basePath;
|
|
33
|
+
constructor(basePath) {
|
|
34
|
+
this.basePath = path.resolve(basePath);
|
|
35
|
+
}
|
|
36
|
+
resolve(...segments) {
|
|
37
|
+
return path.resolve(this.basePath, ...segments);
|
|
38
|
+
}
|
|
39
|
+
async readFile(filePath) {
|
|
40
|
+
return fs.readFile(this.resolve(filePath), "utf-8");
|
|
41
|
+
}
|
|
42
|
+
async writeFile(filePath, content) {
|
|
43
|
+
const fullPath = this.resolve(filePath);
|
|
44
|
+
await fs.mkdir(path.dirname(fullPath), { recursive: true });
|
|
45
|
+
await fs.writeFile(fullPath, content, "utf-8");
|
|
46
|
+
}
|
|
47
|
+
async deleteFile(filePath) {
|
|
48
|
+
await fs.unlink(this.resolve(filePath));
|
|
49
|
+
}
|
|
50
|
+
async exists(filePath) {
|
|
51
|
+
try {
|
|
52
|
+
await fs.access(this.resolve(filePath));
|
|
53
|
+
return true;
|
|
54
|
+
} catch {
|
|
55
|
+
return false;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
async getStats(filePath) {
|
|
59
|
+
const fullPath = this.resolve(filePath);
|
|
60
|
+
const stats = await fs.stat(fullPath);
|
|
61
|
+
return { path: filePath, size: stats.size, modifiedAt: stats.mtime };
|
|
62
|
+
}
|
|
63
|
+
async listFiles(dirPath, extensions) {
|
|
64
|
+
const fullPath = this.resolve(dirPath);
|
|
65
|
+
const filterExts = extensions ?? SUPPORTED_EXTENSIONS;
|
|
66
|
+
let entries;
|
|
67
|
+
try {
|
|
68
|
+
entries = await fs.readdir(fullPath, { withFileTypes: true });
|
|
69
|
+
} catch {
|
|
70
|
+
return [];
|
|
71
|
+
}
|
|
72
|
+
return entries.filter((entry) => entry.isFile() && filterExts.some((ext) => entry.name.endsWith(ext))).map((entry) => this.join(dirPath, entry.name));
|
|
73
|
+
}
|
|
74
|
+
async listDirectories(dirPath) {
|
|
75
|
+
const fullPath = this.resolve(dirPath);
|
|
76
|
+
let entries;
|
|
77
|
+
try {
|
|
78
|
+
entries = await fs.readdir(fullPath, { withFileTypes: true });
|
|
79
|
+
} catch {
|
|
80
|
+
return [];
|
|
81
|
+
}
|
|
82
|
+
return entries.filter((entry) => entry.isDirectory()).map((entry) => this.join(dirPath, entry.name));
|
|
83
|
+
}
|
|
84
|
+
async readBuffer(filePath) {
|
|
85
|
+
return fs.readFile(this.resolve(filePath));
|
|
86
|
+
}
|
|
87
|
+
async writeBuffer(filePath, data) {
|
|
88
|
+
const fullPath = this.resolve(filePath);
|
|
89
|
+
await fs.mkdir(path.dirname(fullPath), { recursive: true });
|
|
90
|
+
await fs.writeFile(fullPath, data);
|
|
91
|
+
}
|
|
92
|
+
async listAllFiles(dirPath) {
|
|
93
|
+
const fullPath = this.resolve(dirPath);
|
|
94
|
+
let entries;
|
|
95
|
+
try {
|
|
96
|
+
entries = await fs.readdir(fullPath, { withFileTypes: true });
|
|
97
|
+
} catch {
|
|
98
|
+
return [];
|
|
99
|
+
}
|
|
100
|
+
const results = [];
|
|
101
|
+
for (const entry of entries) {
|
|
102
|
+
if (!entry.isFile()) continue;
|
|
103
|
+
const relativePath = this.join(dirPath, entry.name);
|
|
104
|
+
const stats = await fs.stat(this.resolve(relativePath));
|
|
105
|
+
results.push({ name: entry.name, relativePath, size: stats.size, modifiedAt: stats.mtime });
|
|
106
|
+
}
|
|
107
|
+
return results;
|
|
108
|
+
}
|
|
109
|
+
join(...segments) {
|
|
110
|
+
return path.join(...segments);
|
|
111
|
+
}
|
|
112
|
+
basename(filePath) {
|
|
113
|
+
return path.basename(filePath);
|
|
114
|
+
}
|
|
115
|
+
extname(filePath) {
|
|
116
|
+
return path.extname(filePath);
|
|
117
|
+
}
|
|
118
|
+
relative(from, to) {
|
|
119
|
+
return path.relative(from, to);
|
|
120
|
+
}
|
|
121
|
+
normalizeSlug(relativePath, ext) {
|
|
122
|
+
return relativePath.replace(ext, "").split(path.sep).join("/");
|
|
123
|
+
}
|
|
124
|
+
readFileSync(filePath) {
|
|
125
|
+
return fsSync.readFileSync(this.resolve(filePath), "utf-8");
|
|
126
|
+
}
|
|
127
|
+
existsSync(filePath) {
|
|
128
|
+
return fsSync.existsSync(this.resolve(filePath));
|
|
129
|
+
}
|
|
130
|
+
listFilesSync(dirPath, extensions) {
|
|
131
|
+
const fullPath = this.resolve(dirPath);
|
|
132
|
+
const filterExts = extensions ?? SUPPORTED_EXTENSIONS;
|
|
133
|
+
let entries;
|
|
134
|
+
try {
|
|
135
|
+
entries = fsSync.readdirSync(fullPath, { withFileTypes: true });
|
|
136
|
+
} catch {
|
|
137
|
+
return [];
|
|
138
|
+
}
|
|
139
|
+
return entries.filter((entry) => entry.isFile() && filterExts.some((ext) => entry.name.endsWith(ext))).map((entry) => this.join(dirPath, entry.name));
|
|
140
|
+
}
|
|
141
|
+
listDirectoriesSync(dirPath) {
|
|
142
|
+
const fullPath = this.resolve(dirPath);
|
|
143
|
+
let entries;
|
|
144
|
+
try {
|
|
145
|
+
entries = fsSync.readdirSync(fullPath, { withFileTypes: true });
|
|
146
|
+
} catch {
|
|
147
|
+
return [];
|
|
148
|
+
}
|
|
149
|
+
return entries.filter((entry) => entry.isDirectory()).map((entry) => this.join(dirPath, entry.name));
|
|
150
|
+
}
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
// src/core/indexer.ts
|
|
154
|
+
import slugify from "@sindresorhus/slugify";
|
|
155
|
+
|
|
156
|
+
// src/core/parsers/parser-mdx.ts
|
|
157
|
+
import matter from "gray-matter";
|
|
158
|
+
|
|
159
|
+
// src/core/frontmatter-binder.ts
|
|
160
|
+
var TOKEN_REGEX = /\{frontmatter\.([a-zA-Z0-9_.]+)\}/g;
|
|
161
|
+
function bindFrontmatter(body, data) {
|
|
162
|
+
return body.replace(TOKEN_REGEX, (_match, path3) => {
|
|
163
|
+
const value = resolvePath(data, path3);
|
|
164
|
+
if (value === void 0 || value === null) return _match;
|
|
165
|
+
if (typeof value === "object") return JSON.stringify(value);
|
|
166
|
+
return String(value);
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
function resolvePath(obj, path3) {
|
|
170
|
+
const keys = path3.split(".");
|
|
171
|
+
let current = obj;
|
|
172
|
+
for (const key of keys) {
|
|
173
|
+
if (typeof current !== "object" || current === null) return void 0;
|
|
174
|
+
current = current[key];
|
|
175
|
+
}
|
|
176
|
+
return current;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// src/core/parsers/parser-mdx.ts
|
|
180
|
+
function normalizeDates(data) {
|
|
181
|
+
const result = {};
|
|
182
|
+
for (const [key, value] of Object.entries(data)) {
|
|
183
|
+
if (value instanceof Date) {
|
|
184
|
+
result[key] = value.toISOString().split("T")[0];
|
|
185
|
+
} else if (typeof value === "object" && value !== null && !Array.isArray(value)) {
|
|
186
|
+
result[key] = normalizeDates(value);
|
|
187
|
+
} else {
|
|
188
|
+
result[key] = value;
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
return result;
|
|
192
|
+
}
|
|
193
|
+
function parseMdx(content, options) {
|
|
194
|
+
const { data: rawData, content: body } = matter(content);
|
|
195
|
+
const data = normalizeDates(rawData);
|
|
196
|
+
const trimmed = body.trim();
|
|
197
|
+
return {
|
|
198
|
+
data,
|
|
199
|
+
body: options?.bindTokens ? bindFrontmatter(trimmed, data) : trimmed
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// src/core/parsers/parser-json.ts
|
|
204
|
+
function parseJson(content) {
|
|
205
|
+
const parsed = JSON.parse(content);
|
|
206
|
+
if (Array.isArray(parsed)) {
|
|
207
|
+
return {
|
|
208
|
+
type: "json-array",
|
|
209
|
+
entries: parsed
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
if (typeof parsed === "object" && parsed !== null) {
|
|
213
|
+
return {
|
|
214
|
+
type: "json-object",
|
|
215
|
+
data: parsed
|
|
216
|
+
};
|
|
217
|
+
}
|
|
218
|
+
throw new Error("JSON content must be an array or object");
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// src/core/schema-inferrer.ts
|
|
222
|
+
var RE_ISO_DATE = /^\d{4}-\d{2}-\d{2}$/;
|
|
223
|
+
var RE_ISO_DATETIME = /^\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}(:\d{2}(\.\d+)?)?(Z|[+-]\d{2}:?\d{2})?$/;
|
|
224
|
+
var RE_EMAIL = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
|
225
|
+
var RE_URL = /^https?:\/\/.+/;
|
|
226
|
+
var LONG_TEXT_THRESHOLD = 200;
|
|
227
|
+
var RICH_TEXT_FIELD_NAMES = ["description", "descriptions", "text", "content"];
|
|
228
|
+
function isISODate(value) {
|
|
229
|
+
return RE_ISO_DATE.test(value);
|
|
230
|
+
}
|
|
231
|
+
function isISODateTime(value) {
|
|
232
|
+
return RE_ISO_DATETIME.test(value);
|
|
233
|
+
}
|
|
234
|
+
function isEmail(value) {
|
|
235
|
+
return RE_EMAIL.test(value);
|
|
236
|
+
}
|
|
237
|
+
function isUrl(value) {
|
|
238
|
+
return RE_URL.test(value);
|
|
239
|
+
}
|
|
240
|
+
function inferStringField(name, strings) {
|
|
241
|
+
if (strings.every(isEmail)) return { name, type: "email" };
|
|
242
|
+
if (strings.every(isUrl)) return { name, type: "url" };
|
|
243
|
+
if (strings.every(isISODateTime)) return { name, type: "date", includeTime: true };
|
|
244
|
+
if (strings.every(isISODate)) return { name, type: "date" };
|
|
245
|
+
if (RICH_TEXT_FIELD_NAMES.includes(name.toLowerCase())) return { name, type: "long-text" };
|
|
246
|
+
const isLong = strings.some((s) => s.length > LONG_TEXT_THRESHOLD || s.includes("\n"));
|
|
247
|
+
return { name, type: isLong ? "long-text" : "text" };
|
|
248
|
+
}
|
|
249
|
+
function inferArrayField(name, items) {
|
|
250
|
+
if (items.length === 0) return { name, type: "array", itemFields: [] };
|
|
251
|
+
if (items.every((item) => typeof item === "string")) {
|
|
252
|
+
const unique = [...new Set(items)].slice(0, 50);
|
|
253
|
+
const options = unique.map((v) => ({ label: v, value: v }));
|
|
254
|
+
return { name, type: "multi-select", options };
|
|
255
|
+
}
|
|
256
|
+
if (items.every((item) => typeof item === "object" && item !== null && !Array.isArray(item))) {
|
|
257
|
+
return { name, type: "array", itemFields: inferFields(items) };
|
|
258
|
+
}
|
|
259
|
+
return { name, type: "array", itemFields: [] };
|
|
260
|
+
}
|
|
261
|
+
function inferFieldDefinition(name, values) {
|
|
262
|
+
const present = values.filter((v) => v !== null && v !== void 0);
|
|
263
|
+
if (present.length === 0) return { name, type: "text" };
|
|
264
|
+
if (present.every((v) => typeof v === "boolean")) return { name, type: "boolean" };
|
|
265
|
+
if (present.every((v) => typeof v === "number")) {
|
|
266
|
+
const format = present.every((v) => Number.isInteger(v)) ? "integer" : "decimal";
|
|
267
|
+
return { name, type: "number", format };
|
|
268
|
+
}
|
|
269
|
+
if (present.every((v) => typeof v === "string")) {
|
|
270
|
+
return inferStringField(name, present);
|
|
271
|
+
}
|
|
272
|
+
if (present.every((v) => Array.isArray(v))) {
|
|
273
|
+
return inferArrayField(name, present.flat());
|
|
274
|
+
}
|
|
275
|
+
if (present.every((v) => typeof v === "object" && v !== null && !Array.isArray(v))) {
|
|
276
|
+
return { name, type: "object", fields: inferFields(present) };
|
|
277
|
+
}
|
|
278
|
+
return { name, type: "text" };
|
|
279
|
+
}
|
|
280
|
+
function inferFields(rows) {
|
|
281
|
+
const keySet = new Set(rows.flatMap((row) => Object.keys(row)));
|
|
282
|
+
return Array.from(keySet).map((key) => inferFieldDefinition(key, rows.map((row) => row[key])));
|
|
283
|
+
}
|
|
284
|
+
function inferSchema(entries, collectionName) {
|
|
285
|
+
const rows = entries.map((entry) => entry.data);
|
|
286
|
+
return { collection: collectionName, fields: inferFields(rows) };
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
// src/core/locale-parser.ts
|
|
290
|
+
var LOCALE_REGEX = /\.([a-z]{2}(?:-[A-Z]{2})?)\.mdx$/;
|
|
291
|
+
function parseLocaleFromFilename(filename) {
|
|
292
|
+
const match = LOCALE_REGEX.exec(filename);
|
|
293
|
+
return match?.[1];
|
|
294
|
+
}
|
|
295
|
+
function stripLocaleFromSlug(slug, locale) {
|
|
296
|
+
if (!locale) return slug;
|
|
297
|
+
const dotSuffix = `.${locale}`;
|
|
298
|
+
if (slug.endsWith(dotSuffix)) return slug.slice(0, -dotSuffix.length);
|
|
299
|
+
const dashSuffix = `-${locale}`;
|
|
300
|
+
if (slug.endsWith(dashSuffix)) return slug.slice(0, -dashSuffix.length);
|
|
301
|
+
return slug;
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
// src/core/indexer.ts
|
|
305
|
+
var ContentIndex = class {
|
|
306
|
+
entries = /* @__PURE__ */ new Map();
|
|
307
|
+
collections = /* @__PURE__ */ new Map();
|
|
308
|
+
fs;
|
|
309
|
+
constructor(fsAdapter) {
|
|
310
|
+
this.fs = fsAdapter;
|
|
311
|
+
}
|
|
312
|
+
async build(config) {
|
|
313
|
+
this.clear();
|
|
314
|
+
const dirs = await this.fs.listDirectories(".");
|
|
315
|
+
for (const dir of dirs) {
|
|
316
|
+
const dirName = this.fs.basename(dir);
|
|
317
|
+
const collectionName = slugify(dirName);
|
|
318
|
+
const collectionConfig = config?.collections?.[collectionName];
|
|
319
|
+
await this.indexCollection(dirName, collectionName, collectionConfig?.schema);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
buildSync(config) {
|
|
323
|
+
this.clear();
|
|
324
|
+
const dirs = this.fs.listDirectoriesSync(".");
|
|
325
|
+
for (const dir of dirs) {
|
|
326
|
+
const dirName = this.fs.basename(dir);
|
|
327
|
+
const collectionName = slugify(dirName);
|
|
328
|
+
const collectionConfig = config?.collections?.[collectionName];
|
|
329
|
+
this.indexCollectionSync(dirName, collectionName, collectionConfig?.schema);
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
getCollection(name) {
|
|
333
|
+
return this.entries.get(name) ?? [];
|
|
334
|
+
}
|
|
335
|
+
getCollections() {
|
|
336
|
+
return Array.from(this.collections.values());
|
|
337
|
+
}
|
|
338
|
+
clear() {
|
|
339
|
+
this.entries.clear();
|
|
340
|
+
this.collections.clear();
|
|
341
|
+
}
|
|
342
|
+
updateEntry(collectionName, entry) {
|
|
343
|
+
const entries = this.entries.get(collectionName) ?? [];
|
|
344
|
+
const idx = entries.findIndex((e) => e.slug === entry.slug);
|
|
345
|
+
if (idx >= 0) {
|
|
346
|
+
entries[idx] = entry;
|
|
347
|
+
} else {
|
|
348
|
+
entries.push(entry);
|
|
349
|
+
}
|
|
350
|
+
this.entries.set(collectionName, entries);
|
|
351
|
+
this.updateCollectionMeta(collectionName);
|
|
352
|
+
}
|
|
353
|
+
removeEntry(collectionName, slug) {
|
|
354
|
+
const entries = this.entries.get(collectionName);
|
|
355
|
+
if (!entries) return;
|
|
356
|
+
const filtered = entries.filter((e) => e.slug !== slug);
|
|
357
|
+
this.entries.set(collectionName, filtered);
|
|
358
|
+
this.updateCollectionMeta(collectionName);
|
|
359
|
+
}
|
|
360
|
+
updateCollectionMeta(collectionName) {
|
|
361
|
+
const col = this.collections.get(collectionName);
|
|
362
|
+
const entries = this.entries.get(collectionName) ?? [];
|
|
363
|
+
if (col) {
|
|
364
|
+
this.collections.set(collectionName, {
|
|
365
|
+
...col,
|
|
366
|
+
count: entries.length,
|
|
367
|
+
type: this.detectCollectionType(entries)
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
async indexCollection(dirName, collectionName, manualSchema) {
|
|
372
|
+
const entries = [];
|
|
373
|
+
await this.scanDir(dirName, collectionName, dirName, entries);
|
|
374
|
+
const orderPath = this.fs.join(dirName, COLLECTION_ORDER_FILE);
|
|
375
|
+
const ordering = await this.readOrdering(orderPath);
|
|
376
|
+
if (ordering) {
|
|
377
|
+
this.applyOrdering(entries, ordering);
|
|
378
|
+
}
|
|
379
|
+
const schema = manualSchema ?? inferSchema(entries, collectionName);
|
|
380
|
+
this.entries.set(collectionName, entries);
|
|
381
|
+
this.collections.set(collectionName, {
|
|
382
|
+
name: collectionName,
|
|
383
|
+
type: this.detectCollectionType(entries),
|
|
384
|
+
count: entries.length,
|
|
385
|
+
basePath: dirName,
|
|
386
|
+
schema
|
|
387
|
+
});
|
|
388
|
+
}
|
|
389
|
+
indexCollectionSync(dirName, collectionName, manualSchema) {
|
|
390
|
+
const entries = [];
|
|
391
|
+
this.scanDirSync(dirName, collectionName, dirName, entries);
|
|
392
|
+
const orderPath = this.fs.join(dirName, COLLECTION_ORDER_FILE);
|
|
393
|
+
const ordering = this.readOrderingSync(orderPath);
|
|
394
|
+
if (ordering) {
|
|
395
|
+
this.applyOrdering(entries, ordering);
|
|
396
|
+
}
|
|
397
|
+
const schema = manualSchema ?? inferSchema(entries, collectionName);
|
|
398
|
+
this.entries.set(collectionName, entries);
|
|
399
|
+
this.collections.set(collectionName, {
|
|
400
|
+
name: collectionName,
|
|
401
|
+
type: this.detectCollectionType(entries),
|
|
402
|
+
count: entries.length,
|
|
403
|
+
basePath: dirName,
|
|
404
|
+
schema
|
|
405
|
+
});
|
|
406
|
+
}
|
|
407
|
+
async scanDir(dirName, collectionName, dirPath, entries) {
|
|
408
|
+
const subDirs = await this.fs.listDirectories(dirPath);
|
|
409
|
+
for (const subDir of subDirs) {
|
|
410
|
+
await this.scanDir(dirName, collectionName, subDir, entries);
|
|
411
|
+
}
|
|
412
|
+
const files = await this.fs.listFiles(dirPath);
|
|
413
|
+
for (const filePath of files) {
|
|
414
|
+
const fileName = this.fs.basename(filePath);
|
|
415
|
+
if (fileName === COLLECTION_ORDER_FILE) continue;
|
|
416
|
+
const ext = this.fs.extname(fileName);
|
|
417
|
+
const content = await this.fs.readFile(filePath);
|
|
418
|
+
const relativePath = this.fs.relative(dirName, filePath);
|
|
419
|
+
const slug = this.fs.normalizeSlug(relativePath, ext).split("/").map((segment) => slugify(segment)).join("/");
|
|
420
|
+
if (ext === ".mdx") {
|
|
421
|
+
entries.push(this.buildMdxEntry(collectionName, slug, fileName, content));
|
|
422
|
+
} else if (ext === ".json") {
|
|
423
|
+
entries.push(...this.buildJsonEntries(collectionName, slug, content));
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
scanDirSync(dirName, collectionName, dirPath, entries) {
|
|
428
|
+
const subDirs = this.fs.listDirectoriesSync(dirPath);
|
|
429
|
+
for (const subDir of subDirs) {
|
|
430
|
+
this.scanDirSync(dirName, collectionName, subDir, entries);
|
|
431
|
+
}
|
|
432
|
+
const files = this.fs.listFilesSync(dirPath);
|
|
433
|
+
for (const filePath of files) {
|
|
434
|
+
const fileName = this.fs.basename(filePath);
|
|
435
|
+
if (fileName === COLLECTION_ORDER_FILE) continue;
|
|
436
|
+
const ext = this.fs.extname(fileName);
|
|
437
|
+
const content = this.fs.readFileSync(filePath);
|
|
438
|
+
const relativePath = this.fs.relative(dirName, filePath);
|
|
439
|
+
const slug = this.fs.normalizeSlug(relativePath, ext).split("/").map((segment) => slugify(segment)).join("/");
|
|
440
|
+
if (ext === ".mdx") {
|
|
441
|
+
entries.push(this.buildMdxEntry(collectionName, slug, fileName, content));
|
|
442
|
+
} else if (ext === ".json") {
|
|
443
|
+
entries.push(...this.buildJsonEntries(collectionName, slug, content));
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
buildMdxEntry(collectionName, slug, fileName, content) {
|
|
448
|
+
const parsed = parseMdx(content);
|
|
449
|
+
const locale = parseLocaleFromFilename(fileName);
|
|
450
|
+
const normalizedSlug = stripLocaleFromSlug(slug, locale);
|
|
451
|
+
return {
|
|
452
|
+
collection: collectionName,
|
|
453
|
+
slug: normalizedSlug,
|
|
454
|
+
path: `/${collectionName}/${normalizedSlug}`,
|
|
455
|
+
body: parsed.body,
|
|
456
|
+
data: parsed.data,
|
|
457
|
+
...locale ? { locale } : {}
|
|
458
|
+
};
|
|
459
|
+
}
|
|
460
|
+
buildJsonEntries(collectionName, slug, content) {
|
|
461
|
+
const parsed = parseJson(content);
|
|
462
|
+
if (parsed.type === "json-array") {
|
|
463
|
+
return parsed.entries.map((data, index) => {
|
|
464
|
+
const entrySlug = typeof data["slug"] === "string" ? slugify(data["slug"]) : `${slug}/${index}`;
|
|
465
|
+
return {
|
|
466
|
+
collection: collectionName,
|
|
467
|
+
slug: entrySlug,
|
|
468
|
+
path: `/${collectionName}/${entrySlug}`,
|
|
469
|
+
data
|
|
470
|
+
};
|
|
471
|
+
});
|
|
472
|
+
}
|
|
473
|
+
return [{ collection: collectionName, slug, path: `/${collectionName}/${slug}`, data: parsed.data }];
|
|
474
|
+
}
|
|
475
|
+
async readOrdering(orderPath) {
|
|
476
|
+
if (!await this.fs.exists(orderPath)) return null;
|
|
477
|
+
try {
|
|
478
|
+
const content = await this.fs.readFile(orderPath);
|
|
479
|
+
const parsed = JSON.parse(content);
|
|
480
|
+
if (Array.isArray(parsed)) return parsed;
|
|
481
|
+
} catch (error) {
|
|
482
|
+
console.warn(`[Nextjs Studio] Failed to parse ordering file: ${orderPath}`, error);
|
|
483
|
+
}
|
|
484
|
+
return null;
|
|
485
|
+
}
|
|
486
|
+
readOrderingSync(orderPath) {
|
|
487
|
+
if (!this.fs.existsSync(orderPath)) return null;
|
|
488
|
+
try {
|
|
489
|
+
const content = this.fs.readFileSync(orderPath);
|
|
490
|
+
const parsed = JSON.parse(content);
|
|
491
|
+
if (Array.isArray(parsed)) return parsed;
|
|
492
|
+
} catch (error) {
|
|
493
|
+
console.warn(`[Nextjs Studio] Failed to parse ordering file: ${orderPath}`, error);
|
|
494
|
+
}
|
|
495
|
+
return null;
|
|
496
|
+
}
|
|
497
|
+
applyOrdering(entries, ordering) {
|
|
498
|
+
const orderMap = new Map(ordering.map((slug, index) => [slug, index]));
|
|
499
|
+
entries.sort((a, b) => {
|
|
500
|
+
const aIndex = orderMap.get(a.slug) ?? Infinity;
|
|
501
|
+
const bIndex = orderMap.get(b.slug) ?? Infinity;
|
|
502
|
+
return aIndex - bIndex;
|
|
503
|
+
});
|
|
504
|
+
}
|
|
505
|
+
detectCollectionType(entries) {
|
|
506
|
+
if (entries.length === 0) return "mdx";
|
|
507
|
+
const first = entries[0];
|
|
508
|
+
if (first.body !== void 0) return "mdx";
|
|
509
|
+
if (entries.length === 1 && !first.slug.includes("/")) return "json-object";
|
|
510
|
+
return "json-array";
|
|
511
|
+
}
|
|
512
|
+
};
|
|
513
|
+
|
|
514
|
+
// src/core/content-store.ts
|
|
515
|
+
var store = null;
|
|
516
|
+
function getStore() {
|
|
517
|
+
if (!store) {
|
|
518
|
+
throw new Error(
|
|
519
|
+
"Content not loaded. Auto-init requires 'nextjs-studio/server' \u2014 use loadContentSync() in a server context, or queryCollection() will auto-init when imported from 'nextjs-studio/server'."
|
|
520
|
+
);
|
|
521
|
+
}
|
|
522
|
+
return store;
|
|
523
|
+
}
|
|
524
|
+
function setStore(index) {
|
|
525
|
+
store = index;
|
|
526
|
+
}
|
|
527
|
+
function hasStore() {
|
|
528
|
+
return store !== null;
|
|
529
|
+
}
|
|
530
|
+
async function loadContent(fsAdapter, config) {
|
|
531
|
+
const index = new ContentIndex(fsAdapter);
|
|
532
|
+
await index.build(config);
|
|
533
|
+
store = index;
|
|
534
|
+
return index;
|
|
535
|
+
}
|
|
536
|
+
function loadContentSync(fsAdapter, config) {
|
|
537
|
+
const index = new ContentIndex(fsAdapter);
|
|
538
|
+
index.buildSync(config);
|
|
539
|
+
store = index;
|
|
540
|
+
return index;
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
// src/core/query-builder.ts
|
|
544
|
+
import { filter, orderBy, get, slice } from "lodash-es";
|
|
545
|
+
|
|
546
|
+
// src/core/draft-filter.ts
|
|
547
|
+
function isDraft(entry) {
|
|
548
|
+
return entry.data.draft === true;
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
// src/core/query-builder.ts
|
|
552
|
+
var QueryBuilder = class {
|
|
553
|
+
collectionName;
|
|
554
|
+
options = {};
|
|
555
|
+
_excludeDrafts = false;
|
|
556
|
+
_locale;
|
|
557
|
+
constructor(collection) {
|
|
558
|
+
this.collectionName = collection;
|
|
559
|
+
}
|
|
560
|
+
where(conditions) {
|
|
561
|
+
this.options.where = { ...this.options.where, ...conditions };
|
|
562
|
+
return this;
|
|
563
|
+
}
|
|
564
|
+
sort(field, order = "asc") {
|
|
565
|
+
this.options.sort = { field, order };
|
|
566
|
+
return this;
|
|
567
|
+
}
|
|
568
|
+
limit(count) {
|
|
569
|
+
this.options.limit = count;
|
|
570
|
+
return this;
|
|
571
|
+
}
|
|
572
|
+
offset(count) {
|
|
573
|
+
this.options.offset = count;
|
|
574
|
+
return this;
|
|
575
|
+
}
|
|
576
|
+
excludeDrafts() {
|
|
577
|
+
this._excludeDrafts = true;
|
|
578
|
+
return this;
|
|
579
|
+
}
|
|
580
|
+
locale(code) {
|
|
581
|
+
this._locale = code;
|
|
582
|
+
return this;
|
|
583
|
+
}
|
|
584
|
+
all() {
|
|
585
|
+
let entries = [...getStore().getCollection(this.collectionName)];
|
|
586
|
+
if (this._excludeDrafts) {
|
|
587
|
+
entries = entries.filter((entry) => !isDraft(entry));
|
|
588
|
+
}
|
|
589
|
+
if (this._locale) {
|
|
590
|
+
const loc = this._locale;
|
|
591
|
+
entries = entries.filter((entry) => entry.locale === loc);
|
|
592
|
+
}
|
|
593
|
+
if (this.options.where) {
|
|
594
|
+
const conditions = this.options.where;
|
|
595
|
+
entries = filter(
|
|
596
|
+
entries,
|
|
597
|
+
(entry) => Object.entries(conditions).every(([key, value]) => get(entry.data, key) === value)
|
|
598
|
+
);
|
|
599
|
+
}
|
|
600
|
+
if (this.options.sort) {
|
|
601
|
+
const { field, order } = this.options.sort;
|
|
602
|
+
entries = orderBy(entries, [(entry) => get(entry.data, field)], [order]);
|
|
603
|
+
}
|
|
604
|
+
const start = this.options.offset ?? 0;
|
|
605
|
+
const end = this.options.limit ? start + this.options.limit : void 0;
|
|
606
|
+
return slice(entries, start, end).map((e) => ({
|
|
607
|
+
collection: e.collection,
|
|
608
|
+
slug: e.slug,
|
|
609
|
+
path: e.path,
|
|
610
|
+
body: e.body,
|
|
611
|
+
locale: e.locale,
|
|
612
|
+
...e.data
|
|
613
|
+
}));
|
|
614
|
+
}
|
|
615
|
+
first() {
|
|
616
|
+
return this.limit(1).all()[0];
|
|
617
|
+
}
|
|
618
|
+
one() {
|
|
619
|
+
const result = this.limit(1).all()[0];
|
|
620
|
+
if (result === void 0) {
|
|
621
|
+
throw new Error(`Collection "${this.collectionName}" is empty \u2014 expected exactly one entry.`);
|
|
622
|
+
}
|
|
623
|
+
return result;
|
|
624
|
+
}
|
|
625
|
+
count() {
|
|
626
|
+
return this.all().length;
|
|
627
|
+
}
|
|
628
|
+
};
|
|
629
|
+
var FLUENT_METHODS = /* @__PURE__ */ new Set(["where", "sort", "limit", "offset", "excludeDrafts", "locale"]);
|
|
630
|
+
function wrapWithArrayProxy(builder) {
|
|
631
|
+
let cache = null;
|
|
632
|
+
return new Proxy(builder, {
|
|
633
|
+
get(target, prop, receiver) {
|
|
634
|
+
if (FLUENT_METHODS.has(String(prop))) {
|
|
635
|
+
const method = Reflect.get(target, prop);
|
|
636
|
+
return (...args) => {
|
|
637
|
+
cache = null;
|
|
638
|
+
method.apply(target, args);
|
|
639
|
+
return receiver;
|
|
640
|
+
};
|
|
641
|
+
}
|
|
642
|
+
if (prop in target) {
|
|
643
|
+
const value2 = Reflect.get(target, prop, receiver);
|
|
644
|
+
return typeof value2 === "function" ? value2.bind(target) : value2;
|
|
645
|
+
}
|
|
646
|
+
if (!cache) cache = target.all();
|
|
647
|
+
const value = Reflect.get(cache, prop);
|
|
648
|
+
return typeof value === "function" ? value.bind(cache) : value;
|
|
649
|
+
}
|
|
650
|
+
});
|
|
651
|
+
}
|
|
652
|
+
function queryCollection(name) {
|
|
653
|
+
return wrapWithArrayProxy(new QueryBuilder(name));
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// src/core/server.ts
|
|
657
|
+
if (!hasStore()) {
|
|
658
|
+
const dir = path2.join(process.cwd(), CONTENTS_DIR);
|
|
659
|
+
const index = new ContentIndex(new FsAdapter(dir));
|
|
660
|
+
index.buildSync();
|
|
661
|
+
setStore(index);
|
|
662
|
+
}
|
|
663
|
+
export {
|
|
664
|
+
ContentIndex,
|
|
665
|
+
FsAdapter,
|
|
666
|
+
loadContent,
|
|
667
|
+
loadContentSync,
|
|
668
|
+
queryCollection
|
|
669
|
+
};
|
|
670
|
+
//# sourceMappingURL=server.js.map
|
|
@@ -337,4 +337,4 @@ type QueryResult<T> = QueryBuilder<T> & T[];
|
|
|
337
337
|
declare function queryCollection<K extends keyof CollectionTypeMap>(name: K): QueryResult<EntryResult<CollectionTypeMap[K]>>;
|
|
338
338
|
declare function queryCollection(name: string): QueryResult<EntryResult>;
|
|
339
339
|
|
|
340
|
-
export { type ArrayField as A, type BooleanField as B, type ContentEntry as C, type
|
|
340
|
+
export { type ArrayField as A, type BooleanField as B, type ContentEntry as C, type DirectoryFileEntry as D, type EmailField as E, type FileInfo as F, type HttpUrl as H, type ISODate as I, type LongTextField as L, type MultiSelectField as M, type NumberField as N, type ObjectField as O, type QueryOptions as Q, type RelationField as R, type StudioConfig as S, type TextField as T, type UrlField as U, type Collection as a, type CollectionConfig as b, type CollectionTypeMap as c, type QueryResult as d, type FieldDefinition as e, type DateField as f, type SelectField as g, type Email as h, type MediaField as i, type MediaPath as j, type IdField as k, type ID as l, type SlugField as m, type Slug as n, type FormulaField as o, type StatusField as p, queryCollection as q, type CreatedTimeField as r, type UpdatedTimeField as s, type CollectionSchema as t, type BaseField as u, type EntryResult as v, type FieldType as w };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nextjs-studio",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.7",
|
|
4
4
|
"description": "A Git-based, local-first CMS for Next.js projects",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"nextjs",
|
|
@@ -33,8 +33,11 @@
|
|
|
33
33
|
"types": "./dist/core/index.d.ts",
|
|
34
34
|
"bin": "dist/bin/nextjs-studio.js",
|
|
35
35
|
"files": [
|
|
36
|
-
"dist/bin",
|
|
37
|
-
"dist/core",
|
|
36
|
+
"dist/bin/nextjs-studio.js",
|
|
37
|
+
"dist/core/index.js",
|
|
38
|
+
"dist/core/index.d.ts",
|
|
39
|
+
"dist/core/server.js",
|
|
40
|
+
"dist/core/server.d.ts",
|
|
38
41
|
"dist/*.d.ts",
|
|
39
42
|
"dist/cli/ui/standalone.tar.gz",
|
|
40
43
|
"README.md",
|