@studiocms/s3-storage 0.0.0-beta.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025-present StudioCMS - withstudiocms (https://github.com/withstudiocms)
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,97 @@
1
+ # @studiocms/s3-storage Plugin
2
+
3
+ ## Usage
4
+
5
+ Add this plugin in your StudioCMS config. (`studiocms.config.mjs`)
6
+
7
+ ```ts
8
+ import { defineStudioCMSConfig } from 'studiocms/config';
9
+ import s3Storage from '@studiocms/s3-storage';
10
+
11
+ export default defineStudioCMSConfig({
12
+ // other options here
13
+ storageManager: s3Storage()
14
+ });
15
+ ```
16
+
17
+ ## Required Environment variables
18
+
19
+ ```env
20
+ # ============================================
21
+ # CONFIGURATION FOR DIFFERENT PROVIDERS
22
+ # ============================================
23
+
24
+ # -------------------- AWS S3 --------------------
25
+ CMS_S3_PROVIDER=AWS
26
+ CMS_S3_REGION=us-east-1
27
+ CMS_S3_ENDPOINT= # Leave empty for AWS
28
+ CMS_S3_ACCESS_KEY_ID=your_access_key
29
+ CMS_S3_SECRET_ACCESS_KEY=your_secret_key
30
+ CMS_S3_BUCKET_NAME=your_bucket_name
31
+ CMS_S3_FORCE_PATH_STYLE=false
32
+ CMS_S3_PUBLIC_ENDPOINT= # Optional: Custom public endpoint for accessing files
33
+
34
+ # ------------ Cloudflare R2 --------------------
35
+ # CMS_S3_PROVIDER=Cloudflare R2
36
+ # CMS_S3_REGION=auto
37
+ # CMS_S3_ENDPOINT=https://your-account-id.r2.cloudflarestorage.com
38
+ # CMS_S3_ACCESS_KEY_ID=your_r2_access_key
39
+ # CMS_S3_SECRET_ACCESS_KEY=your_r2_secret_key
40
+ # CMS_S3_BUCKET_NAME=your_bucket_name
41
+ # CMS_S3_FORCE_PATH_STYLE=false
42
+ # CMS_S3_PUBLIC_ENDPOINT= # Optional: Custom public endpoint for accessing files
43
+
44
+ # ---------- DigitalOcean Spaces ----------------
45
+ # CMS_S3_PROVIDER=DigitalOcean Spaces
46
+ # CMS_S3_REGION=nyc3
47
+ # CMS_S3_ENDPOINT=https://nyc3.digitaloceanspaces.com
48
+ # CMS_S3_ACCESS_KEY_ID=your_spaces_key
49
+ # CMS_S3_SECRET_ACCESS_KEY=your_spaces_secret
50
+ # CMS_S3_BUCKET_NAME=your_space_name
51
+ # CMS_S3_FORCE_PATH_STYLE=false
52
+ # CMS_S3_PUBLIC_ENDPOINT= # Optional: Custom public endpoint for accessing files
53
+
54
+ # ------------- Backblaze B2 --------------------
55
+ # CMS_S3_PROVIDER=Backblaze B2
56
+ # CMS_S3_REGION=us-west-004
57
+ # CMS_S3_ENDPOINT=https://s3.us-west-004.backblazeb2.com
58
+ # CMS_S3_ACCESS_KEY_ID=your_b2_key_id
59
+ # CMS_S3_SECRET_ACCESS_KEY=your_b2_application_key
60
+ # CMS_S3_BUCKET_NAME=your_bucket_name
61
+ # CMS_S3_FORCE_PATH_STYLE=false
62
+ # CMS_S3_PUBLIC_ENDPOINT= # Optional: Custom public endpoint for accessing files
63
+
64
+ # --------------- MinIO -------------------------
65
+ # CMS_S3_PROVIDER=MinIO
66
+ # CMS_S3_REGION=us-east-1
67
+ # CMS_S3_ENDPOINT=http://localhost:9000
68
+ # CMS_S3_ACCESS_KEY_ID=minioadmin
69
+ # CMS_S3_SECRET_ACCESS_KEY=minioadmin
70
+ # CMS_S3_BUCKET_NAME=your_bucket_name
71
+ # CMS_S3_FORCE_PATH_STYLE=true # Required for MinIO
72
+ # CMS_S3_PUBLIC_ENDPOINT= # Optional: Custom public endpoint for accessing files
73
+
74
+ # ------------- Wasabi --------------------------
75
+ # CMS_S3_PROVIDER=Wasabi
76
+ # CMS_S3_REGION=us-east-1
77
+ # CMS_S3_ENDPOINT=https://s3.us-east-1.wasabisys.com
78
+ # CMS_S3_ACCESS_KEY_ID=your_wasabi_key
79
+ # CMS_S3_SECRET_ACCESS_KEY=your_wasabi_secret
80
+ # CMS_S3_BUCKET_NAME=your_bucket_name
81
+ # CMS_S3_FORCE_PATH_STYLE=false
82
+ # CMS_S3_PUBLIC_ENDPOINT= # Optional: Custom public endpoint for accessing files
83
+
84
+ # ---------- Linode Object Storage --------------
85
+ # CMS_S3_PROVIDER=Linode
86
+ # CMS_S3_REGION=us-east-1
87
+ # CMS_S3_ENDPOINT=https://us-east-1.linodeobjects.com
88
+ # CMS_S3_ACCESS_KEY_ID=your_linode_key
89
+ # CMS_S3_SECRET_ACCESS_KEY=your_linode_secret
90
+ # CMS_S3_BUCKET_NAME=your_bucket_name
91
+ # CMS_S3_FORCE_PATH_STYLE=false
92
+ # CMS_S3_PUBLIC_ENDPOINT= # Optional: Custom public endpoint for accessing files
93
+ ```
94
+
95
+ ## License
96
+
97
+ [MIT Licensed](./LICENSE).
@@ -0,0 +1,29 @@
1
+ /**
2
+ * These triple-slash directives defines dependencies to various declaration files that will be
3
+ * loaded when a user imports the StudioCMS plugin in their Astro configuration file. These
4
+ * directives must be first at the top of the file and can only be preceded by this comment.
5
+ */
6
+ import { type StudioCMSStorageManager } from 'studiocms/plugins';
7
+ /**
8
+ * Creates and configures a StudioCMS S3 Storage Manager plugin.
9
+ *
10
+ * This function initializes the S3 storage integration for StudioCMS by defining
11
+ * a storage manager plugin with the necessary configuration and hooks.
12
+ *
13
+ * @returns {StudioCMSStorageManager} A configured storage manager instance that integrates
14
+ * S3 storage capabilities with StudioCMS.
15
+ *
16
+ * @remarks
17
+ * The storage manager registers a hook that sets up the S3 storage manager by resolving
18
+ * the path to the storage manager implementation file. It requires StudioCMS version
19
+ * 0.1.0-beta.31 or higher.
20
+ *
21
+ * @example
22
+ * ```typescript
23
+ * import { studiocmsS3Storage } from '@studiocms/s3-storage';
24
+ *
25
+ * const s3Storage = studiocmsS3Storage();
26
+ * ```
27
+ */
28
+ export declare function studiocmsS3Storage(): StudioCMSStorageManager;
29
+ export default studiocmsS3Storage;
package/dist/index.js ADDED
@@ -0,0 +1,24 @@
1
+ import { createResolver } from "astro-integration-kit";
2
+ import { defineStorageManager } from "studiocms/plugins";
3
+ function studiocmsS3Storage() {
4
+ const { resolve } = createResolver(import.meta.url);
5
+ const packageIdentifier = "@studiocms/s3-storage";
6
+ return defineStorageManager({
7
+ identifier: packageIdentifier,
8
+ name: "StudioCMS S3 Storage",
9
+ studiocmsMinimumVersion: "0.1.0-beta.31",
10
+ hooks: {
11
+ "studiocms:storage-manager": ({ setStorageManager, logger }) => {
12
+ logger.info("StudioCMS S3 Storage initialized.");
13
+ setStorageManager({
14
+ managerPath: resolve("./s3-storage-manager.js")
15
+ });
16
+ }
17
+ }
18
+ });
19
+ }
20
+ var index_default = studiocmsS3Storage;
21
+ export {
22
+ index_default as default,
23
+ studiocmsS3Storage
24
+ };
@@ -0,0 +1,41 @@
1
+ import type { AuthorizationType, ContextDriverDefinition, StorageAPIEndpointFn, StorageApiBuilderDefinition, UrlMappingServiceDefinition } from 'studiocms/storage-manager/definitions';
2
+ /**
3
+ * S3-compatible storage API service that provides endpoints for managing files in S3-compatible storage.
4
+ *
5
+ * Implements the StorageApiBuilderDefinition interface to handle various storage operations including
6
+ * file uploads, downloads, deletions, renames, and URL mapping management.
7
+ *
8
+ * @template C - The context type used by the underlying driver
9
+ * @template R - The response type returned by the driver
10
+ *
11
+ * @remarks
12
+ * This service supports the following operations through POST requests:
13
+ * - `resolveUrl`: Resolve a file identifier to its URL metadata
14
+ * - `publicUrl`: Generate and register a public URL for a file
15
+ * - `upload`: Generate a presigned URL for uploading files
16
+ * - `list`: List objects in the bucket with optional prefix filtering
17
+ * - `delete`: Delete a file and its URL mapping
18
+ * - `rename`: Rename/move a file and update its URL mapping
19
+ * - `download`: Generate a presigned URL for downloading files
20
+ * - `cleanup`: Remove expired URL mappings
21
+ * - `mappings`: Retrieve all URL mappings (debugging)
22
+ * - `test`: Test the S3 connection
23
+ *
24
+ * PUT requests are used for direct file uploads with the file key specified in the `x-storage-key` header.
25
+ *
26
+ * Most operations require authorization except for `resolveUrl` and `publicUrl` which are publicly accessible.
27
+ *
28
+ * @example
29
+ * ```typescript
30
+ * const service = new S3ApiService(driver, urlMappingService);
31
+ * const postEndpoint = service.getPOST('locals');
32
+ * const putEndpoint = service.getPUT('locals');
33
+ * ```
34
+ */
35
+ export default class S3ApiService<C, R> implements StorageApiBuilderDefinition<C, R> {
36
+ driver: ContextDriverDefinition<C, R>;
37
+ urlMappingService: UrlMappingServiceDefinition;
38
+ constructor(driver: ContextDriverDefinition<C, R>, urlMappingService: UrlMappingServiceDefinition);
39
+ getPOST(type?: AuthorizationType): StorageAPIEndpointFn<C, R>;
40
+ getPUT(type?: AuthorizationType): StorageAPIEndpointFn<C, R>;
41
+ }
@@ -0,0 +1,247 @@
1
+ import {
2
+ CopyObjectCommand,
3
+ DeleteObjectCommand,
4
+ GetObjectCommand,
5
+ ListObjectsV2Command,
6
+ PutObjectCommand,
7
+ S3Client
8
+ } from "@aws-sdk/client-s3";
9
+ import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
10
+ import { Config, ConfigProvider, Effect, Redacted } from "effect";
11
+ const s3ClientBuilder = Effect.gen(function* () {
12
+ const [
13
+ // With default values for optional config
14
+ region,
15
+ endpoint,
16
+ forcePathStyle,
17
+ provider,
18
+ // Redacted config values
19
+ accessKeyId,
20
+ secretAccessKey,
21
+ // Required config values
22
+ bucketName,
23
+ publicEndpoint
24
+ ] = yield* Effect.all([
25
+ Config.withDefault(Config.string("REGION"), "auto"),
26
+ Config.withDefault(Config.string("ENDPOINT"), void 0),
27
+ Config.withDefault(Config.boolean("FORCE_PATH_STYLE"), false),
28
+ Config.withDefault(Config.string("PROVIDER"), "Unknown"),
29
+ Config.redacted("ACCESS_KEY_ID"),
30
+ Config.redacted("SECRET_ACCESS_KEY"),
31
+ Config.string("BUCKET_NAME"),
32
+ Config.string("PUBLIC_ENDPOINT")
33
+ ]);
34
+ const credentials = {
35
+ accessKeyId: Redacted.value(accessKeyId),
36
+ secretAccessKey: Redacted.value(secretAccessKey)
37
+ };
38
+ const client = new S3Client({
39
+ region,
40
+ endpoint,
41
+ credentials,
42
+ forcePathStyle
43
+ });
44
+ return {
45
+ client,
46
+ bucketName,
47
+ publicEndpoint,
48
+ provider
49
+ };
50
+ }).pipe(Effect.withConfigProvider(ConfigProvider.fromEnv().pipe(ConfigProvider.nested("CMS_S3"))));
51
+ let s3ClientInterface = null;
52
+ const getS3Client = async () => {
53
+ if (!s3ClientInterface) {
54
+ s3ClientInterface = await Effect.runPromise(s3ClientBuilder);
55
+ }
56
+ return s3ClientInterface;
57
+ };
58
+ const generateUrlMetadata = ({ publicEndpoint, bucketName, client }) => async function generateUrlMetadata2(key) {
59
+ if (publicEndpoint) {
60
+ const publicUrl = publicEndpoint.endsWith("/") ? `${publicEndpoint}${key}` : `${publicEndpoint}/${key}`;
61
+ return { url: publicUrl, isPermanent: true };
62
+ }
63
+ const command = new GetObjectCommand({
64
+ Bucket: bucketName,
65
+ Key: key
66
+ });
67
+ const SevenDaysInSeconds = 7 * 24 * 60 * 60;
68
+ const inSevenDays = /* @__PURE__ */ new Date();
69
+ const url = await getSignedUrl(client, command, { expiresIn: SevenDaysInSeconds });
70
+ inSevenDays.setSeconds(inSevenDays.getSeconds() + SevenDaysInSeconds);
71
+ return { url, isPermanent: false, expiresAt: inSevenDays.getTime() };
72
+ };
73
+ class S3ApiService {
74
+ driver;
75
+ urlMappingService;
76
+ constructor(driver, urlMappingService) {
77
+ this.driver = driver;
78
+ this.urlMappingService = urlMappingService;
79
+ }
80
+ getPOST(type) {
81
+ return this.driver.handleEndpoint(async ({ getJson, isAuthorized }) => {
82
+ const jsonBody = await getJson();
83
+ const s3Interface = await getS3Client();
84
+ const { client: s3Client, bucketName: BUCKET_NAME, provider } = s3Interface;
85
+ const metaGenerator = generateUrlMetadata(s3Interface);
86
+ const authRequiredActions = [
87
+ "upload",
88
+ "delete",
89
+ "rename",
90
+ "cleanup",
91
+ "mappings",
92
+ "test",
93
+ "list"
94
+ ];
95
+ if (authRequiredActions.includes(jsonBody.action) && !isAuthorized(type)) {
96
+ return { data: { error: "Unauthorized" }, status: 401 };
97
+ }
98
+ switch (jsonBody.action) {
99
+ case "resolveUrl": {
100
+ const metadata = await this.urlMappingService.resolve(jsonBody.identifier, metaGenerator);
101
+ return { data: metadata, status: 200 };
102
+ }
103
+ case "publicUrl": {
104
+ const metadata = await metaGenerator(jsonBody.key);
105
+ const mappingIdentifier = this.urlMappingService.createIdentifier(jsonBody.key);
106
+ await this.urlMappingService.register(mappingIdentifier, metadata);
107
+ return {
108
+ data: {
109
+ ...metadata,
110
+ identifier: mappingIdentifier
111
+ },
112
+ status: 200
113
+ };
114
+ }
115
+ case "upload": {
116
+ const command = new PutObjectCommand({
117
+ Bucket: BUCKET_NAME,
118
+ Key: jsonBody.key,
119
+ ContentType: jsonBody.contentType
120
+ });
121
+ const url = await getSignedUrl(s3Client, command, { expiresIn: 3600 });
122
+ return { data: { url, key: jsonBody.key }, status: 200 };
123
+ }
124
+ case "list": {
125
+ const command = new ListObjectsV2Command({
126
+ Bucket: BUCKET_NAME,
127
+ Prefix: jsonBody.prefix || jsonBody.key || ""
128
+ });
129
+ const response = await s3Client.send(command);
130
+ const files = response.Contents?.map((item) => ({
131
+ key: item.Key,
132
+ size: item.Size,
133
+ lastModified: item.LastModified
134
+ })) || [];
135
+ return { data: { files }, status: 200 };
136
+ }
137
+ case "delete": {
138
+ const command = new DeleteObjectCommand({
139
+ Bucket: BUCKET_NAME,
140
+ Key: jsonBody.key
141
+ });
142
+ await s3Client.send(command);
143
+ const mappingIdentifier = this.urlMappingService.createIdentifier(jsonBody.key);
144
+ await this.urlMappingService.delete(mappingIdentifier);
145
+ return { data: { success: true }, status: 200 };
146
+ }
147
+ case "rename": {
148
+ if (!jsonBody.newKey) {
149
+ return { data: { error: "newKey is required for rename action" }, status: 400 };
150
+ }
151
+ const copyCommand = new CopyObjectCommand({
152
+ Bucket: BUCKET_NAME,
153
+ CopySource: `${BUCKET_NAME}/${jsonBody.key}`,
154
+ Key: jsonBody.newKey
155
+ });
156
+ await s3Client.send(copyCommand);
157
+ const deleteCommand = new DeleteObjectCommand({
158
+ Bucket: BUCKET_NAME,
159
+ Key: jsonBody.key
160
+ });
161
+ await s3Client.send(deleteCommand);
162
+ const oldMappingIdentifier = this.urlMappingService.createIdentifier(jsonBody.key);
163
+ await this.urlMappingService.delete(oldMappingIdentifier);
164
+ const newMappingIdentifier = this.urlMappingService.createIdentifier(jsonBody.newKey);
165
+ const urlMetadata = await metaGenerator(jsonBody.newKey);
166
+ await this.urlMappingService.register(newMappingIdentifier, urlMetadata);
167
+ return { data: { success: true, newKey: jsonBody.newKey }, status: 200 };
168
+ }
169
+ case "download": {
170
+ const command = new GetObjectCommand({
171
+ Bucket: BUCKET_NAME,
172
+ Key: jsonBody.key
173
+ });
174
+ const url = await getSignedUrl(s3Client, command, { expiresIn: 3600 });
175
+ return { data: { url }, status: 200 };
176
+ }
177
+ case "cleanup": {
178
+ const deletedCount = await this.urlMappingService.cleanup();
179
+ return { data: { deletedCount }, status: 200 };
180
+ }
181
+ case "mappings": {
182
+ const mappings = await this.urlMappingService.getAll();
183
+ return { data: { mappings }, status: 200 };
184
+ }
185
+ case "test": {
186
+ try {
187
+ const command = new ListObjectsV2Command({
188
+ Bucket: BUCKET_NAME,
189
+ MaxKeys: 1
190
+ });
191
+ await s3Client.send(command);
192
+ return {
193
+ data: {
194
+ success: true,
195
+ message: "Successfully connected to S3-compatible storage",
196
+ provider
197
+ },
198
+ status: 200
199
+ };
200
+ } catch (error) {
201
+ return {
202
+ data: {
203
+ success: false,
204
+ error: error instanceof Error ? error.message : "Connection failed"
205
+ },
206
+ status: 500
207
+ };
208
+ }
209
+ }
210
+ default:
211
+ return { data: { error: "Invalid action" }, status: 400 };
212
+ }
213
+ });
214
+ }
215
+ getPUT(type) {
216
+ return this.driver.handleEndpoint(async ({ getArrayBuffer, getHeader, isAuthorized }) => {
217
+ if (!isAuthorized(type)) {
218
+ return { data: { error: "Unauthorized" }, status: 401 };
219
+ }
220
+ const s3Interface = await getS3Client();
221
+ const { client: s3Client, bucketName: BUCKET_NAME } = s3Interface;
222
+ try {
223
+ const contentType = getHeader("Content-Type") || "application/octet-stream";
224
+ const key = getHeader("x-storage-key");
225
+ if (!key) {
226
+ return { data: { error: "Missing x-storage-key header" }, status: 400 };
227
+ }
228
+ const fileData = await getArrayBuffer();
229
+ const command = new PutObjectCommand({
230
+ Bucket: BUCKET_NAME,
231
+ Key: key,
232
+ Body: new Uint8Array(fileData),
233
+ ContentType: contentType
234
+ });
235
+ await s3Client.send(command);
236
+ console.log(`Successfully uploaded file to S3: ${key}`);
237
+ return { data: { message: "File uploaded successfully", key }, status: 200 };
238
+ } catch (error) {
239
+ console.error("S3 PUT Error:", error);
240
+ return { data: { error: error.message }, status: 500 };
241
+ }
242
+ });
243
+ }
244
+ }
245
+ export {
246
+ S3ApiService as default
247
+ };
package/package.json CHANGED
@@ -1,14 +1,70 @@
1
1
  {
2
2
  "name": "@studiocms/s3-storage",
3
- "version": "0.0.0-beta.0",
4
- "description": "",
5
- "publishConfig": {
6
- "access": "public"
3
+ "version": "0.1.1",
4
+ "description": "Add S3 Storage Support into your StudioCMS project.",
5
+ "author": {
6
+ "name": "withstudiocms",
7
+ "url": "https://studiocms.dev"
8
+ },
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "git+https://github.com/withstudiocms/studiocms.git",
12
+ "directory": "packages/@studiocms/s3-storage"
7
13
  },
8
- "keywords": [],
9
- "author": "",
14
+ "contributors": [
15
+ "Adammatthiesen",
16
+ "jdtjenkins",
17
+ "dreyfus92",
18
+ "code.spirit"
19
+ ],
10
20
  "license": "MIT",
21
+ "keywords": [
22
+ "astro",
23
+ "astrocms",
24
+ "astrodb",
25
+ "astrostudio",
26
+ "astro-studio",
27
+ "astro-studiocms",
28
+ "cms",
29
+ "studiocms",
30
+ "studiocms-storage-manager"
31
+ ],
32
+ "homepage": "https://studiocms.dev",
33
+ "publishConfig": {
34
+ "access": "public",
35
+ "provenance": true
36
+ },
37
+ "sideEffects": false,
38
+ "files": [
39
+ "dist"
40
+ ],
41
+ "exports": {
42
+ ".": {
43
+ "types": "./dist/index.d.ts",
44
+ "default": "./dist/index.js"
45
+ }
46
+ },
47
+ "type": "module",
48
+ "dependencies": {
49
+ "@aws-sdk/client-s3": "^3.958.0",
50
+ "@aws-sdk/s3-request-presigner": "^3.958.0",
51
+ "astro-integration-kit": "^0.19.1"
52
+ },
53
+ "devDependencies": {
54
+ "@types/node": "^22.0.0"
55
+ },
56
+ "peerDependencies": {
57
+ "astro": "^5.12.9",
58
+ "effect": "^3.19.14",
59
+ "vite": "^6.3.4",
60
+ "studiocms": "^0.1.1"
61
+ },
11
62
  "scripts": {
12
- "pre": "pnpm publish --tag beta"
63
+ "build": "buildkit build 'src/**/*.{ts,astro,css,json,png}'",
64
+ "dev": "buildkit dev 'src/**/*.{ts,astro,css,json,png}'",
65
+ "typecheck": "tspc -p tsconfig.tspc.json",
66
+ "effect-check": "pnpm effect-language-service diagnostics --project tsconfig.tspc.json",
67
+ "ci:effect-check": "pnpm effect-check --format github-actions",
68
+ "test": "vitest"
13
69
  }
14
70
  }