graphile-bucket-provisioner-plugin 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/plugin.js ADDED
@@ -0,0 +1,417 @@
1
+ /**
2
+ * Bucket Provisioner Plugin for PostGraphile v5
3
+ *
4
+ * Adds S3 bucket provisioning support to PostGraphile v5:
5
+ *
6
+ * 1. `provisionBucket` mutation — explicitly provision an S3 bucket for a
7
+ * logical bucket row in the database. Reads the bucket config via RLS,
8
+ * then calls BucketProvisioner to create and configure the S3 bucket.
9
+ *
10
+ * 2. Auto-provisioning hook — wraps `create*` mutations on tables tagged
11
+ * with `@storageBuckets` to automatically provision the S3 bucket after
12
+ * the database row is created.
13
+ *
14
+ * 3. CORS update hook — wraps `update*` mutations on `@storageBuckets` tables
15
+ * to detect changes to `allowed_origins` and re-apply CORS rules to the
16
+ * S3 bucket.
17
+ *
18
+ * CORS resolution hierarchy (most specific wins):
19
+ * 1. Bucket-level `allowed_origins` column (per-bucket override)
20
+ * 2. Storage-module-level `allowed_origins` column (per-database default)
21
+ * 3. Plugin config `allowedOrigins` (global fallback)
22
+ * Supports `['*']` for open/CDN mode (wildcard CORS).
23
+ *
24
+ * Both pathways use `@constructive-io/bucket-provisioner` for the actual
25
+ * S3 operations (bucket creation, Block Public Access, CORS, policies,
26
+ * versioning, lifecycle rules).
27
+ *
28
+ * Detection: Uses the `@storageBuckets` smart tag on the codec (table).
29
+ * The storage module generator in constructive-db sets this tag on the
30
+ * generated buckets table via a smart comment:
31
+ * COMMENT ON TABLE buckets IS E'@storageBuckets\nStorage buckets table';
32
+ */
33
+ import { context as grafastContext, lambda, object } from 'grafast';
34
+ import { extendSchema, gql } from 'graphile-utils';
35
+ import { Logger } from '@pgpmjs/logger';
36
+ import { BucketProvisioner, } from '@constructive-io/bucket-provisioner';
37
+ const log = new Logger('graphile-bucket-provisioner:plugin');
38
+ // --- Storage module query (same as presigned-url-plugin) ---
39
+ const STORAGE_MODULE_QUERY = `
40
+ SELECT
41
+ sm.id,
42
+ bs.schema_name AS buckets_schema,
43
+ bt.name AS buckets_table,
44
+ sm.endpoint,
45
+ sm.public_url_prefix,
46
+ sm.provider,
47
+ sm.allowed_origins
48
+ FROM metaschema_modules_public.storage_module sm
49
+ JOIN metaschema_public.table bt ON bt.id = sm.buckets_table_id
50
+ JOIN metaschema_public.schema bs ON bs.id = bt.schema_id
51
+ WHERE sm.database_id = $1
52
+ LIMIT 1
53
+ `;
54
+ // --- Helpers ---
55
+ /**
56
+ * Resolve the connection config from the options. If the option is a lazy
57
+ * getter function, call it (and cache the result).
58
+ */
59
+ function resolveConnection(options) {
60
+ if (typeof options.connection === 'function') {
61
+ const resolved = options.connection();
62
+ // Cache so subsequent calls don't re-evaluate
63
+ options.connection = resolved;
64
+ return resolved;
65
+ }
66
+ return options.connection;
67
+ }
68
+ /**
69
+ * Resolve the S3 bucket name from a logical bucket key.
70
+ */
71
+ function resolveBucketName(bucketKey, databaseId, options) {
72
+ if (options.resolveBucketName) {
73
+ return options.resolveBucketName(bucketKey, databaseId);
74
+ }
75
+ if (options.bucketNamePrefix) {
76
+ return `${options.bucketNamePrefix}-${bucketKey}`;
77
+ }
78
+ return bucketKey;
79
+ }
80
+ /**
81
+ * Resolve the database_id from the JWT context.
82
+ */
83
+ async function resolveDatabaseId(pgClient) {
84
+ const result = await pgClient.query(`SELECT jwt_private.current_database_id() AS id`);
85
+ return result.rows[0]?.id ?? null;
86
+ }
87
+ /**
88
+ * Resolve the effective CORS allowed origins using the 3-tier hierarchy:
89
+ * 1. Bucket-level allowed_origins (per-bucket override)
90
+ * 2. Storage-module-level allowed_origins (per-database default)
91
+ * 3. Plugin config allowedOrigins (global fallback)
92
+ */
93
+ function resolveAllowedOrigins(bucketOrigins, storageModuleOrigins, pluginOrigins) {
94
+ if (bucketOrigins && bucketOrigins.length > 0) {
95
+ return bucketOrigins;
96
+ }
97
+ if (storageModuleOrigins && storageModuleOrigins.length > 0) {
98
+ return storageModuleOrigins;
99
+ }
100
+ return pluginOrigins;
101
+ }
102
+ /**
103
+ * Build a BucketProvisioner with per-database connection overrides.
104
+ */
105
+ function buildProvisioner(options, storageModule, effectiveOrigins) {
106
+ const connection = resolveConnection(options);
107
+ const effectiveConnection = {
108
+ ...connection,
109
+ ...(storageModule?.endpoint ? { endpoint: storageModule.endpoint } : {}),
110
+ ...(storageModule?.provider
111
+ ? { provider: storageModule.provider }
112
+ : {}),
113
+ };
114
+ return new BucketProvisioner({
115
+ connection: effectiveConnection,
116
+ allowedOrigins: effectiveOrigins,
117
+ });
118
+ }
119
+ /**
120
+ * Core provisioning logic shared by both the explicit mutation and the
121
+ * auto-provisioning hook.
122
+ */
123
+ async function provisionBucketForRow(pgClient, databaseId, bucketKey, bucketType, bucketAllowedOrigins, options) {
124
+ const s3BucketName = resolveBucketName(bucketKey, databaseId, options);
125
+ const accessType = bucketType;
126
+ // Read storage module config to check for endpoint/provider/CORS overrides
127
+ const smResult = await pgClient.query(STORAGE_MODULE_QUERY, [databaseId]);
128
+ const storageModule = smResult.rows[0] ?? null;
129
+ // Resolve CORS origins using the 3-tier hierarchy
130
+ const effectiveOrigins = resolveAllowedOrigins(bucketAllowedOrigins, storageModule?.allowed_origins, options.allowedOrigins);
131
+ const provisioner = buildProvisioner(options, storageModule, effectiveOrigins);
132
+ log.info(`Provisioning S3 bucket "${s3BucketName}" (key="${bucketKey}", type="${accessType}", ` +
133
+ `origins=${JSON.stringify(effectiveOrigins)}) for database ${databaseId}`);
134
+ const result = await provisioner.provision({
135
+ bucketName: s3BucketName,
136
+ accessType,
137
+ versioning: options.versioning ?? false,
138
+ publicUrlPrefix: storageModule?.public_url_prefix ?? undefined,
139
+ allowedOrigins: effectiveOrigins,
140
+ });
141
+ log.info(`Successfully provisioned S3 bucket "${s3BucketName}" ` +
142
+ `(provider=${result.provider}, blockPublicAccess=${result.blockPublicAccess})`);
143
+ return result;
144
+ }
145
+ /**
146
+ * Update CORS on an existing S3 bucket when allowed_origins changes.
147
+ */
148
+ async function updateBucketCors(pgClient, databaseId, bucketKey, bucketType, bucketAllowedOrigins, options) {
149
+ const s3BucketName = resolveBucketName(bucketKey, databaseId, options);
150
+ const accessType = bucketType;
151
+ const smResult = await pgClient.query(STORAGE_MODULE_QUERY, [databaseId]);
152
+ const storageModule = smResult.rows[0] ?? null;
153
+ const effectiveOrigins = resolveAllowedOrigins(bucketAllowedOrigins, storageModule?.allowed_origins, options.allowedOrigins);
154
+ const provisioner = buildProvisioner(options, storageModule, effectiveOrigins);
155
+ log.info(`Updating CORS on S3 bucket "${s3BucketName}" ` +
156
+ `(origins=${JSON.stringify(effectiveOrigins)}) for database ${databaseId}`);
157
+ await provisioner.updateCors({
158
+ bucketName: s3BucketName,
159
+ accessType,
160
+ allowedOrigins: effectiveOrigins,
161
+ });
162
+ log.info(`Successfully updated CORS on S3 bucket "${s3BucketName}"`);
163
+ }
164
+ // --- Plugin factory ---
165
+ /**
166
+ * Creates the bucket provisioner plugin.
167
+ *
168
+ * This plugin provides two provisioning pathways:
169
+ *
170
+ * 1. **Explicit `provisionBucket` mutation** — Call this mutation with a
171
+ * bucket key to provision (or re-provision) the S3 bucket. Protected
172
+ * by RLS on the buckets table.
173
+ *
174
+ * 2. **Auto-provisioning hook** — When `autoProvision` is true (default),
175
+ * wraps `create*` mutation resolvers on tables tagged with `@storageBuckets`
176
+ * to automatically provision the S3 bucket after the row is created.
177
+ *
178
+ * @param options - Plugin configuration (S3 credentials, CORS origins, naming)
179
+ */
180
+ export function createBucketProvisionerPlugin(options) {
181
+ const autoProvision = options.autoProvision ?? true;
182
+ // The extendSchema plugin adds the explicit provisionBucket mutation
183
+ const mutationPlugin = extendSchema(() => ({
184
+ typeDefs: gql `
185
+ input ProvisionBucketInput {
186
+ """The logical bucket key (e.g., "public", "private")"""
187
+ bucketKey: String!
188
+ }
189
+
190
+ type ProvisionBucketPayload {
191
+ """Whether provisioning succeeded"""
192
+ success: Boolean!
193
+ """The S3 bucket name that was provisioned"""
194
+ bucketName: String!
195
+ """The access type applied"""
196
+ accessType: String!
197
+ """The storage provider used"""
198
+ provider: String!
199
+ """The S3 endpoint (null for AWS S3 default)"""
200
+ endpoint: String
201
+ """Error message if provisioning failed"""
202
+ error: String
203
+ }
204
+
205
+ extend type Mutation {
206
+ """
207
+ Provision an S3 bucket for a logical bucket in the database.
208
+ Reads the bucket config via RLS, then creates and configures
209
+ the S3 bucket with the appropriate privacy policies, CORS rules,
210
+ and lifecycle settings.
211
+ """
212
+ provisionBucket(
213
+ input: ProvisionBucketInput!
214
+ ): ProvisionBucketPayload
215
+ }
216
+ `,
217
+ plans: {
218
+ Mutation: {
219
+ provisionBucket(_$mutation, fieldArgs) {
220
+ const $input = fieldArgs.getRaw('input');
221
+ const $withPgClient = grafastContext().get('withPgClient');
222
+ const $pgSettings = grafastContext().get('pgSettings');
223
+ const $combined = object({
224
+ input: $input,
225
+ withPgClient: $withPgClient,
226
+ pgSettings: $pgSettings,
227
+ });
228
+ return lambda($combined, async ({ input, withPgClient, pgSettings }) => {
229
+ const { bucketKey } = input;
230
+ if (!bucketKey || typeof bucketKey !== 'string') {
231
+ throw new Error('INVALID_BUCKET_KEY');
232
+ }
233
+ return withPgClient(pgSettings, async (pgClient) => {
234
+ // Resolve database ID from JWT context
235
+ const databaseId = await resolveDatabaseId(pgClient);
236
+ if (!databaseId) {
237
+ throw new Error('DATABASE_NOT_FOUND');
238
+ }
239
+ // Read storage module config
240
+ const smResult = await pgClient.query(STORAGE_MODULE_QUERY, [databaseId]);
241
+ if (smResult.rows.length === 0) {
242
+ throw new Error('STORAGE_MODULE_NOT_PROVISIONED');
243
+ }
244
+ const storageModule = smResult.rows[0];
245
+ // Look up the bucket row (RLS enforced via pgSettings)
246
+ const bucketResult = await pgClient.query(`SELECT id, key, type, is_public, allowed_origins
247
+ FROM "${storageModule.buckets_schema}"."${storageModule.buckets_table}"
248
+ WHERE key = $1
249
+ LIMIT 1`, [bucketKey]);
250
+ if (bucketResult.rows.length === 0) {
251
+ throw new Error('BUCKET_NOT_FOUND');
252
+ }
253
+ const bucket = bucketResult.rows[0];
254
+ try {
255
+ const result = await provisionBucketForRow(pgClient, databaseId, bucket.key, bucket.type, bucket.allowed_origins, options);
256
+ return {
257
+ success: true,
258
+ bucketName: result.bucketName,
259
+ accessType: result.accessType,
260
+ provider: result.provider,
261
+ endpoint: result.endpoint,
262
+ error: null,
263
+ };
264
+ }
265
+ catch (err) {
266
+ log.error(`Failed to provision bucket "${bucketKey}": ${err.message}`);
267
+ return {
268
+ success: false,
269
+ bucketName: resolveBucketName(bucket.key, databaseId, options),
270
+ accessType: bucket.type,
271
+ provider: resolveConnection(options).provider,
272
+ endpoint: resolveConnection(options).endpoint ?? null,
273
+ error: err.message,
274
+ };
275
+ }
276
+ });
277
+ });
278
+ },
279
+ },
280
+ },
281
+ }));
282
+ // If autoProvision is disabled, return only the mutation plugin
283
+ if (!autoProvision) {
284
+ return mutationPlugin;
285
+ }
286
+ // Build a composite plugin that includes both the mutation and the hook
287
+ return {
288
+ ...mutationPlugin,
289
+ name: 'BucketProvisionerPlugin',
290
+ version: '0.1.0',
291
+ description: 'Auto-provisions S3 buckets when bucket rows are created, ' +
292
+ 'updates CORS when allowed_origins changes on update, ' +
293
+ 'and provides a provisionBucket mutation for explicit provisioning',
294
+ after: ['PgAttributesPlugin', 'PgMutationCreatePlugin', 'PgMutationUpdateDeletePlugin'],
295
+ schema: {
296
+ ...mutationPlugin.schema,
297
+ hooks: {
298
+ ...(mutationPlugin.schema?.hooks ?? {}),
299
+ /**
300
+ * Wrap create and update mutation resolvers on tables tagged with @storageBuckets.
301
+ *
302
+ * - create*: After the row is created, provision the S3 bucket.
303
+ * - update*: After the row is updated, re-apply CORS if allowed_origins changed.
304
+ *
305
+ * If provisioning/CORS update fails, the DB row still exists (the mutation
306
+ * already committed), and the error is logged. Admin can retry via provisionBucket.
307
+ */
308
+ GraphQLObjectType_fields_field(field, build, context) {
309
+ const { scope: { isRootMutation, fieldName, pgCodec }, } = context;
310
+ // Only wrap root mutation fields
311
+ if (!isRootMutation || !pgCodec || !pgCodec.attributes) {
312
+ return field;
313
+ }
314
+ // Check for @storageBuckets smart tag
315
+ const tags = pgCodec.extensions?.tags;
316
+ if (!tags?.storageBuckets) {
317
+ return field;
318
+ }
319
+ const isCreate = fieldName.startsWith('create');
320
+ const isUpdate = fieldName.startsWith('update');
321
+ // Only wrap create and update mutations (not delete)
322
+ if (!isCreate && !isUpdate) {
323
+ return field;
324
+ }
325
+ log.debug(`Wrapping mutation "${fieldName}" for ${isCreate ? 'auto-provisioning' : 'CORS update'} (codec: ${pgCodec.name})`);
326
+ const defaultResolver = (obj) => obj[fieldName];
327
+ const { resolve: oldResolve = defaultResolver, ...rest } = field;
328
+ return {
329
+ ...rest,
330
+ async resolve(source, args, graphqlContext, info) {
331
+ // Call the original resolver first (creates/updates the DB row)
332
+ const result = await oldResolve(source, args, graphqlContext, info);
333
+ try {
334
+ const inputKey = Object.keys(args.input || {}).find((k) => k !== 'clientMutationId');
335
+ const bucketInput = inputKey ? args.input[inputKey] : null;
336
+ const withPgClient = graphqlContext.withPgClient;
337
+ const pgSettings = graphqlContext.pgSettings;
338
+ if (!withPgClient) {
339
+ log.warn(`${isCreate ? 'Auto-provision' : 'CORS update'} skipped: withPgClient not available in context`);
340
+ return result;
341
+ }
342
+ if (isCreate) {
343
+ // --- CREATE: full provisioning ---
344
+ if (!bucketInput?.key || !bucketInput?.type) {
345
+ log.warn(`Auto-provision skipped for "${fieldName}": ` +
346
+ `could not extract key/type from mutation input`);
347
+ return result;
348
+ }
349
+ await withPgClient(pgSettings, async (pgClient) => {
350
+ const databaseId = await resolveDatabaseId(pgClient);
351
+ if (!databaseId) {
352
+ log.warn('Auto-provision skipped: could not resolve database_id');
353
+ return;
354
+ }
355
+ await provisionBucketForRow(pgClient, databaseId, bucketInput.key, bucketInput.type, bucketInput.allowedOrigins ?? bucketInput.allowed_origins ?? null, options);
356
+ });
357
+ }
358
+ else {
359
+ // --- UPDATE: re-apply CORS if allowed_origins is in the patch ---
360
+ const hasOriginsUpdate = bucketInput &&
361
+ ('allowedOrigins' in bucketInput || 'allowed_origins' in bucketInput);
362
+ if (!hasOriginsUpdate) {
363
+ // allowed_origins not being changed, nothing to do
364
+ return result;
365
+ }
366
+ await withPgClient(pgSettings, async (pgClient) => {
367
+ const databaseId = await resolveDatabaseId(pgClient);
368
+ if (!databaseId) {
369
+ log.warn('CORS update skipped: could not resolve database_id');
370
+ return;
371
+ }
372
+ // Read the updated bucket row to get full state
373
+ const smResult = await pgClient.query(STORAGE_MODULE_QUERY, [databaseId]);
374
+ if (smResult.rows.length === 0) {
375
+ log.warn('CORS update skipped: storage module not provisioned');
376
+ return;
377
+ }
378
+ const storageModule = smResult.rows[0];
379
+ // We need the bucket key — it may come from input or patch
380
+ // For updates, PostGraphile uses nodeId or the row's PK, so
381
+ // we read the bucket from the patch's key or from the nodeId
382
+ const patchKey = bucketInput?.key;
383
+ if (!patchKey) {
384
+ log.warn(`CORS update skipped for "${fieldName}": ` +
385
+ `could not determine bucket key from mutation input`);
386
+ return;
387
+ }
388
+ // Read the full bucket row (post-update) to get type + origins
389
+ const bucketResult = await pgClient.query(`SELECT id, key, type, is_public, allowed_origins
390
+ FROM "${storageModule.buckets_schema}"."${storageModule.buckets_table}"
391
+ WHERE key = $1
392
+ LIMIT 1`, [patchKey]);
393
+ if (bucketResult.rows.length === 0) {
394
+ log.warn(`CORS update skipped: bucket "${patchKey}" not found`);
395
+ return;
396
+ }
397
+ const bucket = bucketResult.rows[0];
398
+ await updateBucketCors(pgClient, databaseId, bucket.key, bucket.type, bucket.allowed_origins, options);
399
+ });
400
+ }
401
+ }
402
+ catch (err) {
403
+ log.error(`${isCreate ? 'Auto-provision' : 'CORS update'} failed for "${fieldName}": ${err.message}. ` +
404
+ (isCreate
405
+ ? `The bucket row was created but the S3 bucket was not provisioned. Use the provisionBucket mutation to retry.`
406
+ : `The bucket row was updated but CORS was not applied to the S3 bucket. Use the provisionBucket mutation to retry.`));
407
+ }
408
+ return result;
409
+ },
410
+ };
411
+ },
412
+ },
413
+ },
414
+ };
415
+ }
416
+ export const BucketProvisionerPlugin = createBucketProvisionerPlugin;
417
+ export default BucketProvisionerPlugin;
@@ -0,0 +1,41 @@
1
+ /**
2
+ * PostGraphile v5 Bucket Provisioner Preset
3
+ *
4
+ * Provides a convenient preset for including bucket provisioning support
5
+ * in PostGraphile. Wraps the main plugin with sensible defaults.
6
+ */
7
+ import type { GraphileConfig } from 'graphile-config';
8
+ import type { BucketProvisionerPluginOptions } from './types';
9
+ /**
10
+ * Creates a preset that includes the bucket provisioner plugin with the given options.
11
+ *
12
+ * @example
13
+ * ```typescript
14
+ * import { BucketProvisionerPreset } from 'graphile-bucket-provisioner-plugin';
15
+ * import { getEnvOptions } from '@constructive-io/graphql-env';
16
+ *
17
+ * // Use a lazy getter so env vars are read at runtime, not import time
18
+ * function getConnection() {
19
+ * const { cdn } = getEnvOptions();
20
+ * return {
21
+ * provider: cdn?.provider || 'minio',
22
+ * region: cdn?.awsRegion || 'us-east-1',
23
+ * endpoint: cdn?.endpoint || 'http://minio:9000',
24
+ * accessKeyId: cdn?.awsAccessKey!,
25
+ * secretAccessKey: cdn?.awsSecretKey!,
26
+ * };
27
+ * }
28
+ *
29
+ * const preset = {
30
+ * extends: [
31
+ * BucketProvisionerPreset({
32
+ * connection: getConnection, // pass function ref, NOT getConnection()
33
+ * allowedOrigins: ['https://app.example.com'],
34
+ * bucketNamePrefix: 'myapp',
35
+ * }),
36
+ * ],
37
+ * };
38
+ * ```
39
+ */
40
+ export declare function BucketProvisionerPreset(options: BucketProvisionerPluginOptions): GraphileConfig.Preset;
41
+ export default BucketProvisionerPreset;
package/esm/preset.js ADDED
@@ -0,0 +1,44 @@
1
+ /**
2
+ * PostGraphile v5 Bucket Provisioner Preset
3
+ *
4
+ * Provides a convenient preset for including bucket provisioning support
5
+ * in PostGraphile. Wraps the main plugin with sensible defaults.
6
+ */
7
+ import { createBucketProvisionerPlugin } from './plugin';
8
+ /**
9
+ * Creates a preset that includes the bucket provisioner plugin with the given options.
10
+ *
11
+ * @example
12
+ * ```typescript
13
+ * import { BucketProvisionerPreset } from 'graphile-bucket-provisioner-plugin';
14
+ * import { getEnvOptions } from '@constructive-io/graphql-env';
15
+ *
16
+ * // Use a lazy getter so env vars are read at runtime, not import time
17
+ * function getConnection() {
18
+ * const { cdn } = getEnvOptions();
19
+ * return {
20
+ * provider: cdn?.provider || 'minio',
21
+ * region: cdn?.awsRegion || 'us-east-1',
22
+ * endpoint: cdn?.endpoint || 'http://minio:9000',
23
+ * accessKeyId: cdn?.awsAccessKey!,
24
+ * secretAccessKey: cdn?.awsSecretKey!,
25
+ * };
26
+ * }
27
+ *
28
+ * const preset = {
29
+ * extends: [
30
+ * BucketProvisionerPreset({
31
+ * connection: getConnection, // pass function ref, NOT getConnection()
32
+ * allowedOrigins: ['https://app.example.com'],
33
+ * bucketNamePrefix: 'myapp',
34
+ * }),
35
+ * ],
36
+ * };
37
+ * ```
38
+ */
39
+ export function BucketProvisionerPreset(options) {
40
+ return {
41
+ plugins: [createBucketProvisionerPlugin(options)],
42
+ };
43
+ }
44
+ export default BucketProvisionerPreset;
package/esm/types.d.ts ADDED
@@ -0,0 +1,90 @@
1
+ /**
2
+ * Types for the bucket provisioner plugin.
3
+ *
4
+ * Defines plugin options, connection configuration, and provisioning result
5
+ * types used by the Graphile plugin to auto-provision S3 buckets when
6
+ * bucket rows are created via GraphQL mutations.
7
+ */
8
+ import type { StorageConnectionConfig, StorageProvider, BucketAccessType, ProvisionResult } from '@constructive-io/bucket-provisioner';
9
+ export type { StorageConnectionConfig, StorageProvider, BucketAccessType, ProvisionResult };
10
+ /**
11
+ * S3 connection configuration or a lazy getter that returns it on first use.
12
+ *
13
+ * When a function is provided, it will only be called when the first
14
+ * provisioning operation actually needs the S3 client — avoiding eager
15
+ * env-var reads and S3Client creation at module import time.
16
+ */
17
+ export type ConnectionConfigOrGetter = StorageConnectionConfig | (() => StorageConnectionConfig);
18
+ /**
19
+ * Function to derive the actual S3 bucket name from a logical bucket key.
20
+ *
21
+ * @param bucketKey - The logical bucket key from the database (e.g., "public", "private")
22
+ * @param databaseId - The metaschema database UUID
23
+ * @returns The S3 bucket name to create/configure
24
+ */
25
+ export type BucketNameResolver = (bucketKey: string, databaseId: string) => string;
26
+ /**
27
+ * Plugin options for the bucket provisioner plugin.
28
+ */
29
+ export interface BucketProvisionerPluginOptions {
30
+ /**
31
+ * S3 connection configuration (credentials, endpoint, provider).
32
+ * Can be a concrete object or a lazy getter function.
33
+ */
34
+ connection: ConnectionConfigOrGetter;
35
+ /**
36
+ * Allowed origins for CORS rules on provisioned buckets.
37
+ * These are the domains where your app runs (e.g., ["https://app.example.com"]).
38
+ * Required for browser-based presigned URL uploads.
39
+ */
40
+ allowedOrigins: string[];
41
+ /**
42
+ * Optional prefix for S3 bucket names.
43
+ * When set, the S3 bucket name becomes `{prefix}-{bucketKey}`.
44
+ * Example: prefix "myapp" + key "public" → S3 bucket "myapp-public"
45
+ */
46
+ bucketNamePrefix?: string;
47
+ /**
48
+ * Optional custom function to derive S3 bucket names from logical bucket keys.
49
+ * Takes precedence over `bucketNamePrefix` when provided.
50
+ */
51
+ resolveBucketName?: BucketNameResolver;
52
+ /**
53
+ * Whether to enable versioning on provisioned buckets.
54
+ * Default: false
55
+ */
56
+ versioning?: boolean;
57
+ /**
58
+ * Whether to auto-provision S3 buckets when bucket rows are created
59
+ * via GraphQL mutations. When true, the plugin wraps create mutations
60
+ * on tables tagged with `@storageBuckets` to trigger provisioning
61
+ * after the mutation succeeds.
62
+ *
63
+ * Default: true
64
+ */
65
+ autoProvision?: boolean;
66
+ }
67
+ /**
68
+ * Input for the provisionBucket mutation.
69
+ */
70
+ export interface ProvisionBucketInput {
71
+ /** The logical bucket key (e.g., "public", "private") */
72
+ bucketKey: string;
73
+ }
74
+ /**
75
+ * Result of the provisionBucket mutation.
76
+ */
77
+ export interface ProvisionBucketPayload {
78
+ /** Whether provisioning succeeded */
79
+ success: boolean;
80
+ /** The S3 bucket name that was provisioned */
81
+ bucketName: string;
82
+ /** The access type applied */
83
+ accessType: string;
84
+ /** The storage provider used */
85
+ provider: string;
86
+ /** The S3 endpoint (null for AWS S3 default) */
87
+ endpoint: string | null;
88
+ /** Error message if provisioning failed */
89
+ error: string | null;
90
+ }
package/esm/types.js ADDED
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Types for the bucket provisioner plugin.
3
+ *
4
+ * Defines plugin options, connection configuration, and provisioning result
5
+ * types used by the Graphile plugin to auto-provision S3 buckets when
6
+ * bucket rows are created via GraphQL mutations.
7
+ */
8
+ export {};
package/index.d.ts ADDED
@@ -0,0 +1,42 @@
1
+ /**
2
+ * Bucket Provisioner Plugin for PostGraphile v5
3
+ *
4
+ * Provides automatic S3 bucket provisioning for PostGraphile v5.
5
+ * When bucket rows are created via GraphQL mutations, this plugin
6
+ * automatically provisions the corresponding S3 bucket with the
7
+ * correct privacy policies, CORS rules, and lifecycle settings.
8
+ *
9
+ * Also provides an explicit `provisionBucket` mutation for manual
10
+ * provisioning or re-provisioning of S3 buckets.
11
+ *
12
+ * @example
13
+ * ```typescript
14
+ * import { BucketProvisionerPreset } from 'graphile-bucket-provisioner-plugin';
15
+ * import { getEnvOptions } from '@constructive-io/graphql-env';
16
+ *
17
+ * // Use a lazy getter so env vars are read at runtime, not import time
18
+ * function getConnection() {
19
+ * const { cdn } = getEnvOptions();
20
+ * return {
21
+ * provider: cdn?.provider || 'minio',
22
+ * region: cdn?.awsRegion || 'us-east-1',
23
+ * endpoint: cdn?.endpoint || 'http://minio:9000',
24
+ * accessKeyId: cdn?.awsAccessKey!,
25
+ * secretAccessKey: cdn?.awsSecretKey!,
26
+ * };
27
+ * }
28
+ *
29
+ * const preset = {
30
+ * extends: [
31
+ * BucketProvisionerPreset({
32
+ * connection: getConnection, // pass function ref, NOT getConnection()
33
+ * allowedOrigins: ['https://app.example.com'],
34
+ * bucketNamePrefix: 'myapp',
35
+ * }),
36
+ * ],
37
+ * };
38
+ * ```
39
+ */
40
+ export { BucketProvisionerPlugin, createBucketProvisionerPlugin } from './plugin';
41
+ export { BucketProvisionerPreset } from './preset';
42
+ export type { BucketProvisionerPluginOptions, ConnectionConfigOrGetter, BucketNameResolver, ProvisionBucketInput, ProvisionBucketPayload, StorageConnectionConfig, StorageProvider, BucketAccessType, ProvisionResult, } from './types';