graphile-presigned-url-plugin 0.11.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/plugin.d.ts CHANGED
@@ -9,11 +9,14 @@
9
9
  * 2. Upload fields — adds `requestUploadUrl` and `requestBulkUploadUrls` fields
10
10
  * on `@storageBuckets`-tagged types, so clients upload via the typed bucket API.
11
11
  *
12
- * 3. downloadUrlhandled by download-url-field.ts (separate plugin).
12
+ * 3. Mutation entry points adds per-bucket mutation fields on the root Mutation
13
+ * type (e.g., `appBucket(key: "public"): AppBucket`), so upload operations
14
+ * can be accessed as proper GraphQL mutations instead of queries.
13
15
  *
14
- * No global mutations all S3 operations are scoped to the per-table types that
15
- * PostGraphile already generates. Scope resolution uses the codec's schema/table
16
- * name matched against cached storage module configs.
16
+ * 4. downloadUrlhandled by download-url-field.ts (separate plugin).
17
+ *
18
+ * Scope resolution uses the codec's schema/table name matched against
19
+ * cached storage module configs.
17
20
  */
18
21
  import type { GraphileConfig } from 'graphile-config';
19
22
  import 'graphile-build';
package/esm/plugin.js CHANGED
@@ -9,16 +9,19 @@
9
9
  * 2. Upload fields — adds `requestUploadUrl` and `requestBulkUploadUrls` fields
10
10
  * on `@storageBuckets`-tagged types, so clients upload via the typed bucket API.
11
11
  *
12
- * 3. downloadUrlhandled by download-url-field.ts (separate plugin).
12
+ * 3. Mutation entry points adds per-bucket mutation fields on the root Mutation
13
+ * type (e.g., `appBucket(key: "public"): AppBucket`), so upload operations
14
+ * can be accessed as proper GraphQL mutations instead of queries.
13
15
  *
14
- * No global mutations all S3 operations are scoped to the per-table types that
15
- * PostGraphile already generates. Scope resolution uses the codec's schema/table
16
- * name matched against cached storage module configs.
16
+ * 4. downloadUrlhandled by download-url-field.ts (separate plugin).
17
+ *
18
+ * Scope resolution uses the codec's schema/table name matched against
19
+ * cached storage module configs.
17
20
  */
18
- import { context as grafastContext, lambda, object } from 'grafast';
21
+ import { access, context as grafastContext, lambda, object } from 'grafast';
19
22
  import 'graphile-build';
20
23
  import { Logger } from '@pgpmjs/logger';
21
- import { loadAllStorageModules, resolveStorageConfigFromCodec, isS3BucketProvisioned, markS3BucketProvisioned } from './storage-module-cache';
24
+ import { loadAllStorageModules, resolveStorageConfigFromCodec, getBucketConfig, isS3BucketProvisioned, markS3BucketProvisioned } from './storage-module-cache';
22
25
  import { generatePresignedPutUrl, deleteS3Object } from './s3-signer';
23
26
  const log = new Logger('graphile-presigned-url:plugin');
24
27
  // --- Protocol-level constants (not configurable) ---
@@ -111,7 +114,71 @@ export function createPresignedUrlPlugin(options) {
111
114
  * Add requestUploadUrl and requestBulkUploadUrls fields on @storageBuckets types.
112
115
  */
113
116
  GraphQLObjectType_fields(fields, build, context) {
114
- const { scope: { pgCodec, isPgClassType }, } = context;
117
+ const { scope: { pgCodec, isPgClassType, isRootMutation }, } = context;
118
+ // --- Path 1: Add per-bucket mutation entry points on root Mutation ---
119
+ if (isRootMutation) {
120
+ const { graphql: { GraphQLString, GraphQLNonNull }, } = build;
121
+ const bucketCodecs = Object.values(build.input.pgRegistry.pgCodecs).filter((codec) => codec.attributes && codec.extensions?.tags?.storageBuckets);
122
+ if (bucketCodecs.length === 0)
123
+ return fields;
124
+ const newFields = {};
125
+ for (const codec of bucketCodecs) {
126
+ const typeName = build.inflection.tableType(codec);
127
+ const bucketType = build.getTypeByName(typeName);
128
+ if (!bucketType) {
129
+ log.debug(`Skipping mutation entry point for ${codec.name}: type ${typeName} not found`);
130
+ continue;
131
+ }
132
+ const fieldName = typeName.charAt(0).toLowerCase() + typeName.slice(1);
133
+ const hasOwnerId = !!codec.attributes.owner_id;
134
+ const capturedCodec = codec;
135
+ log.debug(`Adding mutation entry point "${fieldName}" for bucket type ${typeName} (entity-scoped=${hasOwnerId})`);
136
+ newFields[fieldName] = context.fieldWithHooks({ fieldName }, {
137
+ description: `Look up a ${typeName} by key for mutation operations (upload, etc.).`,
138
+ type: bucketType,
139
+ args: {
140
+ key: { type: new GraphQLNonNull(GraphQLString), description: 'Bucket key (e.g., "public", "private")' },
141
+ ...(hasOwnerId
142
+ ? { ownerId: { type: new GraphQLNonNull(GraphQLString), description: 'Owner entity ID (required for entity-scoped buckets)' } }
143
+ : {}),
144
+ },
145
+ plan(_$mutation, fieldArgs) {
146
+ const $key = fieldArgs.getRaw('key');
147
+ const $ownerId = hasOwnerId ? fieldArgs.getRaw('ownerId') : lambda(null, () => null);
148
+ const $withPgClient = grafastContext().get('withPgClient');
149
+ const $pgSettings = grafastContext().get('pgSettings');
150
+ const $combined = object({
151
+ key: $key,
152
+ ownerId: $ownerId,
153
+ withPgClient: $withPgClient,
154
+ pgSettings: $pgSettings,
155
+ });
156
+ const $row = lambda($combined, async (vals) => {
157
+ return vals.withPgClient(vals.pgSettings, async (pgClient) => {
158
+ const databaseId = await resolveDatabaseId(pgClient);
159
+ if (!databaseId)
160
+ throw new Error('DATABASE_NOT_FOUND');
161
+ const allConfigs = await loadAllStorageModules(pgClient, databaseId);
162
+ const storageConfig = resolveStorageConfigFromCodec(capturedCodec, allConfigs);
163
+ if (!storageConfig)
164
+ throw new Error('STORAGE_MODULE_NOT_FOUND');
165
+ const bucket = await getBucketConfig(pgClient, storageConfig, databaseId, vals.key, vals.ownerId ?? undefined);
166
+ if (!bucket)
167
+ throw new Error('BUCKET_NOT_FOUND');
168
+ return bucket;
169
+ });
170
+ });
171
+ const columnEntries = {};
172
+ for (const col of Object.keys(capturedCodec.attributes)) {
173
+ columnEntries[col] = access($row, col);
174
+ }
175
+ return object(columnEntries);
176
+ },
177
+ });
178
+ }
179
+ return build.extend(fields, newFields, 'PresignedUrlPlugin adding per-bucket mutation entry points');
180
+ }
181
+ // --- Path 2: Add upload fields on @storageBuckets types ---
115
182
  if (!isPgClassType || !pgCodec || !pgCodec.attributes) {
116
183
  return fields;
117
184
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphile-presigned-url-plugin",
3
- "version": "0.11.0",
3
+ "version": "0.12.0",
4
4
  "description": "Presigned URL upload plugin for PostGraphile v5 — requestUploadUrl mutation and downloadUrl computed field",
5
5
  "author": "Constructive <developers@constructive.io>",
6
6
  "homepage": "https://github.com/constructive-io/constructive",
@@ -60,5 +60,5 @@
60
60
  "@types/node": "^22.19.11",
61
61
  "makage": "^0.1.10"
62
62
  },
63
- "gitHead": "7409479a981ff63a0937b5406a1c206a07b264ad"
63
+ "gitHead": "31a008c2064e5fea05ae178a7b4e2ecbc1beb6cb"
64
64
  }
package/plugin.d.ts CHANGED
@@ -9,11 +9,14 @@
9
9
  * 2. Upload fields — adds `requestUploadUrl` and `requestBulkUploadUrls` fields
10
10
  * on `@storageBuckets`-tagged types, so clients upload via the typed bucket API.
11
11
  *
12
- * 3. downloadUrlhandled by download-url-field.ts (separate plugin).
12
+ * 3. Mutation entry points adds per-bucket mutation fields on the root Mutation
13
+ * type (e.g., `appBucket(key: "public"): AppBucket`), so upload operations
14
+ * can be accessed as proper GraphQL mutations instead of queries.
13
15
  *
14
- * No global mutations all S3 operations are scoped to the per-table types that
15
- * PostGraphile already generates. Scope resolution uses the codec's schema/table
16
- * name matched against cached storage module configs.
16
+ * 4. downloadUrlhandled by download-url-field.ts (separate plugin).
17
+ *
18
+ * Scope resolution uses the codec's schema/table name matched against
19
+ * cached storage module configs.
17
20
  */
18
21
  import type { GraphileConfig } from 'graphile-config';
19
22
  import 'graphile-build';
package/plugin.js CHANGED
@@ -10,11 +10,14 @@
10
10
  * 2. Upload fields — adds `requestUploadUrl` and `requestBulkUploadUrls` fields
11
11
  * on `@storageBuckets`-tagged types, so clients upload via the typed bucket API.
12
12
  *
13
- * 3. downloadUrlhandled by download-url-field.ts (separate plugin).
13
+ * 3. Mutation entry points adds per-bucket mutation fields on the root Mutation
14
+ * type (e.g., `appBucket(key: "public"): AppBucket`), so upload operations
15
+ * can be accessed as proper GraphQL mutations instead of queries.
14
16
  *
15
- * No global mutations all S3 operations are scoped to the per-table types that
16
- * PostGraphile already generates. Scope resolution uses the codec's schema/table
17
- * name matched against cached storage module configs.
17
+ * 4. downloadUrlhandled by download-url-field.ts (separate plugin).
18
+ *
19
+ * Scope resolution uses the codec's schema/table name matched against
20
+ * cached storage module configs.
18
21
  */
19
22
  Object.defineProperty(exports, "__esModule", { value: true });
20
23
  exports.PresignedUrlPlugin = void 0;
@@ -115,7 +118,71 @@ function createPresignedUrlPlugin(options) {
115
118
  * Add requestUploadUrl and requestBulkUploadUrls fields on @storageBuckets types.
116
119
  */
117
120
  GraphQLObjectType_fields(fields, build, context) {
118
- const { scope: { pgCodec, isPgClassType }, } = context;
121
+ const { scope: { pgCodec, isPgClassType, isRootMutation }, } = context;
122
+ // --- Path 1: Add per-bucket mutation entry points on root Mutation ---
123
+ if (isRootMutation) {
124
+ const { graphql: { GraphQLString, GraphQLNonNull }, } = build;
125
+ const bucketCodecs = Object.values(build.input.pgRegistry.pgCodecs).filter((codec) => codec.attributes && codec.extensions?.tags?.storageBuckets);
126
+ if (bucketCodecs.length === 0)
127
+ return fields;
128
+ const newFields = {};
129
+ for (const codec of bucketCodecs) {
130
+ const typeName = build.inflection.tableType(codec);
131
+ const bucketType = build.getTypeByName(typeName);
132
+ if (!bucketType) {
133
+ log.debug(`Skipping mutation entry point for ${codec.name}: type ${typeName} not found`);
134
+ continue;
135
+ }
136
+ const fieldName = typeName.charAt(0).toLowerCase() + typeName.slice(1);
137
+ const hasOwnerId = !!codec.attributes.owner_id;
138
+ const capturedCodec = codec;
139
+ log.debug(`Adding mutation entry point "${fieldName}" for bucket type ${typeName} (entity-scoped=${hasOwnerId})`);
140
+ newFields[fieldName] = context.fieldWithHooks({ fieldName }, {
141
+ description: `Look up a ${typeName} by key for mutation operations (upload, etc.).`,
142
+ type: bucketType,
143
+ args: {
144
+ key: { type: new GraphQLNonNull(GraphQLString), description: 'Bucket key (e.g., "public", "private")' },
145
+ ...(hasOwnerId
146
+ ? { ownerId: { type: new GraphQLNonNull(GraphQLString), description: 'Owner entity ID (required for entity-scoped buckets)' } }
147
+ : {}),
148
+ },
149
+ plan(_$mutation, fieldArgs) {
150
+ const $key = fieldArgs.getRaw('key');
151
+ const $ownerId = hasOwnerId ? fieldArgs.getRaw('ownerId') : (0, grafast_1.lambda)(null, () => null);
152
+ const $withPgClient = (0, grafast_1.context)().get('withPgClient');
153
+ const $pgSettings = (0, grafast_1.context)().get('pgSettings');
154
+ const $combined = (0, grafast_1.object)({
155
+ key: $key,
156
+ ownerId: $ownerId,
157
+ withPgClient: $withPgClient,
158
+ pgSettings: $pgSettings,
159
+ });
160
+ const $row = (0, grafast_1.lambda)($combined, async (vals) => {
161
+ return vals.withPgClient(vals.pgSettings, async (pgClient) => {
162
+ const databaseId = await resolveDatabaseId(pgClient);
163
+ if (!databaseId)
164
+ throw new Error('DATABASE_NOT_FOUND');
165
+ const allConfigs = await (0, storage_module_cache_1.loadAllStorageModules)(pgClient, databaseId);
166
+ const storageConfig = (0, storage_module_cache_1.resolveStorageConfigFromCodec)(capturedCodec, allConfigs);
167
+ if (!storageConfig)
168
+ throw new Error('STORAGE_MODULE_NOT_FOUND');
169
+ const bucket = await (0, storage_module_cache_1.getBucketConfig)(pgClient, storageConfig, databaseId, vals.key, vals.ownerId ?? undefined);
170
+ if (!bucket)
171
+ throw new Error('BUCKET_NOT_FOUND');
172
+ return bucket;
173
+ });
174
+ });
175
+ const columnEntries = {};
176
+ for (const col of Object.keys(capturedCodec.attributes)) {
177
+ columnEntries[col] = (0, grafast_1.access)($row, col);
178
+ }
179
+ return (0, grafast_1.object)(columnEntries);
180
+ },
181
+ });
182
+ }
183
+ return build.extend(fields, newFields, 'PresignedUrlPlugin adding per-bucket mutation entry points');
184
+ }
185
+ // --- Path 2: Add upload fields on @storageBuckets types ---
119
186
  if (!isPgClassType || !pgCodec || !pgCodec.attributes) {
120
187
  return fields;
121
188
  }