graphile-presigned-url-plugin 0.12.0 → 0.12.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/plugin.js CHANGED
@@ -18,10 +18,10 @@
18
18
  * Scope resolution uses the codec's schema/table name matched against
19
19
  * cached storage module configs.
20
20
  */
21
- import { access, context as grafastContext, lambda, object } from 'grafast';
21
+ import { context as grafastContext, lambda, object } from 'grafast';
22
22
  import 'graphile-build';
23
23
  import { Logger } from '@pgpmjs/logger';
24
- import { loadAllStorageModules, resolveStorageConfigFromCodec, getBucketConfig, isS3BucketProvisioned, markS3BucketProvisioned } from './storage-module-cache';
24
+ import { loadAllStorageModules, resolveStorageConfigFromCodec, isS3BucketProvisioned, markS3BucketProvisioned } from './storage-module-cache';
25
25
  import { generatePresignedPutUrl, deleteS3Object } from './s3-signer';
26
26
  const log = new Logger('graphile-presigned-url:plugin');
27
27
  // --- Protocol-level constants (not configurable) ---
@@ -131,7 +131,17 @@ export function createPresignedUrlPlugin(options) {
131
131
  }
132
132
  const fieldName = typeName.charAt(0).toLowerCase() + typeName.slice(1);
133
133
  const hasOwnerId = !!codec.attributes.owner_id;
134
- const capturedCodec = codec;
134
+ // Find the PgResource for this codec so we can return a proper PgSelectSingleStep
135
+ const bucketResource = Object.values(build.input.pgRegistry.pgResources).find((r) => r.codec === codec && !r.isUnique && !r.isVirtual && !r.parameters);
136
+ if (!bucketResource) {
137
+ log.debug(`Skipping mutation entry point for ${codec.name}: no PgResource found`);
138
+ continue;
139
+ }
140
+ // Resolve the GraphQL type for ownerId from the codec's attribute codec
141
+ // (e.g. UUID scalar instead of String) so Grafast's type matching works.
142
+ const ownerIdType = hasOwnerId
143
+ ? build.getGraphQLTypeByPgCodec(codec.attributes.owner_id.codec, 'input')
144
+ : null;
135
145
  log.debug(`Adding mutation entry point "${fieldName}" for bucket type ${typeName} (entity-scoped=${hasOwnerId})`);
136
146
  newFields[fieldName] = context.fieldWithHooks({ fieldName }, {
137
147
  description: `Look up a ${typeName} by key for mutation operations (upload, etc.).`,
@@ -139,40 +149,17 @@ export function createPresignedUrlPlugin(options) {
139
149
  args: {
140
150
  key: { type: new GraphQLNonNull(GraphQLString), description: 'Bucket key (e.g., "public", "private")' },
141
151
  ...(hasOwnerId
142
- ? { ownerId: { type: new GraphQLNonNull(GraphQLString), description: 'Owner entity ID (required for entity-scoped buckets)' } }
152
+ ? { ownerId: { type: new GraphQLNonNull(ownerIdType || GraphQLString), description: 'Owner entity ID (required for entity-scoped buckets)' } }
143
153
  : {}),
144
154
  },
145
155
  plan(_$mutation, fieldArgs) {
146
- const $key = fieldArgs.getRaw('key');
147
- const $ownerId = hasOwnerId ? fieldArgs.getRaw('ownerId') : lambda(null, () => null);
148
- const $withPgClient = grafastContext().get('withPgClient');
149
- const $pgSettings = grafastContext().get('pgSettings');
150
- const $combined = object({
151
- key: $key,
152
- ownerId: $ownerId,
153
- withPgClient: $withPgClient,
154
- pgSettings: $pgSettings,
155
- });
156
- const $row = lambda($combined, async (vals) => {
157
- return vals.withPgClient(vals.pgSettings, async (pgClient) => {
158
- const databaseId = await resolveDatabaseId(pgClient);
159
- if (!databaseId)
160
- throw new Error('DATABASE_NOT_FOUND');
161
- const allConfigs = await loadAllStorageModules(pgClient, databaseId);
162
- const storageConfig = resolveStorageConfigFromCodec(capturedCodec, allConfigs);
163
- if (!storageConfig)
164
- throw new Error('STORAGE_MODULE_NOT_FOUND');
165
- const bucket = await getBucketConfig(pgClient, storageConfig, databaseId, vals.key, vals.ownerId ?? undefined);
166
- if (!bucket)
167
- throw new Error('BUCKET_NOT_FOUND');
168
- return bucket;
169
- });
170
- });
171
- const columnEntries = {};
172
- for (const col of Object.keys(capturedCodec.attributes)) {
173
- columnEntries[col] = access($row, col);
156
+ const spec = {
157
+ key: fieldArgs.getRaw('key'),
158
+ };
159
+ if (hasOwnerId) {
160
+ spec.owner_id = fieldArgs.getRaw('ownerId');
174
161
  }
175
- return object(columnEntries);
162
+ return bucketResource.find(spec).single();
176
163
  },
177
164
  });
178
165
  }
@@ -95,6 +95,7 @@ export declare function resolveStorageConfigFromCodec(pgCodec: {
95
95
  extensions?: {
96
96
  pg?: {
97
97
  schemaName?: string;
98
+ name?: string;
98
99
  };
99
100
  };
100
101
  sqlType?: string;
@@ -296,7 +296,7 @@ export async function loadAllStorageModules(pgClient, databaseId) {
296
296
  */
297
297
  export function resolveStorageConfigFromCodec(pgCodec, allConfigs) {
298
298
  const schemaName = pgCodec.extensions?.pg?.schemaName;
299
- const tableName = pgCodec.name;
299
+ const tableName = pgCodec.extensions?.pg?.name ?? pgCodec.name;
300
300
  if (!schemaName || !tableName)
301
301
  return null;
302
302
  return allConfigs.find((c) => (c.filesTableName === tableName && c.schemaName === schemaName) ||
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "graphile-presigned-url-plugin",
3
- "version": "0.12.0",
3
+ "version": "0.12.2",
4
4
  "description": "Presigned URL upload plugin for PostGraphile v5 — requestUploadUrl mutation and downloadUrl computed field",
5
5
  "author": "Constructive <developers@constructive.io>",
6
6
  "homepage": "https://github.com/constructive-io/constructive",
@@ -60,5 +60,5 @@
60
60
  "@types/node": "^22.19.11",
61
61
  "makage": "^0.1.10"
62
62
  },
63
- "gitHead": "31a008c2064e5fea05ae178a7b4e2ecbc1beb6cb"
63
+ "gitHead": "e1421fda3dabc22a37e42fd224d181bffb613779"
64
64
  }
package/plugin.js CHANGED
@@ -135,7 +135,17 @@ function createPresignedUrlPlugin(options) {
135
135
  }
136
136
  const fieldName = typeName.charAt(0).toLowerCase() + typeName.slice(1);
137
137
  const hasOwnerId = !!codec.attributes.owner_id;
138
- const capturedCodec = codec;
138
+ // Find the PgResource for this codec so we can return a proper PgSelectSingleStep
139
+ const bucketResource = Object.values(build.input.pgRegistry.pgResources).find((r) => r.codec === codec && !r.isUnique && !r.isVirtual && !r.parameters);
140
+ if (!bucketResource) {
141
+ log.debug(`Skipping mutation entry point for ${codec.name}: no PgResource found`);
142
+ continue;
143
+ }
144
+ // Resolve the GraphQL type for ownerId from the codec's attribute codec
145
+ // (e.g. UUID scalar instead of String) so Grafast's type matching works.
146
+ const ownerIdType = hasOwnerId
147
+ ? build.getGraphQLTypeByPgCodec(codec.attributes.owner_id.codec, 'input')
148
+ : null;
139
149
  log.debug(`Adding mutation entry point "${fieldName}" for bucket type ${typeName} (entity-scoped=${hasOwnerId})`);
140
150
  newFields[fieldName] = context.fieldWithHooks({ fieldName }, {
141
151
  description: `Look up a ${typeName} by key for mutation operations (upload, etc.).`,
@@ -143,40 +153,17 @@ function createPresignedUrlPlugin(options) {
143
153
  args: {
144
154
  key: { type: new GraphQLNonNull(GraphQLString), description: 'Bucket key (e.g., "public", "private")' },
145
155
  ...(hasOwnerId
146
- ? { ownerId: { type: new GraphQLNonNull(GraphQLString), description: 'Owner entity ID (required for entity-scoped buckets)' } }
156
+ ? { ownerId: { type: new GraphQLNonNull(ownerIdType || GraphQLString), description: 'Owner entity ID (required for entity-scoped buckets)' } }
147
157
  : {}),
148
158
  },
149
159
  plan(_$mutation, fieldArgs) {
150
- const $key = fieldArgs.getRaw('key');
151
- const $ownerId = hasOwnerId ? fieldArgs.getRaw('ownerId') : (0, grafast_1.lambda)(null, () => null);
152
- const $withPgClient = (0, grafast_1.context)().get('withPgClient');
153
- const $pgSettings = (0, grafast_1.context)().get('pgSettings');
154
- const $combined = (0, grafast_1.object)({
155
- key: $key,
156
- ownerId: $ownerId,
157
- withPgClient: $withPgClient,
158
- pgSettings: $pgSettings,
159
- });
160
- const $row = (0, grafast_1.lambda)($combined, async (vals) => {
161
- return vals.withPgClient(vals.pgSettings, async (pgClient) => {
162
- const databaseId = await resolveDatabaseId(pgClient);
163
- if (!databaseId)
164
- throw new Error('DATABASE_NOT_FOUND');
165
- const allConfigs = await (0, storage_module_cache_1.loadAllStorageModules)(pgClient, databaseId);
166
- const storageConfig = (0, storage_module_cache_1.resolveStorageConfigFromCodec)(capturedCodec, allConfigs);
167
- if (!storageConfig)
168
- throw new Error('STORAGE_MODULE_NOT_FOUND');
169
- const bucket = await (0, storage_module_cache_1.getBucketConfig)(pgClient, storageConfig, databaseId, vals.key, vals.ownerId ?? undefined);
170
- if (!bucket)
171
- throw new Error('BUCKET_NOT_FOUND');
172
- return bucket;
173
- });
174
- });
175
- const columnEntries = {};
176
- for (const col of Object.keys(capturedCodec.attributes)) {
177
- columnEntries[col] = (0, grafast_1.access)($row, col);
160
+ const spec = {
161
+ key: fieldArgs.getRaw('key'),
162
+ };
163
+ if (hasOwnerId) {
164
+ spec.owner_id = fieldArgs.getRaw('ownerId');
178
165
  }
179
- return (0, grafast_1.object)(columnEntries);
166
+ return bucketResource.find(spec).single();
180
167
  },
181
168
  });
182
169
  }
@@ -95,6 +95,7 @@ export declare function resolveStorageConfigFromCodec(pgCodec: {
95
95
  extensions?: {
96
96
  pg?: {
97
97
  schemaName?: string;
98
+ name?: string;
98
99
  };
99
100
  };
100
101
  sqlType?: string;
@@ -308,7 +308,7 @@ async function loadAllStorageModules(pgClient, databaseId) {
308
308
  */
309
309
  function resolveStorageConfigFromCodec(pgCodec, allConfigs) {
310
310
  const schemaName = pgCodec.extensions?.pg?.schemaName;
311
- const tableName = pgCodec.name;
311
+ const tableName = pgCodec.extensions?.pg?.name ?? pgCodec.name;
312
312
  if (!schemaName || !tableName)
313
313
  return null;
314
314
  return allConfigs.find((c) => (c.filesTableName === tableName && c.schemaName === schemaName) ||