graphile-upload-plugin 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +23 -0
- package/README.md +142 -0
- package/esm/index.js +4 -0
- package/esm/plugin.js +158 -0
- package/esm/resolvers/upload.js +54 -0
- package/index.d.ts +4 -0
- package/index.js +11 -0
- package/package.json +58 -0
- package/plugin.d.ts +31 -0
- package/plugin.js +160 -0
- package/resolvers/upload.d.ts +13 -0
- package/resolvers/upload.js +61 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
The MIT License (MIT)
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Dan Lynch <pyramation@gmail.com>
|
|
4
|
+
Copyright (c) 2025 Hyperweb <developers@hyperweb.io>
|
|
5
|
+
Copyright (c) 2020-present, Interweb, Inc.
|
|
6
|
+
|
|
7
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
8
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
9
|
+
in the Software without restriction, including without limitation the rights
|
|
10
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
11
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
12
|
+
furnished to do so, subject to the following conditions:
|
|
13
|
+
|
|
14
|
+
The above copyright notice and this permission notice shall be included in all
|
|
15
|
+
copies or substantial portions of the Software.
|
|
16
|
+
|
|
17
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
18
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
19
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
20
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
21
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
22
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
23
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
# graphile-upload-plugin
|
|
2
|
+
|
|
3
|
+
<p align="center" width="100%">
|
|
4
|
+
<img height="250" src="https://raw.githubusercontent.com/launchql/launchql/refs/heads/main/assets/outline-logo.svg" />
|
|
5
|
+
</p>
|
|
6
|
+
|
|
7
|
+
<p align="center" width="100%">
|
|
8
|
+
<a href="https://github.com/launchql/launchql/actions/workflows/run-tests.yaml">
|
|
9
|
+
<img height="20" src="https://github.com/launchql/launchql/actions/workflows/run-tests.yaml/badge.svg" />
|
|
10
|
+
</a>
|
|
11
|
+
<a href="https://github.com/launchql/launchql/blob/main/LICENSE"><img height="20" src="https://img.shields.io/badge/license-MIT-blue.svg"/></a>
|
|
12
|
+
<a href="https://www.npmjs.com/package/graphile-upload-plugin"><img height="20" src="https://img.shields.io/github/package-json/v/launchql/launchql?filename=graphile%2Fgraphile-upload-plugin%2Fpackage.json"/></a>
|
|
13
|
+
</p>
|
|
14
|
+
|
|
15
|
+
PostGraphile plugin for handling file uploads via GraphQL. Adds `Upload` scalar type and upload field support for PostgreSQL columns.
|
|
16
|
+
|
|
17
|
+
## Install
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
pnpm add graphile-upload-plugin
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Usage
|
|
24
|
+
|
|
25
|
+
```ts
|
|
26
|
+
import express from 'express';
|
|
27
|
+
import { postgraphile } from 'postgraphile';
|
|
28
|
+
import UploadPostGraphilePlugin from 'graphile-upload-plugin';
|
|
29
|
+
|
|
30
|
+
const app = express();
|
|
31
|
+
app.use(
|
|
32
|
+
postgraphile(process.env.DATABASE_URL, ['app_public'], {
|
|
33
|
+
appendPlugins: [UploadPostGraphilePlugin],
|
|
34
|
+
graphileBuildOptions: {
|
|
35
|
+
uploadFieldDefinitions: [
|
|
36
|
+
{
|
|
37
|
+
name: 'upload',
|
|
38
|
+
namespaceName: 'public',
|
|
39
|
+
type: 'JSON',
|
|
40
|
+
resolve: async (upload, args, context, info) => {
|
|
41
|
+
// Handle upload
|
|
42
|
+
return { url: '...', size: upload.size };
|
|
43
|
+
},
|
|
44
|
+
},
|
|
45
|
+
{
|
|
46
|
+
tag: 'upload',
|
|
47
|
+
resolve: async (upload, args, context, info) => {
|
|
48
|
+
// Handle upload by tag
|
|
49
|
+
return { url: '...' };
|
|
50
|
+
},
|
|
51
|
+
},
|
|
52
|
+
],
|
|
53
|
+
},
|
|
54
|
+
})
|
|
55
|
+
);
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Configuration
|
|
59
|
+
|
|
60
|
+
The plugin accepts `uploadFieldDefinitions` in `graphileBuildOptions`:
|
|
61
|
+
|
|
62
|
+
- **By type**: Match PostgreSQL types by `name` and `namespaceName`
|
|
63
|
+
- **By tag**: Match columns via smart comments (e.g., `@upload`)
|
|
64
|
+
|
|
65
|
+
Each definition requires a `resolve` function that processes the upload and returns the value to store in the database.
|
|
66
|
+
|
|
67
|
+
---
|
|
68
|
+
|
|
69
|
+
## Education and Tutorials
|
|
70
|
+
|
|
71
|
+
1. 🚀 [Quickstart: Getting Up and Running](https://launchql.com/learn/quickstart)
|
|
72
|
+
Get started with modular databases in minutes. Install prerequisites and deploy your first module.
|
|
73
|
+
|
|
74
|
+
2. 📦 [Modular PostgreSQL Development with Database Packages](https://launchql.com/learn/modular-postgres)
|
|
75
|
+
Learn to organize PostgreSQL projects with pgpm workspaces and reusable database modules.
|
|
76
|
+
|
|
77
|
+
3. ✏️ [Authoring Database Changes](https://launchql.com/learn/authoring-database-changes)
|
|
78
|
+
Master the workflow for adding, organizing, and managing database changes with pgpm.
|
|
79
|
+
|
|
80
|
+
4. 🧪 [End-to-End PostgreSQL Testing with TypeScript](https://launchql.com/learn/e2e-postgres-testing)
|
|
81
|
+
Master end-to-end PostgreSQL testing with ephemeral databases, RLS testing, and CI/CD automation.
|
|
82
|
+
|
|
83
|
+
5. ⚡ [Supabase Testing](https://launchql.com/learn/supabase)
|
|
84
|
+
Use TypeScript-first tools to test Supabase projects with realistic RLS, policies, and auth contexts.
|
|
85
|
+
|
|
86
|
+
6. 💧 [Drizzle ORM Testing](https://launchql.com/learn/drizzle-testing)
|
|
87
|
+
Run full-stack tests with Drizzle ORM, including database setup, teardown, and RLS enforcement.
|
|
88
|
+
|
|
89
|
+
7. 🔧 [Troubleshooting](https://launchql.com/learn/troubleshooting)
|
|
90
|
+
Common issues and solutions for pgpm, PostgreSQL, and testing.
|
|
91
|
+
|
|
92
|
+
## Related LaunchQL Tooling
|
|
93
|
+
|
|
94
|
+
### 🧪 Testing
|
|
95
|
+
|
|
96
|
+
* [launchql/pgsql-test](https://github.com/launchql/launchql/tree/main/packages/pgsql-test): **📊 Isolated testing environments** with per-test transaction rollbacks—ideal for integration tests, complex migrations, and RLS simulation.
|
|
97
|
+
* [launchql/supabase-test](https://github.com/launchql/launchql/tree/main/packages/supabase-test): **🧪 Supabase-native test harness** preconfigured for the local Supabase stack—per-test rollbacks, JWT/role context helpers, and CI/GitHub Actions ready.
|
|
98
|
+
* [launchql/graphile-test](https://github.com/launchql/launchql/tree/main/packages/graphile-test): **🔐 Authentication mocking** for Graphile-focused test helpers and emulating row-level security contexts.
|
|
99
|
+
* [launchql/pg-query-context](https://github.com/launchql/launchql/tree/main/packages/pg-query-context): **🔒 Session context injection** to add session-local context (e.g., `SET LOCAL`) into queries—ideal for setting `role`, `jwt.claims`, and other session settings.
|
|
100
|
+
|
|
101
|
+
### 🧠 Parsing & AST
|
|
102
|
+
|
|
103
|
+
* [launchql/pgsql-parser](https://github.com/launchql/pgsql-parser): **🔄 SQL conversion engine** that interprets and converts PostgreSQL syntax.
|
|
104
|
+
* [launchql/libpg-query-node](https://github.com/launchql/libpg-query-node): **🌉 Node.js bindings** for `libpg_query`, converting SQL into parse trees.
|
|
105
|
+
* [launchql/pg-proto-parser](https://github.com/launchql/pg-proto-parser): **📦 Protobuf parser** for parsing PostgreSQL Protocol Buffers definitions to generate TypeScript interfaces, utility functions, and JSON mappings for enums.
|
|
106
|
+
* [@pgsql/enums](https://github.com/launchql/pgsql-parser/tree/main/packages/enums): **🏷️ TypeScript enums** for PostgreSQL AST for safe and ergonomic parsing logic.
|
|
107
|
+
* [@pgsql/types](https://github.com/launchql/pgsql-parser/tree/main/packages/types): **📝 Type definitions** for PostgreSQL AST nodes in TypeScript.
|
|
108
|
+
* [@pgsql/utils](https://github.com/launchql/pgsql-parser/tree/main/packages/utils): **🛠️ AST utilities** for constructing and transforming PostgreSQL syntax trees.
|
|
109
|
+
* [launchql/pg-ast](https://github.com/launchql/launchql/tree/main/packages/pg-ast): **🔍 Low-level AST tools** and transformations for Postgres query structures.
|
|
110
|
+
|
|
111
|
+
### 🚀 API & Dev Tools
|
|
112
|
+
|
|
113
|
+
* [launchql/server](https://github.com/launchql/launchql/tree/main/packages/server): **⚡ Express-based API server** powered by PostGraphile to expose a secure, scalable GraphQL API over your Postgres database.
|
|
114
|
+
* [launchql/explorer](https://github.com/launchql/launchql/tree/main/packages/explorer): **🔎 Visual API explorer** with GraphiQL for browsing across all databases and schemas—useful for debugging, documentation, and API prototyping.
|
|
115
|
+
|
|
116
|
+
### 🔁 Streaming & Uploads
|
|
117
|
+
|
|
118
|
+
* [launchql/s3-streamer](https://github.com/launchql/launchql/tree/main/packages/s3-streamer): **📤 Direct S3 streaming** for large files with support for metadata injection and content validation.
|
|
119
|
+
* [launchql/etag-hash](https://github.com/launchql/launchql/tree/main/packages/etag-hash): **🏷️ S3-compatible ETags** created by streaming and hashing file uploads in chunks.
|
|
120
|
+
* [launchql/etag-stream](https://github.com/launchql/launchql/tree/main/packages/etag-stream): **🔄 ETag computation** via Node stream transformer during upload or transfer.
|
|
121
|
+
* [launchql/uuid-hash](https://github.com/launchql/launchql/tree/main/packages/uuid-hash): **🆔 Deterministic UUIDs** generated from hashed content, great for deduplication and asset referencing.
|
|
122
|
+
* [launchql/uuid-stream](https://github.com/launchql/launchql/tree/main/packages/uuid-stream): **🌊 Streaming UUID generation** based on piped file content—ideal for upload pipelines.
|
|
123
|
+
* [launchql/upload-names](https://github.com/launchql/launchql/tree/main/packages/upload-names): **📂 Collision-resistant filenames** utility for structured and unique file names for uploads.
|
|
124
|
+
|
|
125
|
+
### 🧰 CLI & Codegen
|
|
126
|
+
|
|
127
|
+
* [pgpm](https://github.com/launchql/launchql/tree/main/packages/pgpm): **🖥️ PostgreSQL Package Manager** for modular Postgres development. Works with database workspaces, scaffolding, migrations, seeding, and installing database packages.
|
|
128
|
+
* [@launchql/cli](https://github.com/launchql/launchql/tree/main/packages/cli): **🖥️ Command-line toolkit** for managing LaunchQL projects—supports database scaffolding, migrations, seeding, code generation, and automation.
|
|
129
|
+
* [launchql/launchql-gen](https://github.com/launchql/launchql/tree/main/packages/launchql-gen): **✨ Auto-generated GraphQL** mutations and queries dynamically built from introspected schema data.
|
|
130
|
+
* [@launchql/query-builder](https://github.com/launchql/launchql/tree/main/packages/query-builder): **🏗️ SQL constructor** providing a robust TypeScript-based query builder for dynamic generation of `SELECT`, `INSERT`, `UPDATE`, `DELETE`, and stored procedure calls—supports advanced SQL features like `JOIN`, `GROUP BY`, and schema-qualified queries.
|
|
131
|
+
* [@launchql/query](https://github.com/launchql/launchql/tree/main/packages/query): **🧩 Fluent GraphQL builder** for PostGraphile schemas. ⚡ Schema-aware via introspection, 🧩 composable and ergonomic for building deeply nested queries.
|
|
132
|
+
|
|
133
|
+
## Credits
|
|
134
|
+
|
|
135
|
+
🛠 Built by LaunchQL — if you like our tools, please checkout and contribute to [our github ⚛️](https://github.com/launchql)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
## Disclaimer
|
|
139
|
+
|
|
140
|
+
AS DESCRIBED IN THE LICENSES, THE SOFTWARE IS PROVIDED "AS IS", AT YOUR OWN RISK, AND WITHOUT WARRANTIES OF ANY KIND.
|
|
141
|
+
|
|
142
|
+
No developer or entity involved in creating this software will be liable for any claims or damages whatsoever associated with your use, inability to use, or your interaction with other users of the code, including any direct, indirect, incidental, special, exemplary, punitive or consequential damages, or loss of profits, cryptocurrencies, tokens, or anything else of value.
|
package/esm/index.js
ADDED
package/esm/plugin.js
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
// PostGraphile plugin
|
|
2
|
+
const UploadPostGraphilePlugin = (builder, opts = {}) => {
|
|
3
|
+
const { uploadFieldDefinitions = [] } = opts;
|
|
4
|
+
// Determine whether a table attribute should be treated as an Upload according to configuration
|
|
5
|
+
const relevantUploadType = (attr) => {
|
|
6
|
+
const types = uploadFieldDefinitions.filter(({ name, namespaceName, tag }) => (name &&
|
|
7
|
+
namespaceName &&
|
|
8
|
+
attr.type?.name === name &&
|
|
9
|
+
attr.type?.namespaceName === namespaceName) ||
|
|
10
|
+
(tag && attr.tags?.[tag]));
|
|
11
|
+
if (types.length === 1) {
|
|
12
|
+
return types[0];
|
|
13
|
+
}
|
|
14
|
+
else if (types.length > 1) {
|
|
15
|
+
throw new Error('Upload field definitions are ambiguous');
|
|
16
|
+
}
|
|
17
|
+
return undefined;
|
|
18
|
+
};
|
|
19
|
+
builder.hook('build', (input, build) => {
|
|
20
|
+
const { addType, graphql: { GraphQLScalarType, GraphQLError }, } = build;
|
|
21
|
+
const GraphQLUpload = new GraphQLScalarType({
|
|
22
|
+
name: 'Upload',
|
|
23
|
+
description: 'The `Upload` scalar type represents a file upload.',
|
|
24
|
+
parseValue(value) {
|
|
25
|
+
// The value should be an object with a `.promise` that resolves to the file upload
|
|
26
|
+
const maybe = value;
|
|
27
|
+
if (maybe &&
|
|
28
|
+
maybe.promise &&
|
|
29
|
+
typeof maybe.promise.then === 'function') {
|
|
30
|
+
return maybe.promise;
|
|
31
|
+
}
|
|
32
|
+
throw new GraphQLError('Upload value invalid.');
|
|
33
|
+
},
|
|
34
|
+
parseLiteral(ast) {
|
|
35
|
+
throw new GraphQLError('Upload literal unsupported.', ast);
|
|
36
|
+
},
|
|
37
|
+
serialize() {
|
|
38
|
+
throw new GraphQLError('Upload serialization unsupported.');
|
|
39
|
+
},
|
|
40
|
+
});
|
|
41
|
+
addType(GraphQLUpload);
|
|
42
|
+
// Override the internal types for configured upload-backed columns
|
|
43
|
+
uploadFieldDefinitions.forEach(({ name, namespaceName, type }) => {
|
|
44
|
+
if (!name || !type || !namespaceName)
|
|
45
|
+
return; // tag-based or incomplete definitions
|
|
46
|
+
const theType = build.pgIntrospectionResultsByKind.type.find((typ) => typ.name === name && typ.namespaceName === namespaceName);
|
|
47
|
+
if (theType) {
|
|
48
|
+
build.pgRegisterGqlTypeByTypeId(theType.id, () => build.getTypeByName(type));
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
return input;
|
|
52
|
+
});
|
|
53
|
+
builder.hook('inflection', (inflection, build) => {
|
|
54
|
+
return build.extend(inflection, {
|
|
55
|
+
// NO ARROW FUNCTIONS HERE (this)
|
|
56
|
+
uploadColumn(attr) {
|
|
57
|
+
return this.column(attr) + 'Upload';
|
|
58
|
+
},
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
// Add Upload input fields alongside matching columns
|
|
62
|
+
builder.hook('GraphQLInputObjectType:fields', (fields, build, context) => {
|
|
63
|
+
const { scope: { isPgRowType, pgIntrospection: table }, } = context;
|
|
64
|
+
if (!isPgRowType || !table || table.kind !== 'class') {
|
|
65
|
+
return fields;
|
|
66
|
+
}
|
|
67
|
+
return build.extend(fields, table.attributes.reduce((memo, attr) => {
|
|
68
|
+
if (!build.pgColumnFilter(attr, build, context))
|
|
69
|
+
return memo;
|
|
70
|
+
const action = context.scope.isPgBaseInput
|
|
71
|
+
? 'base'
|
|
72
|
+
: context.scope.isPgPatch
|
|
73
|
+
? 'update'
|
|
74
|
+
: 'create';
|
|
75
|
+
if (build.pgOmit(attr, action))
|
|
76
|
+
return memo;
|
|
77
|
+
if (attr.identity === 'a')
|
|
78
|
+
return memo;
|
|
79
|
+
if (!relevantUploadType(attr)) {
|
|
80
|
+
return memo;
|
|
81
|
+
}
|
|
82
|
+
const fieldName = build.inflection.uploadColumn(attr);
|
|
83
|
+
if (memo[fieldName]) {
|
|
84
|
+
throw new Error(`Two columns produce the same GraphQL field name '${fieldName}' on class '${table.namespaceName}.${table.name}'; one of them is '${attr.name}'`);
|
|
85
|
+
}
|
|
86
|
+
memo = build.extend(memo, {
|
|
87
|
+
[fieldName]: context.fieldWithHooks(fieldName, {
|
|
88
|
+
description: attr.description,
|
|
89
|
+
type: build.getTypeByName('Upload'),
|
|
90
|
+
}, { pgFieldIntrospection: attr, isPgUploadField: true }),
|
|
91
|
+
}, `Adding field for ${build.describePgEntity(attr)}. You can rename this field with a 'Smart Comment':\n\n ${build.sqlCommentByAddingTags(attr, {
|
|
92
|
+
name: 'newNameHere',
|
|
93
|
+
})}`);
|
|
94
|
+
return memo;
|
|
95
|
+
}, {}), `Adding columns to '${build.describePgEntity(table)}'`);
|
|
96
|
+
});
|
|
97
|
+
builder.hook('GraphQLObjectType:fields:field', (field, build, context) => {
|
|
98
|
+
const { pgIntrospectionResultsByKind: introspectionResultsByKind, inflection, } = build;
|
|
99
|
+
const { scope: { isRootMutation, fieldName, pgFieldIntrospection: table }, } = context;
|
|
100
|
+
if (!isRootMutation || !table) {
|
|
101
|
+
return field;
|
|
102
|
+
}
|
|
103
|
+
// It's possible that `resolve` isn't specified on a field, so in that case
|
|
104
|
+
// we fall back to a default resolver.
|
|
105
|
+
const defaultResolver = (obj) => obj[fieldName];
|
|
106
|
+
// Extract the old resolver from `field`
|
|
107
|
+
const { resolve: oldResolve = defaultResolver, ...rest } = field; // GraphQLFieldConfig
|
|
108
|
+
const tags = {};
|
|
109
|
+
const types = {};
|
|
110
|
+
const originals = {};
|
|
111
|
+
const uploadResolversByFieldName = introspectionResultsByKind.attribute
|
|
112
|
+
.filter((attr) => attr.classId === table.id)
|
|
113
|
+
.reduce((memo, attr) => {
|
|
114
|
+
// first, try to directly match the types here
|
|
115
|
+
const typeMatched = relevantUploadType(attr);
|
|
116
|
+
if (typeMatched) {
|
|
117
|
+
const fieldName = inflection.column(attr);
|
|
118
|
+
const uploadFieldName = inflection.uploadColumn(attr);
|
|
119
|
+
memo[uploadFieldName] = typeMatched.resolve;
|
|
120
|
+
tags[uploadFieldName] = attr.tags;
|
|
121
|
+
types[uploadFieldName] = attr.type.name;
|
|
122
|
+
originals[uploadFieldName] = fieldName;
|
|
123
|
+
}
|
|
124
|
+
return memo;
|
|
125
|
+
}, {});
|
|
126
|
+
return {
|
|
127
|
+
// Copy over everything except 'resolve'
|
|
128
|
+
...rest,
|
|
129
|
+
// Add our new resolver which wraps the old resolver
|
|
130
|
+
async resolve(source, args, context, info) {
|
|
131
|
+
// Recursively check for Upload promises to resolve
|
|
132
|
+
async function resolvePromises(obj) {
|
|
133
|
+
for (const key of Object.keys(obj)) {
|
|
134
|
+
if (obj[key] instanceof Promise) {
|
|
135
|
+
if (uploadResolversByFieldName[key]) {
|
|
136
|
+
const upload = await obj[key];
|
|
137
|
+
// eslint-disable-next-line require-atomic-updates
|
|
138
|
+
obj[originals[key]] = await uploadResolversByFieldName[key](upload, args, context, {
|
|
139
|
+
...info,
|
|
140
|
+
uploadPlugin: { tags: tags[key], type: types[key] },
|
|
141
|
+
});
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
else if (obj[key] !== null && typeof obj[key] === 'object') {
|
|
145
|
+
await resolvePromises(obj[key]);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
await resolvePromises(args);
|
|
150
|
+
// Call the old resolver
|
|
151
|
+
const oldResolveResult = await oldResolve(source, args, context, info);
|
|
152
|
+
// Finally return the result.
|
|
153
|
+
return oldResolveResult;
|
|
154
|
+
},
|
|
155
|
+
};
|
|
156
|
+
});
|
|
157
|
+
};
|
|
158
|
+
export default UploadPostGraphilePlugin;
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import streamer from '@launchql/s3-streamer';
|
|
2
|
+
import uploadNames from '@launchql/upload-names';
|
|
3
|
+
export class Uploader {
|
|
4
|
+
opts;
|
|
5
|
+
streamerInstance;
|
|
6
|
+
constructor(opts) {
|
|
7
|
+
this.opts = opts;
|
|
8
|
+
const { bucketName, awsRegion, awsSecretKey, awsAccessKey, minioEndpoint } = this.opts;
|
|
9
|
+
this.streamerInstance = new streamer({
|
|
10
|
+
defaultBucket: bucketName,
|
|
11
|
+
awsRegion,
|
|
12
|
+
awsSecretKey,
|
|
13
|
+
awsAccessKey,
|
|
14
|
+
minioEndpoint,
|
|
15
|
+
});
|
|
16
|
+
}
|
|
17
|
+
async resolveUpload(upload, _args, _context, info) {
|
|
18
|
+
const { uploadPlugin: { tags, type } } = info;
|
|
19
|
+
const readStream = upload.createReadStream();
|
|
20
|
+
const { filename } = upload;
|
|
21
|
+
const rand = Math.random().toString(36).substring(2, 7) +
|
|
22
|
+
Math.random().toString(36).substring(2, 7);
|
|
23
|
+
const key = `${rand}-${uploadNames(filename)}`;
|
|
24
|
+
const result = await this.streamerInstance.upload({
|
|
25
|
+
readStream,
|
|
26
|
+
filename,
|
|
27
|
+
key,
|
|
28
|
+
bucket: this.opts.bucketName
|
|
29
|
+
});
|
|
30
|
+
const url = result.upload.Location;
|
|
31
|
+
const { contentType, magic: { charset } } = result;
|
|
32
|
+
const typ = type || tags.type;
|
|
33
|
+
const allowedMimes = tags.mime
|
|
34
|
+
? tags.mime.trim().split(',').map((a) => a.trim())
|
|
35
|
+
: typ === 'image'
|
|
36
|
+
? ['image/jpg', 'image/jpeg', 'image/png', 'image/svg+xml']
|
|
37
|
+
: [];
|
|
38
|
+
if (allowedMimes.length && !allowedMimes.includes(contentType)) {
|
|
39
|
+
throw new Error(`UPLOAD_MIMETYPE ${allowedMimes.join(',')}`);
|
|
40
|
+
}
|
|
41
|
+
switch (typ) {
|
|
42
|
+
case 'image':
|
|
43
|
+
case 'upload':
|
|
44
|
+
return {
|
|
45
|
+
filename,
|
|
46
|
+
mime: contentType,
|
|
47
|
+
url
|
|
48
|
+
};
|
|
49
|
+
case 'attachment':
|
|
50
|
+
default:
|
|
51
|
+
return url;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
package/index.d.ts
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import UploadPostGraphilePlugin, { type FileUpload, type UploadPluginInfo, type UploadResolver, type UploadFieldDefinition } from './plugin';
|
|
2
|
+
export { UploadPostGraphilePlugin, type FileUpload, type UploadPluginInfo, type UploadResolver, type UploadFieldDefinition, };
|
|
3
|
+
export { Uploader, type UploaderOptions } from './resolvers/upload';
|
|
4
|
+
export default UploadPostGraphilePlugin;
|
package/index.js
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Uploader = exports.UploadPostGraphilePlugin = void 0;
|
|
7
|
+
const plugin_1 = __importDefault(require("./plugin"));
|
|
8
|
+
exports.UploadPostGraphilePlugin = plugin_1.default;
|
|
9
|
+
var upload_1 = require("./resolvers/upload");
|
|
10
|
+
Object.defineProperty(exports, "Uploader", { enumerable: true, get: function () { return upload_1.Uploader; } });
|
|
11
|
+
exports.default = plugin_1.default;
|
package/package.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "graphile-upload-plugin",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"description": "Graphile upload plugin for PostGraphile",
|
|
5
|
+
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
6
|
+
"homepage": "https://github.com/launchql/launchql",
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"main": "index.js",
|
|
9
|
+
"module": "esm/index.js",
|
|
10
|
+
"types": "index.d.ts",
|
|
11
|
+
"scripts": {
|
|
12
|
+
"clean": "makage clean",
|
|
13
|
+
"prepack": "pnpm run build",
|
|
14
|
+
"build": "makage build",
|
|
15
|
+
"build:dev": "makage build --dev",
|
|
16
|
+
"lint": "eslint . --fix",
|
|
17
|
+
"test": "jest",
|
|
18
|
+
"test:watch": "jest --watch"
|
|
19
|
+
},
|
|
20
|
+
"publishConfig": {
|
|
21
|
+
"access": "public",
|
|
22
|
+
"directory": "dist"
|
|
23
|
+
},
|
|
24
|
+
"repository": {
|
|
25
|
+
"type": "git",
|
|
26
|
+
"url": "https://github.com/launchql/launchql"
|
|
27
|
+
},
|
|
28
|
+
"keywords": [
|
|
29
|
+
"postgraphile",
|
|
30
|
+
"graphile",
|
|
31
|
+
"launchql",
|
|
32
|
+
"plugin",
|
|
33
|
+
"postgres",
|
|
34
|
+
"graphql",
|
|
35
|
+
"upload"
|
|
36
|
+
],
|
|
37
|
+
"bugs": {
|
|
38
|
+
"url": "https://github.com/launchql/launchql/issues"
|
|
39
|
+
},
|
|
40
|
+
"devDependencies": {
|
|
41
|
+
"@aws-sdk/client-s3": "^3.0.0",
|
|
42
|
+
"@launchql/env": "^2.5.6",
|
|
43
|
+
"@launchql/s3-utils": "^2.2.10",
|
|
44
|
+
"@types/pg": "^8.15.2",
|
|
45
|
+
"graphile-test": "^2.8.9",
|
|
46
|
+
"graphql-tag": "^2.12.6",
|
|
47
|
+
"makage": "^0.1.6",
|
|
48
|
+
"pgsql-test": "^2.14.12",
|
|
49
|
+
"ts-jest": "^29.1.0"
|
|
50
|
+
},
|
|
51
|
+
"dependencies": {
|
|
52
|
+
"@launchql/s3-streamer": "^2.6.8",
|
|
53
|
+
"@launchql/upload-names": "^2.2.10",
|
|
54
|
+
"graphile-build": "^4.14.1",
|
|
55
|
+
"graphql": "15.10.1"
|
|
56
|
+
},
|
|
57
|
+
"gitHead": "3812f24a480b2035b3413ec7fecfe492f294e590"
|
|
58
|
+
}
|
package/plugin.d.ts
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import type { ReadStream } from 'fs';
|
|
2
|
+
import type { Plugin } from 'graphile-build';
|
|
3
|
+
import type { GraphQLResolveInfo } from 'graphql';
|
|
4
|
+
export interface FileUpload {
|
|
5
|
+
filename: string;
|
|
6
|
+
mimetype?: string;
|
|
7
|
+
encoding?: string;
|
|
8
|
+
createReadStream: () => ReadStream;
|
|
9
|
+
}
|
|
10
|
+
export interface UploadPluginInfo {
|
|
11
|
+
tags: Record<string, any>;
|
|
12
|
+
type?: string;
|
|
13
|
+
}
|
|
14
|
+
export type UploadResolver = (upload: FileUpload, args: any, context: any, info: GraphQLResolveInfo & {
|
|
15
|
+
uploadPlugin: UploadPluginInfo;
|
|
16
|
+
}) => Promise<any>;
|
|
17
|
+
export type UploadFieldDefinition = {
|
|
18
|
+
name: string;
|
|
19
|
+
namespaceName: string;
|
|
20
|
+
type: string;
|
|
21
|
+
resolve: UploadResolver;
|
|
22
|
+
tag?: never;
|
|
23
|
+
} | {
|
|
24
|
+
tag: string;
|
|
25
|
+
resolve: UploadResolver;
|
|
26
|
+
name?: never;
|
|
27
|
+
namespaceName?: never;
|
|
28
|
+
type?: string;
|
|
29
|
+
};
|
|
30
|
+
declare const UploadPostGraphilePlugin: Plugin;
|
|
31
|
+
export default UploadPostGraphilePlugin;
|
package/plugin.js
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
// PostGraphile plugin
|
|
4
|
+
const UploadPostGraphilePlugin = (builder, opts = {}) => {
|
|
5
|
+
const { uploadFieldDefinitions = [] } = opts;
|
|
6
|
+
// Determine whether a table attribute should be treated as an Upload according to configuration
|
|
7
|
+
const relevantUploadType = (attr) => {
|
|
8
|
+
const types = uploadFieldDefinitions.filter(({ name, namespaceName, tag }) => (name &&
|
|
9
|
+
namespaceName &&
|
|
10
|
+
attr.type?.name === name &&
|
|
11
|
+
attr.type?.namespaceName === namespaceName) ||
|
|
12
|
+
(tag && attr.tags?.[tag]));
|
|
13
|
+
if (types.length === 1) {
|
|
14
|
+
return types[0];
|
|
15
|
+
}
|
|
16
|
+
else if (types.length > 1) {
|
|
17
|
+
throw new Error('Upload field definitions are ambiguous');
|
|
18
|
+
}
|
|
19
|
+
return undefined;
|
|
20
|
+
};
|
|
21
|
+
builder.hook('build', (input, build) => {
|
|
22
|
+
const { addType, graphql: { GraphQLScalarType, GraphQLError }, } = build;
|
|
23
|
+
const GraphQLUpload = new GraphQLScalarType({
|
|
24
|
+
name: 'Upload',
|
|
25
|
+
description: 'The `Upload` scalar type represents a file upload.',
|
|
26
|
+
parseValue(value) {
|
|
27
|
+
// The value should be an object with a `.promise` that resolves to the file upload
|
|
28
|
+
const maybe = value;
|
|
29
|
+
if (maybe &&
|
|
30
|
+
maybe.promise &&
|
|
31
|
+
typeof maybe.promise.then === 'function') {
|
|
32
|
+
return maybe.promise;
|
|
33
|
+
}
|
|
34
|
+
throw new GraphQLError('Upload value invalid.');
|
|
35
|
+
},
|
|
36
|
+
parseLiteral(ast) {
|
|
37
|
+
throw new GraphQLError('Upload literal unsupported.', ast);
|
|
38
|
+
},
|
|
39
|
+
serialize() {
|
|
40
|
+
throw new GraphQLError('Upload serialization unsupported.');
|
|
41
|
+
},
|
|
42
|
+
});
|
|
43
|
+
addType(GraphQLUpload);
|
|
44
|
+
// Override the internal types for configured upload-backed columns
|
|
45
|
+
uploadFieldDefinitions.forEach(({ name, namespaceName, type }) => {
|
|
46
|
+
if (!name || !type || !namespaceName)
|
|
47
|
+
return; // tag-based or incomplete definitions
|
|
48
|
+
const theType = build.pgIntrospectionResultsByKind.type.find((typ) => typ.name === name && typ.namespaceName === namespaceName);
|
|
49
|
+
if (theType) {
|
|
50
|
+
build.pgRegisterGqlTypeByTypeId(theType.id, () => build.getTypeByName(type));
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
return input;
|
|
54
|
+
});
|
|
55
|
+
builder.hook('inflection', (inflection, build) => {
|
|
56
|
+
return build.extend(inflection, {
|
|
57
|
+
// NO ARROW FUNCTIONS HERE (this)
|
|
58
|
+
uploadColumn(attr) {
|
|
59
|
+
return this.column(attr) + 'Upload';
|
|
60
|
+
},
|
|
61
|
+
});
|
|
62
|
+
});
|
|
63
|
+
// Add Upload input fields alongside matching columns
|
|
64
|
+
builder.hook('GraphQLInputObjectType:fields', (fields, build, context) => {
|
|
65
|
+
const { scope: { isPgRowType, pgIntrospection: table }, } = context;
|
|
66
|
+
if (!isPgRowType || !table || table.kind !== 'class') {
|
|
67
|
+
return fields;
|
|
68
|
+
}
|
|
69
|
+
return build.extend(fields, table.attributes.reduce((memo, attr) => {
|
|
70
|
+
if (!build.pgColumnFilter(attr, build, context))
|
|
71
|
+
return memo;
|
|
72
|
+
const action = context.scope.isPgBaseInput
|
|
73
|
+
? 'base'
|
|
74
|
+
: context.scope.isPgPatch
|
|
75
|
+
? 'update'
|
|
76
|
+
: 'create';
|
|
77
|
+
if (build.pgOmit(attr, action))
|
|
78
|
+
return memo;
|
|
79
|
+
if (attr.identity === 'a')
|
|
80
|
+
return memo;
|
|
81
|
+
if (!relevantUploadType(attr)) {
|
|
82
|
+
return memo;
|
|
83
|
+
}
|
|
84
|
+
const fieldName = build.inflection.uploadColumn(attr);
|
|
85
|
+
if (memo[fieldName]) {
|
|
86
|
+
throw new Error(`Two columns produce the same GraphQL field name '${fieldName}' on class '${table.namespaceName}.${table.name}'; one of them is '${attr.name}'`);
|
|
87
|
+
}
|
|
88
|
+
memo = build.extend(memo, {
|
|
89
|
+
[fieldName]: context.fieldWithHooks(fieldName, {
|
|
90
|
+
description: attr.description,
|
|
91
|
+
type: build.getTypeByName('Upload'),
|
|
92
|
+
}, { pgFieldIntrospection: attr, isPgUploadField: true }),
|
|
93
|
+
}, `Adding field for ${build.describePgEntity(attr)}. You can rename this field with a 'Smart Comment':\n\n ${build.sqlCommentByAddingTags(attr, {
|
|
94
|
+
name: 'newNameHere',
|
|
95
|
+
})}`);
|
|
96
|
+
return memo;
|
|
97
|
+
}, {}), `Adding columns to '${build.describePgEntity(table)}'`);
|
|
98
|
+
});
|
|
99
|
+
builder.hook('GraphQLObjectType:fields:field', (field, build, context) => {
|
|
100
|
+
const { pgIntrospectionResultsByKind: introspectionResultsByKind, inflection, } = build;
|
|
101
|
+
const { scope: { isRootMutation, fieldName, pgFieldIntrospection: table }, } = context;
|
|
102
|
+
if (!isRootMutation || !table) {
|
|
103
|
+
return field;
|
|
104
|
+
}
|
|
105
|
+
// It's possible that `resolve` isn't specified on a field, so in that case
|
|
106
|
+
// we fall back to a default resolver.
|
|
107
|
+
const defaultResolver = (obj) => obj[fieldName];
|
|
108
|
+
// Extract the old resolver from `field`
|
|
109
|
+
const { resolve: oldResolve = defaultResolver, ...rest } = field; // GraphQLFieldConfig
|
|
110
|
+
const tags = {};
|
|
111
|
+
const types = {};
|
|
112
|
+
const originals = {};
|
|
113
|
+
const uploadResolversByFieldName = introspectionResultsByKind.attribute
|
|
114
|
+
.filter((attr) => attr.classId === table.id)
|
|
115
|
+
.reduce((memo, attr) => {
|
|
116
|
+
// first, try to directly match the types here
|
|
117
|
+
const typeMatched = relevantUploadType(attr);
|
|
118
|
+
if (typeMatched) {
|
|
119
|
+
const fieldName = inflection.column(attr);
|
|
120
|
+
const uploadFieldName = inflection.uploadColumn(attr);
|
|
121
|
+
memo[uploadFieldName] = typeMatched.resolve;
|
|
122
|
+
tags[uploadFieldName] = attr.tags;
|
|
123
|
+
types[uploadFieldName] = attr.type.name;
|
|
124
|
+
originals[uploadFieldName] = fieldName;
|
|
125
|
+
}
|
|
126
|
+
return memo;
|
|
127
|
+
}, {});
|
|
128
|
+
return {
|
|
129
|
+
// Copy over everything except 'resolve'
|
|
130
|
+
...rest,
|
|
131
|
+
// Add our new resolver which wraps the old resolver
|
|
132
|
+
async resolve(source, args, context, info) {
|
|
133
|
+
// Recursively check for Upload promises to resolve
|
|
134
|
+
async function resolvePromises(obj) {
|
|
135
|
+
for (const key of Object.keys(obj)) {
|
|
136
|
+
if (obj[key] instanceof Promise) {
|
|
137
|
+
if (uploadResolversByFieldName[key]) {
|
|
138
|
+
const upload = await obj[key];
|
|
139
|
+
// eslint-disable-next-line require-atomic-updates
|
|
140
|
+
obj[originals[key]] = await uploadResolversByFieldName[key](upload, args, context, {
|
|
141
|
+
...info,
|
|
142
|
+
uploadPlugin: { tags: tags[key], type: types[key] },
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
else if (obj[key] !== null && typeof obj[key] === 'object') {
|
|
147
|
+
await resolvePromises(obj[key]);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
await resolvePromises(args);
|
|
152
|
+
// Call the old resolver
|
|
153
|
+
const oldResolveResult = await oldResolve(source, args, context, info);
|
|
154
|
+
// Finally return the result.
|
|
155
|
+
return oldResolveResult;
|
|
156
|
+
},
|
|
157
|
+
};
|
|
158
|
+
});
|
|
159
|
+
};
|
|
160
|
+
exports.default = UploadPostGraphilePlugin;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export interface UploaderOptions {
|
|
2
|
+
bucketName: string;
|
|
3
|
+
awsRegion: string;
|
|
4
|
+
awsSecretKey: string;
|
|
5
|
+
awsAccessKey: string;
|
|
6
|
+
minioEndpoint?: string;
|
|
7
|
+
}
|
|
8
|
+
export declare class Uploader {
|
|
9
|
+
private opts;
|
|
10
|
+
private streamerInstance;
|
|
11
|
+
constructor(opts: UploaderOptions);
|
|
12
|
+
resolveUpload(upload: any, _args: any, _context: any, info: any): Promise<any>;
|
|
13
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Uploader = void 0;
|
|
7
|
+
const s3_streamer_1 = __importDefault(require("@launchql/s3-streamer"));
|
|
8
|
+
const upload_names_1 = __importDefault(require("@launchql/upload-names"));
|
|
9
|
+
class Uploader {
|
|
10
|
+
opts;
|
|
11
|
+
streamerInstance;
|
|
12
|
+
constructor(opts) {
|
|
13
|
+
this.opts = opts;
|
|
14
|
+
const { bucketName, awsRegion, awsSecretKey, awsAccessKey, minioEndpoint } = this.opts;
|
|
15
|
+
this.streamerInstance = new s3_streamer_1.default({
|
|
16
|
+
defaultBucket: bucketName,
|
|
17
|
+
awsRegion,
|
|
18
|
+
awsSecretKey,
|
|
19
|
+
awsAccessKey,
|
|
20
|
+
minioEndpoint,
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
async resolveUpload(upload, _args, _context, info) {
|
|
24
|
+
const { uploadPlugin: { tags, type } } = info;
|
|
25
|
+
const readStream = upload.createReadStream();
|
|
26
|
+
const { filename } = upload;
|
|
27
|
+
const rand = Math.random().toString(36).substring(2, 7) +
|
|
28
|
+
Math.random().toString(36).substring(2, 7);
|
|
29
|
+
const key = `${rand}-${(0, upload_names_1.default)(filename)}`;
|
|
30
|
+
const result = await this.streamerInstance.upload({
|
|
31
|
+
readStream,
|
|
32
|
+
filename,
|
|
33
|
+
key,
|
|
34
|
+
bucket: this.opts.bucketName
|
|
35
|
+
});
|
|
36
|
+
const url = result.upload.Location;
|
|
37
|
+
const { contentType, magic: { charset } } = result;
|
|
38
|
+
const typ = type || tags.type;
|
|
39
|
+
const allowedMimes = tags.mime
|
|
40
|
+
? tags.mime.trim().split(',').map((a) => a.trim())
|
|
41
|
+
: typ === 'image'
|
|
42
|
+
? ['image/jpg', 'image/jpeg', 'image/png', 'image/svg+xml']
|
|
43
|
+
: [];
|
|
44
|
+
if (allowedMimes.length && !allowedMimes.includes(contentType)) {
|
|
45
|
+
throw new Error(`UPLOAD_MIMETYPE ${allowedMimes.join(',')}`);
|
|
46
|
+
}
|
|
47
|
+
switch (typ) {
|
|
48
|
+
case 'image':
|
|
49
|
+
case 'upload':
|
|
50
|
+
return {
|
|
51
|
+
filename,
|
|
52
|
+
mime: contentType,
|
|
53
|
+
url
|
|
54
|
+
};
|
|
55
|
+
case 'attachment':
|
|
56
|
+
default:
|
|
57
|
+
return url;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
exports.Uploader = Uploader;
|