graphile-settings 2.6.11 → 2.6.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -4
- package/esm/index.js +5 -6
- package/index.js +42 -10
- package/package.json +15 -15
- package/esm/plugins/upload-postgraphile-plugin.js +0 -158
- package/esm/resolvers/upload.js +0 -54
- package/plugins/upload-postgraphile-plugin.d.ts +0 -31
- package/plugins/upload-postgraphile-plugin.js +0 -160
- package/resolvers/upload.d.ts +0 -14
- package/resolvers/upload.js +0 -61
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
<img height="20" src="https://github.com/launchql/launchql/actions/workflows/run-tests.yaml/badge.svg" />
|
|
10
10
|
</a>
|
|
11
11
|
<a href="https://github.com/launchql/launchql/blob/main/LICENSE"><img height="20" src="https://img.shields.io/badge/license-MIT-blue.svg"/></a>
|
|
12
|
-
<a href="https://www.npmjs.com/package/graphile-settings"><img height="20" src="https://img.shields.io/github/package-json/v/launchql/launchql?filename=
|
|
12
|
+
<a href="https://www.npmjs.com/package/graphile-settings"><img height="20" src="https://img.shields.io/github/package-json/v/launchql/launchql?filename=graphile%2Fgraphile-settings%2Fpackage.json"/></a>
|
|
13
13
|
</p>
|
|
14
14
|
|
|
15
15
|
**`graphile-settings`** is a batteries-included configuration builder for [PostGraphile](https://www.graphile.org/postgraphile/), purpose-built for the [LaunchQL](https://github.com/launchql/launchql) ecosystem. It centralizes plugin setup, schema wiring, and feature flags into a single, composable interface — enabling consistent, high-performance GraphQL APIs across projects.
|
|
@@ -114,9 +114,9 @@ app.listen(settings.port);
|
|
|
114
114
|
|
|
115
115
|
## 🔌 Included Plugins
|
|
116
116
|
|
|
117
|
-
* `
|
|
118
|
-
*
|
|
119
|
-
*
|
|
117
|
+
* `graphile-plugin-connection-filter`
|
|
118
|
+
* `graphile-plugin-fulltext-filter`
|
|
119
|
+
* `graphile-postgis`
|
|
120
120
|
* `postgraphile-plugin-connection-filter-postgis`
|
|
121
121
|
* `postgraphile-derived-upload-field`
|
|
122
122
|
* `graphile-simple-inflector`
|
package/esm/index.js
CHANGED
|
@@ -1,20 +1,19 @@
|
|
|
1
1
|
import PgManyToMany from '@graphile-contrib/pg-many-to-many';
|
|
2
2
|
import { getEnvOptions } from '@launchql/env';
|
|
3
|
-
import PgPostgis from '
|
|
4
|
-
|
|
5
|
-
import FulltextFilterPlugin from '@pyramation/postgraphile-plugin-fulltext-filter';
|
|
3
|
+
import PgPostgis from 'graphile-postgis';
|
|
4
|
+
import FulltextFilterPlugin from 'graphile-plugin-fulltext-filter';
|
|
6
5
|
import { NodePlugin } from 'graphile-build';
|
|
7
6
|
import { additionalGraphQLContextFromRequest as langAdditional, LangPlugin } from 'graphile-i18n';
|
|
8
7
|
import PgMetaschema from 'graphile-meta-schema';
|
|
9
|
-
// @ts-ignore
|
|
10
8
|
import PgSearch from 'graphile-search-plugin';
|
|
11
9
|
import PgSimpleInflector from 'graphile-simple-inflector';
|
|
10
|
+
// import ConnectionFilterPlugin from 'graphile-plugin-connection-filter';
|
|
11
|
+
// @ts-ignore
|
|
12
12
|
import ConnectionFilterPlugin from 'postgraphile-plugin-connection-filter';
|
|
13
13
|
// @ts-ignore
|
|
14
14
|
import PgPostgisFilter from 'postgraphile-plugin-connection-filter-postgis';
|
|
15
15
|
import LqlTypesPlugin from './plugins/types';
|
|
16
|
-
import UploadPostGraphilePlugin from '
|
|
17
|
-
import { Uploader } from './resolvers/upload';
|
|
16
|
+
import UploadPostGraphilePlugin, { Uploader } from 'graphile-upload-plugin';
|
|
18
17
|
export const getGraphileSettings = (rawOpts) => {
|
|
19
18
|
const opts = getEnvOptions(rawOpts);
|
|
20
19
|
const { server, graphile, features, cdn } = opts;
|
package/index.js
CHANGED
|
@@ -1,4 +1,37 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
36
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
37
|
};
|
|
@@ -6,26 +39,25 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
39
|
exports.getGraphileSettings = void 0;
|
|
7
40
|
const pg_many_to_many_1 = __importDefault(require("@graphile-contrib/pg-many-to-many"));
|
|
8
41
|
const env_1 = require("@launchql/env");
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
const postgraphile_plugin_fulltext_filter_1 = __importDefault(require("@pyramation/postgraphile-plugin-fulltext-filter"));
|
|
42
|
+
const graphile_postgis_1 = __importDefault(require("graphile-postgis"));
|
|
43
|
+
const graphile_plugin_fulltext_filter_1 = __importDefault(require("graphile-plugin-fulltext-filter"));
|
|
12
44
|
const graphile_build_1 = require("graphile-build");
|
|
13
45
|
const graphile_i18n_1 = require("graphile-i18n");
|
|
14
46
|
const graphile_meta_schema_1 = __importDefault(require("graphile-meta-schema"));
|
|
15
|
-
// @ts-ignore
|
|
16
47
|
const graphile_search_plugin_1 = __importDefault(require("graphile-search-plugin"));
|
|
17
48
|
const graphile_simple_inflector_1 = __importDefault(require("graphile-simple-inflector"));
|
|
49
|
+
// import ConnectionFilterPlugin from 'graphile-plugin-connection-filter';
|
|
50
|
+
// @ts-ignore
|
|
18
51
|
const postgraphile_plugin_connection_filter_1 = __importDefault(require("postgraphile-plugin-connection-filter"));
|
|
19
52
|
// @ts-ignore
|
|
20
53
|
const postgraphile_plugin_connection_filter_postgis_1 = __importDefault(require("postgraphile-plugin-connection-filter-postgis"));
|
|
21
54
|
const types_1 = __importDefault(require("./plugins/types"));
|
|
22
|
-
const
|
|
23
|
-
const upload_1 = require("./resolvers/upload");
|
|
55
|
+
const graphile_upload_plugin_1 = __importStar(require("graphile-upload-plugin"));
|
|
24
56
|
const getGraphileSettings = (rawOpts) => {
|
|
25
57
|
const opts = (0, env_1.getEnvOptions)(rawOpts);
|
|
26
58
|
const { server, graphile, features, cdn } = opts;
|
|
27
59
|
// Instantiate uploader with merged cdn opts
|
|
28
|
-
const uploader = new
|
|
60
|
+
const uploader = new graphile_upload_plugin_1.Uploader({
|
|
29
61
|
bucketName: cdn.bucketName,
|
|
30
62
|
awsRegion: cdn.awsRegion,
|
|
31
63
|
awsAccessKey: cdn.awsAccessKey,
|
|
@@ -35,15 +67,15 @@ const getGraphileSettings = (rawOpts) => {
|
|
|
35
67
|
const resolveUpload = uploader.resolveUpload.bind(uploader);
|
|
36
68
|
const plugins = [
|
|
37
69
|
postgraphile_plugin_connection_filter_1.default,
|
|
38
|
-
|
|
70
|
+
graphile_plugin_fulltext_filter_1.default,
|
|
39
71
|
types_1.default,
|
|
40
|
-
|
|
72
|
+
graphile_upload_plugin_1.default,
|
|
41
73
|
graphile_meta_schema_1.default,
|
|
42
74
|
pg_many_to_many_1.default,
|
|
43
75
|
graphile_search_plugin_1.default,
|
|
44
76
|
];
|
|
45
77
|
if (features?.postgis) {
|
|
46
|
-
plugins.push(
|
|
78
|
+
plugins.push(graphile_postgis_1.default, postgraphile_plugin_connection_filter_postgis_1.default);
|
|
47
79
|
}
|
|
48
80
|
if (features?.simpleInflection) {
|
|
49
81
|
plugins.push(graphile_simple_inflector_1.default);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "graphile-settings",
|
|
3
|
-
"version": "2.6.
|
|
3
|
+
"version": "2.6.13",
|
|
4
4
|
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
5
5
|
"description": "graphile settings",
|
|
6
6
|
"main": "index.js",
|
|
@@ -30,20 +30,20 @@
|
|
|
30
30
|
},
|
|
31
31
|
"dependencies": {
|
|
32
32
|
"@graphile-contrib/pg-many-to-many": "^1.0.2",
|
|
33
|
-
"@launchql/env": "^2.5.
|
|
34
|
-
"@launchql/
|
|
35
|
-
"@launchql/types": "^2.8.5",
|
|
36
|
-
"@launchql/upload-names": "^2.2.9",
|
|
37
|
-
"@pyramation/postgis": "^0.1.1",
|
|
38
|
-
"@pyramation/postgraphile-plugin-fulltext-filter": "^2.0.0",
|
|
33
|
+
"@launchql/env": "^2.5.6",
|
|
34
|
+
"@launchql/types": "^2.8.6",
|
|
39
35
|
"cors": "^2.8.5",
|
|
40
36
|
"express": "^5.1.0",
|
|
41
37
|
"graphile-build": "^4.14.1",
|
|
42
|
-
"graphile-i18n": "^0.1.
|
|
43
|
-
"graphile-meta-schema": "^0.2.
|
|
44
|
-
"graphile-
|
|
45
|
-
"graphile-
|
|
46
|
-
"graphile-
|
|
38
|
+
"graphile-i18n": "^0.1.5",
|
|
39
|
+
"graphile-meta-schema": "^0.2.10",
|
|
40
|
+
"graphile-plugin-connection-filter": "^2.3.1",
|
|
41
|
+
"graphile-plugin-fulltext-filter": "^2.0.2",
|
|
42
|
+
"graphile-postgis": "^0.1.1",
|
|
43
|
+
"graphile-query": "^2.3.11",
|
|
44
|
+
"graphile-search-plugin": "^0.1.13",
|
|
45
|
+
"graphile-simple-inflector": "^0.1.8",
|
|
46
|
+
"graphile-upload-plugin": "^0.2.0",
|
|
47
47
|
"graphql": "15.10.1",
|
|
48
48
|
"graphql-tag": "2.12.6",
|
|
49
49
|
"graphql-upload": "^13.0.0",
|
|
@@ -51,7 +51,7 @@
|
|
|
51
51
|
"pg": "^8.16.0",
|
|
52
52
|
"postgraphile": "^4.14.1",
|
|
53
53
|
"postgraphile-derived-upload-field": "^0.0.6",
|
|
54
|
-
"postgraphile-plugin-connection-filter": "^2.
|
|
54
|
+
"postgraphile-plugin-connection-filter": "^2.0.0",
|
|
55
55
|
"postgraphile-plugin-connection-filter-postgis": "^1.0.0-alpha.6",
|
|
56
56
|
"request-ip": "^3.3.0"
|
|
57
57
|
},
|
|
@@ -60,7 +60,7 @@
|
|
|
60
60
|
"@types/express": "^5.0.1",
|
|
61
61
|
"@types/pg": "^8.15.2",
|
|
62
62
|
"@types/request-ip": "^0.0.41",
|
|
63
|
-
"makage": "^0.1.
|
|
63
|
+
"makage": "^0.1.8",
|
|
64
64
|
"nodemon": "^3.1.10",
|
|
65
65
|
"ts-node": "^10.9.2"
|
|
66
66
|
},
|
|
@@ -71,5 +71,5 @@
|
|
|
71
71
|
"launchql",
|
|
72
72
|
"graphql"
|
|
73
73
|
],
|
|
74
|
-
"gitHead": "
|
|
74
|
+
"gitHead": "3812f24a480b2035b3413ec7fecfe492f294e590"
|
|
75
75
|
}
|
|
@@ -1,158 +0,0 @@
|
|
|
1
|
-
// PostGraphile plugin
|
|
2
|
-
const UploadPostGraphilePlugin = (builder, opts = {}) => {
|
|
3
|
-
const { uploadFieldDefinitions = [] } = opts;
|
|
4
|
-
// Determine whether a table attribute should be treated as an Upload according to configuration
|
|
5
|
-
const relevantUploadType = (attr) => {
|
|
6
|
-
const types = uploadFieldDefinitions.filter(({ name, namespaceName, tag }) => (name &&
|
|
7
|
-
namespaceName &&
|
|
8
|
-
attr.type?.name === name &&
|
|
9
|
-
attr.type?.namespaceName === namespaceName) ||
|
|
10
|
-
(tag && attr.tags?.[tag]));
|
|
11
|
-
if (types.length === 1) {
|
|
12
|
-
return types[0];
|
|
13
|
-
}
|
|
14
|
-
else if (types.length > 1) {
|
|
15
|
-
throw new Error('Upload field definitions are ambiguous');
|
|
16
|
-
}
|
|
17
|
-
return undefined;
|
|
18
|
-
};
|
|
19
|
-
builder.hook('build', (input, build) => {
|
|
20
|
-
const { addType, graphql: { GraphQLScalarType, GraphQLError }, } = build;
|
|
21
|
-
const GraphQLUpload = new GraphQLScalarType({
|
|
22
|
-
name: 'Upload',
|
|
23
|
-
description: 'The `Upload` scalar type represents a file upload.',
|
|
24
|
-
parseValue(value) {
|
|
25
|
-
// The value should be an object with a `.promise` that resolves to the file upload
|
|
26
|
-
const maybe = value;
|
|
27
|
-
if (maybe &&
|
|
28
|
-
maybe.promise &&
|
|
29
|
-
typeof maybe.promise.then === 'function') {
|
|
30
|
-
return maybe.promise;
|
|
31
|
-
}
|
|
32
|
-
throw new GraphQLError('Upload value invalid.');
|
|
33
|
-
},
|
|
34
|
-
parseLiteral(ast) {
|
|
35
|
-
throw new GraphQLError('Upload literal unsupported.', ast);
|
|
36
|
-
},
|
|
37
|
-
serialize() {
|
|
38
|
-
throw new GraphQLError('Upload serialization unsupported.');
|
|
39
|
-
},
|
|
40
|
-
});
|
|
41
|
-
addType(GraphQLUpload);
|
|
42
|
-
// Override the internal types for configured upload-backed columns
|
|
43
|
-
uploadFieldDefinitions.forEach(({ name, namespaceName, type }) => {
|
|
44
|
-
if (!name || !type || !namespaceName)
|
|
45
|
-
return; // tag-based or incomplete definitions
|
|
46
|
-
const theType = build.pgIntrospectionResultsByKind.type.find((typ) => typ.name === name && typ.namespaceName === namespaceName);
|
|
47
|
-
if (theType) {
|
|
48
|
-
build.pgRegisterGqlTypeByTypeId(theType.id, () => build.getTypeByName(type));
|
|
49
|
-
}
|
|
50
|
-
});
|
|
51
|
-
return input;
|
|
52
|
-
});
|
|
53
|
-
builder.hook('inflection', (inflection, build) => {
|
|
54
|
-
return build.extend(inflection, {
|
|
55
|
-
// NO ARROW FUNCTIONS HERE (this)
|
|
56
|
-
uploadColumn(attr) {
|
|
57
|
-
return this.column(attr) + 'Upload';
|
|
58
|
-
},
|
|
59
|
-
});
|
|
60
|
-
});
|
|
61
|
-
// Add Upload input fields alongside matching columns
|
|
62
|
-
builder.hook('GraphQLInputObjectType:fields', (fields, build, context) => {
|
|
63
|
-
const { scope: { isPgRowType, pgIntrospection: table }, } = context;
|
|
64
|
-
if (!isPgRowType || !table || table.kind !== 'class') {
|
|
65
|
-
return fields;
|
|
66
|
-
}
|
|
67
|
-
return build.extend(fields, table.attributes.reduce((memo, attr) => {
|
|
68
|
-
if (!build.pgColumnFilter(attr, build, context))
|
|
69
|
-
return memo;
|
|
70
|
-
const action = context.scope.isPgBaseInput
|
|
71
|
-
? 'base'
|
|
72
|
-
: context.scope.isPgPatch
|
|
73
|
-
? 'update'
|
|
74
|
-
: 'create';
|
|
75
|
-
if (build.pgOmit(attr, action))
|
|
76
|
-
return memo;
|
|
77
|
-
if (attr.identity === 'a')
|
|
78
|
-
return memo;
|
|
79
|
-
if (!relevantUploadType(attr)) {
|
|
80
|
-
return memo;
|
|
81
|
-
}
|
|
82
|
-
const fieldName = build.inflection.uploadColumn(attr);
|
|
83
|
-
if (memo[fieldName]) {
|
|
84
|
-
throw new Error(`Two columns produce the same GraphQL field name '${fieldName}' on class '${table.namespaceName}.${table.name}'; one of them is '${attr.name}'`);
|
|
85
|
-
}
|
|
86
|
-
memo = build.extend(memo, {
|
|
87
|
-
[fieldName]: context.fieldWithHooks(fieldName, {
|
|
88
|
-
description: attr.description,
|
|
89
|
-
type: build.getTypeByName('Upload'),
|
|
90
|
-
}, { pgFieldIntrospection: attr, isPgUploadField: true }),
|
|
91
|
-
}, `Adding field for ${build.describePgEntity(attr)}. You can rename this field with a 'Smart Comment':\n\n ${build.sqlCommentByAddingTags(attr, {
|
|
92
|
-
name: 'newNameHere',
|
|
93
|
-
})}`);
|
|
94
|
-
return memo;
|
|
95
|
-
}, {}), `Adding columns to '${build.describePgEntity(table)}'`);
|
|
96
|
-
});
|
|
97
|
-
builder.hook('GraphQLObjectType:fields:field', (field, build, context) => {
|
|
98
|
-
const { pgIntrospectionResultsByKind: introspectionResultsByKind, inflection, } = build;
|
|
99
|
-
const { scope: { isRootMutation, fieldName, pgFieldIntrospection: table }, } = context;
|
|
100
|
-
if (!isRootMutation || !table) {
|
|
101
|
-
return field;
|
|
102
|
-
}
|
|
103
|
-
// It's possible that `resolve` isn't specified on a field, so in that case
|
|
104
|
-
// we fall back to a default resolver.
|
|
105
|
-
const defaultResolver = (obj) => obj[fieldName];
|
|
106
|
-
// Extract the old resolver from `field`
|
|
107
|
-
const { resolve: oldResolve = defaultResolver, ...rest } = field; // GraphQLFieldConfig
|
|
108
|
-
const tags = {};
|
|
109
|
-
const types = {};
|
|
110
|
-
const originals = {};
|
|
111
|
-
const uploadResolversByFieldName = introspectionResultsByKind.attribute
|
|
112
|
-
.filter((attr) => attr.classId === table.id)
|
|
113
|
-
.reduce((memo, attr) => {
|
|
114
|
-
// first, try to directly match the types here
|
|
115
|
-
const typeMatched = relevantUploadType(attr);
|
|
116
|
-
if (typeMatched) {
|
|
117
|
-
const fieldName = inflection.column(attr);
|
|
118
|
-
const uploadFieldName = inflection.uploadColumn(attr);
|
|
119
|
-
memo[uploadFieldName] = typeMatched.resolve;
|
|
120
|
-
tags[uploadFieldName] = attr.tags;
|
|
121
|
-
types[uploadFieldName] = attr.type.name;
|
|
122
|
-
originals[uploadFieldName] = fieldName;
|
|
123
|
-
}
|
|
124
|
-
return memo;
|
|
125
|
-
}, {});
|
|
126
|
-
return {
|
|
127
|
-
// Copy over everything except 'resolve'
|
|
128
|
-
...rest,
|
|
129
|
-
// Add our new resolver which wraps the old resolver
|
|
130
|
-
async resolve(source, args, context, info) {
|
|
131
|
-
// Recursively check for Upload promises to resolve
|
|
132
|
-
async function resolvePromises(obj) {
|
|
133
|
-
for (const key of Object.keys(obj)) {
|
|
134
|
-
if (obj[key] instanceof Promise) {
|
|
135
|
-
if (uploadResolversByFieldName[key]) {
|
|
136
|
-
const upload = await obj[key];
|
|
137
|
-
// eslint-disable-next-line require-atomic-updates
|
|
138
|
-
obj[originals[key]] = await uploadResolversByFieldName[key](upload, args, context, {
|
|
139
|
-
...info,
|
|
140
|
-
uploadPlugin: { tags: tags[key], type: types[key] },
|
|
141
|
-
});
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
else if (obj[key] !== null && typeof obj[key] === 'object') {
|
|
145
|
-
await resolvePromises(obj[key]);
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
await resolvePromises(args);
|
|
150
|
-
// Call the old resolver
|
|
151
|
-
const oldResolveResult = await oldResolve(source, args, context, info);
|
|
152
|
-
// Finally return the result.
|
|
153
|
-
return oldResolveResult;
|
|
154
|
-
},
|
|
155
|
-
};
|
|
156
|
-
});
|
|
157
|
-
};
|
|
158
|
-
export default UploadPostGraphilePlugin;
|
package/esm/resolvers/upload.js
DELETED
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
import streamer from '@launchql/s3-streamer';
|
|
2
|
-
import uploadNames from '@launchql/upload-names';
|
|
3
|
-
export class Uploader {
|
|
4
|
-
opts;
|
|
5
|
-
streamerInstance;
|
|
6
|
-
constructor(opts) {
|
|
7
|
-
this.opts = opts;
|
|
8
|
-
const { bucketName, awsRegion, awsSecretKey, awsAccessKey, minioEndpoint } = this.opts;
|
|
9
|
-
this.streamerInstance = new streamer({
|
|
10
|
-
defaultBucket: bucketName,
|
|
11
|
-
awsRegion,
|
|
12
|
-
awsSecretKey,
|
|
13
|
-
awsAccessKey,
|
|
14
|
-
minioEndpoint,
|
|
15
|
-
});
|
|
16
|
-
}
|
|
17
|
-
async resolveUpload(upload, _args, _context, info) {
|
|
18
|
-
const { uploadPlugin: { tags, type } } = info;
|
|
19
|
-
const readStream = upload.createReadStream();
|
|
20
|
-
const { filename } = upload;
|
|
21
|
-
const rand = Math.random().toString(36).substring(2, 7) +
|
|
22
|
-
Math.random().toString(36).substring(2, 7);
|
|
23
|
-
const key = `${rand}-${uploadNames(filename)}`;
|
|
24
|
-
const result = await this.streamerInstance.upload({
|
|
25
|
-
readStream,
|
|
26
|
-
filename,
|
|
27
|
-
key,
|
|
28
|
-
bucket: this.opts.bucketName
|
|
29
|
-
});
|
|
30
|
-
const url = result.upload.Location;
|
|
31
|
-
const { contentType, magic: { charset } } = result;
|
|
32
|
-
const typ = type || tags.type;
|
|
33
|
-
const allowedMimes = tags.mime
|
|
34
|
-
? tags.mime.trim().split(',').map((a) => a.trim())
|
|
35
|
-
: typ === 'image'
|
|
36
|
-
? ['image/jpg', 'image/jpeg', 'image/png', 'image/svg+xml']
|
|
37
|
-
: [];
|
|
38
|
-
if (allowedMimes.length && !allowedMimes.includes(contentType)) {
|
|
39
|
-
throw new Error(`UPLOAD_MIMETYPE ${allowedMimes.join(',')}`);
|
|
40
|
-
}
|
|
41
|
-
switch (typ) {
|
|
42
|
-
case 'image':
|
|
43
|
-
case 'upload':
|
|
44
|
-
return {
|
|
45
|
-
filename,
|
|
46
|
-
mime: contentType,
|
|
47
|
-
url
|
|
48
|
-
};
|
|
49
|
-
case 'attachment':
|
|
50
|
-
default:
|
|
51
|
-
return url;
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
}
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
import type { ReadStream } from 'fs';
|
|
2
|
-
import type { Plugin } from 'graphile-build';
|
|
3
|
-
import type { GraphQLResolveInfo } from 'graphql';
|
|
4
|
-
export interface FileUpload {
|
|
5
|
-
filename: string;
|
|
6
|
-
mimetype?: string;
|
|
7
|
-
encoding?: string;
|
|
8
|
-
createReadStream: () => ReadStream;
|
|
9
|
-
}
|
|
10
|
-
export interface UploadPluginInfo {
|
|
11
|
-
tags: Record<string, any>;
|
|
12
|
-
type?: string;
|
|
13
|
-
}
|
|
14
|
-
export type UploadResolver = (upload: FileUpload, args: any, context: any, info: GraphQLResolveInfo & {
|
|
15
|
-
uploadPlugin: UploadPluginInfo;
|
|
16
|
-
}) => Promise<any>;
|
|
17
|
-
export type UploadFieldDefinition = {
|
|
18
|
-
name: string;
|
|
19
|
-
namespaceName: string;
|
|
20
|
-
type: string;
|
|
21
|
-
resolve: UploadResolver;
|
|
22
|
-
tag?: never;
|
|
23
|
-
} | {
|
|
24
|
-
tag: string;
|
|
25
|
-
resolve: UploadResolver;
|
|
26
|
-
name?: never;
|
|
27
|
-
namespaceName?: never;
|
|
28
|
-
type?: string;
|
|
29
|
-
};
|
|
30
|
-
declare const UploadPostGraphilePlugin: Plugin;
|
|
31
|
-
export default UploadPostGraphilePlugin;
|
|
@@ -1,160 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
// PostGraphile plugin
|
|
4
|
-
const UploadPostGraphilePlugin = (builder, opts = {}) => {
|
|
5
|
-
const { uploadFieldDefinitions = [] } = opts;
|
|
6
|
-
// Determine whether a table attribute should be treated as an Upload according to configuration
|
|
7
|
-
const relevantUploadType = (attr) => {
|
|
8
|
-
const types = uploadFieldDefinitions.filter(({ name, namespaceName, tag }) => (name &&
|
|
9
|
-
namespaceName &&
|
|
10
|
-
attr.type?.name === name &&
|
|
11
|
-
attr.type?.namespaceName === namespaceName) ||
|
|
12
|
-
(tag && attr.tags?.[tag]));
|
|
13
|
-
if (types.length === 1) {
|
|
14
|
-
return types[0];
|
|
15
|
-
}
|
|
16
|
-
else if (types.length > 1) {
|
|
17
|
-
throw new Error('Upload field definitions are ambiguous');
|
|
18
|
-
}
|
|
19
|
-
return undefined;
|
|
20
|
-
};
|
|
21
|
-
builder.hook('build', (input, build) => {
|
|
22
|
-
const { addType, graphql: { GraphQLScalarType, GraphQLError }, } = build;
|
|
23
|
-
const GraphQLUpload = new GraphQLScalarType({
|
|
24
|
-
name: 'Upload',
|
|
25
|
-
description: 'The `Upload` scalar type represents a file upload.',
|
|
26
|
-
parseValue(value) {
|
|
27
|
-
// The value should be an object with a `.promise` that resolves to the file upload
|
|
28
|
-
const maybe = value;
|
|
29
|
-
if (maybe &&
|
|
30
|
-
maybe.promise &&
|
|
31
|
-
typeof maybe.promise.then === 'function') {
|
|
32
|
-
return maybe.promise;
|
|
33
|
-
}
|
|
34
|
-
throw new GraphQLError('Upload value invalid.');
|
|
35
|
-
},
|
|
36
|
-
parseLiteral(ast) {
|
|
37
|
-
throw new GraphQLError('Upload literal unsupported.', ast);
|
|
38
|
-
},
|
|
39
|
-
serialize() {
|
|
40
|
-
throw new GraphQLError('Upload serialization unsupported.');
|
|
41
|
-
},
|
|
42
|
-
});
|
|
43
|
-
addType(GraphQLUpload);
|
|
44
|
-
// Override the internal types for configured upload-backed columns
|
|
45
|
-
uploadFieldDefinitions.forEach(({ name, namespaceName, type }) => {
|
|
46
|
-
if (!name || !type || !namespaceName)
|
|
47
|
-
return; // tag-based or incomplete definitions
|
|
48
|
-
const theType = build.pgIntrospectionResultsByKind.type.find((typ) => typ.name === name && typ.namespaceName === namespaceName);
|
|
49
|
-
if (theType) {
|
|
50
|
-
build.pgRegisterGqlTypeByTypeId(theType.id, () => build.getTypeByName(type));
|
|
51
|
-
}
|
|
52
|
-
});
|
|
53
|
-
return input;
|
|
54
|
-
});
|
|
55
|
-
builder.hook('inflection', (inflection, build) => {
|
|
56
|
-
return build.extend(inflection, {
|
|
57
|
-
// NO ARROW FUNCTIONS HERE (this)
|
|
58
|
-
uploadColumn(attr) {
|
|
59
|
-
return this.column(attr) + 'Upload';
|
|
60
|
-
},
|
|
61
|
-
});
|
|
62
|
-
});
|
|
63
|
-
// Add Upload input fields alongside matching columns
|
|
64
|
-
builder.hook('GraphQLInputObjectType:fields', (fields, build, context) => {
|
|
65
|
-
const { scope: { isPgRowType, pgIntrospection: table }, } = context;
|
|
66
|
-
if (!isPgRowType || !table || table.kind !== 'class') {
|
|
67
|
-
return fields;
|
|
68
|
-
}
|
|
69
|
-
return build.extend(fields, table.attributes.reduce((memo, attr) => {
|
|
70
|
-
if (!build.pgColumnFilter(attr, build, context))
|
|
71
|
-
return memo;
|
|
72
|
-
const action = context.scope.isPgBaseInput
|
|
73
|
-
? 'base'
|
|
74
|
-
: context.scope.isPgPatch
|
|
75
|
-
? 'update'
|
|
76
|
-
: 'create';
|
|
77
|
-
if (build.pgOmit(attr, action))
|
|
78
|
-
return memo;
|
|
79
|
-
if (attr.identity === 'a')
|
|
80
|
-
return memo;
|
|
81
|
-
if (!relevantUploadType(attr)) {
|
|
82
|
-
return memo;
|
|
83
|
-
}
|
|
84
|
-
const fieldName = build.inflection.uploadColumn(attr);
|
|
85
|
-
if (memo[fieldName]) {
|
|
86
|
-
throw new Error(`Two columns produce the same GraphQL field name '${fieldName}' on class '${table.namespaceName}.${table.name}'; one of them is '${attr.name}'`);
|
|
87
|
-
}
|
|
88
|
-
memo = build.extend(memo, {
|
|
89
|
-
[fieldName]: context.fieldWithHooks(fieldName, {
|
|
90
|
-
description: attr.description,
|
|
91
|
-
type: build.getTypeByName('Upload'),
|
|
92
|
-
}, { pgFieldIntrospection: attr, isPgUploadField: true }),
|
|
93
|
-
}, `Adding field for ${build.describePgEntity(attr)}. You can rename this field with a 'Smart Comment':\n\n ${build.sqlCommentByAddingTags(attr, {
|
|
94
|
-
name: 'newNameHere',
|
|
95
|
-
})}`);
|
|
96
|
-
return memo;
|
|
97
|
-
}, {}), `Adding columns to '${build.describePgEntity(table)}'`);
|
|
98
|
-
});
|
|
99
|
-
builder.hook('GraphQLObjectType:fields:field', (field, build, context) => {
|
|
100
|
-
const { pgIntrospectionResultsByKind: introspectionResultsByKind, inflection, } = build;
|
|
101
|
-
const { scope: { isRootMutation, fieldName, pgFieldIntrospection: table }, } = context;
|
|
102
|
-
if (!isRootMutation || !table) {
|
|
103
|
-
return field;
|
|
104
|
-
}
|
|
105
|
-
// It's possible that `resolve` isn't specified on a field, so in that case
|
|
106
|
-
// we fall back to a default resolver.
|
|
107
|
-
const defaultResolver = (obj) => obj[fieldName];
|
|
108
|
-
// Extract the old resolver from `field`
|
|
109
|
-
const { resolve: oldResolve = defaultResolver, ...rest } = field; // GraphQLFieldConfig
|
|
110
|
-
const tags = {};
|
|
111
|
-
const types = {};
|
|
112
|
-
const originals = {};
|
|
113
|
-
const uploadResolversByFieldName = introspectionResultsByKind.attribute
|
|
114
|
-
.filter((attr) => attr.classId === table.id)
|
|
115
|
-
.reduce((memo, attr) => {
|
|
116
|
-
// first, try to directly match the types here
|
|
117
|
-
const typeMatched = relevantUploadType(attr);
|
|
118
|
-
if (typeMatched) {
|
|
119
|
-
const fieldName = inflection.column(attr);
|
|
120
|
-
const uploadFieldName = inflection.uploadColumn(attr);
|
|
121
|
-
memo[uploadFieldName] = typeMatched.resolve;
|
|
122
|
-
tags[uploadFieldName] = attr.tags;
|
|
123
|
-
types[uploadFieldName] = attr.type.name;
|
|
124
|
-
originals[uploadFieldName] = fieldName;
|
|
125
|
-
}
|
|
126
|
-
return memo;
|
|
127
|
-
}, {});
|
|
128
|
-
return {
|
|
129
|
-
// Copy over everything except 'resolve'
|
|
130
|
-
...rest,
|
|
131
|
-
// Add our new resolver which wraps the old resolver
|
|
132
|
-
async resolve(source, args, context, info) {
|
|
133
|
-
// Recursively check for Upload promises to resolve
|
|
134
|
-
async function resolvePromises(obj) {
|
|
135
|
-
for (const key of Object.keys(obj)) {
|
|
136
|
-
if (obj[key] instanceof Promise) {
|
|
137
|
-
if (uploadResolversByFieldName[key]) {
|
|
138
|
-
const upload = await obj[key];
|
|
139
|
-
// eslint-disable-next-line require-atomic-updates
|
|
140
|
-
obj[originals[key]] = await uploadResolversByFieldName[key](upload, args, context, {
|
|
141
|
-
...info,
|
|
142
|
-
uploadPlugin: { tags: tags[key], type: types[key] },
|
|
143
|
-
});
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
else if (obj[key] !== null && typeof obj[key] === 'object') {
|
|
147
|
-
await resolvePromises(obj[key]);
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
await resolvePromises(args);
|
|
152
|
-
// Call the old resolver
|
|
153
|
-
const oldResolveResult = await oldResolve(source, args, context, info);
|
|
154
|
-
// Finally return the result.
|
|
155
|
-
return oldResolveResult;
|
|
156
|
-
},
|
|
157
|
-
};
|
|
158
|
-
});
|
|
159
|
-
};
|
|
160
|
-
exports.default = UploadPostGraphilePlugin;
|
package/resolvers/upload.d.ts
DELETED
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
interface UploaderOptions {
|
|
2
|
-
bucketName: string;
|
|
3
|
-
awsRegion: string;
|
|
4
|
-
awsSecretKey: string;
|
|
5
|
-
awsAccessKey: string;
|
|
6
|
-
minioEndpoint?: string;
|
|
7
|
-
}
|
|
8
|
-
export declare class Uploader {
|
|
9
|
-
private opts;
|
|
10
|
-
private streamerInstance;
|
|
11
|
-
constructor(opts: UploaderOptions);
|
|
12
|
-
resolveUpload(upload: any, _args: any, _context: any, info: any): Promise<any>;
|
|
13
|
-
}
|
|
14
|
-
export {};
|
package/resolvers/upload.js
DELETED
|
@@ -1,61 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.Uploader = void 0;
|
|
7
|
-
const s3_streamer_1 = __importDefault(require("@launchql/s3-streamer"));
|
|
8
|
-
const upload_names_1 = __importDefault(require("@launchql/upload-names"));
|
|
9
|
-
class Uploader {
|
|
10
|
-
opts;
|
|
11
|
-
streamerInstance;
|
|
12
|
-
constructor(opts) {
|
|
13
|
-
this.opts = opts;
|
|
14
|
-
const { bucketName, awsRegion, awsSecretKey, awsAccessKey, minioEndpoint } = this.opts;
|
|
15
|
-
this.streamerInstance = new s3_streamer_1.default({
|
|
16
|
-
defaultBucket: bucketName,
|
|
17
|
-
awsRegion,
|
|
18
|
-
awsSecretKey,
|
|
19
|
-
awsAccessKey,
|
|
20
|
-
minioEndpoint,
|
|
21
|
-
});
|
|
22
|
-
}
|
|
23
|
-
async resolveUpload(upload, _args, _context, info) {
|
|
24
|
-
const { uploadPlugin: { tags, type } } = info;
|
|
25
|
-
const readStream = upload.createReadStream();
|
|
26
|
-
const { filename } = upload;
|
|
27
|
-
const rand = Math.random().toString(36).substring(2, 7) +
|
|
28
|
-
Math.random().toString(36).substring(2, 7);
|
|
29
|
-
const key = `${rand}-${(0, upload_names_1.default)(filename)}`;
|
|
30
|
-
const result = await this.streamerInstance.upload({
|
|
31
|
-
readStream,
|
|
32
|
-
filename,
|
|
33
|
-
key,
|
|
34
|
-
bucket: this.opts.bucketName
|
|
35
|
-
});
|
|
36
|
-
const url = result.upload.Location;
|
|
37
|
-
const { contentType, magic: { charset } } = result;
|
|
38
|
-
const typ = type || tags.type;
|
|
39
|
-
const allowedMimes = tags.mime
|
|
40
|
-
? tags.mime.trim().split(',').map((a) => a.trim())
|
|
41
|
-
: typ === 'image'
|
|
42
|
-
? ['image/jpg', 'image/jpeg', 'image/png', 'image/svg+xml']
|
|
43
|
-
: [];
|
|
44
|
-
if (allowedMimes.length && !allowedMimes.includes(contentType)) {
|
|
45
|
-
throw new Error(`UPLOAD_MIMETYPE ${allowedMimes.join(',')}`);
|
|
46
|
-
}
|
|
47
|
-
switch (typ) {
|
|
48
|
-
case 'image':
|
|
49
|
-
case 'upload':
|
|
50
|
-
return {
|
|
51
|
-
filename,
|
|
52
|
-
mime: contentType,
|
|
53
|
-
url
|
|
54
|
-
};
|
|
55
|
-
case 'attachment':
|
|
56
|
-
default:
|
|
57
|
-
return url;
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
exports.Uploader = Uploader;
|