@kiyasov/platform-hono 1.0.8 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/.yarn/install-state.gz +0 -0
  2. package/Readme.md +0 -7
  3. package/dist/cjs/src/drivers/graphQLUpload/GraphQLUpload.d.ts +3 -0
  4. package/dist/cjs/src/drivers/graphQLUpload/GraphQLUpload.js +21 -0
  5. package/dist/cjs/src/drivers/graphQLUpload/GraphQLUpload.js.map +1 -0
  6. package/dist/cjs/src/drivers/graphQLUpload/Upload.d.ts +16 -0
  7. package/dist/cjs/src/drivers/graphQLUpload/Upload.js +17 -0
  8. package/dist/cjs/src/drivers/graphQLUpload/Upload.js.map +1 -0
  9. package/dist/cjs/src/drivers/graphQLUpload/fs-capacitor.d.ts +44 -0
  10. package/dist/cjs/src/drivers/graphQLUpload/fs-capacitor.js +190 -0
  11. package/dist/cjs/src/drivers/graphQLUpload/fs-capacitor.js.map +1 -0
  12. package/dist/cjs/src/drivers/graphQLUpload/index.d.ts +4 -0
  13. package/dist/cjs/src/drivers/graphQLUpload/index.js +21 -0
  14. package/dist/cjs/src/drivers/graphQLUpload/index.js.map +1 -0
  15. package/dist/cjs/src/drivers/graphQLUpload/processRequest.d.ts +2 -0
  16. package/dist/cjs/src/drivers/graphQLUpload/processRequest.js +45 -0
  17. package/dist/cjs/src/drivers/graphQLUpload/processRequest.js.map +1 -0
  18. package/dist/cjs/src/drivers/graphql.driver.d.ts +7 -4
  19. package/dist/cjs/src/drivers/graphql.driver.js +23 -11
  20. package/dist/cjs/src/drivers/graphql.driver.js.map +1 -1
  21. package/dist/cjs/src/drivers/index.d.ts +1 -0
  22. package/dist/cjs/src/drivers/index.js +1 -0
  23. package/dist/cjs/src/drivers/index.js.map +1 -1
  24. package/dist/cjs/src/drivers/services/processRequest.d.ts +2 -0
  25. package/dist/cjs/src/drivers/services/processRequest.js +45 -0
  26. package/dist/cjs/src/drivers/services/processRequest.js.map +1 -0
  27. package/dist/cjs/tsconfig.cjs.tsbuildinfo +1 -1
  28. package/dist/esm/src/drivers/graphQLUpload/GraphQLUpload.d.ts +3 -0
  29. package/dist/esm/src/drivers/graphQLUpload/GraphQLUpload.js +18 -0
  30. package/dist/esm/src/drivers/graphQLUpload/GraphQLUpload.js.map +1 -0
  31. package/dist/esm/src/drivers/graphQLUpload/Upload.d.ts +16 -0
  32. package/dist/esm/src/drivers/graphQLUpload/Upload.js +13 -0
  33. package/dist/esm/src/drivers/graphQLUpload/Upload.js.map +1 -0
  34. package/dist/esm/src/drivers/graphQLUpload/fs-capacitor.d.ts +44 -0
  35. package/dist/esm/src/drivers/graphQLUpload/fs-capacitor.js +183 -0
  36. package/dist/esm/src/drivers/graphQLUpload/fs-capacitor.js.map +1 -0
  37. package/dist/esm/src/drivers/graphQLUpload/index.d.ts +4 -0
  38. package/dist/esm/src/drivers/graphQLUpload/index.js +5 -0
  39. package/dist/esm/src/drivers/graphQLUpload/index.js.map +1 -0
  40. package/dist/esm/src/drivers/graphQLUpload/processRequest.d.ts +2 -0
  41. package/dist/esm/src/drivers/graphQLUpload/processRequest.js +41 -0
  42. package/dist/esm/src/drivers/graphQLUpload/processRequest.js.map +1 -0
  43. package/dist/esm/src/drivers/graphql.driver.d.ts +7 -4
  44. package/dist/esm/src/drivers/graphql.driver.js +28 -16
  45. package/dist/esm/src/drivers/graphql.driver.js.map +1 -1
  46. package/dist/esm/src/drivers/index.d.ts +1 -0
  47. package/dist/esm/src/drivers/index.js +1 -0
  48. package/dist/esm/src/drivers/index.js.map +1 -1
  49. package/dist/esm/src/drivers/services/processRequest.d.ts +2 -0
  50. package/dist/esm/src/drivers/services/processRequest.js +41 -0
  51. package/dist/esm/src/drivers/services/processRequest.js.map +1 -0
  52. package/dist/esm/tsconfig.esm.tsbuildinfo +1 -1
  53. package/package.json +1 -1
  54. package/src/drivers/graphQLUpload/GraphQLUpload.ts +86 -0
  55. package/src/drivers/graphQLUpload/Upload.ts +35 -0
  56. package/src/drivers/graphQLUpload/fs-capacitor.ts +285 -0
  57. package/src/drivers/graphQLUpload/index.ts +4 -0
  58. package/src/drivers/graphQLUpload/processRequest.ts +50 -0
  59. package/src/drivers/graphql.driver.ts +38 -23
  60. package/src/drivers/index.ts +1 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kiyasov/platform-hono",
3
- "version": "1.0.8",
3
+ "version": "1.0.9",
4
4
  "description": "Nest adapter for Hono",
5
5
  "author": "Islam Kiiasov",
6
6
  "license": "MIT",
@@ -0,0 +1,86 @@
1
+ import { Upload } from "./Upload";
2
+ import { ASTNode, GraphQLError, GraphQLScalarType } from "graphql";
3
+
4
+ /**
5
+ * A GraphQL `Upload` scalar that can be used in a
6
+ * [`GraphQLSchema`](https://graphql.org/graphql-js/type/#graphqlschema).
7
+ * It's value in resolvers is a promise that resolves
8
+ * [file upload details]{@link FileUpload} for processing and storage.
9
+ * @example <caption>Ways to `import`.</caption>
10
+ * ```js
11
+ * import { GraphQLUpload } from 'graphql-upload-ts';
12
+ * ```
13
+ *
14
+ * ```js
15
+ * import GraphQLUpload from 'graphql-upload-ts/dist/GraphQLUpload.js';
16
+ * ```
17
+ * @example <caption>Ways to `require`.</caption>
18
+ * ```js
19
+ * const { GraphQLUpload } = require('graphql-upload-ts');
20
+ * ```
21
+ *
22
+ * ```js
23
+ * const GraphQLUpload = require('graphql-upload-ts/dist/GraphQLUpload');
24
+ * ```
25
+ * @example <caption>Setup for a schema built with [`makeExecutableSchema`](https://apollographql.com/docs/graphql-tools/generate-schema#makeExecutableSchema).</caption>
26
+ * ```js
27
+ * const { makeExecutableSchema } = require('graphql-tools');
28
+ * const { GraphQLUpload } = require('graphql-upload-ts');
29
+ *
30
+ * const schema = makeExecutableSchema({
31
+ * typeDefs: /* GraphQL *\/ `
32
+ * scalar Upload
33
+ * `,
34
+ * resolvers: {
35
+ * Upload: GraphQLUpload,
36
+ * },
37
+ * });
38
+ * ```
39
+ * @example <caption>A manually constructed schema with an image upload mutation.</caption>
40
+ * ```js
41
+ * const {
42
+ * GraphQLSchema,
43
+ * GraphQLObjectType,
44
+ * GraphQLBoolean,
45
+ * } = require('graphql');
46
+ * const { GraphQLUpload } = require('graphql-upload-ts');
47
+ *
48
+ * const schema = new GraphQLSchema({
49
+ * mutation: new GraphQLObjectType({
50
+ * name: 'Mutation',
51
+ * fields: {
52
+ * uploadImage: {
53
+ * description: 'Uploads an image.',
54
+ * type: GraphQLBoolean,
55
+ * args: {
56
+ * image: {
57
+ * description: 'Image file.',
58
+ * type: GraphQLUpload,
59
+ * },
60
+ * },
61
+ * async resolve(parent, { image }) {
62
+ * const { filename, fieldName, mimetype, createReadStream } = await image;
63
+ * const stream = createReadStream();
64
+ * // Promisify the stream and store the file, then…
65
+ * return true;
66
+ * },
67
+ * },
68
+ * },
69
+ * }),
70
+ * });
71
+ * ```
72
+ */
73
+ export const GraphQLUpload = new GraphQLScalarType({
74
+ name: "Upload",
75
+ description: "The `Upload` scalar type represents a file upload.",
76
+ parseValue(value: { promise: Promise<Upload> }) {
77
+ if (value instanceof Upload) return value.promise;
78
+ throw new GraphQLError("Upload value invalid.");
79
+ },
80
+ parseLiteral(node: ASTNode | ASTNode[]) {
81
+ throw new GraphQLError("Upload literal unsupported.", { nodes: node });
82
+ },
83
+ serialize() {
84
+ throw new GraphQLError("Upload serialization unsupported.");
85
+ },
86
+ });
@@ -0,0 +1,35 @@
1
+ import { ReadStream, ReadStreamOptions, WriteStream } from "./fs-capacitor";
2
+
3
+ export interface FileUpload {
4
+ filename: string;
5
+ fieldName: string;
6
+ mimetype: string;
7
+ encoding: string;
8
+
9
+ createReadStream(options?: ReadStreamOptions): ReadStream;
10
+
11
+ capacitor: WriteStream;
12
+ }
13
+
14
+ export class Upload {
15
+ promise: Promise<FileUpload>;
16
+ resolve: (file?: FileUpload) => void;
17
+ reject: (error?: Error | string) => void;
18
+ file?: FileUpload;
19
+
20
+ constructor() {
21
+ this.promise = new Promise((resolve, reject) => {
22
+ this.resolve = (file) => {
23
+ this.file = file;
24
+
25
+ resolve(file);
26
+ };
27
+
28
+ this.reject = reject;
29
+ });
30
+
31
+ // Prevent errors crashing Node.js, see:
32
+ // https://github.com/nodejs/node/issues/20392
33
+ this.promise.catch(() => {});
34
+ }
35
+ }
@@ -0,0 +1,285 @@
1
+ import { randomBytes } from "crypto";
2
+ import { read, open, closeSync, unlinkSync, write, close, unlink } from "fs";
3
+ import { tmpdir } from "os";
4
+ import { join } from "path";
5
+ import { Readable, ReadableOptions, Writable, WritableOptions } from "stream";
6
+ import { EventEmitter } from "events";
7
+
8
+ export class ReadAfterDestroyedError extends Error {}
9
+ export class ReadAfterReleasedError extends Error {}
10
+
11
+ export interface ReadStreamOptions {
12
+ highWaterMark?: ReadableOptions["highWaterMark"];
13
+ encoding?: ReadableOptions["encoding"];
14
+ }
15
+
16
+ // Use a “proxy” event emitter configured to have an infinite maximum number of
17
+ // listeners to prevent Node.js max listeners exceeded warnings if many
18
+ // `fs-capacitor` `ReadStream` instances are created at the same time. See:
19
+ // https://github.com/mike-marcacci/fs-capacitor/issues/30
20
+ const processExitProxy = new EventEmitter();
21
+ processExitProxy.setMaxListeners(Infinity);
22
+ process.once("exit", () => processExitProxy.emit("exit"));
23
+
24
+ export class ReadStream extends Readable {
25
+ private _pos: number = 0;
26
+ private _writeStream: WriteStream;
27
+
28
+ constructor(writeStream: WriteStream, options?: ReadStreamOptions) {
29
+ super({
30
+ highWaterMark: options?.highWaterMark,
31
+ encoding: options?.encoding,
32
+ autoDestroy: true,
33
+ });
34
+ this._writeStream = writeStream;
35
+ }
36
+
37
+ _read(n: number): void {
38
+ if (this.destroyed) return;
39
+
40
+ if (typeof this._writeStream["_fd"] !== "number") {
41
+ this._writeStream.once("ready", () => this._read(n));
42
+ return;
43
+ }
44
+
45
+ // Using `allocUnsafe` here is OK because we return a slice the length of
46
+ // `bytesRead`, and discard the rest. This prevents node from having to zero
47
+ // out the entire allocation first.
48
+ const buf = Buffer.allocUnsafe(n);
49
+ read(this._writeStream["_fd"], buf, 0, n, this._pos, (error, bytesRead) => {
50
+ if (error) this.destroy(error);
51
+
52
+ // Push any read bytes into the local stream buffer.
53
+ if (bytesRead) {
54
+ this._pos += bytesRead;
55
+ this.push(buf.slice(0, bytesRead));
56
+ return;
57
+ }
58
+
59
+ // If there were no more bytes to read and the write stream is finished,
60
+ // then this stream has reached the end.
61
+ if (
62
+ (
63
+ this._writeStream as any as {
64
+ _writableState: { finished: boolean };
65
+ }
66
+ )._writableState.finished
67
+ ) {
68
+ // Check if we have consumed the whole file up to where
69
+ // the write stream has written before ending the stream
70
+ if (this._pos < (this._writeStream as any as { _pos: number })._pos)
71
+ this._read(n);
72
+ else this.push(null);
73
+ return;
74
+ }
75
+
76
+ // Otherwise, wait for the write stream to add more data or finish.
77
+ const retry = (): void => {
78
+ this._writeStream.off("finish", retry);
79
+ this._writeStream.off("write", retry);
80
+ this._read(n);
81
+ };
82
+
83
+ this._writeStream.on("finish", retry);
84
+ this._writeStream.on("write", retry);
85
+ });
86
+ }
87
+ }
88
+
89
+ export interface WriteStreamOptions {
90
+ highWaterMark?: WritableOptions["highWaterMark"];
91
+ defaultEncoding?: WritableOptions["defaultEncoding"];
92
+ tmpdir?: () => string;
93
+ }
94
+
95
+ export class WriteStream extends Writable {
96
+ private _fd: null | number = null;
97
+ private _path: null | string = null;
98
+ private _pos: number = 0;
99
+ private _readStreams: Set<ReadStream> = new Set();
100
+ private _released: boolean = false;
101
+
102
+ constructor(options?: WriteStreamOptions) {
103
+ super({
104
+ highWaterMark: options?.highWaterMark,
105
+ defaultEncoding: options?.defaultEncoding,
106
+ autoDestroy: false,
107
+ });
108
+
109
+ // Generate a random filename.
110
+ randomBytes(16, (error, buffer) => {
111
+ if (error) {
112
+ this.destroy(error);
113
+ return;
114
+ }
115
+
116
+ this._path = join(
117
+ (options?.tmpdir ?? tmpdir)(),
118
+ `capacitor-${buffer.toString("hex")}.tmp`
119
+ );
120
+
121
+ // Create a file in the OS's temporary files directory.
122
+ open(this._path, "wx+", 0o600, (error, fd) => {
123
+ if (error) {
124
+ this.destroy(error);
125
+ return;
126
+ }
127
+
128
+ // Cleanup when the process exits or is killed.
129
+ processExitProxy.once("exit", this._cleanupSync);
130
+
131
+ this._fd = fd;
132
+ this.emit("ready");
133
+ });
134
+ });
135
+ }
136
+
137
+ _cleanup = (callback: (error: null | Error) => void): void => {
138
+ const fd = this._fd;
139
+ const path = this._path;
140
+
141
+ if (typeof fd !== "number" || typeof path !== "string") {
142
+ callback(null);
143
+ return;
144
+ }
145
+
146
+ // Close the file descriptor.
147
+ close(fd, (closeError) => {
148
+ // An error here probably means the fd was already closed, but we can
149
+ // still try to unlink the file.
150
+ unlink(path, (unlinkError) => {
151
+ // If we are unable to unlink the file, the operating system will
152
+ // clean up on next restart, since we use store thes in `os.tmpdir()`
153
+ this._fd = null;
154
+
155
+ // We avoid removing this until now in case an exit occurs while
156
+ // asyncronously cleaning up.
157
+ processExitProxy.off("exit", this._cleanupSync);
158
+ callback(unlinkError ?? closeError);
159
+ });
160
+ });
161
+ };
162
+
163
+ _cleanupSync = (): void => {
164
+ processExitProxy.off("exit", this._cleanupSync);
165
+
166
+ if (typeof this._fd === "number")
167
+ try {
168
+ closeSync(this._fd);
169
+ } catch (error) {
170
+ // An error here probably means the fd was already closed, but we can
171
+ // still try to unlink the file.
172
+ }
173
+
174
+ try {
175
+ if (this._path !== null) {
176
+ unlinkSync(this._path);
177
+ }
178
+ } catch (error) {
179
+ // If we are unable to unlink the file, the operating system will clean
180
+ // up on next restart, since we use store thes in `os.tmpdir()`
181
+ }
182
+ };
183
+
184
+ _final(callback: (error?: null | Error) => any): void {
185
+ if (typeof this._fd !== "number") {
186
+ this.once("ready", () => this._final(callback));
187
+ return;
188
+ }
189
+ callback();
190
+ }
191
+
192
+ _write(
193
+ chunk: Buffer,
194
+ encoding: string,
195
+ callback: (error?: null | Error) => any
196
+ ): void {
197
+ if (typeof this._fd !== "number") {
198
+ this.once("ready", () => this._write(chunk, encoding, callback));
199
+ return;
200
+ }
201
+
202
+ write(this._fd, chunk, 0, chunk.length, this._pos, (error) => {
203
+ if (error) {
204
+ callback(error);
205
+ return;
206
+ }
207
+
208
+ // It's safe to increment `this._pos` after flushing to the filesystem
209
+ // because node streams ensure that only one `_write()` is active at a
210
+ // time. If this assumption is broken, the behavior of this library is
211
+ // undefined, regardless of where this is incremented. Relocating this
212
+ // to increment syncronously would result in correct file contents, but
213
+ // the out-of-order writes would still open the potential for read streams
214
+ // to scan positions that have not yet been written.
215
+ this._pos += chunk.length;
216
+ this.emit("write");
217
+ callback();
218
+ });
219
+ }
220
+
221
+ release(): void {
222
+ this._released = true;
223
+ if (this._readStreams.size === 0) this.destroy();
224
+ }
225
+
226
+ _destroy(
227
+ error: undefined | null | Error,
228
+ callback: (error?: null | Error) => any
229
+ ): void {
230
+ // Destroy all attached read streams.
231
+ for (const readStream of this._readStreams) {
232
+ readStream.destroy(error || undefined);
233
+ }
234
+
235
+ // This capacitor is fully initialized.
236
+ if (typeof this._fd === "number" && typeof this._path === "string") {
237
+ this._cleanup((cleanupError) => callback(cleanupError ?? error));
238
+ return;
239
+ }
240
+
241
+ // This capacitor has not yet finished initialization; if initialization
242
+ // does complete, immediately clean up after.
243
+ this.once("ready", () => {
244
+ this._cleanup((cleanupError) => {
245
+ if (cleanupError) {
246
+ this.emit("error", cleanupError);
247
+ }
248
+ });
249
+ });
250
+
251
+ callback(error);
252
+ }
253
+
254
+ createReadStream(options?: ReadStreamOptions): ReadStream {
255
+ if (this.destroyed)
256
+ throw new ReadAfterDestroyedError(
257
+ "A ReadStream cannot be created from a destroyed WriteStream."
258
+ );
259
+
260
+ if (this._released)
261
+ throw new ReadAfterReleasedError(
262
+ "A ReadStream cannot be created from a released WriteStream."
263
+ );
264
+
265
+ const readStream = new ReadStream(this, options);
266
+ this._readStreams.add(readStream);
267
+
268
+ readStream.once("close", (): void => {
269
+ this._readStreams.delete(readStream);
270
+
271
+ if (this._released && this._readStreams.size === 0) {
272
+ this.destroy();
273
+ }
274
+ });
275
+
276
+ return readStream;
277
+ }
278
+ }
279
+
280
+ export default {
281
+ WriteStream,
282
+ ReadStream,
283
+ ReadAfterDestroyedError,
284
+ ReadAfterReleasedError,
285
+ };
@@ -0,0 +1,4 @@
1
+ export * from "./fs-capacitor";
2
+ export * from "./Upload";
3
+ export * from "./GraphQLUpload";
4
+ export * from "./processRequest";
@@ -0,0 +1,50 @@
1
+ import { Context } from "hono";
2
+ import { WriteStream, Upload } from ".";
3
+ import { Readable } from "stream";
4
+
5
+ export async function processRequest(
6
+ ctx: Context
7
+ ): Promise<Record<string, any>> {
8
+ const body = await ctx.req.parseBody();
9
+ const operations = JSON.parse(body.operations as string);
10
+ const map = new Map(Object.entries(JSON.parse(body.map as string)));
11
+
12
+ for (const [fieldName, file] of Object.entries(body)) {
13
+ if (
14
+ fieldName === "operations" ||
15
+ fieldName === "map" ||
16
+ !(file instanceof File)
17
+ )
18
+ continue;
19
+
20
+ const fileKeys = map.get(fieldName);
21
+ if (!Array.isArray(fileKeys) || !fileKeys.length) continue;
22
+
23
+ const buffer = Buffer.from(await file.arrayBuffer());
24
+ const capacitor = new WriteStream();
25
+ Readable.from(buffer).pipe(capacitor);
26
+
27
+ const upload = new Upload();
28
+ upload.file = {
29
+ filename: file.name,
30
+ mimetype: file.type,
31
+ fieldName,
32
+ encoding: "7bit",
33
+ createReadStream: (options) => capacitor.createReadStream(options),
34
+ capacitor,
35
+ };
36
+ upload.resolve(upload.file);
37
+
38
+ for (const fileKey of fileKeys) {
39
+ const pathSegments = fileKey.split(".");
40
+ let current = operations;
41
+ for (let i = 0; i < pathSegments.length - 1; i++) {
42
+ if (!current[pathSegments[i]]) current[pathSegments[i]] = {};
43
+ current = current[pathSegments[i]];
44
+ }
45
+ current[pathSegments[pathSegments.length - 1]] = upload;
46
+ }
47
+ }
48
+
49
+ return operations;
50
+ }
@@ -1,20 +1,22 @@
1
- import { ApolloServer, BaseContext, HeaderMap } from '@apollo/server';
1
+ import { ApolloServer, BaseContext, HeaderMap } from "@apollo/server";
2
+ import { ModulesContainer } from "@nestjs/core";
2
3
  import {
3
4
  AbstractGraphQLDriver,
4
5
  GqlSubscriptionService,
5
6
  SubscriptionConfig,
6
- } from '@nestjs/graphql';
7
- import { ApolloServerPluginDrainHttpServer } from '@apollo/server/plugin/drainHttpServer';
8
- import { ApolloDriverConfig } from '@nestjs/apollo';
9
- import { Context, HonoRequest } from 'hono';
10
- import { StatusCode } from 'hono/utils/http-status';
11
- import { Logger } from '@nestjs/common';
7
+ } from "@nestjs/graphql";
8
+ import { ApolloServerPluginDrainHttpServer } from "@apollo/server/plugin/drainHttpServer";
9
+ import { ApolloDriverConfig } from "@nestjs/apollo";
10
+ import { Context, HonoRequest } from "hono";
11
+ import { StatusCode } from "hono/utils/http-status";
12
+ import { Logger } from "@nestjs/common";
13
+ import http from "http";
12
14
 
13
- import { PluginsExplorerService } from './services/plugins-explorer.service';
14
- import { ModulesContainer } from '@nestjs/core';
15
+ import { PluginsExplorerService } from "./services/plugins-explorer.service";
16
+ import { processRequest } from "./graphQLUpload";
15
17
 
16
18
  export class HonoGraphQLDriver<
17
- T extends Record<string, any> = ApolloDriverConfig,
19
+ T extends Record<string, any> = ApolloDriverConfig
18
20
  > extends AbstractGraphQLDriver {
19
21
  protected apolloServer: ApolloServer<BaseContext>;
20
22
  private _subscriptionService?: GqlSubscriptionService;
@@ -33,15 +35,15 @@ export class HonoGraphQLDriver<
33
35
  const { httpAdapter } = this.httpAdapterHost;
34
36
  const platformName = httpAdapter.getType();
35
37
 
36
- if (platformName !== 'hono') {
37
- throw new Error('This driver is only compatible with the Hono platform');
38
+ if (platformName !== "hono") {
39
+ throw new Error("This driver is only compatible with the Hono platform");
38
40
  }
39
41
 
40
42
  await this.registerHono(options);
41
43
 
42
44
  if (options.installSubscriptionHandlers || options.subscriptions) {
43
45
  const subscriptionsOptions: SubscriptionConfig =
44
- options.subscriptions || { 'subscriptions-transport-ws': {} };
46
+ options.subscriptions || { "subscriptions-transport-ws": {} };
45
47
  this._subscriptionService = new GqlSubscriptionService(
46
48
  {
47
49
  schema: options.schema,
@@ -49,14 +51,14 @@ export class HonoGraphQLDriver<
49
51
  context: options.context,
50
52
  ...subscriptionsOptions,
51
53
  },
52
- this.httpAdapterHost.httpAdapter?.getHttpServer(),
54
+ this.httpAdapterHost.httpAdapter?.getHttpServer()
53
55
  );
54
56
  }
55
57
  }
56
58
 
57
59
  protected async registerHono(
58
60
  options: T,
59
- { preStartHook }: { preStartHook?: () => void } = {},
61
+ { preStartHook }: { preStartHook?: () => void } = {}
60
62
  ) {
61
63
  const { path, typeDefs, resolvers, schema } = options;
62
64
  const { httpAdapter } = this.httpAdapterHost;
@@ -78,7 +80,7 @@ export class HonoGraphQLDriver<
78
80
  await server.start();
79
81
 
80
82
  app.use(path, async (ctx: Context) => {
81
- const bodyData = await this.parseBody(ctx.req);
83
+ const bodyData = await this.parseBody(ctx);
82
84
 
83
85
  const defaultContext = () => Promise.resolve({} as BaseContext);
84
86
 
@@ -108,7 +110,7 @@ export class HonoGraphQLDriver<
108
110
 
109
111
  ctx.status(status === undefined ? 200 : (status as StatusCode));
110
112
 
111
- if (body.kind === 'complete') {
113
+ if (body.kind === "complete") {
112
114
  return ctx.body(body.string);
113
115
  }
114
116
 
@@ -122,7 +124,7 @@ export class HonoGraphQLDriver<
122
124
  });
123
125
 
124
126
  return new Response(readableStream, {
125
- headers: { 'Content-Type': 'application/octet-stream' },
127
+ headers: { "Content-Type": "application/octet-stream" },
126
128
  });
127
129
  });
128
130
 
@@ -140,17 +142,30 @@ export class HonoGraphQLDriver<
140
142
  return map;
141
143
  }
142
144
 
143
- private async parseBody(req: HonoRequest): Promise<Record<string, unknown>> {
144
- const contentType = req.header('content-type');
145
- if (contentType === 'application/graphql')
145
+ private async parseBody(ctx: Context): Promise<Record<string, unknown>> {
146
+ const req = ctx.req;
147
+ const contentType = req.header("content-type");
148
+
149
+ if (contentType === "application/graphql")
146
150
  return { query: await req.text() };
147
- if (contentType === 'application/json')
151
+ if (contentType === "application/json")
148
152
  return req.json().catch(this.logError);
149
- if (contentType === 'application/x-www-form-urlencoded')
153
+ if (contentType === "application/x-www-form-urlencoded")
150
154
  return this.parseFormURL(req);
155
+ if (contentType?.startsWith("multipart/form-data")) {
156
+ return processRequest(ctx);
157
+ }
151
158
  return {};
152
159
  }
153
160
 
161
+ headersToRecord(headers: Headers): http.IncomingHttpHeaders {
162
+ const obj: http.IncomingHttpHeaders = {};
163
+ headers.forEach((value, key) => {
164
+ obj[key] = value;
165
+ });
166
+ return obj;
167
+ }
168
+
154
169
  private logError(e: unknown): void {
155
170
  if (e instanceof Error) {
156
171
  Logger.error(e.stack || e.message);
@@ -1 +1,2 @@
1
1
  export * from "./graphql.driver";
2
+ export * from "./graphQLUpload";