@powerhousedao/reactor-api 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,142 @@
1
+ type Query {
2
+ system: System
3
+ drive: DocumentDriveState
4
+ document(id: ID!): IDocument
5
+ documents: [String!]!
6
+ }
7
+
8
+ type Mutation {
9
+ registerPullResponderListener(filter: InputListenerFilter!): Listener
10
+ pushUpdates(strands: [InputStrandUpdate!]): [ListenerRevision!]!
11
+ acknowledge(listenerId: String!, revisions: [ListenerRevisionInput]): Boolean
12
+ }
13
+
14
+ input InputOperationSignerUser {
15
+ address: String!
16
+ networkId: String!
17
+ chainId: Int!
18
+ }
19
+
20
+ type OperationSignerUser {
21
+ address: String!
22
+ networkId: String!
23
+ chainId: Int!
24
+ }
25
+
26
+ input InputOperationSignerApp {
27
+ name: String!
28
+ key: String!
29
+ }
30
+
31
+ type OperationSignerApp {
32
+ name: String!
33
+ key: String!
34
+ }
35
+
36
+ input InputListenerFilter {
37
+ documentType: [String!]
38
+ documentId: [String!]
39
+ scope: [String!]
40
+ branch: [String!]
41
+ }
42
+
43
+ type OperationSigner {
44
+ app: OperationSignerApp
45
+ user: OperationSignerUser
46
+ signatures: [String!]!
47
+ }
48
+
49
+ input InputOperationSigner {
50
+ app: InputOperationSignerApp
51
+ user: InputOperationSignerUser
52
+ signatures: [String!]!
53
+ }
54
+
55
+ type OperationContext {
56
+ signer: OperationSigner
57
+ }
58
+
59
+ input InputOperationContext {
60
+ signer: InputOperationSigner
61
+ }
62
+
63
+ input InputOperationUpdate {
64
+ index: Int!
65
+ skip: Int
66
+ type: String!
67
+ id: String!
68
+ input: String!
69
+ hash: String!
70
+ timestamp: String!
71
+ error: String
72
+ context: InputOperationContext
73
+ }
74
+
75
+ type OperationUpdate {
76
+ index: Int!
77
+ skip: Int
78
+ type: String!
79
+ id: String!
80
+ input: String!
81
+ hash: String!
82
+ timestamp: String!
83
+ error: String
84
+ context: OperationContext
85
+ }
86
+
87
+ type StrandUpdate {
88
+ driveId: String!
89
+ documentId: String!
90
+ scope: String!
91
+ branch: String!
92
+ operations: [OperationUpdate!]!
93
+ }
94
+
95
+ input InputStrandUpdate {
96
+ driveId: String!
97
+ documentId: String!
98
+ scope: String!
99
+ branch: String!
100
+ operations: [InputOperationUpdate!]!
101
+ }
102
+
103
+ input ListenerFilterInput {
104
+ documentType: [String!]
105
+ documentId: [String!]
106
+ scope: [String!]
107
+ branch: [String!]
108
+ }
109
+
110
+ enum UpdateStatus {
111
+ SUCCESS
112
+ MISSING
113
+ CONFLICT
114
+ ERROR
115
+ }
116
+
117
+ input ListenerRevisionInput {
118
+ driveId: String!
119
+ documentId: String!
120
+ scope: String!
121
+ branch: String!
122
+ status: UpdateStatus!
123
+ revision: Int!
124
+ }
125
+
126
+ type ListenerRevision {
127
+ driveId: String!
128
+ documentId: String!
129
+ scope: String!
130
+ branch: String!
131
+ status: UpdateStatus!
132
+ revision: Int!
133
+ error: String
134
+ }
135
+
136
+ type System {
137
+ sync: Sync
138
+ }
139
+
140
+ type Sync {
141
+ strands(listenerId: ID!, since: String): [StrandUpdate!]!
142
+ }
@@ -0,0 +1,16 @@
1
+ import { GraphQLResolverMap } from "@apollo/subgraph/dist/schema-helper";
2
+ import { BaseDocumentDriveServer } from "document-drive";
3
+ import { readFileSync } from "node:fs";
4
+ import { dirname, resolve } from "node:path";
5
+ import { fileURLToPath } from "node:url";
6
+ import { createSchema } from "src/utils/create-schema";
7
+ import { resolvers } from "./resolvers";
8
+ import schemaPath from "./schema.graphql";
9
+
10
+ const __dirname =
11
+ import.meta.dirname ?? dirname(fileURLToPath(import.meta.url));
12
+
13
+ const typeDefs = readFileSync(resolve(__dirname, schemaPath), "utf8");
14
+
15
+ export const getSchema = (driveServer: BaseDocumentDriveServer) =>
16
+ createSchema(driveServer, resolvers as GraphQLResolverMap, typeDefs);
@@ -0,0 +1,13 @@
1
+ import { getSchema as getSystemSchema } from "./system/subgraph";
2
+ import { getSchema as getDriveSchema } from "./drive/subgraph";
3
+
4
+ export const SUBGRAPH_REGISTRY = [
5
+ {
6
+ name: "system",
7
+ getSchema: getSystemSchema,
8
+ },
9
+ {
10
+ name: "d/:drive",
11
+ getSchema: getDriveSchema,
12
+ },
13
+ ];
@@ -0,0 +1,22 @@
1
+ import { DriveInput } from "document-drive";
2
+ import { Context } from "../../../../../../apps/switchboard/types";
3
+
4
+ export const resolvers = {
5
+ Query: {
6
+ drives: async (parent: unknown, args: unknown, ctx: Context) => {
7
+ const drives = await ctx.driveServer.getDrives();
8
+ return drives;
9
+ },
10
+ },
11
+ Mutation: {
12
+ addDrive: async (parent: unknown, args: DriveInput, ctx: Context) => {
13
+ try {
14
+ const drive = await ctx.driveServer.addDrive(args);
15
+ return drive.state.global;
16
+ } catch (e) {
17
+ console.error(e);
18
+ throw new Error(e as string);
19
+ }
20
+ },
21
+ },
22
+ };
@@ -0,0 +1,18 @@
1
+ type Query {
2
+ drives: [String!]!
3
+ driveIdBySlug(slug: String!): String
4
+ }
5
+
6
+ type Mutation {
7
+ addDrive(global: DocumentDriveStateInput!): DocumentDriveState
8
+ deleteDrive(id: ID!): Boolean
9
+ setDriveIcon(id: String!, icon: String!): Boolean
10
+ setDriveName(id: String!, name: String!): Boolean
11
+ }
12
+
13
+ input DocumentDriveStateInput {
14
+ name: String
15
+ id: String
16
+ slug: String
17
+ icon: String
18
+ }
@@ -0,0 +1,16 @@
1
+ import { GraphQLResolverMap } from "@apollo/subgraph/dist/schema-helper";
2
+ import { BaseDocumentDriveServer } from "document-drive";
3
+ import { readFileSync } from "fs";
4
+ import { dirname, resolve } from "path";
5
+ import { fileURLToPath } from "url";
6
+ import { createSchema } from "src/utils/create-schema";
7
+ import { resolvers } from "./resolvers";
8
+ import schemaPath from "./schema.graphql";
9
+
10
+ const __dirname =
11
+ import.meta.dirname ?? dirname(fileURLToPath(import.meta.url));
12
+
13
+ const typeDefs = readFileSync(resolve(__dirname, schemaPath), "utf8");
14
+
15
+ export const getSchema = (driveServer: BaseDocumentDriveServer) =>
16
+ createSchema(driveServer, resolvers as GraphQLResolverMap, typeDefs);
package/src/types.ts ADDED
@@ -0,0 +1,8 @@
1
+ import { BaseDocumentDriveServer } from "document-drive";
2
+ import { IncomingHttpHeaders } from "http";
3
+
4
+ export interface Context {
5
+ headers: IncomingHttpHeaders;
6
+ driveId: string | undefined;
7
+ driveServer: BaseDocumentDriveServer;
8
+ }
@@ -0,0 +1,81 @@
1
+ import { buildSubgraphSchema } from "@apollo/subgraph";
2
+ import { BaseDocumentDriveServer } from "document-drive";
3
+ import { readFileSync } from "node:fs";
4
+ import { fileURLToPath } from "node:url";
5
+ import { GraphQLResolverMap } from "@apollo/subgraph/dist/schema-helper";
6
+ import { parse } from "graphql";
7
+
8
+ export const createSchema = (
9
+ documentDriveServer: BaseDocumentDriveServer,
10
+ resolvers: GraphQLResolverMap,
11
+ typeDefs: string
12
+ ) =>
13
+ buildSubgraphSchema([
14
+ {
15
+ typeDefs: getDocumentModelTypeDefs(documentDriveServer, typeDefs),
16
+ resolvers,
17
+ },
18
+ ]);
19
+
20
+ export const getDocumentModelTypeDefs = (
21
+ documentDriveServer: BaseDocumentDriveServer,
22
+ typeDefs: string
23
+ ) => {
24
+ const documentModels = documentDriveServer.getDocumentModels();
25
+ let dmSchema = "";
26
+ documentModels.forEach(({ documentModel }) => {
27
+ dmSchema += `
28
+ ${documentModel.specifications
29
+ .map((specification) =>
30
+ specification.state.global.schema
31
+ .replaceAll(" Account ", ` ${documentModel.name}Account `)
32
+ .replaceAll(`: Account`, `: ${documentModel.name}Account`)
33
+ .replaceAll(`[Account!]!`, `[${documentModel.name}Account!]!`)
34
+ .replaceAll("scalar DateTime", "")
35
+ .replaceAll(/input (.*?) {[\s\S]*?}/g, "")
36
+ )
37
+ .join("\n")};
38
+
39
+ ${documentModel.specifications
40
+ .map((specification) =>
41
+ specification.state.local.schema
42
+ .replaceAll(" Account ", ` ${documentModel.name}Account `)
43
+ .replaceAll(`: Account`, `: ${documentModel.name}Account`)
44
+ .replaceAll(`[Account!]!`, `[${documentModel.name}Account!]!`)
45
+ .replaceAll("scalar DateTime", "")
46
+ .replaceAll(/input (.*?) {[\s\S]*?}/g, "")
47
+ .replaceAll("type AccountSnapshotLocalState", "")
48
+ .replaceAll("type BudgetStatementLocalState", "")
49
+ .replaceAll("type ScopeFrameworkLocalState", "")
50
+ )
51
+ .join("\n")};
52
+
53
+ type ${documentModel.name} implements IDocument {
54
+ id: ID!
55
+ name: String!
56
+ documentType: String!
57
+ revision: Int!
58
+ created: DateTime!
59
+ lastModified: DateTime!
60
+ ${documentModel.name !== "DocumentModel" ? `state: ${documentModel.name}State!` : ""}
61
+ }\n`;
62
+ });
63
+
64
+ // add the mutation and query types
65
+ const schema = `
66
+ scalar DateTime
67
+ interface IDocument {
68
+ name: String!
69
+ documentType: String!
70
+ revision: Int!
71
+ created: DateTime!
72
+ lastModified: DateTime!
73
+
74
+ }
75
+ ${dmSchema}
76
+
77
+ ${typeDefs}
78
+ `;
79
+
80
+ return parse(schema.replaceAll(";", ""));
81
+ };
@@ -0,0 +1,55 @@
1
+ import { buildSubgraphSchema } from "@apollo/subgraph";
2
+ import * as DocumentModelsLibs from "document-model-libs/document-models";
3
+ import { DocumentModel } from "document-model/document";
4
+ import { module as DocumentModelLib } from "document-model/document-model";
5
+ import { describe, expect, it } from "vitest";
6
+ import {
7
+ BaseDocumentDriveServer,
8
+ DocumentDriveServer,
9
+ } from "../../document-drive/src/server";
10
+ import { addSubgraph } from "../src/index";
11
+ import { initReactorRouter, reactorRouter, updateRouter } from "../src/router";
12
+ import { getDocumentModelTypeDefs } from "../src/utils/gen-doc-model-type-defs";
13
+ import express from "express";
14
+
15
+ const documentModels = [
16
+ DocumentModelLib,
17
+ ...Object.values(DocumentModelsLibs),
18
+ ] as DocumentModel[];
19
+
20
+ describe("Reactor Router", () => {
21
+ it("should be initialized", async () => {
22
+ const app = express();
23
+ await initReactorRouter("/", app, new DocumentDriveServer(documentModels));
24
+ const [system, drive] = reactorRouter.stack;
25
+ expect(system).toBeDefined();
26
+ expect(drive).toBeDefined();
27
+ expect("/system").toMatch(system.regexp);
28
+ expect("/drive").toMatch(drive.regexp);
29
+ });
30
+
31
+ it("should be able to add a new subgraph", async () => {
32
+ const driveServer = new DocumentDriveServer(documentModels);
33
+ await driveServer.initialize();
34
+ const newSubgraph = {
35
+ name: "newSubgraph",
36
+ getSchema: (documentDriveServer: BaseDocumentDriveServer) =>
37
+ buildSubgraphSchema([
38
+ {
39
+ typeDefs: getDocumentModelTypeDefs(
40
+ documentDriveServer,
41
+ `
42
+ type Query {
43
+ hello: String
44
+ }
45
+ `
46
+ ),
47
+ resolvers: { Query: { hello: () => "world" } },
48
+ },
49
+ ]),
50
+ };
51
+
52
+ await addSubgraph(newSubgraph);
53
+ expect(reactorRouter.stack.length).gte(3);
54
+ });
55
+ });
package/tsconfig.json ADDED
@@ -0,0 +1,18 @@
1
+ {
2
+ "compilerOptions": {
3
+ "module": "ESNext",
4
+ "moduleResolution": "Bundler",
5
+ "target": "esnext",
6
+ "types": ["node", "./types.d.ts"],
7
+ "declaration": true,
8
+ "outDir": "./dist",
9
+ "esModuleInterop": true,
10
+ "forceConsistentCasingInFileNames": true,
11
+ "strict": true,
12
+ "noImplicitAny": true,
13
+ "skipLibCheck": true,
14
+ "emitDeclarationOnly": true,
15
+ "baseUrl": "."
16
+ },
17
+ "include": ["src/**/*", "*.config.ts"]
18
+ }
package/tsdoc.json ADDED
@@ -0,0 +1,3 @@
1
+ {
2
+ "$schema": "https://developer.microsoft.com/json-schemas/tsdoc/v0/tsdoc.schema.json"
3
+ }
package/tsup.config.ts ADDED
@@ -0,0 +1,16 @@
1
+ import { defineConfig } from "tsup";
2
+
3
+ export default defineConfig({
4
+ entry: ["src/index.ts"],
5
+ splitting: true,
6
+ sourcemap: true,
7
+ clean: true,
8
+ format: "esm",
9
+ treeshake: true,
10
+ noExternal: ["document-drive"],
11
+ target: "node20",
12
+ loader: {
13
+ ".graphql": "file",
14
+ },
15
+ dts: true,
16
+ });
package/types.d.ts ADDED
@@ -0,0 +1,5 @@
1
+ declare module "*.graphql" {
2
+ const value: string;
3
+
4
+ export default value;
5
+ }
@@ -0,0 +1,28 @@
1
+ import { dirname, resolve } from "node:path";
2
+ import { defineConfig } from "vitest/config";
3
+
4
+ export default defineConfig({
5
+ plugins: [
6
+ {
7
+ name: "graphql-path-resolver",
8
+ resolveId(source, importer) {
9
+ if (source.endsWith(".graphql")) {
10
+ // Resolve the path relative to the the file that imports the .graphql file
11
+ return resolve(
12
+ dirname(importer || ""),
13
+ `${source.startsWith("/") ? `.${source}` : source}.graphql`
14
+ );
15
+ }
16
+ return null; // Let other resolvers handle other imports
17
+ },
18
+ load(id) {
19
+ if (id.endsWith(".graphql")) {
20
+ // Return the file path as a string, which Vite will use as the resolved module
21
+ return `export default ${JSON.stringify(id)}`;
22
+ }
23
+ return null; // Let other loaders handle other files
24
+ },
25
+ },
26
+ ],
27
+ test: {},
28
+ });