@sylphx/lens-server 1.5.2 → 1.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +8 -8
- package/dist/index.js +87 -10
- package/package.json +3 -3
- package/src/e2e/server.test.ts +8 -8
- package/src/index.ts +3 -3
- package/src/server/create.test.ts +592 -15
- package/src/server/create.ts +158 -21
- package/src/sse/handler.ts +1 -1
- package/src/state/graph-state-manager.test.ts +1 -1
- package/src/state/index.ts +1 -1
package/dist/index.d.ts
CHANGED
|
@@ -238,21 +238,21 @@ interface LensServerConfig<
|
|
|
238
238
|
TRouter extends RouterDef = RouterDef
|
|
239
239
|
> {
|
|
240
240
|
/** Entity definitions */
|
|
241
|
-
entities?: EntitiesMap;
|
|
241
|
+
entities?: EntitiesMap | undefined;
|
|
242
242
|
/** Router definition (namespaced operations) - context type is inferred */
|
|
243
|
-
router?: TRouter;
|
|
243
|
+
router?: TRouter | undefined;
|
|
244
244
|
/** Query definitions (flat, legacy) */
|
|
245
|
-
queries?: QueriesMap;
|
|
245
|
+
queries?: QueriesMap | undefined;
|
|
246
246
|
/** Mutation definitions (flat, legacy) */
|
|
247
|
-
mutations?: MutationsMap;
|
|
247
|
+
mutations?: MutationsMap | undefined;
|
|
248
248
|
/** Field resolvers array (use lens() factory to create) */
|
|
249
|
-
resolvers?: Resolvers;
|
|
249
|
+
resolvers?: Resolvers | undefined;
|
|
250
250
|
/** Logger for server messages (default: silent) */
|
|
251
|
-
logger?: LensLogger;
|
|
251
|
+
logger?: LensLogger | undefined;
|
|
252
252
|
/** Context factory - must return the context type expected by the router */
|
|
253
|
-
context?: (req?: unknown) => TContext | Promise<TContext
|
|
253
|
+
context?: ((req?: unknown) => TContext | Promise<TContext>) | undefined;
|
|
254
254
|
/** Server version */
|
|
255
|
-
version?: string;
|
|
255
|
+
version?: string | undefined;
|
|
256
256
|
}
|
|
257
257
|
/** Server metadata for transport handshake */
|
|
258
258
|
interface ServerMetadata {
|
package/dist/index.js
CHANGED
|
@@ -11,7 +11,9 @@ import {
|
|
|
11
11
|
createEmit,
|
|
12
12
|
createUpdate as createUpdate2,
|
|
13
13
|
flattenRouter,
|
|
14
|
+
isEntityDef,
|
|
14
15
|
isMutationDef,
|
|
16
|
+
isPipeline,
|
|
15
17
|
isQueryDef,
|
|
16
18
|
runWithContext,
|
|
17
19
|
toResolverMap
|
|
@@ -457,6 +459,70 @@ function createGraphStateManager(config) {
|
|
|
457
459
|
}
|
|
458
460
|
|
|
459
461
|
// src/server/create.ts
|
|
462
|
+
function getEntityTypeName(returnSpec) {
|
|
463
|
+
if (!returnSpec)
|
|
464
|
+
return;
|
|
465
|
+
if (isEntityDef(returnSpec)) {
|
|
466
|
+
return returnSpec._name;
|
|
467
|
+
}
|
|
468
|
+
if (Array.isArray(returnSpec) && returnSpec.length === 1 && isEntityDef(returnSpec[0])) {
|
|
469
|
+
return returnSpec[0]._name;
|
|
470
|
+
}
|
|
471
|
+
return;
|
|
472
|
+
}
|
|
473
|
+
function getInputFields(inputSchema) {
|
|
474
|
+
if (!inputSchema?.shape)
|
|
475
|
+
return [];
|
|
476
|
+
return Object.keys(inputSchema.shape);
|
|
477
|
+
}
|
|
478
|
+
function sugarToPipeline(optimistic, entityType, inputFields) {
|
|
479
|
+
if (isPipeline(optimistic)) {
|
|
480
|
+
return optimistic;
|
|
481
|
+
}
|
|
482
|
+
if (!entityType) {
|
|
483
|
+
return optimistic;
|
|
484
|
+
}
|
|
485
|
+
if (optimistic === "merge") {
|
|
486
|
+
const args = { type: entityType };
|
|
487
|
+
for (const field of inputFields) {
|
|
488
|
+
args[field] = { $input: field };
|
|
489
|
+
}
|
|
490
|
+
return {
|
|
491
|
+
$pipe: [{ $do: "entity.update", $with: args }]
|
|
492
|
+
};
|
|
493
|
+
}
|
|
494
|
+
if (optimistic === "create") {
|
|
495
|
+
const args = { type: entityType, id: { $temp: true } };
|
|
496
|
+
for (const field of inputFields) {
|
|
497
|
+
if (field !== "id") {
|
|
498
|
+
args[field] = { $input: field };
|
|
499
|
+
}
|
|
500
|
+
}
|
|
501
|
+
return {
|
|
502
|
+
$pipe: [{ $do: "entity.create", $with: args }]
|
|
503
|
+
};
|
|
504
|
+
}
|
|
505
|
+
if (optimistic === "delete") {
|
|
506
|
+
return {
|
|
507
|
+
$pipe: [{ $do: "entity.delete", $with: { type: entityType, id: { $input: "id" } } }]
|
|
508
|
+
};
|
|
509
|
+
}
|
|
510
|
+
if (typeof optimistic === "object" && optimistic !== null && "merge" in optimistic && typeof optimistic.merge === "object") {
|
|
511
|
+
const extra = optimistic.merge;
|
|
512
|
+
const args = { type: entityType };
|
|
513
|
+
for (const field of inputFields) {
|
|
514
|
+
args[field] = { $input: field };
|
|
515
|
+
}
|
|
516
|
+
for (const [key, value] of Object.entries(extra)) {
|
|
517
|
+
args[key] = value;
|
|
518
|
+
}
|
|
519
|
+
return {
|
|
520
|
+
$pipe: [{ $do: "entity.update", $with: args }]
|
|
521
|
+
};
|
|
522
|
+
}
|
|
523
|
+
return optimistic;
|
|
524
|
+
}
|
|
525
|
+
|
|
460
526
|
class DataLoader {
|
|
461
527
|
batchFn;
|
|
462
528
|
batch = new Map;
|
|
@@ -627,7 +693,9 @@ class LensServerImpl {
|
|
|
627
693
|
for (const [name, def] of Object.entries(this.mutations)) {
|
|
628
694
|
const meta = { type: "mutation" };
|
|
629
695
|
if (def._optimistic) {
|
|
630
|
-
|
|
696
|
+
const entityType = getEntityTypeName(def._output);
|
|
697
|
+
const inputFields = getInputFields(def._input);
|
|
698
|
+
meta.optimistic = sugarToPipeline(def._optimistic, entityType, inputFields);
|
|
631
699
|
}
|
|
632
700
|
setNested(name, meta);
|
|
633
701
|
}
|
|
@@ -783,11 +851,14 @@ class LensServerImpl {
|
|
|
783
851
|
sub.cleanups.splice(idx, 1);
|
|
784
852
|
};
|
|
785
853
|
};
|
|
786
|
-
const
|
|
787
|
-
|
|
788
|
-
ctx: context,
|
|
854
|
+
const lensContext = {
|
|
855
|
+
...context,
|
|
789
856
|
emit,
|
|
790
857
|
onCleanup
|
|
858
|
+
};
|
|
859
|
+
const result = resolver({
|
|
860
|
+
input: sub.input,
|
|
861
|
+
ctx: lensContext
|
|
791
862
|
});
|
|
792
863
|
if (isAsyncIterable(result)) {
|
|
793
864
|
for await (const value of result) {
|
|
@@ -938,11 +1009,14 @@ class LensServerImpl {
|
|
|
938
1009
|
}
|
|
939
1010
|
const emit = createEmit(() => {});
|
|
940
1011
|
const onCleanup = () => () => {};
|
|
941
|
-
const
|
|
942
|
-
|
|
943
|
-
ctx: context,
|
|
1012
|
+
const lensContext = {
|
|
1013
|
+
...context,
|
|
944
1014
|
emit,
|
|
945
1015
|
onCleanup
|
|
1016
|
+
};
|
|
1017
|
+
const result = resolver({
|
|
1018
|
+
input: cleanInput,
|
|
1019
|
+
ctx: lensContext
|
|
946
1020
|
});
|
|
947
1021
|
let data;
|
|
948
1022
|
if (isAsyncIterable(result)) {
|
|
@@ -982,11 +1056,14 @@ class LensServerImpl {
|
|
|
982
1056
|
}
|
|
983
1057
|
const emit = createEmit(() => {});
|
|
984
1058
|
const onCleanup = () => () => {};
|
|
985
|
-
const
|
|
986
|
-
|
|
987
|
-
ctx: context,
|
|
1059
|
+
const lensContext = {
|
|
1060
|
+
...context,
|
|
988
1061
|
emit,
|
|
989
1062
|
onCleanup
|
|
1063
|
+
};
|
|
1064
|
+
const result = await resolver({
|
|
1065
|
+
input,
|
|
1066
|
+
ctx: lensContext
|
|
990
1067
|
});
|
|
991
1068
|
const entityName = this.getEntityNameFromMutation(name);
|
|
992
1069
|
const entities = this.extractEntities(entityName, result);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sylphx/lens-server",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.11.0",
|
|
4
4
|
"description": "Server runtime for Lens API framework",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
"build": "bunup",
|
|
16
16
|
"typecheck": "tsc --noEmit",
|
|
17
17
|
"test": "bun test",
|
|
18
|
-
"prepack": "bun run build"
|
|
18
|
+
"prepack": "[ -d dist ] || bun run build"
|
|
19
19
|
},
|
|
20
20
|
"files": [
|
|
21
21
|
"dist",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"author": "SylphxAI",
|
|
31
31
|
"license": "MIT",
|
|
32
32
|
"dependencies": {
|
|
33
|
-
"@sylphx/lens-core": "^1.
|
|
33
|
+
"@sylphx/lens-core": "^1.22.0"
|
|
34
34
|
},
|
|
35
35
|
"devDependencies": {
|
|
36
36
|
"typescript": "^5.9.3",
|
package/src/e2e/server.test.ts
CHANGED
|
@@ -317,8 +317,8 @@ describe("E2E - Subscriptions", () => {
|
|
|
317
317
|
const watchUser = query()
|
|
318
318
|
.input(z.object({ id: z.string() }))
|
|
319
319
|
.returns(User)
|
|
320
|
-
.resolve(({ input,
|
|
321
|
-
emitFn = emit;
|
|
320
|
+
.resolve(({ input, ctx }) => {
|
|
321
|
+
emitFn = ctx.emit;
|
|
322
322
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
323
323
|
if (!user) throw new Error("Not found");
|
|
324
324
|
return user;
|
|
@@ -362,8 +362,8 @@ describe("E2E - Subscriptions", () => {
|
|
|
362
362
|
const watchUser = query()
|
|
363
363
|
.input(z.object({ id: z.string() }))
|
|
364
364
|
.returns(User)
|
|
365
|
-
.resolve(({ input,
|
|
366
|
-
emitFn = emit;
|
|
365
|
+
.resolve(({ input, ctx }) => {
|
|
366
|
+
emitFn = ctx.emit;
|
|
367
367
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
368
368
|
if (!user) throw new Error("Not found");
|
|
369
369
|
return user;
|
|
@@ -464,8 +464,8 @@ describe("E2E - Cleanup", () => {
|
|
|
464
464
|
const watchUser = query()
|
|
465
465
|
.input(z.object({ id: z.string() }))
|
|
466
466
|
.returns(User)
|
|
467
|
-
.resolve(({ input,
|
|
468
|
-
onCleanup(() => {
|
|
467
|
+
.resolve(({ input, ctx }) => {
|
|
468
|
+
ctx.onCleanup(() => {
|
|
469
469
|
cleanedUp = true;
|
|
470
470
|
});
|
|
471
471
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
@@ -506,8 +506,8 @@ describe("E2E - GraphStateManager", () => {
|
|
|
506
506
|
const getUser = query()
|
|
507
507
|
.input(z.object({ id: z.string() }))
|
|
508
508
|
.returns(User)
|
|
509
|
-
.resolve(({ input,
|
|
510
|
-
emitFn = emit;
|
|
509
|
+
.resolve(({ input, ctx }) => {
|
|
510
|
+
emitFn = ctx.emit;
|
|
511
511
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
512
512
|
if (!user) throw new Error("Not found");
|
|
513
513
|
return user;
|
package/src/index.ts
CHANGED
|
@@ -50,7 +50,7 @@ export {
|
|
|
50
50
|
// Metadata types (for transport handshake)
|
|
51
51
|
type ServerMetadata,
|
|
52
52
|
type WebSocketLike,
|
|
53
|
-
} from "./server/create";
|
|
53
|
+
} from "./server/create.js";
|
|
54
54
|
|
|
55
55
|
// =============================================================================
|
|
56
56
|
// State Management
|
|
@@ -68,7 +68,7 @@ export {
|
|
|
68
68
|
type StateFullMessage,
|
|
69
69
|
type StateUpdateMessage,
|
|
70
70
|
type Subscription,
|
|
71
|
-
} from "./state";
|
|
71
|
+
} from "./state/index.js";
|
|
72
72
|
|
|
73
73
|
// =============================================================================
|
|
74
74
|
// SSE Transport Adapter
|
|
@@ -82,4 +82,4 @@ export {
|
|
|
82
82
|
SSEHandler,
|
|
83
83
|
// Types
|
|
84
84
|
type SSEHandlerConfig,
|
|
85
|
-
} from "./sse/handler";
|
|
85
|
+
} from "./sse/handler.js";
|
|
@@ -724,8 +724,8 @@ describe("onCleanup", () => {
|
|
|
724
724
|
|
|
725
725
|
const liveQuery = query()
|
|
726
726
|
.returns(User)
|
|
727
|
-
.resolve(({
|
|
728
|
-
onCleanup(() => {
|
|
727
|
+
.resolve(({ ctx }) => {
|
|
728
|
+
ctx.onCleanup(() => {
|
|
729
729
|
cleanedUp = true;
|
|
730
730
|
});
|
|
731
731
|
return mockUsers[0];
|
|
@@ -764,8 +764,8 @@ describe("onCleanup", () => {
|
|
|
764
764
|
|
|
765
765
|
const liveQuery = query()
|
|
766
766
|
.returns(User)
|
|
767
|
-
.resolve(({
|
|
768
|
-
onCleanup(() => {
|
|
767
|
+
.resolve(({ ctx }) => {
|
|
768
|
+
ctx.onCleanup(() => {
|
|
769
769
|
cleanedUp = true;
|
|
770
770
|
});
|
|
771
771
|
return mockUsers[0];
|
|
@@ -802,8 +802,8 @@ describe("onCleanup", () => {
|
|
|
802
802
|
|
|
803
803
|
const liveQuery = query()
|
|
804
804
|
.returns(User)
|
|
805
|
-
.resolve(({
|
|
806
|
-
const remove = onCleanup(() => {
|
|
805
|
+
.resolve(({ ctx }) => {
|
|
806
|
+
const remove = ctx.onCleanup(() => {
|
|
807
807
|
cleanedUp = true;
|
|
808
808
|
});
|
|
809
809
|
// Remove the cleanup before unsubscribe
|
|
@@ -950,8 +950,10 @@ describe("getMetadata", () => {
|
|
|
950
950
|
expect(metadata.version).toBe("1.2.3");
|
|
951
951
|
expect(metadata.operations).toBeDefined();
|
|
952
952
|
expect(metadata.operations.getUser).toEqual({ type: "query" });
|
|
953
|
-
// createUser auto-derives optimistic "create" from naming convention
|
|
954
|
-
expect(metadata.operations.createUser).
|
|
953
|
+
// createUser auto-derives optimistic "create" from naming convention (converted to Pipeline)
|
|
954
|
+
expect((metadata.operations.createUser as any).type).toBe("mutation");
|
|
955
|
+
expect((metadata.operations.createUser as any).optimistic.$pipe).toBeDefined();
|
|
956
|
+
expect((metadata.operations.createUser as any).optimistic.$pipe[0].$do).toBe("entity.create");
|
|
955
957
|
});
|
|
956
958
|
|
|
957
959
|
it("builds nested operations map from namespaced routes", () => {
|
|
@@ -973,8 +975,10 @@ describe("getMetadata", () => {
|
|
|
973
975
|
const metadata = server.getMetadata();
|
|
974
976
|
expect(metadata.operations.user).toBeDefined();
|
|
975
977
|
expect((metadata.operations.user as any).get).toEqual({ type: "query" });
|
|
976
|
-
// Auto-derives optimistic "create" from naming convention
|
|
977
|
-
expect((metadata.operations.user as any).create).
|
|
978
|
+
// Auto-derives optimistic "create" from naming convention (converted to Pipeline)
|
|
979
|
+
expect((metadata.operations.user as any).create.type).toBe("mutation");
|
|
980
|
+
expect((metadata.operations.user as any).create.optimistic.$pipe).toBeDefined();
|
|
981
|
+
expect((metadata.operations.user as any).create.optimistic.$pipe[0].$do).toBe("entity.create");
|
|
978
982
|
});
|
|
979
983
|
|
|
980
984
|
it("includes optimistic config in mutation metadata", () => {
|
|
@@ -990,9 +994,21 @@ describe("getMetadata", () => {
|
|
|
990
994
|
});
|
|
991
995
|
|
|
992
996
|
const metadata = server.getMetadata();
|
|
997
|
+
// Sugar "merge" is converted to Reify Pipeline
|
|
993
998
|
expect(metadata.operations.updateUser).toEqual({
|
|
994
999
|
type: "mutation",
|
|
995
|
-
optimistic:
|
|
1000
|
+
optimistic: {
|
|
1001
|
+
$pipe: [
|
|
1002
|
+
{
|
|
1003
|
+
$do: "entity.update",
|
|
1004
|
+
$with: {
|
|
1005
|
+
type: "User",
|
|
1006
|
+
id: { $input: "id" },
|
|
1007
|
+
name: { $input: "name" },
|
|
1008
|
+
},
|
|
1009
|
+
},
|
|
1010
|
+
],
|
|
1011
|
+
},
|
|
996
1012
|
});
|
|
997
1013
|
});
|
|
998
1014
|
|
|
@@ -1499,8 +1515,8 @@ describe("Logger integration", () => {
|
|
|
1499
1515
|
const errorLogs: string[] = [];
|
|
1500
1516
|
const liveQuery = query()
|
|
1501
1517
|
.returns(User)
|
|
1502
|
-
.resolve(({
|
|
1503
|
-
onCleanup(() => {
|
|
1518
|
+
.resolve(({ ctx }) => {
|
|
1519
|
+
ctx.onCleanup(() => {
|
|
1504
1520
|
throw new Error("Cleanup failed");
|
|
1505
1521
|
});
|
|
1506
1522
|
return mockUsers[0];
|
|
@@ -1544,8 +1560,8 @@ describe("Logger integration", () => {
|
|
|
1544
1560
|
const errorLogs: string[] = [];
|
|
1545
1561
|
const liveQuery = query()
|
|
1546
1562
|
.returns(User)
|
|
1547
|
-
.resolve(({
|
|
1548
|
-
onCleanup(() => {
|
|
1563
|
+
.resolve(({ ctx }) => {
|
|
1564
|
+
ctx.onCleanup(() => {
|
|
1549
1565
|
throw new Error("Disconnect cleanup failed");
|
|
1550
1566
|
});
|
|
1551
1567
|
return mockUsers[0];
|
|
@@ -1584,6 +1600,567 @@ describe("Logger integration", () => {
|
|
|
1584
1600
|
});
|
|
1585
1601
|
});
|
|
1586
1602
|
|
|
1603
|
+
// =============================================================================
|
|
1604
|
+
// Test: DataLoader Batching
|
|
1605
|
+
// =============================================================================
|
|
1606
|
+
|
|
1607
|
+
describe("DataLoader Batching", () => {
|
|
1608
|
+
it("batches multiple load calls into single batch function call", async () => {
|
|
1609
|
+
let batchCallCount = 0;
|
|
1610
|
+
let receivedKeys: string[] = [];
|
|
1611
|
+
|
|
1612
|
+
class TestDataLoader {
|
|
1613
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1614
|
+
private scheduled = false;
|
|
1615
|
+
|
|
1616
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1617
|
+
|
|
1618
|
+
async load(key: string): Promise<string | null> {
|
|
1619
|
+
return new Promise((resolve, reject) => {
|
|
1620
|
+
const existing = this.batch.get(key);
|
|
1621
|
+
if (existing) {
|
|
1622
|
+
existing.push({ resolve, reject });
|
|
1623
|
+
} else {
|
|
1624
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1625
|
+
}
|
|
1626
|
+
this.scheduleDispatch();
|
|
1627
|
+
});
|
|
1628
|
+
}
|
|
1629
|
+
|
|
1630
|
+
private scheduleDispatch(): void {
|
|
1631
|
+
if (this.scheduled) return;
|
|
1632
|
+
this.scheduled = true;
|
|
1633
|
+
queueMicrotask(() => this.dispatch());
|
|
1634
|
+
}
|
|
1635
|
+
|
|
1636
|
+
private async dispatch(): Promise<void> {
|
|
1637
|
+
this.scheduled = false;
|
|
1638
|
+
const batch = this.batch;
|
|
1639
|
+
this.batch = new Map();
|
|
1640
|
+
|
|
1641
|
+
const keys = Array.from(batch.keys());
|
|
1642
|
+
if (keys.length === 0) return;
|
|
1643
|
+
|
|
1644
|
+
try {
|
|
1645
|
+
const results = await this.batchFn(keys);
|
|
1646
|
+
keys.forEach((key, index) => {
|
|
1647
|
+
const callbacks = batch.get(key)!;
|
|
1648
|
+
const result = results[index] ?? null;
|
|
1649
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1650
|
+
});
|
|
1651
|
+
} catch (error) {
|
|
1652
|
+
for (const callbacks of batch.values()) {
|
|
1653
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1654
|
+
}
|
|
1655
|
+
}
|
|
1656
|
+
}
|
|
1657
|
+
|
|
1658
|
+
clear(): void {
|
|
1659
|
+
this.batch.clear();
|
|
1660
|
+
}
|
|
1661
|
+
}
|
|
1662
|
+
|
|
1663
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1664
|
+
batchCallCount++;
|
|
1665
|
+
receivedKeys = keys;
|
|
1666
|
+
return keys.map((k) => `value-${k}`);
|
|
1667
|
+
});
|
|
1668
|
+
|
|
1669
|
+
// Load multiple keys in same tick
|
|
1670
|
+
const promises = [loader.load("key1"), loader.load("key2"), loader.load("key3")];
|
|
1671
|
+
|
|
1672
|
+
const results = await Promise.all(promises);
|
|
1673
|
+
|
|
1674
|
+
// Should batch all calls into single batch function call
|
|
1675
|
+
expect(batchCallCount).toBe(1);
|
|
1676
|
+
expect(receivedKeys).toEqual(["key1", "key2", "key3"]);
|
|
1677
|
+
expect(results).toEqual(["value-key1", "value-key2", "value-key3"]);
|
|
1678
|
+
});
|
|
1679
|
+
|
|
1680
|
+
it("handles duplicate keys in same batch", async () => {
|
|
1681
|
+
class TestDataLoader {
|
|
1682
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1683
|
+
private scheduled = false;
|
|
1684
|
+
|
|
1685
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1686
|
+
|
|
1687
|
+
async load(key: string): Promise<string | null> {
|
|
1688
|
+
return new Promise((resolve, reject) => {
|
|
1689
|
+
const existing = this.batch.get(key);
|
|
1690
|
+
if (existing) {
|
|
1691
|
+
existing.push({ resolve, reject });
|
|
1692
|
+
} else {
|
|
1693
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1694
|
+
}
|
|
1695
|
+
this.scheduleDispatch();
|
|
1696
|
+
});
|
|
1697
|
+
}
|
|
1698
|
+
|
|
1699
|
+
private scheduleDispatch(): void {
|
|
1700
|
+
if (this.scheduled) return;
|
|
1701
|
+
this.scheduled = true;
|
|
1702
|
+
queueMicrotask(() => this.dispatch());
|
|
1703
|
+
}
|
|
1704
|
+
|
|
1705
|
+
private async dispatch(): Promise<void> {
|
|
1706
|
+
this.scheduled = false;
|
|
1707
|
+
const batch = this.batch;
|
|
1708
|
+
this.batch = new Map();
|
|
1709
|
+
|
|
1710
|
+
const keys = Array.from(batch.keys());
|
|
1711
|
+
if (keys.length === 0) return;
|
|
1712
|
+
|
|
1713
|
+
try {
|
|
1714
|
+
const results = await this.batchFn(keys);
|
|
1715
|
+
keys.forEach((key, index) => {
|
|
1716
|
+
const callbacks = batch.get(key)!;
|
|
1717
|
+
const result = results[index] ?? null;
|
|
1718
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1719
|
+
});
|
|
1720
|
+
} catch (error) {
|
|
1721
|
+
for (const callbacks of batch.values()) {
|
|
1722
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1723
|
+
}
|
|
1724
|
+
}
|
|
1725
|
+
}
|
|
1726
|
+
|
|
1727
|
+
clear(): void {
|
|
1728
|
+
this.batch.clear();
|
|
1729
|
+
}
|
|
1730
|
+
}
|
|
1731
|
+
|
|
1732
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1733
|
+
return keys.map((k) => `value-${k}`);
|
|
1734
|
+
});
|
|
1735
|
+
|
|
1736
|
+
// Load same key multiple times
|
|
1737
|
+
const promises = [loader.load("key1"), loader.load("key1"), loader.load("key1")];
|
|
1738
|
+
|
|
1739
|
+
const results = await Promise.all(promises);
|
|
1740
|
+
|
|
1741
|
+
// All should resolve with same value
|
|
1742
|
+
expect(results).toEqual(["value-key1", "value-key1", "value-key1"]);
|
|
1743
|
+
});
|
|
1744
|
+
|
|
1745
|
+
it("handles batch function errors", async () => {
|
|
1746
|
+
class TestDataLoader {
|
|
1747
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1748
|
+
private scheduled = false;
|
|
1749
|
+
|
|
1750
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1751
|
+
|
|
1752
|
+
async load(key: string): Promise<string | null> {
|
|
1753
|
+
return new Promise((resolve, reject) => {
|
|
1754
|
+
const existing = this.batch.get(key);
|
|
1755
|
+
if (existing) {
|
|
1756
|
+
existing.push({ resolve, reject });
|
|
1757
|
+
} else {
|
|
1758
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1759
|
+
}
|
|
1760
|
+
this.scheduleDispatch();
|
|
1761
|
+
});
|
|
1762
|
+
}
|
|
1763
|
+
|
|
1764
|
+
private scheduleDispatch(): void {
|
|
1765
|
+
if (this.scheduled) return;
|
|
1766
|
+
this.scheduled = true;
|
|
1767
|
+
queueMicrotask(() => this.dispatch());
|
|
1768
|
+
}
|
|
1769
|
+
|
|
1770
|
+
private async dispatch(): Promise<void> {
|
|
1771
|
+
this.scheduled = false;
|
|
1772
|
+
const batch = this.batch;
|
|
1773
|
+
this.batch = new Map();
|
|
1774
|
+
|
|
1775
|
+
const keys = Array.from(batch.keys());
|
|
1776
|
+
if (keys.length === 0) return;
|
|
1777
|
+
|
|
1778
|
+
try {
|
|
1779
|
+
const results = await this.batchFn(keys);
|
|
1780
|
+
keys.forEach((key, index) => {
|
|
1781
|
+
const callbacks = batch.get(key)!;
|
|
1782
|
+
const result = results[index] ?? null;
|
|
1783
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1784
|
+
});
|
|
1785
|
+
} catch (error) {
|
|
1786
|
+
for (const callbacks of batch.values()) {
|
|
1787
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1788
|
+
}
|
|
1789
|
+
}
|
|
1790
|
+
}
|
|
1791
|
+
|
|
1792
|
+
clear(): void {
|
|
1793
|
+
this.batch.clear();
|
|
1794
|
+
}
|
|
1795
|
+
}
|
|
1796
|
+
|
|
1797
|
+
const loader = new TestDataLoader(async () => {
|
|
1798
|
+
throw new Error("Batch function error");
|
|
1799
|
+
});
|
|
1800
|
+
|
|
1801
|
+
const promises = [loader.load("key1"), loader.load("key2")];
|
|
1802
|
+
|
|
1803
|
+
// All loads should reject with same error
|
|
1804
|
+
await expect(Promise.all(promises)).rejects.toThrow("Batch function error");
|
|
1805
|
+
});
|
|
1806
|
+
|
|
1807
|
+
it("does not schedule dispatch twice if already scheduled", async () => {
|
|
1808
|
+
let dispatchCount = 0;
|
|
1809
|
+
|
|
1810
|
+
class TestDataLoader {
|
|
1811
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1812
|
+
private scheduled = false;
|
|
1813
|
+
|
|
1814
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1815
|
+
|
|
1816
|
+
async load(key: string): Promise<string | null> {
|
|
1817
|
+
return new Promise((resolve, reject) => {
|
|
1818
|
+
const existing = this.batch.get(key);
|
|
1819
|
+
if (existing) {
|
|
1820
|
+
existing.push({ resolve, reject });
|
|
1821
|
+
} else {
|
|
1822
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1823
|
+
}
|
|
1824
|
+
this.scheduleDispatch();
|
|
1825
|
+
});
|
|
1826
|
+
}
|
|
1827
|
+
|
|
1828
|
+
private scheduleDispatch(): void {
|
|
1829
|
+
if (this.scheduled) return;
|
|
1830
|
+
this.scheduled = true;
|
|
1831
|
+
queueMicrotask(() => this.dispatch());
|
|
1832
|
+
}
|
|
1833
|
+
|
|
1834
|
+
private async dispatch(): Promise<void> {
|
|
1835
|
+
dispatchCount++;
|
|
1836
|
+
this.scheduled = false;
|
|
1837
|
+
const batch = this.batch;
|
|
1838
|
+
this.batch = new Map();
|
|
1839
|
+
|
|
1840
|
+
const keys = Array.from(batch.keys());
|
|
1841
|
+
if (keys.length === 0) return;
|
|
1842
|
+
|
|
1843
|
+
try {
|
|
1844
|
+
const results = await this.batchFn(keys);
|
|
1845
|
+
keys.forEach((key, index) => {
|
|
1846
|
+
const callbacks = batch.get(key)!;
|
|
1847
|
+
const result = results[index] ?? null;
|
|
1848
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1849
|
+
});
|
|
1850
|
+
} catch (error) {
|
|
1851
|
+
for (const callbacks of batch.values()) {
|
|
1852
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1853
|
+
}
|
|
1854
|
+
}
|
|
1855
|
+
}
|
|
1856
|
+
|
|
1857
|
+
clear(): void {
|
|
1858
|
+
this.batch.clear();
|
|
1859
|
+
}
|
|
1860
|
+
}
|
|
1861
|
+
|
|
1862
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1863
|
+
return keys.map((k) => `value-${k}`);
|
|
1864
|
+
});
|
|
1865
|
+
|
|
1866
|
+
// Load multiple keys
|
|
1867
|
+
await Promise.all([loader.load("key1"), loader.load("key2"), loader.load("key3")]);
|
|
1868
|
+
|
|
1869
|
+
// Should only dispatch once despite multiple load calls
|
|
1870
|
+
expect(dispatchCount).toBe(1);
|
|
1871
|
+
});
|
|
1872
|
+
|
|
1873
|
+
it("clears pending batches when clear is called", () => {
|
|
1874
|
+
class TestDataLoader {
|
|
1875
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1876
|
+
private scheduled = false;
|
|
1877
|
+
|
|
1878
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1879
|
+
|
|
1880
|
+
async load(key: string): Promise<string | null> {
|
|
1881
|
+
return new Promise((resolve, reject) => {
|
|
1882
|
+
const existing = this.batch.get(key);
|
|
1883
|
+
if (existing) {
|
|
1884
|
+
existing.push({ resolve, reject });
|
|
1885
|
+
} else {
|
|
1886
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1887
|
+
}
|
|
1888
|
+
this.scheduleDispatch();
|
|
1889
|
+
});
|
|
1890
|
+
}
|
|
1891
|
+
|
|
1892
|
+
private scheduleDispatch(): void {
|
|
1893
|
+
if (this.scheduled) return;
|
|
1894
|
+
this.scheduled = true;
|
|
1895
|
+
queueMicrotask(() => this.dispatch());
|
|
1896
|
+
}
|
|
1897
|
+
|
|
1898
|
+
private async dispatch(): Promise<void> {
|
|
1899
|
+
this.scheduled = false;
|
|
1900
|
+
const batch = this.batch;
|
|
1901
|
+
this.batch = new Map();
|
|
1902
|
+
|
|
1903
|
+
const keys = Array.from(batch.keys());
|
|
1904
|
+
if (keys.length === 0) return;
|
|
1905
|
+
|
|
1906
|
+
try {
|
|
1907
|
+
const results = await this.batchFn(keys);
|
|
1908
|
+
keys.forEach((key, index) => {
|
|
1909
|
+
const callbacks = batch.get(key)!;
|
|
1910
|
+
const result = results[index] ?? null;
|
|
1911
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1912
|
+
});
|
|
1913
|
+
} catch (error) {
|
|
1914
|
+
for (const callbacks of batch.values()) {
|
|
1915
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1916
|
+
}
|
|
1917
|
+
}
|
|
1918
|
+
}
|
|
1919
|
+
|
|
1920
|
+
clear(): void {
|
|
1921
|
+
this.batch.clear();
|
|
1922
|
+
}
|
|
1923
|
+
|
|
1924
|
+
getBatchSize(): number {
|
|
1925
|
+
return this.batch.size;
|
|
1926
|
+
}
|
|
1927
|
+
}
|
|
1928
|
+
|
|
1929
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1930
|
+
return keys.map((k) => `value-${k}`);
|
|
1931
|
+
});
|
|
1932
|
+
|
|
1933
|
+
// Add some items to batch (but don't await - they won't dispatch yet)
|
|
1934
|
+
loader.load("key1");
|
|
1935
|
+
loader.load("key2");
|
|
1936
|
+
|
|
1937
|
+
// Clear should remove pending items
|
|
1938
|
+
loader.clear();
|
|
1939
|
+
|
|
1940
|
+
// Batch should be empty
|
|
1941
|
+
expect(loader.getBatchSize()).toBe(0);
|
|
1942
|
+
});
|
|
1943
|
+
|
|
1944
|
+
it("handles null results from batch function", async () => {
|
|
1945
|
+
class TestDataLoader {
|
|
1946
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1947
|
+
private scheduled = false;
|
|
1948
|
+
|
|
1949
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1950
|
+
|
|
1951
|
+
async load(key: string): Promise<string | null> {
|
|
1952
|
+
return new Promise((resolve, reject) => {
|
|
1953
|
+
const existing = this.batch.get(key);
|
|
1954
|
+
if (existing) {
|
|
1955
|
+
existing.push({ resolve, reject });
|
|
1956
|
+
} else {
|
|
1957
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1958
|
+
}
|
|
1959
|
+
this.scheduleDispatch();
|
|
1960
|
+
});
|
|
1961
|
+
}
|
|
1962
|
+
|
|
1963
|
+
private scheduleDispatch(): void {
|
|
1964
|
+
if (this.scheduled) return;
|
|
1965
|
+
this.scheduled = true;
|
|
1966
|
+
queueMicrotask(() => this.dispatch());
|
|
1967
|
+
}
|
|
1968
|
+
|
|
1969
|
+
private async dispatch(): Promise<void> {
|
|
1970
|
+
this.scheduled = false;
|
|
1971
|
+
const batch = this.batch;
|
|
1972
|
+
this.batch = new Map();
|
|
1973
|
+
|
|
1974
|
+
const keys = Array.from(batch.keys());
|
|
1975
|
+
if (keys.length === 0) return;
|
|
1976
|
+
|
|
1977
|
+
try {
|
|
1978
|
+
const results = await this.batchFn(keys);
|
|
1979
|
+
keys.forEach((key, index) => {
|
|
1980
|
+
const callbacks = batch.get(key)!;
|
|
1981
|
+
const result = results[index] ?? null;
|
|
1982
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1983
|
+
});
|
|
1984
|
+
} catch (error) {
|
|
1985
|
+
for (const callbacks of batch.values()) {
|
|
1986
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1987
|
+
}
|
|
1988
|
+
}
|
|
1989
|
+
}
|
|
1990
|
+
|
|
1991
|
+
clear(): void {
|
|
1992
|
+
this.batch.clear();
|
|
1993
|
+
}
|
|
1994
|
+
}
|
|
1995
|
+
|
|
1996
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1997
|
+
// Return null for some keys
|
|
1998
|
+
return keys.map((k) => (k === "key2" ? null : `value-${k}`));
|
|
1999
|
+
});
|
|
2000
|
+
|
|
2001
|
+
const results = await Promise.all([loader.load("key1"), loader.load("key2"), loader.load("key3")]);
|
|
2002
|
+
|
|
2003
|
+
expect(results).toEqual(["value-key1", null, "value-key3"]);
|
|
2004
|
+
});
|
|
2005
|
+
});
|
|
2006
|
+
|
|
2007
|
+
// =============================================================================
|
|
2008
|
+
// Test: HTTP Server Lifecycle (listen, close, findConnectionByWs)
|
|
2009
|
+
// =============================================================================
|
|
2010
|
+
|
|
2011
|
+
describe("HTTP Server Lifecycle", () => {
|
|
2012
|
+
it("handles GET requests that are not metadata endpoint", async () => {
|
|
2013
|
+
const server = createServer({
|
|
2014
|
+
entities: { User },
|
|
2015
|
+
});
|
|
2016
|
+
|
|
2017
|
+
const request = new Request("http://localhost/some-other-path", { method: "GET" });
|
|
2018
|
+
const response = await server.handleRequest(request);
|
|
2019
|
+
|
|
2020
|
+
expect(response.status).toBe(405);
|
|
2021
|
+
const text = await response.text();
|
|
2022
|
+
expect(text).toBe("Method not allowed");
|
|
2023
|
+
});
|
|
2024
|
+
|
|
2025
|
+
it("handles PUT requests", async () => {
|
|
2026
|
+
const server = createServer({
|
|
2027
|
+
entities: { User },
|
|
2028
|
+
});
|
|
2029
|
+
|
|
2030
|
+
const request = new Request("http://localhost/api", { method: "PUT" });
|
|
2031
|
+
const response = await server.handleRequest(request);
|
|
2032
|
+
|
|
2033
|
+
expect(response.status).toBe(405);
|
|
2034
|
+
const text = await response.text();
|
|
2035
|
+
expect(text).toBe("Method not allowed");
|
|
2036
|
+
});
|
|
2037
|
+
|
|
2038
|
+
it("handles DELETE requests", async () => {
|
|
2039
|
+
const server = createServer({
|
|
2040
|
+
entities: { User },
|
|
2041
|
+
});
|
|
2042
|
+
|
|
2043
|
+
const request = new Request("http://localhost/api", { method: "DELETE" });
|
|
2044
|
+
const response = await server.handleRequest(request);
|
|
2045
|
+
|
|
2046
|
+
expect(response.status).toBe(405);
|
|
2047
|
+
const text = await response.text();
|
|
2048
|
+
expect(text).toBe("Method not allowed");
|
|
2049
|
+
});
|
|
2050
|
+
|
|
2051
|
+
it("handles PATCH requests", async () => {
|
|
2052
|
+
const server = createServer({
|
|
2053
|
+
entities: { User },
|
|
2054
|
+
});
|
|
2055
|
+
|
|
2056
|
+
const request = new Request("http://localhost/api", { method: "PATCH" });
|
|
2057
|
+
const response = await server.handleRequest(request);
|
|
2058
|
+
|
|
2059
|
+
expect(response.status).toBe(405);
|
|
2060
|
+
const text = await response.text();
|
|
2061
|
+
expect(text).toBe("Method not allowed");
|
|
2062
|
+
});
|
|
2063
|
+
|
|
2064
|
+
it("handles OPTIONS requests", async () => {
|
|
2065
|
+
const server = createServer({
|
|
2066
|
+
entities: { User },
|
|
2067
|
+
});
|
|
2068
|
+
|
|
2069
|
+
const request = new Request("http://localhost/api", { method: "OPTIONS" });
|
|
2070
|
+
const response = await server.handleRequest(request);
|
|
2071
|
+
|
|
2072
|
+
expect(response.status).toBe(405);
|
|
2073
|
+
const text = await response.text();
|
|
2074
|
+
expect(text).toBe("Method not allowed");
|
|
2075
|
+
});
|
|
2076
|
+
|
|
2077
|
+
it("handles HEAD requests", async () => {
|
|
2078
|
+
const server = createServer({
|
|
2079
|
+
entities: { User },
|
|
2080
|
+
});
|
|
2081
|
+
|
|
2082
|
+
const request = new Request("http://localhost/api", { method: "HEAD" });
|
|
2083
|
+
const response = await server.handleRequest(request);
|
|
2084
|
+
|
|
2085
|
+
expect(response.status).toBe(405);
|
|
2086
|
+
});
|
|
2087
|
+
|
|
2088
|
+
it("can start and stop server with listen/close", async () => {
|
|
2089
|
+
const server = createServer({
|
|
2090
|
+
entities: { User },
|
|
2091
|
+
logger: {
|
|
2092
|
+
info: () => {}, // Silent logger for test
|
|
2093
|
+
},
|
|
2094
|
+
});
|
|
2095
|
+
|
|
2096
|
+
// Start server on a random high port to avoid conflicts
|
|
2097
|
+
const port = 30000 + Math.floor(Math.random() * 10000);
|
|
2098
|
+
|
|
2099
|
+
try {
|
|
2100
|
+
await server.listen(port);
|
|
2101
|
+
|
|
2102
|
+
// Verify server is running by making a request
|
|
2103
|
+
const response = await fetch(`http://localhost:${port}/__lens/metadata`);
|
|
2104
|
+
expect(response.status).toBe(200);
|
|
2105
|
+
const data = await response.json();
|
|
2106
|
+
expect(data.version).toBeDefined();
|
|
2107
|
+
} finally {
|
|
2108
|
+
// Always close the server
|
|
2109
|
+
await server.close();
|
|
2110
|
+
}
|
|
2111
|
+
});
|
|
2112
|
+
|
|
2113
|
+
it("handles method not allowed via real HTTP server", async () => {
|
|
2114
|
+
const server = createServer({
|
|
2115
|
+
entities: { User },
|
|
2116
|
+
logger: {
|
|
2117
|
+
info: () => {}, // Silent logger for test
|
|
2118
|
+
},
|
|
2119
|
+
});
|
|
2120
|
+
|
|
2121
|
+
const port = 30000 + Math.floor(Math.random() * 10000);
|
|
2122
|
+
|
|
2123
|
+
try {
|
|
2124
|
+
await server.listen(port);
|
|
2125
|
+
|
|
2126
|
+
// Make a PUT request which should return 405
|
|
2127
|
+
const response = await fetch(`http://localhost:${port}/api`, { method: "PUT" });
|
|
2128
|
+
expect(response.status).toBe(405);
|
|
2129
|
+
const text = await response.text();
|
|
2130
|
+
expect(text).toBe("Method not allowed");
|
|
2131
|
+
} finally {
|
|
2132
|
+
await server.close();
|
|
2133
|
+
}
|
|
2134
|
+
});
|
|
2135
|
+
|
|
2136
|
+
// Note: WebSocket integration via Bun.serve's native WebSocket upgrade (lines 1184-1193)
|
|
2137
|
+
// is tested through unit tests using mock WebSockets. Full integration tests with real
|
|
2138
|
+
// WebSocket clients would require additional setup and are better suited for E2E tests.
|
|
2139
|
+
});
|
|
2140
|
+
|
|
2141
|
+
// =============================================================================
|
|
2142
|
+
// Test: SSE Handler Edge Cases
|
|
2143
|
+
// =============================================================================
|
|
2144
|
+
|
|
2145
|
+
describe("SSE Handler Edge Cases", () => {
|
|
2146
|
+
it("handles WebSocket error callback", async () => {
|
|
2147
|
+
const server = createServer({
|
|
2148
|
+
entities: { User },
|
|
2149
|
+
});
|
|
2150
|
+
|
|
2151
|
+
const ws = createMockWs();
|
|
2152
|
+
server.handleWebSocket(ws);
|
|
2153
|
+
|
|
2154
|
+
// Trigger error callback (if set)
|
|
2155
|
+
if (ws.onerror) {
|
|
2156
|
+
ws.onerror(new Error("WebSocket error"));
|
|
2157
|
+
}
|
|
2158
|
+
|
|
2159
|
+
// Should not crash
|
|
2160
|
+
expect(true).toBe(true);
|
|
2161
|
+
});
|
|
2162
|
+
});
|
|
2163
|
+
|
|
1587
2164
|
// =============================================================================
|
|
1588
2165
|
// Test: Entity Resolvers
|
|
1589
2166
|
// =============================================================================
|
package/src/server/create.ts
CHANGED
|
@@ -18,12 +18,16 @@ import {
|
|
|
18
18
|
type FieldType,
|
|
19
19
|
flattenRouter,
|
|
20
20
|
type InferRouterContext,
|
|
21
|
+
isEntityDef,
|
|
21
22
|
isMutationDef,
|
|
23
|
+
isPipeline,
|
|
22
24
|
isQueryDef,
|
|
23
25
|
type MutationDef,
|
|
26
|
+
type Pipeline,
|
|
24
27
|
type QueryDef,
|
|
25
28
|
type ResolverDef,
|
|
26
29
|
type Resolvers,
|
|
30
|
+
type ReturnSpec,
|
|
27
31
|
type RouterDef,
|
|
28
32
|
runWithContext,
|
|
29
33
|
toResolverMap,
|
|
@@ -35,7 +39,7 @@ export interface SelectionObject {
|
|
|
35
39
|
[key: string]: boolean | SelectionObject | { select: SelectionObject };
|
|
36
40
|
}
|
|
37
41
|
|
|
38
|
-
import { GraphStateManager } from "../state/graph-state-manager";
|
|
42
|
+
import { GraphStateManager } from "../state/graph-state-manager.js";
|
|
39
43
|
|
|
40
44
|
// =============================================================================
|
|
41
45
|
// Types
|
|
@@ -77,21 +81,21 @@ export interface LensServerConfig<
|
|
|
77
81
|
TRouter extends RouterDef = RouterDef,
|
|
78
82
|
> {
|
|
79
83
|
/** Entity definitions */
|
|
80
|
-
entities?: EntitiesMap;
|
|
84
|
+
entities?: EntitiesMap | undefined;
|
|
81
85
|
/** Router definition (namespaced operations) - context type is inferred */
|
|
82
|
-
router?: TRouter;
|
|
86
|
+
router?: TRouter | undefined;
|
|
83
87
|
/** Query definitions (flat, legacy) */
|
|
84
|
-
queries?: QueriesMap;
|
|
88
|
+
queries?: QueriesMap | undefined;
|
|
85
89
|
/** Mutation definitions (flat, legacy) */
|
|
86
|
-
mutations?: MutationsMap;
|
|
90
|
+
mutations?: MutationsMap | undefined;
|
|
87
91
|
/** Field resolvers array (use lens() factory to create) */
|
|
88
|
-
resolvers?: Resolvers;
|
|
92
|
+
resolvers?: Resolvers | undefined;
|
|
89
93
|
/** Logger for server messages (default: silent) */
|
|
90
|
-
logger?: LensLogger;
|
|
94
|
+
logger?: LensLogger | undefined;
|
|
91
95
|
/** Context factory - must return the context type expected by the router */
|
|
92
|
-
context?: (req?: unknown) => TContext | Promise<TContext
|
|
96
|
+
context?: ((req?: unknown) => TContext | Promise<TContext>) | undefined;
|
|
93
97
|
/** Server version */
|
|
94
|
-
version?: string;
|
|
98
|
+
version?: string | undefined;
|
|
95
99
|
}
|
|
96
100
|
|
|
97
101
|
/** Server metadata for transport handshake */
|
|
@@ -149,6 +153,121 @@ export interface WebSocketLike {
|
|
|
149
153
|
onerror?: ((error: unknown) => void) | null;
|
|
150
154
|
}
|
|
151
155
|
|
|
156
|
+
// =============================================================================
|
|
157
|
+
// Sugar to Reify Pipeline Conversion
|
|
158
|
+
// =============================================================================
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Extract entity type name from return spec.
|
|
162
|
+
* Returns undefined if not an entity.
|
|
163
|
+
*/
|
|
164
|
+
function getEntityTypeName(returnSpec: ReturnSpec | undefined): string | undefined {
|
|
165
|
+
if (!returnSpec) return undefined;
|
|
166
|
+
|
|
167
|
+
// Single entity: EntityDef
|
|
168
|
+
if (isEntityDef(returnSpec)) {
|
|
169
|
+
return returnSpec._name;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// Array of entities: [EntityDef]
|
|
173
|
+
if (Array.isArray(returnSpec) && returnSpec.length === 1 && isEntityDef(returnSpec[0])) {
|
|
174
|
+
return returnSpec[0]._name;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
return undefined;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Get input field keys from a Zod-like schema.
|
|
182
|
+
* Falls back to empty array if schema doesn't have shape.
|
|
183
|
+
*/
|
|
184
|
+
function getInputFields(inputSchema: { shape?: Record<string, unknown> } | undefined): string[] {
|
|
185
|
+
if (!inputSchema?.shape) return [];
|
|
186
|
+
return Object.keys(inputSchema.shape);
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Convert sugar syntax to Reify Pipeline.
|
|
191
|
+
*
|
|
192
|
+
* Sugar syntax:
|
|
193
|
+
* - "merge" → entity.update with input fields merged
|
|
194
|
+
* - "create" → entity.create with temp ID
|
|
195
|
+
* - "delete" → entity.delete by input.id
|
|
196
|
+
* - { merge: {...} } → entity.update with input + extra fields
|
|
197
|
+
*
|
|
198
|
+
* Returns the original value if already a Pipeline or not sugar.
|
|
199
|
+
*/
|
|
200
|
+
function sugarToPipeline(
|
|
201
|
+
optimistic: unknown,
|
|
202
|
+
entityType: string | undefined,
|
|
203
|
+
inputFields: string[],
|
|
204
|
+
): Pipeline | unknown {
|
|
205
|
+
// Already a Pipeline - pass through
|
|
206
|
+
if (isPipeline(optimistic)) {
|
|
207
|
+
return optimistic;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// No entity type - can't convert sugar
|
|
211
|
+
if (!entityType) {
|
|
212
|
+
return optimistic;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// "merge" sugar - update entity with input fields
|
|
216
|
+
if (optimistic === "merge") {
|
|
217
|
+
const args: Record<string, unknown> = { type: entityType };
|
|
218
|
+
for (const field of inputFields) {
|
|
219
|
+
args[field] = { $input: field };
|
|
220
|
+
}
|
|
221
|
+
return {
|
|
222
|
+
$pipe: [{ $do: "entity.update", $with: args }],
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// "create" sugar - create entity with temp ID
|
|
227
|
+
if (optimistic === "create") {
|
|
228
|
+
const args: Record<string, unknown> = { type: entityType, id: { $temp: true } };
|
|
229
|
+
for (const field of inputFields) {
|
|
230
|
+
if (field !== "id") {
|
|
231
|
+
args[field] = { $input: field };
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
return {
|
|
235
|
+
$pipe: [{ $do: "entity.create", $with: args }],
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// "delete" sugar - delete entity by input.id
|
|
240
|
+
if (optimistic === "delete") {
|
|
241
|
+
return {
|
|
242
|
+
$pipe: [{ $do: "entity.delete", $with: { type: entityType, id: { $input: "id" } } }],
|
|
243
|
+
};
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// { merge: {...} } sugar - update with input + extra fields
|
|
247
|
+
if (
|
|
248
|
+
typeof optimistic === "object" &&
|
|
249
|
+
optimistic !== null &&
|
|
250
|
+
"merge" in optimistic &&
|
|
251
|
+
typeof (optimistic as Record<string, unknown>).merge === "object"
|
|
252
|
+
) {
|
|
253
|
+
const extra = (optimistic as { merge: Record<string, unknown> }).merge;
|
|
254
|
+
const args: Record<string, unknown> = { type: entityType };
|
|
255
|
+
for (const field of inputFields) {
|
|
256
|
+
args[field] = { $input: field };
|
|
257
|
+
}
|
|
258
|
+
// Extra fields override input refs
|
|
259
|
+
for (const [key, value] of Object.entries(extra)) {
|
|
260
|
+
args[key] = value;
|
|
261
|
+
}
|
|
262
|
+
return {
|
|
263
|
+
$pipe: [{ $do: "entity.update", $with: args }],
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// Unknown format - pass through
|
|
268
|
+
return optimistic;
|
|
269
|
+
}
|
|
270
|
+
|
|
152
271
|
// =============================================================================
|
|
153
272
|
// Protocol Messages
|
|
154
273
|
// =============================================================================
|
|
@@ -309,7 +428,7 @@ class LensServerImpl<
|
|
|
309
428
|
private queries: Q;
|
|
310
429
|
private mutations: M;
|
|
311
430
|
private entities: EntitiesMap;
|
|
312
|
-
private resolverMap?: ResolverMap;
|
|
431
|
+
private resolverMap?: ResolverMap | undefined;
|
|
313
432
|
private contextFactory: (req?: unknown) => TContext | Promise<TContext>;
|
|
314
433
|
private version: string;
|
|
315
434
|
private logger: LensLogger;
|
|
@@ -479,11 +598,14 @@ class LensServerImpl<
|
|
|
479
598
|
setNested(name, { type: "query" });
|
|
480
599
|
}
|
|
481
600
|
|
|
482
|
-
// Add mutations with optimistic config
|
|
601
|
+
// Add mutations with optimistic config (convert sugar to Reify Pipeline)
|
|
483
602
|
for (const [name, def] of Object.entries(this.mutations)) {
|
|
484
603
|
const meta: OperationMeta = { type: "mutation" };
|
|
485
604
|
if (def._optimistic) {
|
|
486
|
-
|
|
605
|
+
// Convert sugar syntax to Reify Pipeline
|
|
606
|
+
const entityType = getEntityTypeName(def._output);
|
|
607
|
+
const inputFields = getInputFields(def._input as { shape?: Record<string, unknown> });
|
|
608
|
+
meta.optimistic = sugarToPipeline(def._optimistic, entityType, inputFields);
|
|
487
609
|
}
|
|
488
610
|
setNested(name, meta);
|
|
489
611
|
}
|
|
@@ -701,11 +823,16 @@ class LensServerImpl<
|
|
|
701
823
|
};
|
|
702
824
|
};
|
|
703
825
|
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
826
|
+
// Merge Lens extensions (emit, onCleanup) into user context
|
|
827
|
+
const lensContext = {
|
|
828
|
+
...context,
|
|
707
829
|
emit,
|
|
708
830
|
onCleanup,
|
|
831
|
+
};
|
|
832
|
+
|
|
833
|
+
const result = resolver({
|
|
834
|
+
input: sub.input,
|
|
835
|
+
ctx: lensContext,
|
|
709
836
|
});
|
|
710
837
|
|
|
711
838
|
if (isAsyncIterable(result)) {
|
|
@@ -920,11 +1047,16 @@ class LensServerImpl<
|
|
|
920
1047
|
const emit = createEmit(() => {});
|
|
921
1048
|
const onCleanup = () => () => {};
|
|
922
1049
|
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
1050
|
+
// Merge Lens extensions (emit, onCleanup) into user context
|
|
1051
|
+
const lensContext = {
|
|
1052
|
+
...context,
|
|
926
1053
|
emit,
|
|
927
1054
|
onCleanup,
|
|
1055
|
+
};
|
|
1056
|
+
|
|
1057
|
+
const result = resolver({
|
|
1058
|
+
input: cleanInput as TInput,
|
|
1059
|
+
ctx: lensContext,
|
|
928
1060
|
});
|
|
929
1061
|
|
|
930
1062
|
let data: TOutput;
|
|
@@ -974,11 +1106,16 @@ class LensServerImpl<
|
|
|
974
1106
|
const emit = createEmit(() => {});
|
|
975
1107
|
const onCleanup = () => () => {};
|
|
976
1108
|
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
1109
|
+
// Merge Lens extensions (emit, onCleanup) into user context
|
|
1110
|
+
const lensContext = {
|
|
1111
|
+
...context,
|
|
980
1112
|
emit,
|
|
981
1113
|
onCleanup,
|
|
1114
|
+
};
|
|
1115
|
+
|
|
1116
|
+
const result = await resolver({
|
|
1117
|
+
input: input as TInput,
|
|
1118
|
+
ctx: lensContext,
|
|
982
1119
|
});
|
|
983
1120
|
|
|
984
1121
|
// Emit to GraphStateManager
|
package/src/sse/handler.ts
CHANGED
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
* Connects SSE streams to GraphStateManager.
|
|
6
6
|
*/
|
|
7
7
|
|
|
8
|
-
import type { GraphStateManager, StateClient } from "../state/graph-state-manager";
|
|
8
|
+
import type { GraphStateManager, StateClient } from "../state/graph-state-manager.js";
|
|
9
9
|
|
|
10
10
|
// =============================================================================
|
|
11
11
|
// Types
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
*/
|
|
4
4
|
|
|
5
5
|
import { beforeEach, describe, expect, it, mock } from "bun:test";
|
|
6
|
-
import { GraphStateManager, type StateClient, type StateUpdateMessage } from "./graph-state-manager";
|
|
6
|
+
import { GraphStateManager, type StateClient, type StateUpdateMessage } from "./graph-state-manager.js";
|
|
7
7
|
|
|
8
8
|
describe("GraphStateManager", () => {
|
|
9
9
|
let manager: GraphStateManager;
|
package/src/state/index.ts
CHANGED