@sylphx/lens-server 1.5.6 → 1.11.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +21 -12
- package/package.json +2 -2
- package/src/e2e/server.test.ts +8 -8
- package/src/server/create.test.ts +571 -10
- package/src/server/create.ts +27 -12
- package/src/state/graph-state-manager.test.ts +23 -23
package/dist/index.js
CHANGED
|
@@ -507,8 +507,8 @@ function sugarToPipeline(optimistic, entityType, inputFields) {
|
|
|
507
507
|
$pipe: [{ $do: "entity.delete", $with: { type: entityType, id: { $input: "id" } } }]
|
|
508
508
|
};
|
|
509
509
|
}
|
|
510
|
-
if (typeof optimistic === "object" && optimistic !== null && "merge" in optimistic && typeof optimistic
|
|
511
|
-
const extra = optimistic
|
|
510
|
+
if (typeof optimistic === "object" && optimistic !== null && "merge" in optimistic && typeof optimistic.merge === "object") {
|
|
511
|
+
const extra = optimistic.merge;
|
|
512
512
|
const args = { type: entityType };
|
|
513
513
|
for (const field of inputFields) {
|
|
514
514
|
args[field] = { $input: field };
|
|
@@ -851,11 +851,14 @@ class LensServerImpl {
|
|
|
851
851
|
sub.cleanups.splice(idx, 1);
|
|
852
852
|
};
|
|
853
853
|
};
|
|
854
|
-
const
|
|
855
|
-
|
|
856
|
-
ctx: context,
|
|
854
|
+
const lensContext = {
|
|
855
|
+
...context,
|
|
857
856
|
emit,
|
|
858
857
|
onCleanup
|
|
858
|
+
};
|
|
859
|
+
const result = resolver({
|
|
860
|
+
input: sub.input,
|
|
861
|
+
ctx: lensContext
|
|
859
862
|
});
|
|
860
863
|
if (isAsyncIterable(result)) {
|
|
861
864
|
for await (const value of result) {
|
|
@@ -1006,11 +1009,14 @@ class LensServerImpl {
|
|
|
1006
1009
|
}
|
|
1007
1010
|
const emit = createEmit(() => {});
|
|
1008
1011
|
const onCleanup = () => () => {};
|
|
1009
|
-
const
|
|
1010
|
-
|
|
1011
|
-
ctx: context,
|
|
1012
|
+
const lensContext = {
|
|
1013
|
+
...context,
|
|
1012
1014
|
emit,
|
|
1013
1015
|
onCleanup
|
|
1016
|
+
};
|
|
1017
|
+
const result = resolver({
|
|
1018
|
+
input: cleanInput,
|
|
1019
|
+
ctx: lensContext
|
|
1014
1020
|
});
|
|
1015
1021
|
let data;
|
|
1016
1022
|
if (isAsyncIterable(result)) {
|
|
@@ -1050,11 +1056,14 @@ class LensServerImpl {
|
|
|
1050
1056
|
}
|
|
1051
1057
|
const emit = createEmit(() => {});
|
|
1052
1058
|
const onCleanup = () => () => {};
|
|
1053
|
-
const
|
|
1054
|
-
|
|
1055
|
-
ctx: context,
|
|
1059
|
+
const lensContext = {
|
|
1060
|
+
...context,
|
|
1056
1061
|
emit,
|
|
1057
1062
|
onCleanup
|
|
1063
|
+
};
|
|
1064
|
+
const result = await resolver({
|
|
1065
|
+
input,
|
|
1066
|
+
ctx: lensContext
|
|
1058
1067
|
});
|
|
1059
1068
|
const entityName = this.getEntityNameFromMutation(name);
|
|
1060
1069
|
const entities = this.extractEntities(entityName, result);
|
|
@@ -1209,7 +1218,7 @@ class LensServerImpl {
|
|
|
1209
1218
|
const result = {};
|
|
1210
1219
|
const obj = data;
|
|
1211
1220
|
if ("id" in obj) {
|
|
1212
|
-
result
|
|
1221
|
+
result.id = obj.id;
|
|
1213
1222
|
}
|
|
1214
1223
|
if (Array.isArray(fields)) {
|
|
1215
1224
|
for (const field of fields) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sylphx/lens-server",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.11.2",
|
|
4
4
|
"description": "Server runtime for Lens API framework",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -30,7 +30,7 @@
|
|
|
30
30
|
"author": "SylphxAI",
|
|
31
31
|
"license": "MIT",
|
|
32
32
|
"dependencies": {
|
|
33
|
-
"@sylphx/lens-core": "^1.
|
|
33
|
+
"@sylphx/lens-core": "^1.22.2"
|
|
34
34
|
},
|
|
35
35
|
"devDependencies": {
|
|
36
36
|
"typescript": "^5.9.3",
|
package/src/e2e/server.test.ts
CHANGED
|
@@ -317,8 +317,8 @@ describe("E2E - Subscriptions", () => {
|
|
|
317
317
|
const watchUser = query()
|
|
318
318
|
.input(z.object({ id: z.string() }))
|
|
319
319
|
.returns(User)
|
|
320
|
-
.resolve(({ input,
|
|
321
|
-
emitFn = emit;
|
|
320
|
+
.resolve(({ input, ctx }) => {
|
|
321
|
+
emitFn = ctx.emit;
|
|
322
322
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
323
323
|
if (!user) throw new Error("Not found");
|
|
324
324
|
return user;
|
|
@@ -362,8 +362,8 @@ describe("E2E - Subscriptions", () => {
|
|
|
362
362
|
const watchUser = query()
|
|
363
363
|
.input(z.object({ id: z.string() }))
|
|
364
364
|
.returns(User)
|
|
365
|
-
.resolve(({ input,
|
|
366
|
-
emitFn = emit;
|
|
365
|
+
.resolve(({ input, ctx }) => {
|
|
366
|
+
emitFn = ctx.emit;
|
|
367
367
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
368
368
|
if (!user) throw new Error("Not found");
|
|
369
369
|
return user;
|
|
@@ -464,8 +464,8 @@ describe("E2E - Cleanup", () => {
|
|
|
464
464
|
const watchUser = query()
|
|
465
465
|
.input(z.object({ id: z.string() }))
|
|
466
466
|
.returns(User)
|
|
467
|
-
.resolve(({ input,
|
|
468
|
-
onCleanup(() => {
|
|
467
|
+
.resolve(({ input, ctx }) => {
|
|
468
|
+
ctx.onCleanup(() => {
|
|
469
469
|
cleanedUp = true;
|
|
470
470
|
});
|
|
471
471
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
@@ -506,8 +506,8 @@ describe("E2E - GraphStateManager", () => {
|
|
|
506
506
|
const getUser = query()
|
|
507
507
|
.input(z.object({ id: z.string() }))
|
|
508
508
|
.returns(User)
|
|
509
|
-
.resolve(({ input,
|
|
510
|
-
emitFn = emit;
|
|
509
|
+
.resolve(({ input, ctx }) => {
|
|
510
|
+
emitFn = ctx.emit;
|
|
511
511
|
const user = mockUsers.find((u) => u.id === input.id);
|
|
512
512
|
if (!user) throw new Error("Not found");
|
|
513
513
|
return user;
|
|
@@ -724,8 +724,8 @@ describe("onCleanup", () => {
|
|
|
724
724
|
|
|
725
725
|
const liveQuery = query()
|
|
726
726
|
.returns(User)
|
|
727
|
-
.resolve(({
|
|
728
|
-
onCleanup(() => {
|
|
727
|
+
.resolve(({ ctx }) => {
|
|
728
|
+
ctx.onCleanup(() => {
|
|
729
729
|
cleanedUp = true;
|
|
730
730
|
});
|
|
731
731
|
return mockUsers[0];
|
|
@@ -764,8 +764,8 @@ describe("onCleanup", () => {
|
|
|
764
764
|
|
|
765
765
|
const liveQuery = query()
|
|
766
766
|
.returns(User)
|
|
767
|
-
.resolve(({
|
|
768
|
-
onCleanup(() => {
|
|
767
|
+
.resolve(({ ctx }) => {
|
|
768
|
+
ctx.onCleanup(() => {
|
|
769
769
|
cleanedUp = true;
|
|
770
770
|
});
|
|
771
771
|
return mockUsers[0];
|
|
@@ -802,8 +802,8 @@ describe("onCleanup", () => {
|
|
|
802
802
|
|
|
803
803
|
const liveQuery = query()
|
|
804
804
|
.returns(User)
|
|
805
|
-
.resolve(({
|
|
806
|
-
const remove = onCleanup(() => {
|
|
805
|
+
.resolve(({ ctx }) => {
|
|
806
|
+
const remove = ctx.onCleanup(() => {
|
|
807
807
|
cleanedUp = true;
|
|
808
808
|
});
|
|
809
809
|
// Remove the cleanup before unsubscribe
|
|
@@ -1515,8 +1515,8 @@ describe("Logger integration", () => {
|
|
|
1515
1515
|
const errorLogs: string[] = [];
|
|
1516
1516
|
const liveQuery = query()
|
|
1517
1517
|
.returns(User)
|
|
1518
|
-
.resolve(({
|
|
1519
|
-
onCleanup(() => {
|
|
1518
|
+
.resolve(({ ctx }) => {
|
|
1519
|
+
ctx.onCleanup(() => {
|
|
1520
1520
|
throw new Error("Cleanup failed");
|
|
1521
1521
|
});
|
|
1522
1522
|
return mockUsers[0];
|
|
@@ -1560,8 +1560,8 @@ describe("Logger integration", () => {
|
|
|
1560
1560
|
const errorLogs: string[] = [];
|
|
1561
1561
|
const liveQuery = query()
|
|
1562
1562
|
.returns(User)
|
|
1563
|
-
.resolve(({
|
|
1564
|
-
onCleanup(() => {
|
|
1563
|
+
.resolve(({ ctx }) => {
|
|
1564
|
+
ctx.onCleanup(() => {
|
|
1565
1565
|
throw new Error("Disconnect cleanup failed");
|
|
1566
1566
|
});
|
|
1567
1567
|
return mockUsers[0];
|
|
@@ -1600,6 +1600,567 @@ describe("Logger integration", () => {
|
|
|
1600
1600
|
});
|
|
1601
1601
|
});
|
|
1602
1602
|
|
|
1603
|
+
// =============================================================================
|
|
1604
|
+
// Test: DataLoader Batching
|
|
1605
|
+
// =============================================================================
|
|
1606
|
+
|
|
1607
|
+
describe("DataLoader Batching", () => {
|
|
1608
|
+
it("batches multiple load calls into single batch function call", async () => {
|
|
1609
|
+
let batchCallCount = 0;
|
|
1610
|
+
let receivedKeys: string[] = [];
|
|
1611
|
+
|
|
1612
|
+
class TestDataLoader {
|
|
1613
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1614
|
+
private scheduled = false;
|
|
1615
|
+
|
|
1616
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1617
|
+
|
|
1618
|
+
async load(key: string): Promise<string | null> {
|
|
1619
|
+
return new Promise((resolve, reject) => {
|
|
1620
|
+
const existing = this.batch.get(key);
|
|
1621
|
+
if (existing) {
|
|
1622
|
+
existing.push({ resolve, reject });
|
|
1623
|
+
} else {
|
|
1624
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1625
|
+
}
|
|
1626
|
+
this.scheduleDispatch();
|
|
1627
|
+
});
|
|
1628
|
+
}
|
|
1629
|
+
|
|
1630
|
+
private scheduleDispatch(): void {
|
|
1631
|
+
if (this.scheduled) return;
|
|
1632
|
+
this.scheduled = true;
|
|
1633
|
+
queueMicrotask(() => this.dispatch());
|
|
1634
|
+
}
|
|
1635
|
+
|
|
1636
|
+
private async dispatch(): Promise<void> {
|
|
1637
|
+
this.scheduled = false;
|
|
1638
|
+
const batch = this.batch;
|
|
1639
|
+
this.batch = new Map();
|
|
1640
|
+
|
|
1641
|
+
const keys = Array.from(batch.keys());
|
|
1642
|
+
if (keys.length === 0) return;
|
|
1643
|
+
|
|
1644
|
+
try {
|
|
1645
|
+
const results = await this.batchFn(keys);
|
|
1646
|
+
keys.forEach((key, index) => {
|
|
1647
|
+
const callbacks = batch.get(key)!;
|
|
1648
|
+
const result = results[index] ?? null;
|
|
1649
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1650
|
+
});
|
|
1651
|
+
} catch (error) {
|
|
1652
|
+
for (const callbacks of batch.values()) {
|
|
1653
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1654
|
+
}
|
|
1655
|
+
}
|
|
1656
|
+
}
|
|
1657
|
+
|
|
1658
|
+
clear(): void {
|
|
1659
|
+
this.batch.clear();
|
|
1660
|
+
}
|
|
1661
|
+
}
|
|
1662
|
+
|
|
1663
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1664
|
+
batchCallCount++;
|
|
1665
|
+
receivedKeys = keys;
|
|
1666
|
+
return keys.map((k) => `value-${k}`);
|
|
1667
|
+
});
|
|
1668
|
+
|
|
1669
|
+
// Load multiple keys in same tick
|
|
1670
|
+
const promises = [loader.load("key1"), loader.load("key2"), loader.load("key3")];
|
|
1671
|
+
|
|
1672
|
+
const results = await Promise.all(promises);
|
|
1673
|
+
|
|
1674
|
+
// Should batch all calls into single batch function call
|
|
1675
|
+
expect(batchCallCount).toBe(1);
|
|
1676
|
+
expect(receivedKeys).toEqual(["key1", "key2", "key3"]);
|
|
1677
|
+
expect(results).toEqual(["value-key1", "value-key2", "value-key3"]);
|
|
1678
|
+
});
|
|
1679
|
+
|
|
1680
|
+
it("handles duplicate keys in same batch", async () => {
|
|
1681
|
+
class TestDataLoader {
|
|
1682
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1683
|
+
private scheduled = false;
|
|
1684
|
+
|
|
1685
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1686
|
+
|
|
1687
|
+
async load(key: string): Promise<string | null> {
|
|
1688
|
+
return new Promise((resolve, reject) => {
|
|
1689
|
+
const existing = this.batch.get(key);
|
|
1690
|
+
if (existing) {
|
|
1691
|
+
existing.push({ resolve, reject });
|
|
1692
|
+
} else {
|
|
1693
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1694
|
+
}
|
|
1695
|
+
this.scheduleDispatch();
|
|
1696
|
+
});
|
|
1697
|
+
}
|
|
1698
|
+
|
|
1699
|
+
private scheduleDispatch(): void {
|
|
1700
|
+
if (this.scheduled) return;
|
|
1701
|
+
this.scheduled = true;
|
|
1702
|
+
queueMicrotask(() => this.dispatch());
|
|
1703
|
+
}
|
|
1704
|
+
|
|
1705
|
+
private async dispatch(): Promise<void> {
|
|
1706
|
+
this.scheduled = false;
|
|
1707
|
+
const batch = this.batch;
|
|
1708
|
+
this.batch = new Map();
|
|
1709
|
+
|
|
1710
|
+
const keys = Array.from(batch.keys());
|
|
1711
|
+
if (keys.length === 0) return;
|
|
1712
|
+
|
|
1713
|
+
try {
|
|
1714
|
+
const results = await this.batchFn(keys);
|
|
1715
|
+
keys.forEach((key, index) => {
|
|
1716
|
+
const callbacks = batch.get(key)!;
|
|
1717
|
+
const result = results[index] ?? null;
|
|
1718
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1719
|
+
});
|
|
1720
|
+
} catch (error) {
|
|
1721
|
+
for (const callbacks of batch.values()) {
|
|
1722
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1723
|
+
}
|
|
1724
|
+
}
|
|
1725
|
+
}
|
|
1726
|
+
|
|
1727
|
+
clear(): void {
|
|
1728
|
+
this.batch.clear();
|
|
1729
|
+
}
|
|
1730
|
+
}
|
|
1731
|
+
|
|
1732
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1733
|
+
return keys.map((k) => `value-${k}`);
|
|
1734
|
+
});
|
|
1735
|
+
|
|
1736
|
+
// Load same key multiple times
|
|
1737
|
+
const promises = [loader.load("key1"), loader.load("key1"), loader.load("key1")];
|
|
1738
|
+
|
|
1739
|
+
const results = await Promise.all(promises);
|
|
1740
|
+
|
|
1741
|
+
// All should resolve with same value
|
|
1742
|
+
expect(results).toEqual(["value-key1", "value-key1", "value-key1"]);
|
|
1743
|
+
});
|
|
1744
|
+
|
|
1745
|
+
it("handles batch function errors", async () => {
|
|
1746
|
+
class TestDataLoader {
|
|
1747
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1748
|
+
private scheduled = false;
|
|
1749
|
+
|
|
1750
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1751
|
+
|
|
1752
|
+
async load(key: string): Promise<string | null> {
|
|
1753
|
+
return new Promise((resolve, reject) => {
|
|
1754
|
+
const existing = this.batch.get(key);
|
|
1755
|
+
if (existing) {
|
|
1756
|
+
existing.push({ resolve, reject });
|
|
1757
|
+
} else {
|
|
1758
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1759
|
+
}
|
|
1760
|
+
this.scheduleDispatch();
|
|
1761
|
+
});
|
|
1762
|
+
}
|
|
1763
|
+
|
|
1764
|
+
private scheduleDispatch(): void {
|
|
1765
|
+
if (this.scheduled) return;
|
|
1766
|
+
this.scheduled = true;
|
|
1767
|
+
queueMicrotask(() => this.dispatch());
|
|
1768
|
+
}
|
|
1769
|
+
|
|
1770
|
+
private async dispatch(): Promise<void> {
|
|
1771
|
+
this.scheduled = false;
|
|
1772
|
+
const batch = this.batch;
|
|
1773
|
+
this.batch = new Map();
|
|
1774
|
+
|
|
1775
|
+
const keys = Array.from(batch.keys());
|
|
1776
|
+
if (keys.length === 0) return;
|
|
1777
|
+
|
|
1778
|
+
try {
|
|
1779
|
+
const results = await this.batchFn(keys);
|
|
1780
|
+
keys.forEach((key, index) => {
|
|
1781
|
+
const callbacks = batch.get(key)!;
|
|
1782
|
+
const result = results[index] ?? null;
|
|
1783
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1784
|
+
});
|
|
1785
|
+
} catch (error) {
|
|
1786
|
+
for (const callbacks of batch.values()) {
|
|
1787
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1788
|
+
}
|
|
1789
|
+
}
|
|
1790
|
+
}
|
|
1791
|
+
|
|
1792
|
+
clear(): void {
|
|
1793
|
+
this.batch.clear();
|
|
1794
|
+
}
|
|
1795
|
+
}
|
|
1796
|
+
|
|
1797
|
+
const loader = new TestDataLoader(async () => {
|
|
1798
|
+
throw new Error("Batch function error");
|
|
1799
|
+
});
|
|
1800
|
+
|
|
1801
|
+
const promises = [loader.load("key1"), loader.load("key2")];
|
|
1802
|
+
|
|
1803
|
+
// All loads should reject with same error
|
|
1804
|
+
await expect(Promise.all(promises)).rejects.toThrow("Batch function error");
|
|
1805
|
+
});
|
|
1806
|
+
|
|
1807
|
+
it("does not schedule dispatch twice if already scheduled", async () => {
|
|
1808
|
+
let dispatchCount = 0;
|
|
1809
|
+
|
|
1810
|
+
class TestDataLoader {
|
|
1811
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1812
|
+
private scheduled = false;
|
|
1813
|
+
|
|
1814
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1815
|
+
|
|
1816
|
+
async load(key: string): Promise<string | null> {
|
|
1817
|
+
return new Promise((resolve, reject) => {
|
|
1818
|
+
const existing = this.batch.get(key);
|
|
1819
|
+
if (existing) {
|
|
1820
|
+
existing.push({ resolve, reject });
|
|
1821
|
+
} else {
|
|
1822
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1823
|
+
}
|
|
1824
|
+
this.scheduleDispatch();
|
|
1825
|
+
});
|
|
1826
|
+
}
|
|
1827
|
+
|
|
1828
|
+
private scheduleDispatch(): void {
|
|
1829
|
+
if (this.scheduled) return;
|
|
1830
|
+
this.scheduled = true;
|
|
1831
|
+
queueMicrotask(() => this.dispatch());
|
|
1832
|
+
}
|
|
1833
|
+
|
|
1834
|
+
private async dispatch(): Promise<void> {
|
|
1835
|
+
dispatchCount++;
|
|
1836
|
+
this.scheduled = false;
|
|
1837
|
+
const batch = this.batch;
|
|
1838
|
+
this.batch = new Map();
|
|
1839
|
+
|
|
1840
|
+
const keys = Array.from(batch.keys());
|
|
1841
|
+
if (keys.length === 0) return;
|
|
1842
|
+
|
|
1843
|
+
try {
|
|
1844
|
+
const results = await this.batchFn(keys);
|
|
1845
|
+
keys.forEach((key, index) => {
|
|
1846
|
+
const callbacks = batch.get(key)!;
|
|
1847
|
+
const result = results[index] ?? null;
|
|
1848
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1849
|
+
});
|
|
1850
|
+
} catch (error) {
|
|
1851
|
+
for (const callbacks of batch.values()) {
|
|
1852
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1853
|
+
}
|
|
1854
|
+
}
|
|
1855
|
+
}
|
|
1856
|
+
|
|
1857
|
+
clear(): void {
|
|
1858
|
+
this.batch.clear();
|
|
1859
|
+
}
|
|
1860
|
+
}
|
|
1861
|
+
|
|
1862
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1863
|
+
return keys.map((k) => `value-${k}`);
|
|
1864
|
+
});
|
|
1865
|
+
|
|
1866
|
+
// Load multiple keys
|
|
1867
|
+
await Promise.all([loader.load("key1"), loader.load("key2"), loader.load("key3")]);
|
|
1868
|
+
|
|
1869
|
+
// Should only dispatch once despite multiple load calls
|
|
1870
|
+
expect(dispatchCount).toBe(1);
|
|
1871
|
+
});
|
|
1872
|
+
|
|
1873
|
+
it("clears pending batches when clear is called", () => {
|
|
1874
|
+
class TestDataLoader {
|
|
1875
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1876
|
+
private scheduled = false;
|
|
1877
|
+
|
|
1878
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1879
|
+
|
|
1880
|
+
async load(key: string): Promise<string | null> {
|
|
1881
|
+
return new Promise((resolve, reject) => {
|
|
1882
|
+
const existing = this.batch.get(key);
|
|
1883
|
+
if (existing) {
|
|
1884
|
+
existing.push({ resolve, reject });
|
|
1885
|
+
} else {
|
|
1886
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1887
|
+
}
|
|
1888
|
+
this.scheduleDispatch();
|
|
1889
|
+
});
|
|
1890
|
+
}
|
|
1891
|
+
|
|
1892
|
+
private scheduleDispatch(): void {
|
|
1893
|
+
if (this.scheduled) return;
|
|
1894
|
+
this.scheduled = true;
|
|
1895
|
+
queueMicrotask(() => this.dispatch());
|
|
1896
|
+
}
|
|
1897
|
+
|
|
1898
|
+
private async dispatch(): Promise<void> {
|
|
1899
|
+
this.scheduled = false;
|
|
1900
|
+
const batch = this.batch;
|
|
1901
|
+
this.batch = new Map();
|
|
1902
|
+
|
|
1903
|
+
const keys = Array.from(batch.keys());
|
|
1904
|
+
if (keys.length === 0) return;
|
|
1905
|
+
|
|
1906
|
+
try {
|
|
1907
|
+
const results = await this.batchFn(keys);
|
|
1908
|
+
keys.forEach((key, index) => {
|
|
1909
|
+
const callbacks = batch.get(key)!;
|
|
1910
|
+
const result = results[index] ?? null;
|
|
1911
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1912
|
+
});
|
|
1913
|
+
} catch (error) {
|
|
1914
|
+
for (const callbacks of batch.values()) {
|
|
1915
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1916
|
+
}
|
|
1917
|
+
}
|
|
1918
|
+
}
|
|
1919
|
+
|
|
1920
|
+
clear(): void {
|
|
1921
|
+
this.batch.clear();
|
|
1922
|
+
}
|
|
1923
|
+
|
|
1924
|
+
getBatchSize(): number {
|
|
1925
|
+
return this.batch.size;
|
|
1926
|
+
}
|
|
1927
|
+
}
|
|
1928
|
+
|
|
1929
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1930
|
+
return keys.map((k) => `value-${k}`);
|
|
1931
|
+
});
|
|
1932
|
+
|
|
1933
|
+
// Add some items to batch (but don't await - they won't dispatch yet)
|
|
1934
|
+
loader.load("key1");
|
|
1935
|
+
loader.load("key2");
|
|
1936
|
+
|
|
1937
|
+
// Clear should remove pending items
|
|
1938
|
+
loader.clear();
|
|
1939
|
+
|
|
1940
|
+
// Batch should be empty
|
|
1941
|
+
expect(loader.getBatchSize()).toBe(0);
|
|
1942
|
+
});
|
|
1943
|
+
|
|
1944
|
+
it("handles null results from batch function", async () => {
|
|
1945
|
+
class TestDataLoader {
|
|
1946
|
+
private batch: Map<string, { resolve: (v: any) => void; reject: (e: Error) => void }[]> = new Map();
|
|
1947
|
+
private scheduled = false;
|
|
1948
|
+
|
|
1949
|
+
constructor(private batchFn: (keys: string[]) => Promise<(string | null)[]>) {}
|
|
1950
|
+
|
|
1951
|
+
async load(key: string): Promise<string | null> {
|
|
1952
|
+
return new Promise((resolve, reject) => {
|
|
1953
|
+
const existing = this.batch.get(key);
|
|
1954
|
+
if (existing) {
|
|
1955
|
+
existing.push({ resolve, reject });
|
|
1956
|
+
} else {
|
|
1957
|
+
this.batch.set(key, [{ resolve, reject }]);
|
|
1958
|
+
}
|
|
1959
|
+
this.scheduleDispatch();
|
|
1960
|
+
});
|
|
1961
|
+
}
|
|
1962
|
+
|
|
1963
|
+
private scheduleDispatch(): void {
|
|
1964
|
+
if (this.scheduled) return;
|
|
1965
|
+
this.scheduled = true;
|
|
1966
|
+
queueMicrotask(() => this.dispatch());
|
|
1967
|
+
}
|
|
1968
|
+
|
|
1969
|
+
private async dispatch(): Promise<void> {
|
|
1970
|
+
this.scheduled = false;
|
|
1971
|
+
const batch = this.batch;
|
|
1972
|
+
this.batch = new Map();
|
|
1973
|
+
|
|
1974
|
+
const keys = Array.from(batch.keys());
|
|
1975
|
+
if (keys.length === 0) return;
|
|
1976
|
+
|
|
1977
|
+
try {
|
|
1978
|
+
const results = await this.batchFn(keys);
|
|
1979
|
+
keys.forEach((key, index) => {
|
|
1980
|
+
const callbacks = batch.get(key)!;
|
|
1981
|
+
const result = results[index] ?? null;
|
|
1982
|
+
for (const { resolve } of callbacks) resolve(result);
|
|
1983
|
+
});
|
|
1984
|
+
} catch (error) {
|
|
1985
|
+
for (const callbacks of batch.values()) {
|
|
1986
|
+
for (const { reject } of callbacks) reject(error as Error);
|
|
1987
|
+
}
|
|
1988
|
+
}
|
|
1989
|
+
}
|
|
1990
|
+
|
|
1991
|
+
clear(): void {
|
|
1992
|
+
this.batch.clear();
|
|
1993
|
+
}
|
|
1994
|
+
}
|
|
1995
|
+
|
|
1996
|
+
const loader = new TestDataLoader(async (keys) => {
|
|
1997
|
+
// Return null for some keys
|
|
1998
|
+
return keys.map((k) => (k === "key2" ? null : `value-${k}`));
|
|
1999
|
+
});
|
|
2000
|
+
|
|
2001
|
+
const results = await Promise.all([loader.load("key1"), loader.load("key2"), loader.load("key3")]);
|
|
2002
|
+
|
|
2003
|
+
expect(results).toEqual(["value-key1", null, "value-key3"]);
|
|
2004
|
+
});
|
|
2005
|
+
});
|
|
2006
|
+
|
|
2007
|
+
// =============================================================================
|
|
2008
|
+
// Test: HTTP Server Lifecycle (listen, close, findConnectionByWs)
|
|
2009
|
+
// =============================================================================
|
|
2010
|
+
|
|
2011
|
+
describe("HTTP Server Lifecycle", () => {
|
|
2012
|
+
it("handles GET requests that are not metadata endpoint", async () => {
|
|
2013
|
+
const server = createServer({
|
|
2014
|
+
entities: { User },
|
|
2015
|
+
});
|
|
2016
|
+
|
|
2017
|
+
const request = new Request("http://localhost/some-other-path", { method: "GET" });
|
|
2018
|
+
const response = await server.handleRequest(request);
|
|
2019
|
+
|
|
2020
|
+
expect(response.status).toBe(405);
|
|
2021
|
+
const text = await response.text();
|
|
2022
|
+
expect(text).toBe("Method not allowed");
|
|
2023
|
+
});
|
|
2024
|
+
|
|
2025
|
+
it("handles PUT requests", async () => {
|
|
2026
|
+
const server = createServer({
|
|
2027
|
+
entities: { User },
|
|
2028
|
+
});
|
|
2029
|
+
|
|
2030
|
+
const request = new Request("http://localhost/api", { method: "PUT" });
|
|
2031
|
+
const response = await server.handleRequest(request);
|
|
2032
|
+
|
|
2033
|
+
expect(response.status).toBe(405);
|
|
2034
|
+
const text = await response.text();
|
|
2035
|
+
expect(text).toBe("Method not allowed");
|
|
2036
|
+
});
|
|
2037
|
+
|
|
2038
|
+
it("handles DELETE requests", async () => {
|
|
2039
|
+
const server = createServer({
|
|
2040
|
+
entities: { User },
|
|
2041
|
+
});
|
|
2042
|
+
|
|
2043
|
+
const request = new Request("http://localhost/api", { method: "DELETE" });
|
|
2044
|
+
const response = await server.handleRequest(request);
|
|
2045
|
+
|
|
2046
|
+
expect(response.status).toBe(405);
|
|
2047
|
+
const text = await response.text();
|
|
2048
|
+
expect(text).toBe("Method not allowed");
|
|
2049
|
+
});
|
|
2050
|
+
|
|
2051
|
+
it("handles PATCH requests", async () => {
|
|
2052
|
+
const server = createServer({
|
|
2053
|
+
entities: { User },
|
|
2054
|
+
});
|
|
2055
|
+
|
|
2056
|
+
const request = new Request("http://localhost/api", { method: "PATCH" });
|
|
2057
|
+
const response = await server.handleRequest(request);
|
|
2058
|
+
|
|
2059
|
+
expect(response.status).toBe(405);
|
|
2060
|
+
const text = await response.text();
|
|
2061
|
+
expect(text).toBe("Method not allowed");
|
|
2062
|
+
});
|
|
2063
|
+
|
|
2064
|
+
it("handles OPTIONS requests", async () => {
|
|
2065
|
+
const server = createServer({
|
|
2066
|
+
entities: { User },
|
|
2067
|
+
});
|
|
2068
|
+
|
|
2069
|
+
const request = new Request("http://localhost/api", { method: "OPTIONS" });
|
|
2070
|
+
const response = await server.handleRequest(request);
|
|
2071
|
+
|
|
2072
|
+
expect(response.status).toBe(405);
|
|
2073
|
+
const text = await response.text();
|
|
2074
|
+
expect(text).toBe("Method not allowed");
|
|
2075
|
+
});
|
|
2076
|
+
|
|
2077
|
+
it("handles HEAD requests", async () => {
|
|
2078
|
+
const server = createServer({
|
|
2079
|
+
entities: { User },
|
|
2080
|
+
});
|
|
2081
|
+
|
|
2082
|
+
const request = new Request("http://localhost/api", { method: "HEAD" });
|
|
2083
|
+
const response = await server.handleRequest(request);
|
|
2084
|
+
|
|
2085
|
+
expect(response.status).toBe(405);
|
|
2086
|
+
});
|
|
2087
|
+
|
|
2088
|
+
it("can start and stop server with listen/close", async () => {
|
|
2089
|
+
const server = createServer({
|
|
2090
|
+
entities: { User },
|
|
2091
|
+
logger: {
|
|
2092
|
+
info: () => {}, // Silent logger for test
|
|
2093
|
+
},
|
|
2094
|
+
});
|
|
2095
|
+
|
|
2096
|
+
// Start server on a random high port to avoid conflicts
|
|
2097
|
+
const port = 30000 + Math.floor(Math.random() * 10000);
|
|
2098
|
+
|
|
2099
|
+
try {
|
|
2100
|
+
await server.listen(port);
|
|
2101
|
+
|
|
2102
|
+
// Verify server is running by making a request
|
|
2103
|
+
const response = await fetch(`http://localhost:${port}/__lens/metadata`);
|
|
2104
|
+
expect(response.status).toBe(200);
|
|
2105
|
+
const data = await response.json();
|
|
2106
|
+
expect(data.version).toBeDefined();
|
|
2107
|
+
} finally {
|
|
2108
|
+
// Always close the server
|
|
2109
|
+
await server.close();
|
|
2110
|
+
}
|
|
2111
|
+
});
|
|
2112
|
+
|
|
2113
|
+
it("handles method not allowed via real HTTP server", async () => {
|
|
2114
|
+
const server = createServer({
|
|
2115
|
+
entities: { User },
|
|
2116
|
+
logger: {
|
|
2117
|
+
info: () => {}, // Silent logger for test
|
|
2118
|
+
},
|
|
2119
|
+
});
|
|
2120
|
+
|
|
2121
|
+
const port = 30000 + Math.floor(Math.random() * 10000);
|
|
2122
|
+
|
|
2123
|
+
try {
|
|
2124
|
+
await server.listen(port);
|
|
2125
|
+
|
|
2126
|
+
// Make a PUT request which should return 405
|
|
2127
|
+
const response = await fetch(`http://localhost:${port}/api`, { method: "PUT" });
|
|
2128
|
+
expect(response.status).toBe(405);
|
|
2129
|
+
const text = await response.text();
|
|
2130
|
+
expect(text).toBe("Method not allowed");
|
|
2131
|
+
} finally {
|
|
2132
|
+
await server.close();
|
|
2133
|
+
}
|
|
2134
|
+
});
|
|
2135
|
+
|
|
2136
|
+
// Note: WebSocket integration via Bun.serve's native WebSocket upgrade (lines 1184-1193)
|
|
2137
|
+
// is tested through unit tests using mock WebSockets. Full integration tests with real
|
|
2138
|
+
// WebSocket clients would require additional setup and are better suited for E2E tests.
|
|
2139
|
+
});
|
|
2140
|
+
|
|
2141
|
+
// =============================================================================
|
|
2142
|
+
// Test: SSE Handler Edge Cases
|
|
2143
|
+
// =============================================================================
|
|
2144
|
+
|
|
2145
|
+
describe("SSE Handler Edge Cases", () => {
|
|
2146
|
+
it("handles WebSocket error callback", async () => {
|
|
2147
|
+
const server = createServer({
|
|
2148
|
+
entities: { User },
|
|
2149
|
+
});
|
|
2150
|
+
|
|
2151
|
+
const ws = createMockWs();
|
|
2152
|
+
server.handleWebSocket(ws);
|
|
2153
|
+
|
|
2154
|
+
// Trigger error callback (if set)
|
|
2155
|
+
if (ws.onerror) {
|
|
2156
|
+
ws.onerror(new Error("WebSocket error"));
|
|
2157
|
+
}
|
|
2158
|
+
|
|
2159
|
+
// Should not crash
|
|
2160
|
+
expect(true).toBe(true);
|
|
2161
|
+
});
|
|
2162
|
+
});
|
|
2163
|
+
|
|
1603
2164
|
// =============================================================================
|
|
1604
2165
|
// Test: Entity Resolvers
|
|
1605
2166
|
// =============================================================================
|
package/src/server/create.ts
CHANGED
|
@@ -248,9 +248,9 @@ function sugarToPipeline(
|
|
|
248
248
|
typeof optimistic === "object" &&
|
|
249
249
|
optimistic !== null &&
|
|
250
250
|
"merge" in optimistic &&
|
|
251
|
-
typeof (optimistic as Record<string, unknown>)
|
|
251
|
+
typeof (optimistic as Record<string, unknown>).merge === "object"
|
|
252
252
|
) {
|
|
253
|
-
const extra = (optimistic as { merge: Record<string, unknown> })
|
|
253
|
+
const extra = (optimistic as { merge: Record<string, unknown> }).merge;
|
|
254
254
|
const args: Record<string, unknown> = { type: entityType };
|
|
255
255
|
for (const field of inputFields) {
|
|
256
256
|
args[field] = { $input: field };
|
|
@@ -823,11 +823,16 @@ class LensServerImpl<
|
|
|
823
823
|
};
|
|
824
824
|
};
|
|
825
825
|
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
826
|
+
// Merge Lens extensions (emit, onCleanup) into user context
|
|
827
|
+
const lensContext = {
|
|
828
|
+
...context,
|
|
829
829
|
emit,
|
|
830
830
|
onCleanup,
|
|
831
|
+
};
|
|
832
|
+
|
|
833
|
+
const result = resolver({
|
|
834
|
+
input: sub.input,
|
|
835
|
+
ctx: lensContext,
|
|
831
836
|
});
|
|
832
837
|
|
|
833
838
|
if (isAsyncIterable(result)) {
|
|
@@ -1042,11 +1047,16 @@ class LensServerImpl<
|
|
|
1042
1047
|
const emit = createEmit(() => {});
|
|
1043
1048
|
const onCleanup = () => () => {};
|
|
1044
1049
|
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1050
|
+
// Merge Lens extensions (emit, onCleanup) into user context
|
|
1051
|
+
const lensContext = {
|
|
1052
|
+
...context,
|
|
1048
1053
|
emit,
|
|
1049
1054
|
onCleanup,
|
|
1055
|
+
};
|
|
1056
|
+
|
|
1057
|
+
const result = resolver({
|
|
1058
|
+
input: cleanInput as TInput,
|
|
1059
|
+
ctx: lensContext,
|
|
1050
1060
|
});
|
|
1051
1061
|
|
|
1052
1062
|
let data: TOutput;
|
|
@@ -1096,11 +1106,16 @@ class LensServerImpl<
|
|
|
1096
1106
|
const emit = createEmit(() => {});
|
|
1097
1107
|
const onCleanup = () => () => {};
|
|
1098
1108
|
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1109
|
+
// Merge Lens extensions (emit, onCleanup) into user context
|
|
1110
|
+
const lensContext = {
|
|
1111
|
+
...context,
|
|
1102
1112
|
emit,
|
|
1103
1113
|
onCleanup,
|
|
1114
|
+
};
|
|
1115
|
+
|
|
1116
|
+
const result = await resolver({
|
|
1117
|
+
input: input as TInput,
|
|
1118
|
+
ctx: lensContext,
|
|
1104
1119
|
});
|
|
1105
1120
|
|
|
1106
1121
|
// Emit to GraphStateManager
|
|
@@ -1300,7 +1315,7 @@ class LensServerImpl<
|
|
|
1300
1315
|
|
|
1301
1316
|
// Always include id
|
|
1302
1317
|
if ("id" in obj) {
|
|
1303
|
-
result
|
|
1318
|
+
result.id = obj.id;
|
|
1304
1319
|
}
|
|
1305
1320
|
|
|
1306
1321
|
// Handle string array (simple field list)
|
|
@@ -62,7 +62,7 @@ describe("GraphStateManager", () => {
|
|
|
62
62
|
entity: "Post",
|
|
63
63
|
id: "123",
|
|
64
64
|
});
|
|
65
|
-
expect(mockClient.messages[0].updates
|
|
65
|
+
expect(mockClient.messages[0].updates.title).toMatchObject({
|
|
66
66
|
strategy: "value",
|
|
67
67
|
data: "Hello",
|
|
68
68
|
});
|
|
@@ -179,7 +179,7 @@ describe("GraphStateManager", () => {
|
|
|
179
179
|
|
|
180
180
|
manager.emit("Post", "123", { title: "World" });
|
|
181
181
|
|
|
182
|
-
expect(mockClient.messages[0].updates
|
|
182
|
+
expect(mockClient.messages[0].updates.title.strategy).toBe("value");
|
|
183
183
|
});
|
|
184
184
|
|
|
185
185
|
it("uses delta strategy for long strings with small changes", () => {
|
|
@@ -191,7 +191,7 @@ describe("GraphStateManager", () => {
|
|
|
191
191
|
manager.emit("Post", "123", { content: `${longText} appended` });
|
|
192
192
|
|
|
193
193
|
// Should use delta for efficient transfer
|
|
194
|
-
const update = mockClient.messages[0].updates
|
|
194
|
+
const update = mockClient.messages[0].updates.content;
|
|
195
195
|
expect(["delta", "value"]).toContain(update.strategy);
|
|
196
196
|
});
|
|
197
197
|
|
|
@@ -206,7 +206,7 @@ describe("GraphStateManager", () => {
|
|
|
206
206
|
metadata: { views: 101, likes: 10, tags: ["a", "b"] },
|
|
207
207
|
});
|
|
208
208
|
|
|
209
|
-
const update = mockClient.messages[0].updates
|
|
209
|
+
const update = mockClient.messages[0].updates.metadata;
|
|
210
210
|
expect(["patch", "value"]).toContain(update.strategy);
|
|
211
211
|
});
|
|
212
212
|
});
|
|
@@ -258,12 +258,12 @@ describe("GraphStateManager", () => {
|
|
|
258
258
|
|
|
259
259
|
// client-1 got incremental update
|
|
260
260
|
expect(mockClient.messages.length).toBe(1);
|
|
261
|
-
expect(mockClient.messages[0].updates
|
|
261
|
+
expect(mockClient.messages[0].updates.title.data).toBe("Updated");
|
|
262
262
|
|
|
263
263
|
// client-2 got full current state
|
|
264
264
|
expect(client2.messages.length).toBe(1);
|
|
265
|
-
expect(client2.messages[0].updates
|
|
266
|
-
expect(client2.messages[0].updates
|
|
265
|
+
expect(client2.messages[0].updates.title.data).toBe("Updated");
|
|
266
|
+
expect(client2.messages[0].updates.content.data).toBe("World");
|
|
267
267
|
});
|
|
268
268
|
});
|
|
269
269
|
|
|
@@ -390,7 +390,7 @@ describe("GraphStateManager", () => {
|
|
|
390
390
|
manager.emitField("Post", "123", "title", { strategy: "value", data: "Hello World" });
|
|
391
391
|
|
|
392
392
|
expect(mockClient.messages.length).toBe(1);
|
|
393
|
-
expect(mockClient.messages[0].updates
|
|
393
|
+
expect(mockClient.messages[0].updates.title).toEqual({
|
|
394
394
|
strategy: "value",
|
|
395
395
|
data: "Hello World",
|
|
396
396
|
});
|
|
@@ -424,7 +424,7 @@ describe("GraphStateManager", () => {
|
|
|
424
424
|
});
|
|
425
425
|
|
|
426
426
|
const state = manager.getState("Post", "123");
|
|
427
|
-
expect(state?.
|
|
427
|
+
expect(state?.metadata).toEqual({ views: 101, likes: 10 });
|
|
428
428
|
});
|
|
429
429
|
|
|
430
430
|
it("sends field update to subscribed clients only for subscribed fields", () => {
|
|
@@ -480,9 +480,9 @@ describe("GraphStateManager", () => {
|
|
|
480
480
|
]);
|
|
481
481
|
|
|
482
482
|
expect(mockClient.messages.length).toBe(1);
|
|
483
|
-
expect(mockClient.messages[0].updates
|
|
484
|
-
expect(mockClient.messages[0].updates
|
|
485
|
-
expect(mockClient.messages[0].updates
|
|
483
|
+
expect(mockClient.messages[0].updates.title.data).toBe("Hello");
|
|
484
|
+
expect(mockClient.messages[0].updates.content.data).toBe("World");
|
|
485
|
+
expect(mockClient.messages[0].updates.author.data).toBe("Alice");
|
|
486
486
|
});
|
|
487
487
|
|
|
488
488
|
it("applies batch updates to canonical state", () => {
|
|
@@ -712,7 +712,7 @@ describe("GraphStateManager", () => {
|
|
|
712
712
|
});
|
|
713
713
|
|
|
714
714
|
expect(mockClient.messages.length).toBe(1);
|
|
715
|
-
expect(mockClient.messages[0].updates
|
|
715
|
+
expect(mockClient.messages[0].updates.author.data).toEqual({
|
|
716
716
|
id: "1",
|
|
717
717
|
name: "Alice",
|
|
718
718
|
profile: {
|
|
@@ -792,7 +792,7 @@ describe("GraphStateManager", () => {
|
|
|
792
792
|
|
|
793
793
|
// Should have 10 updates
|
|
794
794
|
expect(mockClient.messages.length).toBe(10);
|
|
795
|
-
expect(mockClient.messages[9].updates
|
|
795
|
+
expect(mockClient.messages[9].updates.counter.data).toBe(9);
|
|
796
796
|
});
|
|
797
797
|
|
|
798
798
|
it("handles large number of subscribers to same entity", () => {
|
|
@@ -815,7 +815,7 @@ describe("GraphStateManager", () => {
|
|
|
815
815
|
// All clients should receive the update
|
|
816
816
|
for (const client of clients) {
|
|
817
817
|
expect(client.messages.length).toBe(1);
|
|
818
|
-
expect(client.messages[0].updates
|
|
818
|
+
expect(client.messages[0].updates.title.data).toBe("Broadcast");
|
|
819
819
|
}
|
|
820
820
|
});
|
|
821
821
|
|
|
@@ -842,14 +842,14 @@ describe("GraphStateManager", () => {
|
|
|
842
842
|
manager.emit("Post", "123", { tags: ["javascript", "typescript"] });
|
|
843
843
|
|
|
844
844
|
expect(mockClient.messages.length).toBe(1);
|
|
845
|
-
expect(mockClient.messages[0].updates
|
|
845
|
+
expect(mockClient.messages[0].updates.tags.data).toEqual(["javascript", "typescript"]);
|
|
846
846
|
|
|
847
847
|
// Update array
|
|
848
848
|
mockClient.messages = [];
|
|
849
849
|
manager.emit("Post", "123", { tags: ["javascript", "typescript", "react"] });
|
|
850
850
|
|
|
851
851
|
expect(mockClient.messages.length).toBe(1);
|
|
852
|
-
expect(mockClient.messages[0].updates
|
|
852
|
+
expect(mockClient.messages[0].updates.tags.data).toEqual(["javascript", "typescript", "react"]);
|
|
853
853
|
});
|
|
854
854
|
|
|
855
855
|
it("handles boolean field values", () => {
|
|
@@ -859,14 +859,14 @@ describe("GraphStateManager", () => {
|
|
|
859
859
|
manager.emit("Post", "123", { published: true });
|
|
860
860
|
|
|
861
861
|
expect(mockClient.messages.length).toBe(1);
|
|
862
|
-
expect(mockClient.messages[0].updates
|
|
862
|
+
expect(mockClient.messages[0].updates.published.data).toBe(true);
|
|
863
863
|
|
|
864
864
|
// Toggle boolean
|
|
865
865
|
mockClient.messages = [];
|
|
866
866
|
manager.emit("Post", "123", { published: false });
|
|
867
867
|
|
|
868
868
|
expect(mockClient.messages.length).toBe(1);
|
|
869
|
-
expect(mockClient.messages[0].updates
|
|
869
|
+
expect(mockClient.messages[0].updates.published.data).toBe(false);
|
|
870
870
|
});
|
|
871
871
|
|
|
872
872
|
it("handles number field values including 0", () => {
|
|
@@ -876,14 +876,14 @@ describe("GraphStateManager", () => {
|
|
|
876
876
|
manager.emit("Post", "123", { likes: 0 });
|
|
877
877
|
|
|
878
878
|
expect(mockClient.messages.length).toBe(1);
|
|
879
|
-
expect(mockClient.messages[0].updates
|
|
879
|
+
expect(mockClient.messages[0].updates.likes.data).toBe(0);
|
|
880
880
|
|
|
881
881
|
// Update to positive number
|
|
882
882
|
mockClient.messages = [];
|
|
883
883
|
manager.emit("Post", "123", { likes: 5 });
|
|
884
884
|
|
|
885
885
|
expect(mockClient.messages.length).toBe(1);
|
|
886
|
-
expect(mockClient.messages[0].updates
|
|
886
|
+
expect(mockClient.messages[0].updates.likes.data).toBe(5);
|
|
887
887
|
});
|
|
888
888
|
});
|
|
889
889
|
|
|
@@ -909,7 +909,7 @@ describe("GraphStateManager", () => {
|
|
|
909
909
|
entity: "Users",
|
|
910
910
|
id: "list",
|
|
911
911
|
});
|
|
912
|
-
expect(mockClient.messages[0].updates
|
|
912
|
+
expect(mockClient.messages[0].updates._items.data).toEqual([
|
|
913
913
|
{ id: "1", name: "Alice" },
|
|
914
914
|
{ id: "2", name: "Bob" },
|
|
915
915
|
]);
|
|
@@ -1086,7 +1086,7 @@ describe("GraphStateManager", () => {
|
|
|
1086
1086
|
|
|
1087
1087
|
expect(mockClient.messages.length).toBe(2);
|
|
1088
1088
|
// Second message should be incremental diff (push operation)
|
|
1089
|
-
const update = mockClient.messages[1].updates
|
|
1089
|
+
const update = mockClient.messages[1].updates._items;
|
|
1090
1090
|
expect(update.strategy).toBe("array");
|
|
1091
1091
|
expect(update.data).toEqual([{ op: "push", item: { id: "2", name: "Bob" } }]);
|
|
1092
1092
|
});
|