@gp2f/client-sdk 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/MergeModal.d.ts +21 -0
- package/dist/MergeModal.js +136 -0
- package/dist/ReconciliationBanner.d.ts +18 -0
- package/dist/ReconciliationBanner.js +32 -0
- package/dist/UndoButton.d.ts +14 -0
- package/dist/UndoButton.js +17 -0
- package/dist/client.d.ts +134 -0
- package/dist/client.js +244 -0
- package/dist/index.d.ts +40 -0
- package/dist/index.js +58 -0
- package/dist/wire.d.ts +71 -0
- package/dist/wire.js +5 -0
- package/package.json +38 -0
- package/src/MergeModal.tsx +321 -0
- package/src/ReconciliationBanner.tsx +87 -0
- package/src/UndoButton.tsx +45 -0
- package/src/client.ts +352 -0
- package/src/index.ts +98 -0
- package/src/wire.ts +75 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import React from "react";
|
|
2
|
+
import type { FieldConflict } from "./wire";
|
|
3
|
+
/**
|
|
4
|
+
* Side-by-side modal shown when a REJECT contains non-CRDT field conflicts.
|
|
5
|
+
*
|
|
6
|
+
* For each conflicting field the user can choose:
|
|
7
|
+
* - **Keep mine** – use the local optimistic value
|
|
8
|
+
* - **Use server** – accept the server's authoritative value (from `resolvedValue`)
|
|
9
|
+
*/
|
|
10
|
+
export interface MergeModalProps {
|
|
11
|
+
conflicts: FieldConflict[];
|
|
12
|
+
/** JSON snapshot of the state *before* the rejected op was applied. */
|
|
13
|
+
baseSnapshot: unknown;
|
|
14
|
+
/** JSON diff representing the local (client) changes. */
|
|
15
|
+
localDiff: unknown;
|
|
16
|
+
/** Called with the user's resolution choices (path → chosen value). */
|
|
17
|
+
onResolve: (resolutions: Record<string, unknown>) => void;
|
|
18
|
+
/** Called when the user cancels without resolving. */
|
|
19
|
+
onCancel: () => void;
|
|
20
|
+
}
|
|
21
|
+
export declare function MergeModal({ conflicts, localDiff, onResolve, onCancel, }: MergeModalProps): React.ReactElement;
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import { jsxs as _jsxs, jsx as _jsx } from "react/jsx-runtime";
|
|
2
|
+
import { useState } from "react";
|
|
3
|
+
export function MergeModal({ conflicts, localDiff, onResolve, onCancel, }) {
|
|
4
|
+
const [choices, setChoices] = useState(() => Object.fromEntries(conflicts.map((c) => [c.path, "server"])));
|
|
5
|
+
function handleChoice(path, choice) {
|
|
6
|
+
setChoices((prev) => ({ ...prev, [path]: choice }));
|
|
7
|
+
}
|
|
8
|
+
function handleResolve() {
|
|
9
|
+
const resolutions = {};
|
|
10
|
+
for (const conflict of conflicts) {
|
|
11
|
+
if (choices[conflict.path] === "server") {
|
|
12
|
+
resolutions[conflict.path] = conflict.resolvedValue;
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
// "mine": extract value from localDiff
|
|
16
|
+
resolutions[conflict.path] = getFieldValue(localDiff, conflict.path);
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
onResolve(resolutions);
|
|
20
|
+
}
|
|
21
|
+
return (_jsx("div", { role: "dialog", "aria-modal": "true", "aria-labelledby": "merge-modal-title", style: {
|
|
22
|
+
position: "fixed",
|
|
23
|
+
inset: 0,
|
|
24
|
+
display: "flex",
|
|
25
|
+
alignItems: "center",
|
|
26
|
+
justifyContent: "center",
|
|
27
|
+
backgroundColor: "rgba(0,0,0,0.45)",
|
|
28
|
+
zIndex: 1000,
|
|
29
|
+
}, children: _jsxs("div", { style: {
|
|
30
|
+
backgroundColor: "#ffffff",
|
|
31
|
+
borderRadius: "0.5rem",
|
|
32
|
+
boxShadow: "0 20px 60px rgba(0,0,0,0.3)",
|
|
33
|
+
width: "min(90vw, 640px)",
|
|
34
|
+
maxHeight: "80vh",
|
|
35
|
+
display: "flex",
|
|
36
|
+
flexDirection: "column",
|
|
37
|
+
overflow: "hidden",
|
|
38
|
+
}, children: [_jsxs("div", { style: {
|
|
39
|
+
padding: "1rem 1.25rem",
|
|
40
|
+
borderBottom: "1px solid #e5e7eb",
|
|
41
|
+
display: "flex",
|
|
42
|
+
alignItems: "center",
|
|
43
|
+
justifyContent: "space-between",
|
|
44
|
+
}, children: [_jsxs("h2", { id: "merge-modal-title", style: { margin: 0, fontSize: "1rem", fontWeight: 600 }, children: ["Resolve Conflicts (", conflicts.length, ")"] }), _jsx("button", { type: "button", "aria-label": "Close", onClick: onCancel, style: {
|
|
45
|
+
background: "none",
|
|
46
|
+
border: "none",
|
|
47
|
+
cursor: "pointer",
|
|
48
|
+
fontSize: "1.25rem",
|
|
49
|
+
}, children: "\u2715" })] }), _jsx("div", { style: { overflowY: "auto", flex: 1, padding: "0.75rem 1.25rem" }, children: conflicts.map((conflict) => (_jsx(ConflictRow, { conflict: conflict, localValue: getFieldValue(localDiff, conflict.path), choice: choices[conflict.path] ?? "server", onChoose: (c) => handleChoice(conflict.path, c) }, conflict.path))) }), _jsxs("div", { style: {
|
|
50
|
+
padding: "0.75rem 1.25rem",
|
|
51
|
+
borderTop: "1px solid #e5e7eb",
|
|
52
|
+
display: "flex",
|
|
53
|
+
justifyContent: "flex-end",
|
|
54
|
+
gap: "0.5rem",
|
|
55
|
+
}, children: [_jsx("button", { type: "button", onClick: onCancel, style: {
|
|
56
|
+
padding: "0.5rem 1rem",
|
|
57
|
+
borderRadius: "0.375rem",
|
|
58
|
+
border: "1px solid #d1d5db",
|
|
59
|
+
background: "#ffffff",
|
|
60
|
+
cursor: "pointer",
|
|
61
|
+
}, children: "Cancel" }), _jsx("button", { type: "button", onClick: handleResolve, style: {
|
|
62
|
+
padding: "0.5rem 1rem",
|
|
63
|
+
borderRadius: "0.375rem",
|
|
64
|
+
border: "none",
|
|
65
|
+
backgroundColor: "#2563eb",
|
|
66
|
+
color: "#ffffff",
|
|
67
|
+
cursor: "pointer",
|
|
68
|
+
fontWeight: 500,
|
|
69
|
+
}, children: "Apply Resolutions" })] })] }) }));
|
|
70
|
+
}
|
|
71
|
+
function ConflictRow({ conflict, localValue, choice, onChoose, }) {
|
|
72
|
+
return (_jsxs("div", { style: {
|
|
73
|
+
marginBottom: "1rem",
|
|
74
|
+
borderRadius: "0.375rem",
|
|
75
|
+
border: "1px solid #e5e7eb",
|
|
76
|
+
overflow: "hidden",
|
|
77
|
+
}, children: [_jsxs("div", { style: {
|
|
78
|
+
padding: "0.5rem 0.75rem",
|
|
79
|
+
backgroundColor: "#f9fafb",
|
|
80
|
+
borderBottom: "1px solid #e5e7eb",
|
|
81
|
+
display: "flex",
|
|
82
|
+
alignItems: "center",
|
|
83
|
+
gap: "0.5rem",
|
|
84
|
+
}, children: [_jsx("code", { style: { fontSize: "0.8125rem", flex: 1 }, children: conflict.path }), _jsx("span", { style: {
|
|
85
|
+
fontSize: "0.75rem",
|
|
86
|
+
padding: "0.125rem 0.375rem",
|
|
87
|
+
borderRadius: "0.25rem",
|
|
88
|
+
backgroundColor: conflict.strategy === "TRANSACTIONAL" ? "#fde8e8" : "#e0f2fe",
|
|
89
|
+
color: conflict.strategy === "TRANSACTIONAL" ? "#991b1b" : "#0369a1",
|
|
90
|
+
}, children: conflict.strategy })] }), _jsxs("div", { style: {
|
|
91
|
+
display: "grid",
|
|
92
|
+
gridTemplateColumns: "1fr 1fr",
|
|
93
|
+
gap: 0,
|
|
94
|
+
}, children: [_jsx(OptionPanel, { label: "My change", value: localValue, selected: choice === "mine", onClick: () => onChoose("mine"), accentColor: "#2563eb" }), _jsx(OptionPanel, { label: "Server (authoritative)", value: conflict.resolvedValue, selected: choice === "server", onClick: () => onChoose("server"), accentColor: "#16a34a", bordered: true })] })] }));
|
|
95
|
+
}
|
|
96
|
+
function OptionPanel({ label, value, selected, onClick, accentColor, bordered, }) {
|
|
97
|
+
return (_jsxs("button", { type: "button", onClick: onClick, style: {
|
|
98
|
+
padding: "0.75rem",
|
|
99
|
+
textAlign: "left",
|
|
100
|
+
cursor: "pointer",
|
|
101
|
+
border: "none",
|
|
102
|
+
borderLeft: bordered ? "1px solid #e5e7eb" : undefined,
|
|
103
|
+
backgroundColor: selected ? `${accentColor}10` : "#ffffff",
|
|
104
|
+
outline: selected ? `2px solid ${accentColor}` : "none",
|
|
105
|
+
outlineOffset: "-2px",
|
|
106
|
+
transition: "background-color 100ms",
|
|
107
|
+
width: "100%",
|
|
108
|
+
}, children: [_jsxs("div", { style: {
|
|
109
|
+
fontSize: "0.75rem",
|
|
110
|
+
fontWeight: 600,
|
|
111
|
+
color: selected ? accentColor : "#6b7280",
|
|
112
|
+
marginBottom: "0.25rem",
|
|
113
|
+
}, children: [selected && "✔ ", label] }), _jsx("pre", { style: {
|
|
114
|
+
margin: 0,
|
|
115
|
+
fontSize: "0.75rem",
|
|
116
|
+
fontFamily: "monospace",
|
|
117
|
+
whiteSpace: "pre-wrap",
|
|
118
|
+
wordBreak: "break-all",
|
|
119
|
+
color: "#1f2937",
|
|
120
|
+
}, children: JSON.stringify(value, null, 2) })] }));
|
|
121
|
+
}
|
|
122
|
+
// ── helpers ───────────────────────────────────────────────────────────────────
|
|
123
|
+
/** Extract the value at a JSON-pointer path (e.g. `/amount`) from an object. */
|
|
124
|
+
function getFieldValue(obj, pointer) {
|
|
125
|
+
if (typeof obj !== "object" || obj === null)
|
|
126
|
+
return undefined;
|
|
127
|
+
// Strip leading `/` and split on `/`
|
|
128
|
+
const segments = pointer.replace(/^\//, "").split("/");
|
|
129
|
+
let current = obj;
|
|
130
|
+
for (const seg of segments) {
|
|
131
|
+
if (typeof current !== "object" || current === null)
|
|
132
|
+
return undefined;
|
|
133
|
+
current = current[seg];
|
|
134
|
+
}
|
|
135
|
+
return current;
|
|
136
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import React from "react";
|
|
2
|
+
import type { RejectResponse } from "./wire";
|
|
3
|
+
/**
|
|
4
|
+
* Displayed as a dismissible banner at the top of the form whenever the server
|
|
5
|
+
* REJECTs an operation. Shows the rejection reason and provides a "Undo" and
|
|
6
|
+
* "Resolve conflicts" action.
|
|
7
|
+
*/
|
|
8
|
+
export interface ReconciliationBannerProps {
|
|
9
|
+
/** The full server REJECT response. */
|
|
10
|
+
rejection: RejectResponse;
|
|
11
|
+
/** Called when the user clicks "Undo". */
|
|
12
|
+
onUndo: () => void;
|
|
13
|
+
/** Called when the user clicks "Resolve conflicts". */
|
|
14
|
+
onResolve: () => void;
|
|
15
|
+
/** Called when the user dismisses the banner. */
|
|
16
|
+
onDismiss: () => void;
|
|
17
|
+
}
|
|
18
|
+
export declare function ReconciliationBanner({ rejection, onUndo, onResolve, onDismiss, }: ReconciliationBannerProps): React.ReactElement;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { jsx as _jsx, jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
import { UndoButton } from "./UndoButton";
|
|
3
|
+
export function ReconciliationBanner({ rejection, onUndo, onResolve, onDismiss, }) {
|
|
4
|
+
const hasConflicts = rejection.patch.conflicts.length > 0;
|
|
5
|
+
return (_jsxs("div", { role: "alert", "aria-live": "assertive", style: {
|
|
6
|
+
display: "flex",
|
|
7
|
+
alignItems: "center",
|
|
8
|
+
gap: "0.75rem",
|
|
9
|
+
padding: "0.75rem 1rem",
|
|
10
|
+
borderRadius: "0.375rem",
|
|
11
|
+
backgroundColor: "#fef3c7",
|
|
12
|
+
borderLeft: "4px solid #f59e0b",
|
|
13
|
+
color: "#92400e",
|
|
14
|
+
fontSize: "0.875rem",
|
|
15
|
+
}, children: [_jsxs("span", { style: { flex: 1 }, children: [_jsx("strong", { children: "Sync conflict:" }), " ", rejection.reason] }), _jsx(UndoButton, { onUndo: onUndo }), hasConflicts && (_jsxs("button", { type: "button", onClick: onResolve, style: {
|
|
16
|
+
padding: "0.25rem 0.75rem",
|
|
17
|
+
borderRadius: "0.25rem",
|
|
18
|
+
border: "1px solid #b45309",
|
|
19
|
+
backgroundColor: "transparent",
|
|
20
|
+
color: "#92400e",
|
|
21
|
+
cursor: "pointer",
|
|
22
|
+
fontSize: "0.875rem",
|
|
23
|
+
}, children: ["Resolve conflicts (", rejection.patch.conflicts.length, ")"] })), _jsx("button", { type: "button", "aria-label": "Dismiss", onClick: onDismiss, style: {
|
|
24
|
+
background: "none",
|
|
25
|
+
border: "none",
|
|
26
|
+
cursor: "pointer",
|
|
27
|
+
color: "#92400e",
|
|
28
|
+
fontSize: "1rem",
|
|
29
|
+
lineHeight: 1,
|
|
30
|
+
padding: "0.125rem",
|
|
31
|
+
}, children: "\u2715" })] }));
|
|
32
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import React from "react";
|
|
2
|
+
/**
|
|
3
|
+
* A small undo button shown inline (e.g. in the {@link ReconciliationBanner}).
|
|
4
|
+
* Reverts the optimistic local change that was rejected by the server.
|
|
5
|
+
*/
|
|
6
|
+
export interface UndoButtonProps {
|
|
7
|
+
/** Callback invoked when the user clicks the undo button. */
|
|
8
|
+
onUndo: () => void;
|
|
9
|
+
/** Optional label (defaults to "Undo"). */
|
|
10
|
+
label?: string;
|
|
11
|
+
/** Whether the undo action is currently available. */
|
|
12
|
+
disabled?: boolean;
|
|
13
|
+
}
|
|
14
|
+
export declare function UndoButton({ onUndo, label, disabled, }: UndoButtonProps): React.ReactElement;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { jsxs as _jsxs } from "react/jsx-runtime";
|
|
2
|
+
export function UndoButton({ onUndo, label = "Undo", disabled = false, }) {
|
|
3
|
+
return (_jsxs("button", { type: "button", onClick: onUndo, disabled: disabled, "aria-label": "Undo last change", style: {
|
|
4
|
+
display: "inline-flex",
|
|
5
|
+
alignItems: "center",
|
|
6
|
+
gap: "0.25rem",
|
|
7
|
+
padding: "0.25rem 0.75rem",
|
|
8
|
+
borderRadius: "0.25rem",
|
|
9
|
+
border: "1px solid #b45309",
|
|
10
|
+
backgroundColor: "#ffffff",
|
|
11
|
+
color: "#92400e",
|
|
12
|
+
cursor: disabled ? "not-allowed" : "pointer",
|
|
13
|
+
fontSize: "0.875rem",
|
|
14
|
+
opacity: disabled ? 0.5 : 1,
|
|
15
|
+
transition: "background-color 150ms",
|
|
16
|
+
}, children: ["\u21A9 ", label] }));
|
|
17
|
+
}
|
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import type { ClientMessage, ServerMessage } from "./wire";
|
|
2
|
+
export type MessageHandler = (msg: ServerMessage) => void;
|
|
3
|
+
export type ErrorHandler = (err: Event) => void;
|
|
4
|
+
/**
|
|
5
|
+
* Called for each incremental text token received from the server during a
|
|
6
|
+
* streaming AI response. The `done` flag is `true` on the final token.
|
|
7
|
+
*/
|
|
8
|
+
export type TokenHandler = (token: string, done: boolean) => void;
|
|
9
|
+
/**
|
|
10
|
+
* Called when the server sends a RELOAD_REQUIRED message indicating the
|
|
11
|
+
* client's AST schema version is incompatible. The application should
|
|
12
|
+
* reload its policy bundle and reconnect.
|
|
13
|
+
*/
|
|
14
|
+
export type ReloadRequiredHandler = (minRequiredVersion: string, reason: string) => void;
|
|
15
|
+
export interface Gp2fClientOptions {
|
|
16
|
+
url: string;
|
|
17
|
+
/** Called with every inbound {@link ServerMessage}. */
|
|
18
|
+
onMessage: MessageHandler;
|
|
19
|
+
/** Called on WebSocket error. */
|
|
20
|
+
onError?: ErrorHandler;
|
|
21
|
+
/** Called when the connection is established. */
|
|
22
|
+
onOpen?: () => void;
|
|
23
|
+
/** Called when the connection is closed. */
|
|
24
|
+
onClose?: () => void;
|
|
25
|
+
/**
|
|
26
|
+
* Called with each incremental text token during a streaming AI response.
|
|
27
|
+
* Enables token-by-token UI updates ("Time to First Token" UX pattern).
|
|
28
|
+
*/
|
|
29
|
+
onToken?: TokenHandler;
|
|
30
|
+
/**
|
|
31
|
+
* Called when the server signals that the client's AST schema version is
|
|
32
|
+
* incompatible. The client MUST reload its policy bundle before reconnecting.
|
|
33
|
+
* Defaults to a no-op if not provided.
|
|
34
|
+
*/
|
|
35
|
+
onReloadRequired?: ReloadRequiredHandler;
|
|
36
|
+
/**
|
|
37
|
+
* Token-bucket capacity: maximum number of ops that may be sent in a burst.
|
|
38
|
+
* Defaults to 10.
|
|
39
|
+
*/
|
|
40
|
+
tokenBucketCapacity?: number;
|
|
41
|
+
/**
|
|
42
|
+
* Token-bucket refill rate in tokens per second.
|
|
43
|
+
* Defaults to 5 (one token every 200 ms).
|
|
44
|
+
*/
|
|
45
|
+
tokenBucketRefillRate?: number;
|
|
46
|
+
/**
|
|
47
|
+
* How long (ms) to pause optimistic updates after a conflict is detected.
|
|
48
|
+
* Defaults to 500 ms ("Settle Duration").
|
|
49
|
+
*/
|
|
50
|
+
conflictSettleMs?: number;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Options for {@link applyOptimisticUpdate}.
|
|
54
|
+
*/
|
|
55
|
+
export interface OptimisticUpdateOptions {
|
|
56
|
+
/** The DOM element in which to render the loading indicator. */
|
|
57
|
+
container: HTMLElement;
|
|
58
|
+
/**
|
|
59
|
+
* Vibe engine confidence in [0, 1]. When ≥ 0.7 a full skeleton loader is
|
|
60
|
+
* shown; below that threshold a lighter "Thinking…" text badge is used.
|
|
61
|
+
* Defaults to 0 (text badge).
|
|
62
|
+
*/
|
|
63
|
+
confidence?: number;
|
|
64
|
+
/**
|
|
65
|
+
* Override the default "Thinking…" label shown in low-confidence mode.
|
|
66
|
+
*/
|
|
67
|
+
thinkingText?: string;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Show an optimistic UI loading indicator while waiting for an LLM response.
|
|
71
|
+
*
|
|
72
|
+
* Renders a skeleton loader (high-confidence path) or a "Thinking…" badge
|
|
73
|
+
* (low-confidence path) inside `container`, then returns a cleanup function
|
|
74
|
+
* that removes the indicator when the response arrives.
|
|
75
|
+
*
|
|
76
|
+
* @example
|
|
77
|
+
* ```ts
|
|
78
|
+
* const cleanup = applyOptimisticUpdate({ container: myDiv, confidence: 0.9 });
|
|
79
|
+
* const response = await fetchAiSuggestion();
|
|
80
|
+
* cleanup();
|
|
81
|
+
* renderResponse(response);
|
|
82
|
+
* ```
|
|
83
|
+
*/
|
|
84
|
+
export declare function applyOptimisticUpdate(options: OptimisticUpdateOptions): () => void;
|
|
85
|
+
/**
|
|
86
|
+
* GP2F WebSocket client with:
|
|
87
|
+
* - Token-bucket rate limiting to prevent thundering-herd on reconnect.
|
|
88
|
+
* - Settle-Duration: optimistic updates are paused for
|
|
89
|
+
* {@link Gp2fClientOptions.conflictSettleMs} after a conflict is detected.
|
|
90
|
+
* - Retry-After: the client respects the server's backpressure hint.
|
|
91
|
+
* - Time-offset tracking: the client records the delta between its clock and
|
|
92
|
+
* the server's clock reported in the `HELLO` message.
|
|
93
|
+
*/
|
|
94
|
+
export declare class Gp2fClient {
|
|
95
|
+
private ws;
|
|
96
|
+
private readonly options;
|
|
97
|
+
private readonly bucket;
|
|
98
|
+
/** Timestamp (Date.now()) until which sends are paused. */
|
|
99
|
+
private pauseUntil;
|
|
100
|
+
/** Pending messages queued while the rate limiter or pause is active. */
|
|
101
|
+
private readonly pendingQueue;
|
|
102
|
+
/** Drain timer handle (if set). */
|
|
103
|
+
private drainTimer;
|
|
104
|
+
/**
|
|
105
|
+
* Difference `serverTimeMs - Date.now()` captured on the last HELLO.
|
|
106
|
+
* Add this to `Date.now()` to get an estimate of the server's current time.
|
|
107
|
+
*/
|
|
108
|
+
serverTimeOffsetMs: number;
|
|
109
|
+
constructor(options: Gp2fClientOptions);
|
|
110
|
+
/** Open the WebSocket connection. */
|
|
111
|
+
connect(): void;
|
|
112
|
+
/** Close the WebSocket connection. */
|
|
113
|
+
disconnect(): void;
|
|
114
|
+
/**
|
|
115
|
+
* Send a {@link ClientMessage} to the server.
|
|
116
|
+
*
|
|
117
|
+
* If the rate limiter or a settle/retry-after pause is active the message
|
|
118
|
+
* is queued and drained automatically once the pause expires.
|
|
119
|
+
*/
|
|
120
|
+
send(msg: ClientMessage): void;
|
|
121
|
+
/** Whether the connection is currently open. */
|
|
122
|
+
get connected(): boolean;
|
|
123
|
+
/** Handle an inbound server message, updating internal rate-limit state. */
|
|
124
|
+
private handleInbound;
|
|
125
|
+
/**
|
|
126
|
+
* Returns the number of milliseconds to wait before the next send.
|
|
127
|
+
* 0 means "send immediately".
|
|
128
|
+
*/
|
|
129
|
+
private nextSendDelay;
|
|
130
|
+
/** Schedule a drain of the pending queue after `delayMs` ms. */
|
|
131
|
+
private scheduleDrain;
|
|
132
|
+
/** Attempt to flush as many pending messages as the rate limiter allows. */
|
|
133
|
+
private drainQueue;
|
|
134
|
+
}
|
package/dist/client.js
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Show an optimistic UI loading indicator while waiting for an LLM response.
|
|
3
|
+
*
|
|
4
|
+
* Renders a skeleton loader (high-confidence path) or a "Thinking…" badge
|
|
5
|
+
* (low-confidence path) inside `container`, then returns a cleanup function
|
|
6
|
+
* that removes the indicator when the response arrives.
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```ts
|
|
10
|
+
* const cleanup = applyOptimisticUpdate({ container: myDiv, confidence: 0.9 });
|
|
11
|
+
* const response = await fetchAiSuggestion();
|
|
12
|
+
* cleanup();
|
|
13
|
+
* renderResponse(response);
|
|
14
|
+
* ```
|
|
15
|
+
*/
|
|
16
|
+
export function applyOptimisticUpdate(options) {
|
|
17
|
+
const { container, confidence = 0, thinkingText = "Thinking\u2026" } = options;
|
|
18
|
+
const indicator = document.createElement("div");
|
|
19
|
+
indicator.setAttribute("aria-live", "polite");
|
|
20
|
+
indicator.setAttribute("aria-label", thinkingText);
|
|
21
|
+
if (confidence >= 0.7) {
|
|
22
|
+
// High-confidence: render a skeleton loader so the layout shift is minimal.
|
|
23
|
+
indicator.setAttribute("data-gp2f-skeleton", "true");
|
|
24
|
+
indicator.style.cssText = [
|
|
25
|
+
"display:block",
|
|
26
|
+
"background:linear-gradient(90deg,#e0e0e0 25%,#f5f5f5 50%,#e0e0e0 75%)",
|
|
27
|
+
"background-size:200% 100%",
|
|
28
|
+
"animation:gp2f-shimmer 1.4s infinite",
|
|
29
|
+
"border-radius:4px",
|
|
30
|
+
"height:1.2em",
|
|
31
|
+
"width:80%",
|
|
32
|
+
"margin:4px 0",
|
|
33
|
+
].join(";");
|
|
34
|
+
}
|
|
35
|
+
else {
|
|
36
|
+
// Low-confidence: show a simple "Thinking…" text badge.
|
|
37
|
+
indicator.setAttribute("data-gp2f-thinking", "true");
|
|
38
|
+
indicator.textContent = thinkingText;
|
|
39
|
+
indicator.style.cssText = "opacity:0.6;font-style:italic;font-size:0.9em";
|
|
40
|
+
}
|
|
41
|
+
// Inject the shimmer keyframes once per document.
|
|
42
|
+
if (typeof document !== "undefined" &&
|
|
43
|
+
!document.getElementById("gp2f-shimmer-style")) {
|
|
44
|
+
const style = document.createElement("style");
|
|
45
|
+
style.id = "gp2f-shimmer-style";
|
|
46
|
+
style.textContent =
|
|
47
|
+
"@keyframes gp2f-shimmer{0%{background-position:200% 0}100%{background-position:-200% 0}}";
|
|
48
|
+
document.head.appendChild(style);
|
|
49
|
+
}
|
|
50
|
+
container.appendChild(indicator);
|
|
51
|
+
return () => {
|
|
52
|
+
if (indicator.parentNode === container) {
|
|
53
|
+
container.removeChild(indicator);
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
// ── Token Bucket ──────────────────────────────────────────────────────────────
|
|
58
|
+
/**
|
|
59
|
+
* A simple Token Bucket rate limiter.
|
|
60
|
+
*
|
|
61
|
+
* Tokens refill continuously at `refillRate` tokens/second up to `capacity`.
|
|
62
|
+
* Each `consume()` call removes one token. If no token is available,
|
|
63
|
+
* `consume()` returns the number of milliseconds to wait before retrying.
|
|
64
|
+
*/
|
|
65
|
+
class TokenBucket {
|
|
66
|
+
constructor(capacity, refillRate) {
|
|
67
|
+
this.capacity = capacity;
|
|
68
|
+
this.refillRate = refillRate;
|
|
69
|
+
this.tokens = capacity;
|
|
70
|
+
this.lastRefill = Date.now();
|
|
71
|
+
}
|
|
72
|
+
/** Attempt to consume one token. Returns 0 if successful or wait-ms > 0. */
|
|
73
|
+
consume() {
|
|
74
|
+
this.refill();
|
|
75
|
+
if (this.tokens >= 1) {
|
|
76
|
+
this.tokens -= 1;
|
|
77
|
+
return 0;
|
|
78
|
+
}
|
|
79
|
+
// Calculate how long until the next token is available.
|
|
80
|
+
// `1 - this.tokens` is the fractional deficit; convert to milliseconds.
|
|
81
|
+
return Math.ceil(((1 - this.tokens) / this.refillRate) * 1000);
|
|
82
|
+
}
|
|
83
|
+
refill() {
|
|
84
|
+
const now = Date.now();
|
|
85
|
+
const elapsed = (now - this.lastRefill) / 1000; // seconds
|
|
86
|
+
this.tokens = Math.min(this.capacity, this.tokens + elapsed * this.refillRate);
|
|
87
|
+
this.lastRefill = now;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
// ── Gp2fClient ────────────────────────────────────────────────────────────────
|
|
91
|
+
/**
|
|
92
|
+
* GP2F WebSocket client with:
|
|
93
|
+
* - Token-bucket rate limiting to prevent thundering-herd on reconnect.
|
|
94
|
+
* - Settle-Duration: optimistic updates are paused for
|
|
95
|
+
* {@link Gp2fClientOptions.conflictSettleMs} after a conflict is detected.
|
|
96
|
+
* - Retry-After: the client respects the server's backpressure hint.
|
|
97
|
+
* - Time-offset tracking: the client records the delta between its clock and
|
|
98
|
+
* the server's clock reported in the `HELLO` message.
|
|
99
|
+
*/
|
|
100
|
+
export class Gp2fClient {
|
|
101
|
+
constructor(options) {
|
|
102
|
+
this.ws = null;
|
|
103
|
+
/** Timestamp (Date.now()) until which sends are paused. */
|
|
104
|
+
this.pauseUntil = 0;
|
|
105
|
+
/** Pending messages queued while the rate limiter or pause is active. */
|
|
106
|
+
this.pendingQueue = [];
|
|
107
|
+
/** Drain timer handle (if set). */
|
|
108
|
+
this.drainTimer = null;
|
|
109
|
+
/**
|
|
110
|
+
* Difference `serverTimeMs - Date.now()` captured on the last HELLO.
|
|
111
|
+
* Add this to `Date.now()` to get an estimate of the server's current time.
|
|
112
|
+
*/
|
|
113
|
+
this.serverTimeOffsetMs = 0;
|
|
114
|
+
this.options = options;
|
|
115
|
+
this.bucket = new TokenBucket(options.tokenBucketCapacity ?? 10, options.tokenBucketRefillRate ?? 5);
|
|
116
|
+
}
|
|
117
|
+
/** Open the WebSocket connection. */
|
|
118
|
+
connect() {
|
|
119
|
+
if (this.ws)
|
|
120
|
+
return;
|
|
121
|
+
const ws = new WebSocket(this.options.url);
|
|
122
|
+
this.ws = ws;
|
|
123
|
+
ws.addEventListener("open", () => this.options.onOpen?.());
|
|
124
|
+
ws.addEventListener("close", () => {
|
|
125
|
+
this.ws = null;
|
|
126
|
+
this.options.onClose?.();
|
|
127
|
+
});
|
|
128
|
+
ws.addEventListener("error", (e) => this.options.onError?.(e));
|
|
129
|
+
ws.addEventListener("message", (e) => {
|
|
130
|
+
// ── Streaming token path ───────────────────────────────────────────
|
|
131
|
+
// The server may send incremental token frames before the final JSON
|
|
132
|
+
// message to enable token-by-token UI updates (Time to First Token).
|
|
133
|
+
// Streaming frames are plain-text lines of the form:
|
|
134
|
+
// data: <token>\n (SSE-style, done=false)
|
|
135
|
+
// data: [DONE]\n (final frame, done=true)
|
|
136
|
+
if (this.options.onToken && e.data.startsWith("data: ")) {
|
|
137
|
+
const payload = e.data.slice(6).trim();
|
|
138
|
+
if (payload === "[DONE]") {
|
|
139
|
+
this.options.onToken("", true);
|
|
140
|
+
}
|
|
141
|
+
else {
|
|
142
|
+
this.options.onToken(payload, false);
|
|
143
|
+
}
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
try {
|
|
147
|
+
const msg = JSON.parse(e.data);
|
|
148
|
+
this.handleInbound(msg);
|
|
149
|
+
}
|
|
150
|
+
catch {
|
|
151
|
+
// Ignore unparseable messages
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
/** Close the WebSocket connection. */
|
|
156
|
+
disconnect() {
|
|
157
|
+
this.ws?.close();
|
|
158
|
+
this.ws = null;
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Send a {@link ClientMessage} to the server.
|
|
162
|
+
*
|
|
163
|
+
* If the rate limiter or a settle/retry-after pause is active the message
|
|
164
|
+
* is queued and drained automatically once the pause expires.
|
|
165
|
+
*/
|
|
166
|
+
send(msg) {
|
|
167
|
+
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {
|
|
168
|
+
throw new Error("GP2F WebSocket is not connected");
|
|
169
|
+
}
|
|
170
|
+
const delay = this.nextSendDelay();
|
|
171
|
+
if (delay > 0) {
|
|
172
|
+
this.pendingQueue.push(msg);
|
|
173
|
+
this.scheduleDrain(delay);
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
this.ws.send(JSON.stringify(msg));
|
|
177
|
+
}
|
|
178
|
+
/** Whether the connection is currently open. */
|
|
179
|
+
get connected() {
|
|
180
|
+
return this.ws?.readyState === WebSocket.OPEN;
|
|
181
|
+
}
|
|
182
|
+
// ── private ─────────────────────────────────────────────────────────────────
|
|
183
|
+
/** Handle an inbound server message, updating internal rate-limit state. */
|
|
184
|
+
handleInbound(msg) {
|
|
185
|
+
if (msg.type === "HELLO") {
|
|
186
|
+
// Record the server-client time offset for HLC-aware scheduling.
|
|
187
|
+
this.serverTimeOffsetMs = msg.serverTimeMs - Date.now();
|
|
188
|
+
}
|
|
189
|
+
else if (msg.type === "REJECT") {
|
|
190
|
+
const settleMs = this.options.conflictSettleMs ?? 500;
|
|
191
|
+
if (msg.retryAfterMs !== undefined) {
|
|
192
|
+
// Server-side backpressure: respect the Retry-After hint.
|
|
193
|
+
this.pauseUntil = Math.max(this.pauseUntil, Date.now() + msg.retryAfterMs);
|
|
194
|
+
}
|
|
195
|
+
else {
|
|
196
|
+
// Conflict detected: apply the Settle Duration.
|
|
197
|
+
this.pauseUntil = Math.max(this.pauseUntil, Date.now() + settleMs);
|
|
198
|
+
}
|
|
199
|
+
this.scheduleDrain(this.pauseUntil - Date.now());
|
|
200
|
+
}
|
|
201
|
+
else if (msg.type === "RELOAD_REQUIRED") {
|
|
202
|
+
// Server signals that our AST schema version is incompatible.
|
|
203
|
+
// Notify the application so it can reload the policy bundle.
|
|
204
|
+
this.options.onReloadRequired?.(msg.minRequiredVersion, msg.reason);
|
|
205
|
+
// Close the connection — we cannot continue with an incompatible schema.
|
|
206
|
+
this.disconnect();
|
|
207
|
+
}
|
|
208
|
+
this.options.onMessage(msg);
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Returns the number of milliseconds to wait before the next send.
|
|
212
|
+
* 0 means "send immediately".
|
|
213
|
+
*/
|
|
214
|
+
nextSendDelay() {
|
|
215
|
+
const pauseRemaining = Math.max(0, this.pauseUntil - Date.now());
|
|
216
|
+
if (pauseRemaining > 0)
|
|
217
|
+
return pauseRemaining;
|
|
218
|
+
const bucketWait = this.bucket.consume();
|
|
219
|
+
return bucketWait;
|
|
220
|
+
}
|
|
221
|
+
/** Schedule a drain of the pending queue after `delayMs` ms. */
|
|
222
|
+
scheduleDrain(delayMs) {
|
|
223
|
+
if (this.drainTimer !== null)
|
|
224
|
+
return; // already scheduled
|
|
225
|
+
this.drainTimer = setTimeout(() => {
|
|
226
|
+
this.drainTimer = null;
|
|
227
|
+
this.drainQueue();
|
|
228
|
+
}, Math.max(0, delayMs));
|
|
229
|
+
}
|
|
230
|
+
/** Attempt to flush as many pending messages as the rate limiter allows. */
|
|
231
|
+
drainQueue() {
|
|
232
|
+
if (!this.ws || this.ws.readyState !== WebSocket.OPEN)
|
|
233
|
+
return;
|
|
234
|
+
while (this.pendingQueue.length > 0) {
|
|
235
|
+
const delay = this.nextSendDelay();
|
|
236
|
+
if (delay > 0) {
|
|
237
|
+
this.scheduleDrain(delay);
|
|
238
|
+
return;
|
|
239
|
+
}
|
|
240
|
+
const msg = this.pendingQueue.shift();
|
|
241
|
+
this.ws.send(JSON.stringify(msg));
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
export type { ClientMessage, ServerMessage, AcceptResponse, RejectResponse, ThreeWayPatch, FieldConflict, HelloMessage, ReloadRequiredMessage, } from "./wire";
|
|
2
|
+
export { Gp2fClient, applyOptimisticUpdate } from "./client";
|
|
3
|
+
export type { Gp2fClientOptions, MessageHandler, ErrorHandler, TokenHandler, OptimisticUpdateOptions, ReloadRequiredHandler } from "./client";
|
|
4
|
+
export { ReconciliationBanner } from "./ReconciliationBanner";
|
|
5
|
+
export type { ReconciliationBannerProps } from "./ReconciliationBanner";
|
|
6
|
+
export { UndoButton } from "./UndoButton";
|
|
7
|
+
export type { UndoButtonProps } from "./UndoButton";
|
|
8
|
+
export { MergeModal } from "./MergeModal";
|
|
9
|
+
export type { MergeModalProps } from "./MergeModal";
|
|
10
|
+
/**
|
|
11
|
+
* The shape of the lazily-loaded policy engine module.
|
|
12
|
+
*
|
|
13
|
+
* When the WASM build of `policy-core` is published as an npm package
|
|
14
|
+
* (e.g. `@gp2f/policy-core-wasm`), this interface describes its public API.
|
|
15
|
+
* The lazy loader below imports it on-demand so that the WASM binary is NOT
|
|
16
|
+
* included in the initial JS bundle, reducing Time-To-Interactive.
|
|
17
|
+
*/
|
|
18
|
+
export interface PolicyEngineModule {
|
|
19
|
+
/** Evaluate a policy AST against a JSON state document. */
|
|
20
|
+
evaluate(stateJson: string, astJson: string): {
|
|
21
|
+
result: boolean;
|
|
22
|
+
trace: string[];
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Lazily load the GP2F WASM policy engine.
|
|
27
|
+
*
|
|
28
|
+
* The module is fetched and instantiated on the **first call** only; subsequent
|
|
29
|
+
* calls return the cached instance with no additional network cost.
|
|
30
|
+
*
|
|
31
|
+
* This pattern ("lazy loading") keeps the initial JS bundle small and defers
|
|
32
|
+
* the WASM download until the moment the policy engine is actually needed.
|
|
33
|
+
*
|
|
34
|
+
* @example
|
|
35
|
+
* ```ts
|
|
36
|
+
* const engine = await loadPolicyEngine();
|
|
37
|
+
* const { result } = engine.evaluate(JSON.stringify(state), JSON.stringify(ast));
|
|
38
|
+
* ```
|
|
39
|
+
*/
|
|
40
|
+
export declare function loadPolicyEngine(): Promise<PolicyEngineModule>;
|