@distilled.cloud/core 0.0.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,261 @@
1
+ /**
2
+ * JSON Patch (RFC 6902) Implementation
3
+ *
4
+ * Provides a unified spec patching system for all SDKs.
5
+ * Patches are applied to OpenAPI/Discovery/Smithy specs before code generation
6
+ * to add error types, fix nullable fields, mark sensitive data, etc.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * import { applyAllPatches } from "@distilled.cloud/sdk-core/json-patch";
11
+ *
12
+ * const spec = JSON.parse(fs.readFileSync("openapi.json", "utf-8"));
13
+ * const { applied, errors } = applyAllPatches(spec, "./patches");
14
+ * ```
15
+ */
16
+ import * as fs from "fs";
17
+ import * as path from "path";
18
+
19
+ // ============================================================================
20
+ // Types
21
+ // ============================================================================
22
+
23
+ export interface JsonPatchOperation {
24
+ op: "add" | "remove" | "replace" | "move" | "copy" | "test";
25
+ path: string;
26
+ value?: unknown;
27
+ from?: string;
28
+ }
29
+
30
+ export type JsonPatch = JsonPatchOperation[];
31
+
32
+ export interface PatchFile {
33
+ description: string;
34
+ patches: JsonPatch;
35
+ }
36
+
37
+ // ============================================================================
38
+ // JSON Pointer (RFC 6901)
39
+ // ============================================================================
40
+
41
+ /**
42
+ * Parse a JSON Pointer (RFC 6901) path into segments.
43
+ */
44
+ export function parseJsonPointer(pointer: string): string[] {
45
+ if (pointer === "") return [];
46
+ if (!pointer.startsWith("/")) {
47
+ throw new Error(`Invalid JSON Pointer: ${pointer}`);
48
+ }
49
+ return pointer
50
+ .slice(1)
51
+ .split("/")
52
+ .map((segment) => segment.replace(/~1/g, "/").replace(/~0/g, "~"));
53
+ }
54
+
55
+ /**
56
+ * Get a value at a JSON Pointer path.
57
+ */
58
+ export function getValueAtPath(obj: unknown, pointer: string): unknown {
59
+ const segments = parseJsonPointer(pointer);
60
+ let current: unknown = obj;
61
+
62
+ for (const segment of segments) {
63
+ if (current === null || typeof current !== "object") {
64
+ throw new Error(`Cannot traverse path ${pointer}: not an object`);
65
+ }
66
+ if (Array.isArray(current)) {
67
+ const index = segment === "-" ? current.length : parseInt(segment, 10);
68
+ current = current[index];
69
+ } else {
70
+ current = (current as Record<string, unknown>)[segment];
71
+ }
72
+ }
73
+
74
+ return current;
75
+ }
76
+
77
+ /**
78
+ * Set a value at a JSON Pointer path.
79
+ */
80
+ export function setValueAtPath(
81
+ obj: unknown,
82
+ pointer: string,
83
+ value: unknown,
84
+ ): void {
85
+ const segments = parseJsonPointer(pointer);
86
+ if (segments.length === 0) {
87
+ throw new Error("Cannot set value at root path");
88
+ }
89
+
90
+ let current: unknown = obj;
91
+
92
+ for (let i = 0; i < segments.length - 1; i++) {
93
+ const segment = segments[i]!;
94
+ if (current === null || typeof current !== "object") {
95
+ throw new Error(`Cannot traverse path ${pointer}: not an object`);
96
+ }
97
+ if (Array.isArray(current)) {
98
+ const index = parseInt(segment, 10);
99
+ current = current[index];
100
+ } else {
101
+ current = (current as Record<string, unknown>)[segment];
102
+ }
103
+ }
104
+
105
+ const lastSegment = segments[segments.length - 1]!;
106
+ if (current === null || typeof current !== "object") {
107
+ throw new Error(
108
+ `Cannot set value at path ${pointer}: parent is not an object`,
109
+ );
110
+ }
111
+
112
+ if (Array.isArray(current)) {
113
+ const index =
114
+ lastSegment === "-" ? current.length : parseInt(lastSegment, 10);
115
+ if (lastSegment === "-") {
116
+ current.push(value);
117
+ } else {
118
+ current[index] = value;
119
+ }
120
+ } else {
121
+ (current as Record<string, unknown>)[lastSegment] = value;
122
+ }
123
+ }
124
+
125
+ /**
126
+ * Remove a value at a JSON Pointer path.
127
+ */
128
+ export function removeValueAtPath(obj: unknown, pointer: string): void {
129
+ const segments = parseJsonPointer(pointer);
130
+ if (segments.length === 0) {
131
+ throw new Error("Cannot remove root");
132
+ }
133
+
134
+ let current: unknown = obj;
135
+
136
+ for (let i = 0; i < segments.length - 1; i++) {
137
+ const segment = segments[i]!;
138
+ if (current === null || typeof current !== "object") {
139
+ throw new Error(`Cannot traverse path ${pointer}: not an object`);
140
+ }
141
+ if (Array.isArray(current)) {
142
+ current = current[parseInt(segment, 10)];
143
+ } else {
144
+ current = (current as Record<string, unknown>)[segment];
145
+ }
146
+ }
147
+
148
+ const lastSegment = segments[segments.length - 1]!;
149
+ if (current === null || typeof current !== "object") {
150
+ throw new Error(
151
+ `Cannot remove at path ${pointer}: parent is not an object`,
152
+ );
153
+ }
154
+
155
+ if (Array.isArray(current)) {
156
+ current.splice(parseInt(lastSegment, 10), 1);
157
+ } else {
158
+ delete (current as Record<string, unknown>)[lastSegment];
159
+ }
160
+ }
161
+
162
+ // ============================================================================
163
+ // Patch Operations
164
+ // ============================================================================
165
+
166
+ /**
167
+ * Apply a single JSON Patch operation.
168
+ */
169
+ export function applyOperation(
170
+ obj: unknown,
171
+ operation: JsonPatchOperation,
172
+ ): void {
173
+ switch (operation.op) {
174
+ case "add":
175
+ setValueAtPath(obj, operation.path, operation.value);
176
+ break;
177
+ case "remove":
178
+ removeValueAtPath(obj, operation.path);
179
+ break;
180
+ case "replace":
181
+ // For replace, the path must exist
182
+ getValueAtPath(obj, operation.path); // throws if doesn't exist
183
+ setValueAtPath(obj, operation.path, operation.value);
184
+ break;
185
+ case "move": {
186
+ if (!operation.from) throw new Error("move operation requires 'from'");
187
+ const moveValue = getValueAtPath(obj, operation.from);
188
+ removeValueAtPath(obj, operation.from);
189
+ setValueAtPath(obj, operation.path, moveValue);
190
+ break;
191
+ }
192
+ case "copy": {
193
+ if (!operation.from) throw new Error("copy operation requires 'from'");
194
+ const copyValue = getValueAtPath(obj, operation.from);
195
+ setValueAtPath(
196
+ obj,
197
+ operation.path,
198
+ JSON.parse(JSON.stringify(copyValue)),
199
+ );
200
+ break;
201
+ }
202
+ case "test": {
203
+ const testValue = getValueAtPath(obj, operation.path);
204
+ if (JSON.stringify(testValue) !== JSON.stringify(operation.value)) {
205
+ throw new Error(
206
+ `Test operation failed at ${operation.path}: expected ${JSON.stringify(operation.value)}, got ${JSON.stringify(testValue)}`,
207
+ );
208
+ }
209
+ break;
210
+ }
211
+ default:
212
+ throw new Error(`Unknown operation: ${(operation as { op: string }).op}`);
213
+ }
214
+ }
215
+
216
+ /**
217
+ * Apply a JSON Patch to an object (mutates in place).
218
+ */
219
+ export function applyPatch(obj: unknown, patch: JsonPatch): void {
220
+ for (const operation of patch) {
221
+ applyOperation(obj, operation);
222
+ }
223
+ }
224
+
225
+ /**
226
+ * Load and apply all patches from a directory.
227
+ * Finds all *.patch.json files and applies them.
228
+ */
229
+ export function applyAllPatches(
230
+ spec: unknown,
231
+ patchDir: string,
232
+ ): { applied: string[]; errors: string[] } {
233
+ const applied: string[] = [];
234
+ const errors: string[] = [];
235
+
236
+ if (!fs.existsSync(patchDir)) {
237
+ return { applied, errors };
238
+ }
239
+
240
+ // Find all .patch.json files
241
+ const files = fs
242
+ .readdirSync(patchDir)
243
+ .filter((f) => f.endsWith(".patch.json"))
244
+ .sort(); // Sort for deterministic application order
245
+
246
+ for (const file of files) {
247
+ const filePath = path.join(patchDir, file);
248
+ try {
249
+ const content = fs.readFileSync(filePath, "utf-8");
250
+ const patchFile: PatchFile = JSON.parse(content);
251
+ applyPatch(spec, patchFile.patches);
252
+ applied.push(`${file}: ${patchFile.description}`);
253
+ } catch (error) {
254
+ errors.push(
255
+ `${file}: ${error instanceof Error ? error.message : String(error)}`,
256
+ );
257
+ }
258
+ }
259
+
260
+ return { applied, errors };
261
+ }
@@ -0,0 +1,222 @@
1
+ /**
2
+ * Pagination utilities for streaming through paginated API responses.
3
+ *
4
+ * Supports multiple pagination styles:
5
+ * - Page-based (e.g., PlanetScale): page/per_page with next_page number
6
+ * - Cursor-based (e.g., Neon): cursor/limit with next cursor string
7
+ * - Token-based (e.g., AWS): NextToken/MaxResults with continuation tokens
8
+ *
9
+ * Each SDK defines its own pagination trait configuration, and these
10
+ * shared utilities handle the streaming logic.
11
+ *
12
+ * @example
13
+ * ```ts
14
+ * import * as Pagination from "@distilled.cloud/sdk-core/pagination";
15
+ *
16
+ * // Page-based pagination
17
+ * const allPages = Pagination.paginatePages(listDatabases, { organization: "my-org" }, {
18
+ * inputToken: "page",
19
+ * outputToken: "next_page",
20
+ * items: "data",
21
+ * });
22
+ * ```
23
+ */
24
+ import * as Effect from "effect/Effect";
25
+ import * as Stream from "effect/Stream";
26
+ import { getPath } from "./traits.ts";
27
+
28
+ // ============================================================================
29
+ // Pagination Trait
30
+ // ============================================================================
31
+
32
+ /**
33
+ * Pagination trait describing how to navigate between pages.
34
+ */
35
+ export interface PaginatedTrait {
36
+ /** The name of the input member containing the page/cursor token */
37
+ inputToken: string;
38
+ /** The path to the output member containing the next page/cursor token */
39
+ outputToken: string;
40
+ /** The path to the output member containing the paginated items */
41
+ items?: string;
42
+ /** The name of the input member that limits page size */
43
+ pageSize?: string;
44
+ }
45
+
46
+ // ============================================================================
47
+ // Page-based Pagination (PlanetScale style)
48
+ // ============================================================================
49
+
50
+ /**
51
+ * Creates a stream of pages using page-number pagination.
52
+ *
53
+ * @param operation - The paginated operation to call
54
+ * @param input - The initial input (without page parameter)
55
+ * @param pagination - The pagination trait configuration
56
+ * @returns A Stream of full page responses
57
+ */
58
+ export const paginatePageNumber = <
59
+ Input extends Record<string, unknown>,
60
+ Output,
61
+ E,
62
+ R,
63
+ >(
64
+ operation: (input: Input) => Effect.Effect<Output, E, R>,
65
+ input: Omit<Input, string>,
66
+ pagination: PaginatedTrait,
67
+ ): Stream.Stream<Output, E, R> => {
68
+ type State = { page: number; done: boolean };
69
+
70
+ const unfoldFn = (state: State) =>
71
+ Effect.gen(function* () {
72
+ if (state.done) {
73
+ return undefined;
74
+ }
75
+
76
+ const requestPayload = {
77
+ ...input,
78
+ [pagination.inputToken]: state.page,
79
+ } as Input;
80
+
81
+ const response = yield* operation(requestPayload);
82
+
83
+ const nextPage = getPath(response, pagination.outputToken) as
84
+ | number
85
+ | null
86
+ | undefined;
87
+
88
+ const nextState: State = {
89
+ page: nextPage ?? state.page + 1,
90
+ done: nextPage === null || nextPage === undefined,
91
+ };
92
+
93
+ return [response, nextState] as const;
94
+ });
95
+
96
+ return Stream.unfold({ page: 1, done: false } as State, unfoldFn);
97
+ };
98
+
99
+ // ============================================================================
100
+ // Cursor-based Pagination (Neon style)
101
+ // ============================================================================
102
+
103
+ /**
104
+ * Creates a stream of pages using cursor-based pagination.
105
+ *
106
+ * @param operation - The paginated operation to call
107
+ * @param input - The initial input (without cursor parameter)
108
+ * @param pagination - The pagination trait configuration
109
+ * @returns A Stream of full page responses
110
+ */
111
+ export const paginateCursor = <
112
+ Input extends Record<string, unknown>,
113
+ Output,
114
+ E,
115
+ R,
116
+ >(
117
+ operation: (input: Input) => Effect.Effect<Output, E, R>,
118
+ input: Omit<Input, string>,
119
+ pagination: PaginatedTrait,
120
+ ): Stream.Stream<Output, E, R> => {
121
+ type State = { cursor: string | undefined; done: boolean };
122
+
123
+ const unfoldFn = (state: State) =>
124
+ Effect.gen(function* () {
125
+ if (state.done) {
126
+ return undefined;
127
+ }
128
+
129
+ const requestPayload = {
130
+ ...input,
131
+ ...(state.cursor ? { [pagination.inputToken]: state.cursor } : {}),
132
+ } as Input;
133
+
134
+ const response = yield* operation(requestPayload);
135
+
136
+ const nextCursor = getPath(response, pagination.outputToken) as
137
+ | string
138
+ | null
139
+ | undefined;
140
+
141
+ const nextState: State = {
142
+ cursor: nextCursor ?? undefined,
143
+ done: nextCursor === null || nextCursor === undefined,
144
+ };
145
+
146
+ return [response, nextState] as const;
147
+ });
148
+
149
+ return Stream.unfold({ cursor: undefined, done: false } as State, unfoldFn);
150
+ };
151
+
152
+ // ============================================================================
153
+ // Token-based Pagination (AWS style)
154
+ // ============================================================================
155
+
156
+ /**
157
+ * Creates a stream of pages using token-based pagination.
158
+ *
159
+ * @param operation - The paginated operation to call
160
+ * @param input - The initial input
161
+ * @param pagination - The pagination trait configuration
162
+ * @returns A Stream of full page responses
163
+ */
164
+ export const paginateToken = <
165
+ Input extends Record<string, unknown>,
166
+ Output,
167
+ E,
168
+ R,
169
+ >(
170
+ operation: (input: Input) => Effect.Effect<Output, E, R>,
171
+ input: Input,
172
+ pagination: PaginatedTrait,
173
+ ): Stream.Stream<Output, E, R> => {
174
+ type State = { token: unknown; done: boolean };
175
+
176
+ const unfoldFn = (state: State) =>
177
+ Effect.gen(function* () {
178
+ if (state.done) {
179
+ return undefined;
180
+ }
181
+
182
+ const requestPayload =
183
+ state.token !== undefined
184
+ ? { ...input, [pagination.inputToken]: state.token }
185
+ : input;
186
+
187
+ const response = yield* operation(requestPayload as Input);
188
+
189
+ const nextToken = getPath(response, pagination.outputToken);
190
+
191
+ const nextState: State = {
192
+ token: nextToken,
193
+ done: nextToken === undefined || nextToken === null,
194
+ };
195
+
196
+ return [response, nextState] as const;
197
+ });
198
+
199
+ return Stream.unfold({ token: undefined, done: false } as State, unfoldFn);
200
+ };
201
+
202
+ // ============================================================================
203
+ // Item extraction
204
+ // ============================================================================
205
+
206
+ /**
207
+ * Extracts individual items from a page stream.
208
+ *
209
+ * @param pages - A stream of page responses
210
+ * @param itemsPath - Dot-separated path to the items array in the page response
211
+ * @returns A Stream of individual items
212
+ */
213
+ export const extractItems = <Output, Item, E, R>(
214
+ pages: Stream.Stream<Output, E, R>,
215
+ itemsPath: string,
216
+ ): Stream.Stream<Item, E, R> =>
217
+ pages.pipe(
218
+ Stream.flatMap((page) => {
219
+ const items = getPath(page, itemsPath) as readonly Item[] | undefined;
220
+ return Stream.fromIterable(items ?? []);
221
+ }),
222
+ );
package/src/retry.ts ADDED
@@ -0,0 +1,177 @@
1
+ /**
2
+ * Retry Policy System
3
+ *
4
+ * Provides configurable retry policies for API operations.
5
+ * Each SDK creates its own Retry service tag but uses these shared utilities
6
+ * for building retry schedules and policies.
7
+ *
8
+ * @example
9
+ * ```ts
10
+ * import * as Retry from "@distilled.cloud/sdk-core/retry";
11
+ *
12
+ * // Use the default retry policy
13
+ * myEffect.pipe(Retry.policy(myRetryService, Retry.makeDefault()))
14
+ *
15
+ * // Disable retries
16
+ * myEffect.pipe(Retry.none(myRetryService))
17
+ * ```
18
+ */
19
+ import * as Duration from "effect/Duration";
20
+ import * as Effect from "effect/Effect";
21
+ import { pipe } from "effect/Function";
22
+ import * as Layer from "effect/Layer";
23
+ import * as Ref from "effect/Ref";
24
+ import * as Schedule from "effect/Schedule";
25
+ import * as ServiceMap from "effect/ServiceMap";
26
+ import { isThrottling, isTransientError } from "./category.ts";
27
+
28
+ // ============================================================================
29
+ // Retry Policy Types
30
+ // ============================================================================
31
+
32
+ /**
33
+ * Retry policy options that match the Effect.retry contract.
34
+ */
35
+ export interface Options {
36
+ /**
37
+ * Predicate to determine if an error should trigger a retry.
38
+ */
39
+ readonly while?: (error: unknown) => boolean;
40
+ /**
41
+ * The schedule to use for retrying.
42
+ */
43
+ readonly schedule?: Schedule.Schedule<unknown>;
44
+ }
45
+
46
+ /**
47
+ * A factory function that creates retry policy options with access to the last error ref.
48
+ * This allows dynamic policies that can inspect the last error for retry-after headers, etc.
49
+ */
50
+ export type Factory = (lastError: Ref.Ref<unknown>) => Options;
51
+
52
+ /**
53
+ * A retry policy can be either static options or a factory that receives the last error ref.
54
+ */
55
+ export type Policy = Options | Factory;
56
+
57
+ // ============================================================================
58
+ // Retry Service Factory
59
+ // ============================================================================
60
+
61
+ /**
62
+ * Create a typed Retry service class for an SDK.
63
+ * Each SDK should create its own Retry service using this factory.
64
+ *
65
+ * @example
66
+ * ```ts
67
+ * // In planetscale-sdk/src/retry.ts
68
+ * export class Retry extends makeRetryService("PlanetScaleRetry") {}
69
+ * ```
70
+ */
71
+ export const makeRetryService = (name: string) =>
72
+ ServiceMap.Service<any, Policy>()(name);
73
+
74
+ /**
75
+ * Provides a custom retry policy for API calls.
76
+ */
77
+ export const policy =
78
+ (Service: any, optionsOrFactory: Policy) =>
79
+ <A, E, R>(effect: Effect.Effect<A, E, R>) =>
80
+ Effect.provide(effect, Layer.succeed(Service, optionsOrFactory) as any);
81
+
82
+ /**
83
+ * Disables all automatic retries.
84
+ */
85
+ export const none =
86
+ (Service: any) =>
87
+ <A, E, R>(effect: Effect.Effect<A, E, R>) =>
88
+ Effect.provide(
89
+ effect,
90
+ Layer.succeed(Service, { while: () => false }) as any,
91
+ );
92
+
93
+ // ============================================================================
94
+ // Retry Schedule Utilities
95
+ // ============================================================================
96
+
97
+ /**
98
+ * Custom jittered schedule helper.
99
+ * Adds random jitter between 0-50ms to avoid thundering herd.
100
+ */
101
+ export const jittered = Schedule.addDelay(() =>
102
+ Effect.succeed(Duration.millis(Math.random() * 50)),
103
+ );
104
+
105
+ /**
106
+ * Cap delay at a maximum duration.
107
+ */
108
+ export const capped = (max: Duration.Duration) =>
109
+ Schedule.modifyDelay((duration: Duration.Duration) =>
110
+ Effect.succeed(
111
+ Duration.isGreaterThan(duration, max) ? Duration.millis(5000) : duration,
112
+ ),
113
+ );
114
+
115
+ // ============================================================================
116
+ // Default Retry Policies
117
+ // ============================================================================
118
+
119
+ /**
120
+ * Creates the default retry policy.
121
+ *
122
+ * This policy:
123
+ * - Retries transient errors (throttling, server, network, locked errors)
124
+ * - Uses exponential backoff starting at 100ms with a factor of 2
125
+ * - Ensures at least 500ms delay for throttling errors
126
+ * - Limits to 5 retry attempts
127
+ * - Applies jitter to avoid thundering herd
128
+ */
129
+ export const makeDefault: Factory = (lastError) => ({
130
+ while: (error) => isTransientError(error),
131
+ schedule: pipe(
132
+ Schedule.exponential(100, 2),
133
+ Schedule.modifyDelay(
134
+ Effect.fnUntraced(function* (duration) {
135
+ const error = yield* Ref.get(lastError);
136
+ if (isThrottling(error)) {
137
+ if (Duration.toMillis(duration) < 500) {
138
+ return Duration.toMillis(Duration.millis(500));
139
+ }
140
+ }
141
+ return Duration.toMillis(duration);
142
+ }),
143
+ ),
144
+ Schedule.both(Schedule.recurs(5)),
145
+ jittered,
146
+ ),
147
+ });
148
+
149
+ /**
150
+ * Retry options that retries all throttling errors indefinitely.
151
+ */
152
+ export const throttlingOptions: Options = {
153
+ while: (error) => isThrottling(error),
154
+ schedule: pipe(
155
+ Schedule.exponential(1000, 2),
156
+ capped(Duration.seconds(5)),
157
+ jittered,
158
+ ),
159
+ };
160
+
161
+ /**
162
+ * Retry options that retries all transient errors indefinitely.
163
+ *
164
+ * This includes:
165
+ * 1. Throttling errors
166
+ * 2. Server errors
167
+ * 3. Network errors
168
+ * 4. Locked errors (423)
169
+ */
170
+ export const transientOptions: Options = {
171
+ while: isTransientError,
172
+ schedule: pipe(
173
+ Schedule.exponential(1000, 2),
174
+ capped(Duration.seconds(5)),
175
+ jittered,
176
+ ),
177
+ };