@oleanderhq/sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +89 -0
- package/dist/index.cjs +314 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +343 -0
- package/dist/index.d.ts +343 -0
- package/dist/index.js +275 -0
- package/dist/index.js.map +1 -0
- package/package.json +47 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
// src/schemas.ts
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
var apiErrorBodySchema = z.object({
|
|
4
|
+
error: z.string().optional(),
|
|
5
|
+
details: z.string().optional()
|
|
6
|
+
});
|
|
7
|
+
var DEFAULT_BASE_URL = "https://oleander.dev";
|
|
8
|
+
var optionsSchema = z.object({
|
|
9
|
+
apiKey: z.string().optional(),
|
|
10
|
+
baseUrl: z.string().default(DEFAULT_BASE_URL).transform((v) => v.trim() ? v.trim() : DEFAULT_BASE_URL)
|
|
11
|
+
});
|
|
12
|
+
var queryOptionsSchema = z.object({
|
|
13
|
+
save: z.boolean().default(false)
|
|
14
|
+
});
|
|
15
|
+
var lakeQueryResultSchema = z.object({
|
|
16
|
+
success: z.boolean(),
|
|
17
|
+
results: z.object({
|
|
18
|
+
columns: z.array(z.string()),
|
|
19
|
+
column_types: z.array(z.string()),
|
|
20
|
+
rows: z.array(z.array(z.unknown()))
|
|
21
|
+
}).optional(),
|
|
22
|
+
row_count: z.number().optional(),
|
|
23
|
+
execution_time: z.string().optional(),
|
|
24
|
+
saved_table_name: z.string().optional(),
|
|
25
|
+
error: z.string().optional(),
|
|
26
|
+
details: z.string().optional(),
|
|
27
|
+
query: z.string().optional()
|
|
28
|
+
});
|
|
29
|
+
var sparkJobListPageSchema = z.object({
|
|
30
|
+
scripts: z.array(z.string()),
|
|
31
|
+
continuationToken: z.string().optional()
|
|
32
|
+
});
|
|
33
|
+
var listSparkJobsOptionsSchema = z.object({
|
|
34
|
+
limit: z.number().int().positive().default(20),
|
|
35
|
+
offset: z.number().int().nonnegative().default(0)
|
|
36
|
+
});
|
|
37
|
+
var listSparkJobsResultSchema = z.object({
|
|
38
|
+
scripts: z.array(z.string()),
|
|
39
|
+
hasMore: z.boolean()
|
|
40
|
+
});
|
|
41
|
+
var sparkMachineTypeSchema = z.enum([
|
|
42
|
+
"spark.1.c",
|
|
43
|
+
"spark.2.c",
|
|
44
|
+
"spark.4.c",
|
|
45
|
+
"spark.8.c",
|
|
46
|
+
"spark.16.c",
|
|
47
|
+
"spark.1.b",
|
|
48
|
+
"spark.2.b",
|
|
49
|
+
"spark.4.b",
|
|
50
|
+
"spark.8.b",
|
|
51
|
+
"spark.16.b",
|
|
52
|
+
"spark.1.m",
|
|
53
|
+
"spark.2.m",
|
|
54
|
+
"spark.4.m",
|
|
55
|
+
"spark.8.m",
|
|
56
|
+
"spark.16.m"
|
|
57
|
+
]);
|
|
58
|
+
var submitOptionsSchema = z.object({
|
|
59
|
+
namespace: z.string().min(1, "namespace is required"),
|
|
60
|
+
name: z.string().min(1, "name is required"),
|
|
61
|
+
scriptName: z.string().min(1, "scriptName is required"),
|
|
62
|
+
args: z.array(z.string()).default([]),
|
|
63
|
+
driverMachineType: sparkMachineTypeSchema.default("spark.1.b"),
|
|
64
|
+
executorMachineType: sparkMachineTypeSchema.default("spark.1.b"),
|
|
65
|
+
executorNumbers: z.number().int().min(1).max(20).default(2),
|
|
66
|
+
jobTags: z.array(z.string()).default([]),
|
|
67
|
+
runTags: z.array(z.string()).default([])
|
|
68
|
+
});
|
|
69
|
+
var submitSparkJobAndWaitOptionsSchema = submitOptionsSchema.extend({
|
|
70
|
+
pollIntervalMs: z.number().int().positive().default(1e4),
|
|
71
|
+
timeoutMs: z.number().int().positive().default(6e5)
|
|
72
|
+
});
|
|
73
|
+
var sparkJobRunSchema = z.object({
|
|
74
|
+
runId: z.string()
|
|
75
|
+
});
|
|
76
|
+
var runResponseSchema = z.object({
|
|
77
|
+
id: z.string(),
|
|
78
|
+
state: z.string().nullable(),
|
|
79
|
+
started_at: z.string().nullable().optional(),
|
|
80
|
+
queued_at: z.string().nullable().optional(),
|
|
81
|
+
scheduled_at: z.string().nullable().optional(),
|
|
82
|
+
ended_at: z.string().nullable().optional(),
|
|
83
|
+
duration: z.number().nullable().optional(),
|
|
84
|
+
error: z.unknown().nullable().optional(),
|
|
85
|
+
tags: z.array(
|
|
86
|
+
z.object({
|
|
87
|
+
key: z.string(),
|
|
88
|
+
value: z.string(),
|
|
89
|
+
source: z.string().nullable().optional()
|
|
90
|
+
})
|
|
91
|
+
),
|
|
92
|
+
job: z.object({
|
|
93
|
+
id: z.string(),
|
|
94
|
+
name: z.string(),
|
|
95
|
+
namespace: z.string()
|
|
96
|
+
}),
|
|
97
|
+
pipeline: z.object({
|
|
98
|
+
id: z.string(),
|
|
99
|
+
name: z.string(),
|
|
100
|
+
namespace: z.string()
|
|
101
|
+
})
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
// src/client.ts
|
|
105
|
+
function getHeaders(apiKey) {
|
|
106
|
+
return {
|
|
107
|
+
Authorization: `Bearer ${apiKey}`,
|
|
108
|
+
"Content-Type": "application/json"
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
function parseErrorBody(body, status) {
|
|
112
|
+
const parsed = apiErrorBodySchema.safeParse(body);
|
|
113
|
+
if (parsed.success) {
|
|
114
|
+
const { error, details } = parsed.data;
|
|
115
|
+
return error || details || `HTTP ${status}`;
|
|
116
|
+
}
|
|
117
|
+
return `HTTP ${status}`;
|
|
118
|
+
}
|
|
119
|
+
var Oleander = class {
|
|
120
|
+
constructor(options = {}) {
|
|
121
|
+
const parsed = optionsSchema.safeParse(options);
|
|
122
|
+
if (!parsed.success) {
|
|
123
|
+
const first = parsed.error.flatten().formErrors[0] ?? parsed.error.message;
|
|
124
|
+
throw new Error(first);
|
|
125
|
+
}
|
|
126
|
+
const fromOptions = parsed.data.apiKey?.trim();
|
|
127
|
+
const fromEnv = typeof process !== "undefined" && process.env?.OLEANDER_API_KEY;
|
|
128
|
+
const apiKey = fromOptions ?? fromEnv ?? "";
|
|
129
|
+
if (!apiKey) {
|
|
130
|
+
throw new Error(
|
|
131
|
+
"Oleander requires a non-empty apiKey (or set OLEANDER_API_KEY)"
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
this.apiKey = apiKey;
|
|
135
|
+
this.baseUrl = parsed.data.baseUrl.trim().replace(/\/$/, "");
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Execute a lake query (mirrors `oleander query`).
|
|
139
|
+
*/
|
|
140
|
+
async query(sql, options = {}) {
|
|
141
|
+
const query = typeof sql === "string" ? sql.trim() : "";
|
|
142
|
+
if (!query) {
|
|
143
|
+
throw new Error("Query is required");
|
|
144
|
+
}
|
|
145
|
+
const opts = queryOptionsSchema.parse(options);
|
|
146
|
+
const res = await fetch(`${this.baseUrl}/api/v1/warehouse/query`, {
|
|
147
|
+
method: "POST",
|
|
148
|
+
headers: getHeaders(this.apiKey),
|
|
149
|
+
body: JSON.stringify({
|
|
150
|
+
query,
|
|
151
|
+
autoSaveByHash: opts.save
|
|
152
|
+
})
|
|
153
|
+
});
|
|
154
|
+
const raw = await res.json().catch(() => ({}));
|
|
155
|
+
if (!res.ok) {
|
|
156
|
+
throw new Error(parseErrorBody(raw, res.status));
|
|
157
|
+
}
|
|
158
|
+
const result = lakeQueryResultSchema.parse(raw);
|
|
159
|
+
if (!result.success && result.error) {
|
|
160
|
+
throw new Error(result.details || result.error);
|
|
161
|
+
}
|
|
162
|
+
return result;
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* List spark jobs (mirrors `oleander spark jobs list`).
|
|
166
|
+
* Uses limit/offset pagination; pages through the API internally.
|
|
167
|
+
*/
|
|
168
|
+
async listSparkJobs(options = {}) {
|
|
169
|
+
const opts = listSparkJobsOptionsSchema.parse(options);
|
|
170
|
+
const { limit, offset } = opts;
|
|
171
|
+
const allScripts = [];
|
|
172
|
+
let continuationToken;
|
|
173
|
+
while (true) {
|
|
174
|
+
const params = continuationToken ? `?${new URLSearchParams({ continuationToken }).toString()}` : "";
|
|
175
|
+
const res = await fetch(`${this.baseUrl}/api/v1/spark/scripts${params}`, {
|
|
176
|
+
method: "GET",
|
|
177
|
+
headers: { Authorization: `Bearer ${this.apiKey}` }
|
|
178
|
+
});
|
|
179
|
+
const raw = await res.json().catch(() => ({}));
|
|
180
|
+
if (!res.ok) {
|
|
181
|
+
throw new Error(parseErrorBody(raw, res.status));
|
|
182
|
+
}
|
|
183
|
+
const page = sparkJobListPageSchema.parse(raw);
|
|
184
|
+
allScripts.push(...page.scripts);
|
|
185
|
+
continuationToken = page.continuationToken;
|
|
186
|
+
if (!continuationToken || allScripts.length >= offset + limit) {
|
|
187
|
+
break;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
const scripts = allScripts.slice(offset, offset + limit);
|
|
191
|
+
const hasMore = !!continuationToken || allScripts.length > offset + limit;
|
|
192
|
+
return { scripts, hasMore };
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Submit a spark job (mirrors `oleander spark jobs submit`).
|
|
196
|
+
* Returns the run ID; use getRun() or submitSparkJobAndWait() to poll status.
|
|
197
|
+
*/
|
|
198
|
+
async submitSparkJob(options) {
|
|
199
|
+
const opts = submitOptionsSchema.parse(options);
|
|
200
|
+
const res = await fetch(`${this.baseUrl}/api/v1/spark/jobs`, {
|
|
201
|
+
method: "POST",
|
|
202
|
+
headers: getHeaders(this.apiKey),
|
|
203
|
+
body: JSON.stringify({
|
|
204
|
+
namespace: opts.namespace.trim(),
|
|
205
|
+
name: opts.name.trim(),
|
|
206
|
+
scriptName: opts.scriptName.trim(),
|
|
207
|
+
arguments: opts.args,
|
|
208
|
+
driverMachineType: opts.driverMachineType,
|
|
209
|
+
executorMachineType: opts.executorMachineType,
|
|
210
|
+
executorNumbers: opts.executorNumbers,
|
|
211
|
+
jobTags: opts.jobTags,
|
|
212
|
+
runTags: opts.runTags
|
|
213
|
+
})
|
|
214
|
+
});
|
|
215
|
+
const raw = await res.json().catch(() => ({}));
|
|
216
|
+
if (!res.ok) {
|
|
217
|
+
throw new Error(parseErrorBody(raw, res.status));
|
|
218
|
+
}
|
|
219
|
+
return sparkJobRunSchema.parse(raw);
|
|
220
|
+
}
|
|
221
|
+
/**
|
|
222
|
+
* Get run status (used for polling after submit).
|
|
223
|
+
*/
|
|
224
|
+
async getRun(runId) {
|
|
225
|
+
if (!runId || typeof runId !== "string" || !runId.trim()) {
|
|
226
|
+
throw new Error("runId is required");
|
|
227
|
+
}
|
|
228
|
+
const res = await fetch(`${this.baseUrl}/api/v2/runs/${runId}`, {
|
|
229
|
+
method: "GET",
|
|
230
|
+
headers: { Authorization: `Bearer ${this.apiKey}` }
|
|
231
|
+
});
|
|
232
|
+
const raw = await res.json().catch(() => ({}));
|
|
233
|
+
if (!res.ok) {
|
|
234
|
+
throw new Error(parseErrorBody(raw, res.status));
|
|
235
|
+
}
|
|
236
|
+
return runResponseSchema.parse(raw);
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Submit a spark job and wait until the run reaches a terminal state (COMPLETE, FAIL, ABORT).
|
|
240
|
+
*/
|
|
241
|
+
async submitSparkJobAndWait(options) {
|
|
242
|
+
const opts = submitSparkJobAndWaitOptionsSchema.parse(options);
|
|
243
|
+
const { pollIntervalMs, timeoutMs, ...submitOpts } = opts;
|
|
244
|
+
const { runId } = await this.submitSparkJob(submitOpts);
|
|
245
|
+
const started = Date.now();
|
|
246
|
+
while (Date.now() - started < timeoutMs) {
|
|
247
|
+
const run2 = await this.getRun(runId);
|
|
248
|
+
const state = run2.state ?? "";
|
|
249
|
+
if (state === "COMPLETE" || state === "FAIL" || state === "ABORT") {
|
|
250
|
+
return { runId, state, run: run2 };
|
|
251
|
+
}
|
|
252
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
253
|
+
}
|
|
254
|
+
const run = await this.getRun(runId);
|
|
255
|
+
throw new Error(
|
|
256
|
+
`Timeout waiting for run ${runId} (state: ${run.state ?? "unknown"})`
|
|
257
|
+
);
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
export {
|
|
261
|
+
Oleander,
|
|
262
|
+
apiErrorBodySchema,
|
|
263
|
+
lakeQueryResultSchema,
|
|
264
|
+
listSparkJobsOptionsSchema,
|
|
265
|
+
listSparkJobsResultSchema,
|
|
266
|
+
optionsSchema,
|
|
267
|
+
queryOptionsSchema,
|
|
268
|
+
runResponseSchema,
|
|
269
|
+
sparkJobListPageSchema,
|
|
270
|
+
sparkJobRunSchema,
|
|
271
|
+
sparkMachineTypeSchema,
|
|
272
|
+
submitOptionsSchema,
|
|
273
|
+
submitSparkJobAndWaitOptionsSchema
|
|
274
|
+
};
|
|
275
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/schemas.ts","../src/client.ts"],"sourcesContent":["import { z } from \"zod\";\n\n/** API error response body */\nexport const apiErrorBodySchema = z.object({\n error: z.string().optional(),\n details: z.string().optional(),\n});\n\nexport type ApiErrorBody = z.infer<typeof apiErrorBodySchema>;\n\nconst DEFAULT_BASE_URL = \"https://oleander.dev\";\n\n/** Constructor options. apiKey can be omitted if OLEANDER_API_KEY env is set. */\nexport const optionsSchema = z.object({\n apiKey: z.string().optional(),\n baseUrl: z\n .string()\n .default(DEFAULT_BASE_URL)\n .transform((v) => (v.trim() ? v.trim() : DEFAULT_BASE_URL)),\n});\n\nexport type OleanderOptions = z.input<typeof optionsSchema>;\n\n/** Lake query options */\nexport const queryOptionsSchema = z.object({\n save: z.boolean().default(false),\n});\n\nexport type QueryOptions = z.input<typeof queryOptionsSchema>;\n\n/** Lake query result (API response) */\nexport const lakeQueryResultSchema = z.object({\n success: z.boolean(),\n results: z\n .object({\n columns: z.array(z.string()),\n column_types: z.array(z.string()),\n rows: z.array(z.array(z.unknown())),\n })\n .optional(),\n row_count: z.number().optional(),\n execution_time: z.string().optional(),\n saved_table_name: z.string().optional(),\n error: z.string().optional(),\n details: z.string().optional(),\n query: z.string().optional(),\n});\n\nexport type LakeQueryResult = z.infer<typeof lakeQueryResultSchema>;\n\n/** API response for one page of spark scripts (S3-style) */\nexport const sparkJobListPageSchema = z.object({\n scripts: z.array(z.string()),\n continuationToken: z.string().optional(),\n});\n\nexport type SparkJobListPage = z.infer<typeof sparkJobListPageSchema>;\n\n/** Options for listSparkJobs (limit/offset pagination) */\nexport const listSparkJobsOptionsSchema = z.object({\n limit: z.number().int().positive().default(20),\n offset: z.number().int().nonnegative().default(0),\n});\n\nexport type ListSparkJobsOptions = z.input<typeof listSparkJobsOptionsSchema>;\n\n/** Result of listSparkJobs with limit/offset */\nexport const listSparkJobsResultSchema = z.object({\n scripts: z.array(z.string()),\n hasMore: z.boolean(),\n});\n\nexport type ListSparkJobsResult = z.infer<typeof listSparkJobsResultSchema>;\n\n/** Spark machine type enum */\nexport const sparkMachineTypeSchema = z.enum([\n \"spark.1.c\",\n \"spark.2.c\",\n \"spark.4.c\",\n \"spark.8.c\",\n \"spark.16.c\",\n \"spark.1.b\",\n \"spark.2.b\",\n \"spark.4.b\",\n \"spark.8.b\",\n \"spark.16.b\",\n \"spark.1.m\",\n \"spark.2.m\",\n \"spark.4.m\",\n \"spark.8.m\",\n \"spark.16.m\",\n]);\n\nexport type SparkMachineType = z.infer<typeof sparkMachineTypeSchema>;\n\n/** Spark job submit options. Optional fields have defaults. */\nexport const submitOptionsSchema = z.object({\n namespace: z.string().min(1, \"namespace is required\"),\n name: z.string().min(1, \"name is required\"),\n scriptName: z.string().min(1, \"scriptName is required\"),\n args: z.array(z.string()).default([]),\n driverMachineType: sparkMachineTypeSchema.default(\"spark.1.b\"),\n executorMachineType: sparkMachineTypeSchema.default(\"spark.1.b\"),\n executorNumbers: z.number().int().min(1).max(20).default(2),\n jobTags: z.array(z.string()).default([]),\n runTags: z.array(z.string()).default([]),\n});\n\nexport type SparkJobSubmitOptions = z.input<typeof submitOptionsSchema>;\n\n/** Options for submitSparkJobAndWait (submit options + optional wait tuning) */\nexport const submitSparkJobAndWaitOptionsSchema = submitOptionsSchema.extend({\n pollIntervalMs: z.number().int().positive().default(10000),\n timeoutMs: z.number().int().positive().default(600000),\n});\n\nexport type SubmitSparkJobAndWaitOptions = z.input<\n typeof submitSparkJobAndWaitOptionsSchema\n>;\n\n/** Spark job submit response */\nexport const sparkJobRunSchema = z.object({\n runId: z.string(),\n});\n\nexport type SparkJobRun = z.infer<typeof sparkJobRunSchema>;\n\n/** Run status (for polling) */\nexport const runResponseSchema = z.object({\n id: z.string(),\n state: z.string().nullable(),\n started_at: z.string().nullable().optional(),\n queued_at: z.string().nullable().optional(),\n scheduled_at: z.string().nullable().optional(),\n ended_at: z.string().nullable().optional(),\n duration: z.number().nullable().optional(),\n error: z.unknown().nullable().optional(),\n tags: z.array(\n z.object({\n key: z.string(),\n value: z.string(),\n source: z.string().nullable().optional(),\n }),\n ),\n job: z.object({\n id: z.string(),\n name: z.string(),\n namespace: z.string(),\n }),\n pipeline: z.object({\n id: z.string(),\n name: z.string(),\n namespace: z.string(),\n }),\n});\n\nexport type RunResponse = z.infer<typeof runResponseSchema>;\n\nexport type RunState = \"COMPLETE\" | \"FAIL\" | \"ABORT\" | string;\n","import {\n optionsSchema,\n queryOptionsSchema,\n lakeQueryResultSchema,\n sparkJobListPageSchema,\n listSparkJobsOptionsSchema,\n submitOptionsSchema,\n submitSparkJobAndWaitOptionsSchema,\n sparkJobRunSchema,\n runResponseSchema,\n apiErrorBodySchema,\n} from \"./schemas.js\";\nimport type {\n OleanderOptions,\n QueryOptions,\n LakeQueryResult,\n ListSparkJobsOptions,\n ListSparkJobsResult,\n SparkJobSubmitOptions,\n SubmitSparkJobAndWaitOptions,\n SparkJobRun,\n RunResponse,\n RunState,\n} from \"./schemas.js\";\n\nfunction getHeaders(apiKey: string): Record<string, string> {\n return {\n Authorization: `Bearer ${apiKey}`,\n \"Content-Type\": \"application/json\",\n };\n}\n\nfunction parseErrorBody(body: unknown, status: number): string {\n const parsed = apiErrorBodySchema.safeParse(body);\n if (parsed.success) {\n const { error, details } = parsed.data;\n return error || details || `HTTP ${status}`;\n }\n return `HTTP ${status}`;\n}\n\n/**\n * oleander API. Mirrors the CLI for query, list spark jobs, and launch spark jobs.\n */\nexport class Oleander {\n private readonly apiKey: string;\n private readonly baseUrl: string;\n\n constructor(options: OleanderOptions = {}) {\n const parsed = optionsSchema.safeParse(options);\n if (!parsed.success) {\n const first =\n parsed.error.flatten().formErrors[0] ?? parsed.error.message;\n throw new Error(first);\n }\n const fromOptions = parsed.data.apiKey?.trim();\n const fromEnv =\n typeof process !== \"undefined\" && process.env?.OLEANDER_API_KEY;\n const apiKey = fromOptions ?? fromEnv ?? \"\";\n if (!apiKey) {\n throw new Error(\n \"Oleander requires a non-empty apiKey (or set OLEANDER_API_KEY)\",\n );\n }\n this.apiKey = apiKey;\n this.baseUrl = parsed.data.baseUrl.trim().replace(/\\/$/, \"\");\n }\n\n /**\n * Execute a lake query (mirrors `oleander query`).\n */\n async query(\n sql: string,\n options: QueryOptions = {},\n ): Promise<LakeQueryResult> {\n const query = typeof sql === \"string\" ? sql.trim() : \"\";\n if (!query) {\n throw new Error(\"Query is required\");\n }\n const opts = queryOptionsSchema.parse(options);\n\n const res = await fetch(`${this.baseUrl}/api/v1/warehouse/query`, {\n method: \"POST\",\n headers: getHeaders(this.apiKey),\n body: JSON.stringify({\n query,\n autoSaveByHash: opts.save,\n }),\n });\n\n const raw = await res.json().catch(() => ({}));\n if (!res.ok) {\n throw new Error(parseErrorBody(raw, res.status));\n }\n\n const result = lakeQueryResultSchema.parse(raw);\n if (!result.success && result.error) {\n throw new Error(result.details || result.error);\n }\n return result;\n }\n\n /**\n * List spark jobs (mirrors `oleander spark jobs list`).\n * Uses limit/offset pagination; pages through the API internally.\n */\n async listSparkJobs(\n options: ListSparkJobsOptions = {},\n ): Promise<ListSparkJobsResult> {\n const opts = listSparkJobsOptionsSchema.parse(options);\n const { limit, offset } = opts;\n\n const allScripts: string[] = [];\n let continuationToken: string | undefined;\n\n while (true) {\n const params = continuationToken\n ? `?${new URLSearchParams({ continuationToken }).toString()}`\n : \"\";\n const res = await fetch(`${this.baseUrl}/api/v1/spark/scripts${params}`, {\n method: \"GET\",\n headers: { Authorization: `Bearer ${this.apiKey}` },\n });\n const raw = await res.json().catch(() => ({}));\n if (!res.ok) {\n throw new Error(parseErrorBody(raw, res.status));\n }\n const page = sparkJobListPageSchema.parse(raw);\n allScripts.push(...page.scripts);\n continuationToken = page.continuationToken;\n if (!continuationToken || allScripts.length >= offset + limit) {\n break;\n }\n }\n\n const scripts = allScripts.slice(offset, offset + limit);\n const hasMore = !!continuationToken || allScripts.length > offset + limit;\n return { scripts, hasMore };\n }\n\n /**\n * Submit a spark job (mirrors `oleander spark jobs submit`).\n * Returns the run ID; use getRun() or submitSparkJobAndWait() to poll status.\n */\n async submitSparkJob(options: SparkJobSubmitOptions): Promise<SparkJobRun> {\n const opts = submitOptionsSchema.parse(options);\n\n const res = await fetch(`${this.baseUrl}/api/v1/spark/jobs`, {\n method: \"POST\",\n headers: getHeaders(this.apiKey),\n body: JSON.stringify({\n namespace: opts.namespace.trim(),\n name: opts.name.trim(),\n scriptName: opts.scriptName.trim(),\n arguments: opts.args,\n driverMachineType: opts.driverMachineType,\n executorMachineType: opts.executorMachineType,\n executorNumbers: opts.executorNumbers,\n jobTags: opts.jobTags,\n runTags: opts.runTags,\n }),\n });\n\n const raw = await res.json().catch(() => ({}));\n if (!res.ok) {\n throw new Error(parseErrorBody(raw, res.status));\n }\n return sparkJobRunSchema.parse(raw);\n }\n\n /**\n * Get run status (used for polling after submit).\n */\n async getRun(runId: string): Promise<RunResponse> {\n if (!runId || typeof runId !== \"string\" || !runId.trim()) {\n throw new Error(\"runId is required\");\n }\n const res = await fetch(`${this.baseUrl}/api/v2/runs/${runId}`, {\n method: \"GET\",\n headers: { Authorization: `Bearer ${this.apiKey}` },\n });\n\n const raw = await res.json().catch(() => ({}));\n if (!res.ok) {\n throw new Error(parseErrorBody(raw, res.status));\n }\n return runResponseSchema.parse(raw);\n }\n\n /**\n * Submit a spark job and wait until the run reaches a terminal state (COMPLETE, FAIL, ABORT).\n */\n async submitSparkJobAndWait(\n options: SubmitSparkJobAndWaitOptions,\n ): Promise<{ runId: string; state: RunState; run: RunResponse }> {\n const opts = submitSparkJobAndWaitOptionsSchema.parse(options);\n const { pollIntervalMs, timeoutMs, ...submitOpts } = opts;\n const { runId } = await this.submitSparkJob(submitOpts);\n const started = Date.now();\n\n while (Date.now() - started < timeoutMs) {\n const run = await this.getRun(runId);\n const state = run.state ?? \"\";\n if (state === \"COMPLETE\" || state === \"FAIL\" || state === \"ABORT\") {\n return { runId, state, run };\n }\n await new Promise((r) => setTimeout(r, pollIntervalMs));\n }\n const run = await this.getRun(runId);\n throw new Error(\n `Timeout waiting for run ${runId} (state: ${run.state ?? \"unknown\"})`,\n );\n }\n}\n"],"mappings":";AAAA,SAAS,SAAS;AAGX,IAAM,qBAAqB,EAAE,OAAO;AAAA,EACzC,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,SAAS,EAAE,OAAO,EAAE,SAAS;AAC/B,CAAC;AAID,IAAM,mBAAmB;AAGlB,IAAM,gBAAgB,EAAE,OAAO;AAAA,EACpC,QAAQ,EAAE,OAAO,EAAE,SAAS;AAAA,EAC5B,SAAS,EACN,OAAO,EACP,QAAQ,gBAAgB,EACxB,UAAU,CAAC,MAAO,EAAE,KAAK,IAAI,EAAE,KAAK,IAAI,gBAAiB;AAC9D,CAAC;AAKM,IAAM,qBAAqB,EAAE,OAAO;AAAA,EACzC,MAAM,EAAE,QAAQ,EAAE,QAAQ,KAAK;AACjC,CAAC;AAKM,IAAM,wBAAwB,EAAE,OAAO;AAAA,EAC5C,SAAS,EAAE,QAAQ;AAAA,EACnB,SAAS,EACN,OAAO;AAAA,IACN,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;AAAA,IAC3B,cAAc,EAAE,MAAM,EAAE,OAAO,CAAC;AAAA,IAChC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;AAAA,EACpC,CAAC,EACA,SAAS;AAAA,EACZ,WAAW,EAAE,OAAO,EAAE,SAAS;AAAA,EAC/B,gBAAgB,EAAE,OAAO,EAAE,SAAS;AAAA,EACpC,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,EACtC,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,SAAS,EAAE,OAAO,EAAE,SAAS;AAAA,EAC7B,OAAO,EAAE,OAAO,EAAE,SAAS;AAC7B,CAAC;AAKM,IAAM,yBAAyB,EAAE,OAAO;AAAA,EAC7C,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;AAAA,EAC3B,mBAAmB,EAAE,OAAO,EAAE,SAAS;AACzC,CAAC;AAKM,IAAM,6BAA6B,EAAE,OAAO;AAAA,EACjD,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE;AAAA,EAC7C,QAAQ,EAAE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,QAAQ,CAAC;AAClD,CAAC;AAKM,IAAM,4BAA4B,EAAE,OAAO;AAAA,EAChD,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;AAAA,EAC3B,SAAS,EAAE,QAAQ;AACrB,CAAC;AAKM,IAAM,yBAAyB,EAAE,KAAK;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAKM,IAAM,sBAAsB,EAAE,OAAO;AAAA,EAC1C,WAAW,EAAE,OAAO,EAAE,IAAI,GAAG,uBAAuB;AAAA,EACpD,MAAM,EAAE,OAAO,EAAE,IAAI,GAAG,kBAAkB;AAAA,EAC1C,YAAY,EAAE,OAAO,EAAE,IAAI,GAAG,wBAAwB;AAAA,EACtD,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EACpC,mBAAmB,uBAAuB,QAAQ,WAAW;AAAA,EAC7D,qBAAqB,uBAAuB,QAAQ,WAAW;AAAA,EAC/D,iBAAiB,EAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,EAAE,QAAQ,CAAC;AAAA,EAC1D,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EACvC,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AACzC,CAAC;AAKM,IAAM,qCAAqC,oBAAoB,OAAO;AAAA,EAC3E,gBAAgB,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,GAAK;AAAA,EACzD,WAAW,EAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,GAAM;AACvD,CAAC;AAOM,IAAM,oBAAoB,EAAE,OAAO;AAAA,EACxC,OAAO,EAAE,OAAO;AAClB,CAAC;AAKM,IAAM,oBAAoB,EAAE,OAAO;AAAA,EACxC,IAAI,EAAE,OAAO;AAAA,EACb,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,YAAY,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EAC3C,WAAW,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EAC1C,cAAc,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EAC7C,UAAU,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EACzC,UAAU,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,EACzC,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,SAAS;AAAA,EACvC,MAAM,EAAE;AAAA,IACN,EAAE,OAAO;AAAA,MACP,KAAK,EAAE,OAAO;AAAA,MACd,OAAO,EAAE,OAAO;AAAA,MAChB,QAAQ,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IACzC,CAAC;AAAA,EACH;AAAA,EACA,KAAK,EAAE,OAAO;AAAA,IACZ,IAAI,EAAE,OAAO;AAAA,IACb,MAAM,EAAE,OAAO;AAAA,IACf,WAAW,EAAE,OAAO;AAAA,EACtB,CAAC;AAAA,EACD,UAAU,EAAE,OAAO;AAAA,IACjB,IAAI,EAAE,OAAO;AAAA,IACb,MAAM,EAAE,OAAO;AAAA,IACf,WAAW,EAAE,OAAO;AAAA,EACtB,CAAC;AACH,CAAC;;;ACjID,SAAS,WAAW,QAAwC;AAC1D,SAAO;AAAA,IACL,eAAe,UAAU,MAAM;AAAA,IAC/B,gBAAgB;AAAA,EAClB;AACF;AAEA,SAAS,eAAe,MAAe,QAAwB;AAC7D,QAAM,SAAS,mBAAmB,UAAU,IAAI;AAChD,MAAI,OAAO,SAAS;AAClB,UAAM,EAAE,OAAO,QAAQ,IAAI,OAAO;AAClC,WAAO,SAAS,WAAW,QAAQ,MAAM;AAAA,EAC3C;AACA,SAAO,QAAQ,MAAM;AACvB;AAKO,IAAM,WAAN,MAAe;AAAA,EAIpB,YAAY,UAA2B,CAAC,GAAG;AACzC,UAAM,SAAS,cAAc,UAAU,OAAO;AAC9C,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,QACJ,OAAO,MAAM,QAAQ,EAAE,WAAW,CAAC,KAAK,OAAO,MAAM;AACvD,YAAM,IAAI,MAAM,KAAK;AAAA,IACvB;AACA,UAAM,cAAc,OAAO,KAAK,QAAQ,KAAK;AAC7C,UAAM,UACJ,OAAO,YAAY,eAAe,QAAQ,KAAK;AACjD,UAAM,SAAS,eAAe,WAAW;AACzC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,SAAK,SAAS;AACd,SAAK,UAAU,OAAO,KAAK,QAAQ,KAAK,EAAE,QAAQ,OAAO,EAAE;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MACJ,KACA,UAAwB,CAAC,GACC;AAC1B,UAAM,QAAQ,OAAO,QAAQ,WAAW,IAAI,KAAK,IAAI;AACrD,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,mBAAmB;AAAA,IACrC;AACA,UAAM,OAAO,mBAAmB,MAAM,OAAO;AAE7C,UAAM,MAAM,MAAM,MAAM,GAAG,KAAK,OAAO,2BAA2B;AAAA,MAChE,QAAQ;AAAA,MACR,SAAS,WAAW,KAAK,MAAM;AAAA,MAC/B,MAAM,KAAK,UAAU;AAAA,QACnB;AAAA,QACA,gBAAgB,KAAK;AAAA,MACvB,CAAC;AAAA,IACH,CAAC;AAED,UAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,IAAI,MAAM,eAAe,KAAK,IAAI,MAAM,CAAC;AAAA,IACjD;AAEA,UAAM,SAAS,sBAAsB,MAAM,GAAG;AAC9C,QAAI,CAAC,OAAO,WAAW,OAAO,OAAO;AACnC,YAAM,IAAI,MAAM,OAAO,WAAW,OAAO,KAAK;AAAA,IAChD;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cACJ,UAAgC,CAAC,GACH;AAC9B,UAAM,OAAO,2BAA2B,MAAM,OAAO;AACrD,UAAM,EAAE,OAAO,OAAO,IAAI;AAE1B,UAAM,aAAuB,CAAC;AAC9B,QAAI;AAEJ,WAAO,MAAM;AACX,YAAM,SAAS,oBACX,IAAI,IAAI,gBAAgB,EAAE,kBAAkB,CAAC,EAAE,SAAS,CAAC,KACzD;AACJ,YAAM,MAAM,MAAM,MAAM,GAAG,KAAK,OAAO,wBAAwB,MAAM,IAAI;AAAA,QACvE,QAAQ;AAAA,QACR,SAAS,EAAE,eAAe,UAAU,KAAK,MAAM,GAAG;AAAA,MACpD,CAAC;AACD,YAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,UAAI,CAAC,IAAI,IAAI;AACX,cAAM,IAAI,MAAM,eAAe,KAAK,IAAI,MAAM,CAAC;AAAA,MACjD;AACA,YAAM,OAAO,uBAAuB,MAAM,GAAG;AAC7C,iBAAW,KAAK,GAAG,KAAK,OAAO;AAC/B,0BAAoB,KAAK;AACzB,UAAI,CAAC,qBAAqB,WAAW,UAAU,SAAS,OAAO;AAC7D;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAU,WAAW,MAAM,QAAQ,SAAS,KAAK;AACvD,UAAM,UAAU,CAAC,CAAC,qBAAqB,WAAW,SAAS,SAAS;AACpE,WAAO,EAAE,SAAS,QAAQ;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eAAe,SAAsD;AACzE,UAAM,OAAO,oBAAoB,MAAM,OAAO;AAE9C,UAAM,MAAM,MAAM,MAAM,GAAG,KAAK,OAAO,sBAAsB;AAAA,MAC3D,QAAQ;AAAA,MACR,SAAS,WAAW,KAAK,MAAM;AAAA,MAC/B,MAAM,KAAK,UAAU;AAAA,QACnB,WAAW,KAAK,UAAU,KAAK;AAAA,QAC/B,MAAM,KAAK,KAAK,KAAK;AAAA,QACrB,YAAY,KAAK,WAAW,KAAK;AAAA,QACjC,WAAW,KAAK;AAAA,QAChB,mBAAmB,KAAK;AAAA,QACxB,qBAAqB,KAAK;AAAA,QAC1B,iBAAiB,KAAK;AAAA,QACtB,SAAS,KAAK;AAAA,QACd,SAAS,KAAK;AAAA,MAChB,CAAC;AAAA,IACH,CAAC;AAED,UAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,IAAI,MAAM,eAAe,KAAK,IAAI,MAAM,CAAC;AAAA,IACjD;AACA,WAAO,kBAAkB,MAAM,GAAG;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,OAAqC;AAChD,QAAI,CAAC,SAAS,OAAO,UAAU,YAAY,CAAC,MAAM,KAAK,GAAG;AACxD,YAAM,IAAI,MAAM,mBAAmB;AAAA,IACrC;AACA,UAAM,MAAM,MAAM,MAAM,GAAG,KAAK,OAAO,gBAAgB,KAAK,IAAI;AAAA,MAC9D,QAAQ;AAAA,MACR,SAAS,EAAE,eAAe,UAAU,KAAK,MAAM,GAAG;AAAA,IACpD,CAAC;AAED,UAAM,MAAM,MAAM,IAAI,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AAC7C,QAAI,CAAC,IAAI,IAAI;AACX,YAAM,IAAI,MAAM,eAAe,KAAK,IAAI,MAAM,CAAC;AAAA,IACjD;AACA,WAAO,kBAAkB,MAAM,GAAG;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBACJ,SAC+D;AAC/D,UAAM,OAAO,mCAAmC,MAAM,OAAO;AAC7D,UAAM,EAAE,gBAAgB,WAAW,GAAG,WAAW,IAAI;AACrD,UAAM,EAAE,MAAM,IAAI,MAAM,KAAK,eAAe,UAAU;AACtD,UAAM,UAAU,KAAK,IAAI;AAEzB,WAAO,KAAK,IAAI,IAAI,UAAU,WAAW;AACvC,YAAMA,OAAM,MAAM,KAAK,OAAO,KAAK;AACnC,YAAM,QAAQA,KAAI,SAAS;AAC3B,UAAI,UAAU,cAAc,UAAU,UAAU,UAAU,SAAS;AACjE,eAAO,EAAE,OAAO,OAAO,KAAAA,KAAI;AAAA,MAC7B;AACA,YAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,cAAc,CAAC;AAAA,IACxD;AACA,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AACnC,UAAM,IAAI;AAAA,MACR,2BAA2B,KAAK,YAAY,IAAI,SAAS,SAAS;AAAA,IACpE;AAAA,EACF;AACF;","names":["run"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@oleanderhq/sdk",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "TypeScript SDK for oleander: query lake, list and launch Spark jobs",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.cjs",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"types": "./dist/index.d.ts",
|
|
12
|
+
"import": "./dist/index.js",
|
|
13
|
+
"require": "./dist/index.cjs"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"files": [
|
|
17
|
+
"dist",
|
|
18
|
+
"README.md"
|
|
19
|
+
],
|
|
20
|
+
"scripts": {
|
|
21
|
+
"build": "tsup",
|
|
22
|
+
"test": "npm run build && vitest run",
|
|
23
|
+
"prepublishOnly": "npm run build"
|
|
24
|
+
},
|
|
25
|
+
"keywords": [
|
|
26
|
+
"oleander",
|
|
27
|
+
"sdk",
|
|
28
|
+
"lake",
|
|
29
|
+
"spark",
|
|
30
|
+
"query",
|
|
31
|
+
"data"
|
|
32
|
+
],
|
|
33
|
+
"author": "",
|
|
34
|
+
"license": "MIT",
|
|
35
|
+
"dependencies": {
|
|
36
|
+
"zod": "^3.23.0"
|
|
37
|
+
},
|
|
38
|
+
"devDependencies": {
|
|
39
|
+
"@types/node": "^20.0.0",
|
|
40
|
+
"tsup": "^8.5.1",
|
|
41
|
+
"typescript": "^5",
|
|
42
|
+
"vitest": "^2.0.0"
|
|
43
|
+
},
|
|
44
|
+
"engines": {
|
|
45
|
+
"node": ">=20.0.0"
|
|
46
|
+
}
|
|
47
|
+
}
|