openwrangler 0.0.3 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,144 @@
1
+ # openwrangler
2
+
3
+ A library that implements wrangler's remote functionality, allowing you to use Cloudflare bindings via direct REST API calls instead of local simulation.
4
+
5
+ ## Overview
6
+
7
+ `openwrangler` provides the same binding interface as wrangler's `getPlatformProxy`, but instead of using local miniflare/workerd simulation, it directly calls the Cloudflare REST API. This enables you to work with remote Cloudflare resources during development.
8
+
9
+ ## Installation
10
+
11
+ ```bash
12
+ npm install openwrangler
13
+ # or
14
+ pnpm add openwrangler
15
+ ```
16
+
17
+ ## Direct Usage
18
+
19
+ You can use openwrangler directly in your Node.js applications:
20
+
21
+ ```typescript
22
+ import { getBindings } from 'openwrangler'
23
+
24
+ const bindings = getBindings({
25
+ accountId: 'your-account-id',
26
+ apiToken: 'your-api-token',
27
+ })
28
+
29
+ // Use bindings just like in Cloudflare Workers
30
+ await bindings.r2.put('image.png', buffer)
31
+ await bindings.kv.get('key')
32
+ await bindings.d1.exec('SELECT * FROM users')
33
+ ```
34
+
35
+ ## Nitro Integration
36
+
37
+ `openwrangler` can be seamlessly integrated with Nitro applications using `@bino0216/nitro-cloudflare-dev`.
38
+
39
+ ### Setup
40
+
41
+ 1. Install the Nitro integration package:
42
+
43
+ ```bash
44
+ npm install npm:@bino0216/nitro-cloudflare-dev
45
+ ```
46
+
47
+ 2. Configure remote credentials in your Nitro config:
48
+
49
+ ```typescript
50
+ // nitro.config.ts
51
+ export default defineNitroConfig({
52
+ cloudflareDev: {
53
+ remoteCredentials: {
54
+ accountId: 'your-account-id',
55
+ apiToken: 'your-api-token',
56
+ }
57
+ }
58
+ })
59
+ ```
60
+
61
+ 3. Mark bindings as remote in your `wrangler.toml` or `wrangler.json`:
62
+
63
+ ```toml
64
+ # wrangler.toml
65
+ [[r2_buckets]]
66
+ binding = "MY_BUCKET"
67
+ bucket_name = "my-bucket"
68
+ remote = true
69
+
70
+ [[kv_namespaces]]
71
+ binding = "MY_KV"
72
+ id = "your-kv-id"
73
+ remote = true
74
+
75
+ [[d1_databases]]
76
+ binding = "MY_DB"
77
+ database_name = "my-database"
78
+ database_id = "your-db-id"
79
+ remote = true
80
+ ```
81
+
82
+ Or in JSON format:
83
+
84
+ ```json
85
+ {
86
+ "r2_buckets": [
87
+ {
88
+ "binding": "MY_BUCKET",
89
+ "bucket_name": "my-bucket",
90
+ "remote": true
91
+ }
92
+ ],
93
+ "kv_namespaces": [
94
+ {
95
+ "binding": "MY_KV",
96
+ "id": "your-kv-id",
97
+ "remote": true
98
+ }
99
+ ],
100
+ "d1_databases": [
101
+ {
102
+ "binding": "MY_DB",
103
+ "database_name": "my-database",
104
+ "database_id": "your-db-id",
105
+ "remote": true
106
+ }
107
+ ]
108
+ }
109
+ ```
110
+
111
+ When bindings have `remote = true`, Nitro will use openwrangler's implementation to connect to your actual Cloudflare resources instead of local simulation.
112
+
113
+ ## Supported Bindings
114
+
115
+ - **R2** (`R2Bucket`) - Object storage
116
+ - **KV** (`KVNamespace`) - Key-value storage
117
+ - **D1** (`D1Database`) - SQL database
118
+
119
+ All types are imported from `@cloudflare/workers-types` to ensure compatibility with Cloudflare Workers.
120
+
121
+ ## Development
122
+
123
+ ```bash
124
+ pnpm install
125
+ pnpm dev # Run playground
126
+ pnpm build # Build package
127
+ ```
128
+
129
+ ## Project Structure
130
+
131
+ - `src/index.ts` - Package entry point, `getBindings()` function
132
+ - `playground/` - Nuxt/Nitro v3 based test environment
133
+ - `build.config.ts` - unbuild configuration
134
+
135
+ ## Use Cases
136
+
137
+ - **Development against production data**: Work with real Cloudflare resources during development
138
+ - **Testing**: Test against actual Cloudflare services without deployment
139
+ - **CI/CD**: Run integration tests against remote Cloudflare resources
140
+ - **Hybrid environments**: Mix local and remote bindings based on your needs
141
+
142
+ ## License
143
+
144
+ MIT
package/dist/index.d.mts CHANGED
@@ -1,12 +1,19 @@
1
1
  import { R2Bucket, KVNamespace, D1Database } from '@cloudflare/workers-types/experimental';
2
2
 
3
+ interface R2Config {
4
+ accountId: string;
5
+ r2AccessKeyId: string;
6
+ r2SecretAccessKey: string;
7
+ }
8
+
3
9
  interface BindingsConfig {
4
10
  accountId: string;
5
11
  apiToken: string;
6
12
  }
7
- declare function createR2Binding(config: BindingsConfig, bucketName: string): R2Bucket;
13
+
14
+ declare function createR2Binding(config: R2Config, bucketName: string): R2Bucket;
8
15
  declare function createKVBinding(config: BindingsConfig, namespaceId: string): KVNamespace;
9
16
  declare function createD1Binding(config: BindingsConfig, databaseId: string): D1Database;
10
17
 
11
18
  export { createD1Binding, createKVBinding, createR2Binding };
12
- export type { BindingsConfig };
19
+ export type { BindingsConfig, R2Config };
package/dist/index.d.ts CHANGED
@@ -1,12 +1,19 @@
1
1
  import { R2Bucket, KVNamespace, D1Database } from '@cloudflare/workers-types/experimental';
2
2
 
3
+ interface R2Config {
4
+ accountId: string;
5
+ r2AccessKeyId: string;
6
+ r2SecretAccessKey: string;
7
+ }
8
+
3
9
  interface BindingsConfig {
4
10
  accountId: string;
5
11
  apiToken: string;
6
12
  }
7
- declare function createR2Binding(config: BindingsConfig, bucketName: string): R2Bucket;
13
+
14
+ declare function createR2Binding(config: R2Config, bucketName: string): R2Bucket;
8
15
  declare function createKVBinding(config: BindingsConfig, namespaceId: string): KVNamespace;
9
16
  declare function createD1Binding(config: BindingsConfig, databaseId: string): D1Database;
10
17
 
11
18
  export { createD1Binding, createKVBinding, createR2Binding };
12
- export type { BindingsConfig };
19
+ export type { BindingsConfig, R2Config };
package/dist/index.mjs CHANGED
@@ -1,45 +1,116 @@
1
- class CloudflareAPIClient {
2
- baseUrl = "https://api.cloudflare.com/client/v4";
3
- accountId;
4
- apiToken;
5
- constructor(accountId, apiToken) {
6
- this.accountId = accountId;
7
- this.apiToken = apiToken;
8
- }
9
- async request(method, endpoint, body, headers) {
10
- const url = `${this.baseUrl}${endpoint}`;
11
- const response = await fetch(url, {
12
- method,
13
- headers: {
14
- "Authorization": `Bearer ${this.apiToken}`,
15
- "Content-Type": "application/json",
16
- ...headers
17
- },
18
- body: body ? JSON.stringify(body) : void 0
19
- });
20
- const data = await response.json();
21
- if (!response.ok || !data.success) {
22
- const errorMessage = data.errors?.[0]?.message || `HTTP ${response.status}`;
23
- throw new Error(`Cloudflare API Error: ${errorMessage}`);
24
- }
25
- return data.result;
1
+ class D1PreparedStatementImpl {
2
+ constructor(query, client, databaseId) {
3
+ this.query = query;
4
+ this.client = client;
5
+ this.databaseId = databaseId;
26
6
  }
27
- async get(endpoint, headers) {
28
- return this.request("GET", endpoint, void 0, headers);
7
+ bindings = [];
8
+ bind(...values) {
9
+ this.bindings = values;
10
+ return this;
29
11
  }
30
- async post(endpoint, body, headers) {
31
- return this.request("POST", endpoint, body, headers);
12
+ async run() {
13
+ const endpoint = `/accounts/${this.client.getAccountId()}/d1/database/${this.databaseId}/query`;
14
+ const data = await this.client.post(endpoint, {
15
+ sql: this.query,
16
+ params: this.bindings
17
+ });
18
+ const result = data[0];
19
+ return {
20
+ success: result.success,
21
+ results: result.results,
22
+ meta: {
23
+ served_by: result.meta.served_by || "",
24
+ duration: result.meta.duration || 0,
25
+ changes: result.meta.changes || 0,
26
+ last_row_id: result.meta.last_row_id || 0,
27
+ changed_db: result.meta.changed_db || false,
28
+ size_after: result.meta.size_after || 0,
29
+ rows_read: result.meta.rows_read || 0,
30
+ rows_written: result.meta.rows_written || 0
31
+ }
32
+ };
32
33
  }
33
- async put(endpoint, body, headers) {
34
- return this.request("PUT", endpoint, body, headers);
34
+ async all() {
35
+ return this.run();
35
36
  }
36
- async delete(endpoint, body, headers) {
37
- return this.request("DELETE", endpoint, body, headers);
37
+ async first(colName) {
38
+ const result = await this.run();
39
+ if (!result.results || result.results.length === 0) {
40
+ return null;
41
+ }
42
+ const firstRow = result.results[0];
43
+ if (colName && typeof firstRow === "object" && firstRow !== null) {
44
+ return firstRow[colName] ?? null;
45
+ }
46
+ return firstRow;
38
47
  }
39
- getAccountId() {
40
- return this.accountId;
48
+ async raw(options) {
49
+ const result = await this.run();
50
+ if (!result.results || result.results.length === 0) {
51
+ return options?.columnNames ? [[], ...[]] : [];
52
+ }
53
+ const raw = result.results.map((row) => {
54
+ if (typeof row === "object" && row !== null) {
55
+ return Object.values(row);
56
+ }
57
+ return row;
58
+ });
59
+ if (options?.columnNames && result.results.length > 0) {
60
+ const firstRow = result.results[0];
61
+ if (typeof firstRow === "object" && firstRow !== null) {
62
+ const columnNames = Object.keys(firstRow);
63
+ return [columnNames, ...raw];
64
+ }
65
+ }
66
+ return raw;
41
67
  }
42
68
  }
69
+ function createD1Binding$1(client, databaseId) {
70
+ return {
71
+ prepare(query) {
72
+ return new D1PreparedStatementImpl(query, client, databaseId);
73
+ },
74
+ async batch(statements) {
75
+ const endpoint = `/accounts/${client.getAccountId()}/d1/database/${databaseId}/query`;
76
+ const queries = statements.map((stmt) => {
77
+ const impl = stmt;
78
+ return {
79
+ sql: impl.query,
80
+ params: impl.bindings
81
+ };
82
+ });
83
+ const data = await client.post(endpoint, queries);
84
+ return data.map((result) => ({
85
+ success: true,
86
+ results: result.results,
87
+ meta: {
88
+ served_by: result.meta.served_by || "",
89
+ duration: result.meta.duration || 0,
90
+ changes: result.meta.changes || 0,
91
+ last_row_id: result.meta.last_row_id || 0,
92
+ changed_db: result.meta.changed_db || false,
93
+ size_after: result.meta.size_after || 0,
94
+ rows_read: result.meta.rows_read || 0,
95
+ rows_written: result.meta.rows_written || 0
96
+ }
97
+ }));
98
+ },
99
+ async exec(query) {
100
+ const endpoint = `/accounts/${client.getAccountId()}/d1/database/${databaseId}/query`;
101
+ const data = await client.post(endpoint, {
102
+ sql: query
103
+ });
104
+ return {
105
+ count: data.length,
106
+ duration: data.reduce((acc, r) => acc + (r.meta.duration || 0), 0)
107
+ };
108
+ },
109
+ dump() {
110
+ throw new Error("D1 dump() is deprecated and not supported");
111
+ }
112
+ };
113
+ }
43
114
 
44
115
  function createKVBinding$1(client, namespaceId) {
45
116
  const baseEndpoint = `/accounts/${client.getAccountId()}/storage/kv/namespaces/${namespaceId}`;
@@ -53,7 +124,7 @@ function createKVBinding$1(client, namespaceId) {
53
124
  const response = await fetch(`https://api.cloudflare.com/client/v4${url}`, {
54
125
  method: "GET",
55
126
  headers: {
56
- "Authorization": `Bearer ${client.apiToken}`,
127
+ Authorization: `Bearer ${client.apiToken}`,
57
128
  ...headers
58
129
  }
59
130
  });
@@ -87,7 +158,7 @@ function createKVBinding$1(client, namespaceId) {
87
158
  const response = await fetch(`https://api.cloudflare.com/client/v4${url}`, {
88
159
  method: "GET",
89
160
  headers: {
90
- "Authorization": `Bearer ${client.apiToken}`
161
+ Authorization: `Bearer ${client.apiToken}`
91
162
  }
92
163
  });
93
164
  if (response.status === 404) {
@@ -142,7 +213,7 @@ function createKVBinding$1(client, namespaceId) {
142
213
  async put(key, value, options) {
143
214
  const url = `${baseEndpoint}/values/${encodeURIComponent(key)}`;
144
215
  const headers = {
145
- "Authorization": `Bearer ${client.apiToken}`
216
+ Authorization: `Bearer ${client.apiToken}`
146
217
  };
147
218
  let body;
148
219
  if (typeof value === "string") {
@@ -156,7 +227,8 @@ function createKVBinding$1(client, namespaceId) {
156
227
  const chunks = [];
157
228
  while (true) {
158
229
  const { done, value: chunk } = await reader.read();
159
- if (done) break;
230
+ if (done)
231
+ break;
160
232
  chunks.push(chunk);
161
233
  }
162
234
  const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);
@@ -168,7 +240,7 @@ function createKVBinding$1(client, namespaceId) {
168
240
  }
169
241
  body = result.buffer;
170
242
  } else {
171
- throw new Error("Unsupported value type");
243
+ throw new TypeError("Unsupported value type");
172
244
  }
173
245
  const queryParams = new URLSearchParams();
174
246
  if (options?.expiration) {
@@ -193,7 +265,7 @@ function createKVBinding$1(client, namespaceId) {
193
265
  const response = await fetch(`https://api.cloudflare.com/client/v4${url}`, {
194
266
  method: "DELETE",
195
267
  headers: {
196
- "Authorization": `Bearer ${client.apiToken}`
268
+ Authorization: `Bearer ${client.apiToken}`
197
269
  }
198
270
  });
199
271
  if (!response.ok && response.status !== 404) {
@@ -269,122 +341,485 @@ function createKVBinding$1(client, namespaceId) {
269
341
  return kvNamespace;
270
342
  }
271
343
 
272
- class D1PreparedStatementImpl {
273
- constructor(query, client, databaseId) {
274
- this.query = query;
275
- this.client = client;
276
- this.databaseId = databaseId;
277
- }
278
- bindings = [];
279
- bind(...values) {
280
- this.bindings = values;
281
- return this;
344
+ async function sha256(data) {
345
+ const encoder = new TextEncoder();
346
+ const dataBuffer = typeof data === "string" ? encoder.encode(data) : data;
347
+ return await crypto.subtle.digest("SHA-256", dataBuffer);
348
+ }
349
+ function hex(buffer) {
350
+ return Array.from(new Uint8Array(buffer)).map((b) => b.toString(16).padStart(2, "0")).join("");
351
+ }
352
+ async function hmac(key, data) {
353
+ const encoder = new TextEncoder();
354
+ const cryptoKey = await crypto.subtle.importKey(
355
+ "raw",
356
+ key,
357
+ { name: "HMAC", hash: "SHA-256" },
358
+ false,
359
+ ["sign"]
360
+ );
361
+ return await crypto.subtle.sign("HMAC", cryptoKey, encoder.encode(data));
362
+ }
363
+ async function getSignatureKey(secretAccessKey, dateStamp, region, service) {
364
+ const encoder = new TextEncoder();
365
+ const kDate = await hmac(encoder.encode(`AWS4${secretAccessKey}`), dateStamp);
366
+ const kRegion = await hmac(kDate, region);
367
+ const kService = await hmac(kRegion, service);
368
+ const kSigning = await hmac(kService, "aws4_request");
369
+ return kSigning;
370
+ }
371
+ async function signRequest(params) {
372
+ const {
373
+ method,
374
+ url,
375
+ headers,
376
+ body,
377
+ accessKeyId,
378
+ secretAccessKey,
379
+ region = "auto",
380
+ service = "s3"
381
+ } = params;
382
+ const urlObj = new URL(url);
383
+ const host = urlObj.host;
384
+ const path = urlObj.pathname || "/";
385
+ const queryString = urlObj.search.slice(1);
386
+ const now = /* @__PURE__ */ new Date();
387
+ const amzDate = now.toISOString().replace(/[:-]|\.\d{3}/g, "");
388
+ const dateStamp = amzDate.slice(0, 8);
389
+ const canonicalHeaders = {
390
+ host,
391
+ "x-amz-date": amzDate
392
+ };
393
+ for (const [key, value] of Object.entries(headers)) {
394
+ canonicalHeaders[key.toLowerCase()] = value;
282
395
  }
283
- async run() {
284
- const endpoint = `/accounts/${this.client.getAccountId()}/d1/database/${this.databaseId}/query`;
285
- const data = await this.client.post(endpoint, {
286
- sql: this.query,
287
- params: this.bindings
288
- });
289
- const result = data[0];
290
- return {
291
- success: result.success,
292
- results: result.results,
293
- meta: {
294
- served_by: result.meta.served_by || "",
295
- duration: result.meta.duration || 0,
296
- changes: result.meta.changes || 0,
297
- last_row_id: result.meta.last_row_id || 0,
298
- changed_db: result.meta.changed_db || false,
299
- size_after: result.meta.size_after || 0,
300
- rows_read: result.meta.rows_read || 0,
301
- rows_written: result.meta.rows_written || 0
396
+ const signedHeaders = Object.keys(canonicalHeaders).sort().join(";");
397
+ const canonicalHeadersString = Object.keys(canonicalHeaders).sort().map((key) => `${key}:${canonicalHeaders[key]}`).join("\n");
398
+ const payloadHash = body ? hex(await sha256(body)) : hex(await sha256(""));
399
+ const canonicalRequest = [
400
+ method,
401
+ path,
402
+ queryString,
403
+ `${canonicalHeadersString}
404
+ `,
405
+ signedHeaders,
406
+ payloadHash
407
+ ].join("\n");
408
+ const algorithm = "AWS4-HMAC-SHA256";
409
+ const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`;
410
+ const stringToSign = [
411
+ algorithm,
412
+ amzDate,
413
+ credentialScope,
414
+ hex(await sha256(canonicalRequest))
415
+ ].join("\n");
416
+ const signingKey = await getSignatureKey(secretAccessKey, dateStamp, region, service);
417
+ const signature = hex(await hmac(signingKey, stringToSign));
418
+ const authorization = `${algorithm} Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
419
+ return {
420
+ "Authorization": authorization,
421
+ "x-amz-date": amzDate,
422
+ "x-amz-content-sha256": payloadHash
423
+ };
424
+ }
425
+
426
+ function createR2Binding$1(config, bucketName) {
427
+ const { accountId, r2AccessKeyId, r2SecretAccessKey } = config;
428
+ const baseUrl = `https://${accountId}.r2.cloudflarestorage.com/${bucketName}`;
429
+ async function signedFetch(method, key, options) {
430
+ const queryString = options?.queryParams ? `?${new URLSearchParams(options.queryParams).toString()}` : "";
431
+ const url = `${baseUrl}/${encodeURIComponent(key)}${queryString}`;
432
+ let body;
433
+ if (options?.body instanceof ReadableStream) {
434
+ const reader = options.body.getReader();
435
+ const chunks = [];
436
+ while (true) {
437
+ const { done, value } = await reader.read();
438
+ if (done)
439
+ break;
440
+ chunks.push(value);
302
441
  }
303
- };
304
- }
305
- async all() {
306
- return this.run();
307
- }
308
- async first(colName) {
309
- const result = await this.run();
310
- if (!result.results || result.results.length === 0) {
311
- return null;
312
- }
313
- const firstRow = result.results[0];
314
- if (colName && typeof firstRow === "object" && firstRow !== null) {
315
- return firstRow[colName] ?? null;
442
+ const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);
443
+ const result = new Uint8Array(totalLength);
444
+ let offset = 0;
445
+ for (const chunk of chunks) {
446
+ result.set(chunk, offset);
447
+ offset += chunk.length;
448
+ }
449
+ body = result.buffer;
450
+ } else {
451
+ body = options?.body;
316
452
  }
317
- return firstRow;
453
+ const signatureHeaders = await signRequest({
454
+ method,
455
+ url,
456
+ headers: options?.headers || {},
457
+ body,
458
+ accessKeyId: r2AccessKeyId,
459
+ secretAccessKey: r2SecretAccessKey
460
+ });
461
+ return fetch(url, {
462
+ method,
463
+ headers: {
464
+ ...options?.headers,
465
+ ...signatureHeaders
466
+ },
467
+ body
468
+ });
318
469
  }
319
- async raw(options) {
320
- const result = await this.run();
321
- if (!result.results || result.results.length === 0) {
322
- return [];
323
- }
324
- const raw = result.results.map((row) => {
325
- if (typeof row === "object" && row !== null) {
326
- return Object.values(row);
327
- }
328
- return row;
470
+ async function signedFetchBucket(method, options) {
471
+ const queryString = options?.queryParams ? `?${new URLSearchParams(options.queryParams).toString()}` : "";
472
+ const url = `${baseUrl}${queryString}`;
473
+ const signatureHeaders = await signRequest({
474
+ method,
475
+ url,
476
+ headers: options?.headers || {},
477
+ accessKeyId: r2AccessKeyId,
478
+ secretAccessKey: r2SecretAccessKey
329
479
  });
330
- if (options?.columnNames && result.results.length > 0) {
331
- const firstRow = result.results[0];
332
- if (typeof firstRow === "object" && firstRow !== null) {
333
- const columnNames = Object.keys(firstRow);
334
- return [columnNames, ...raw];
480
+ return fetch(url, {
481
+ method,
482
+ headers: {
483
+ ...options?.headers,
484
+ ...signatureHeaders
335
485
  }
336
- }
337
- return raw;
486
+ });
338
487
  }
339
- }
340
- function createD1Binding$1(client, databaseId) {
341
- return {
342
- prepare(query) {
343
- return new D1PreparedStatementImpl(query, client, databaseId);
344
- },
345
- async batch(statements) {
346
- const endpoint = `/accounts/${client.getAccountId()}/d1/database/${databaseId}/query`;
347
- const queries = statements.map((stmt) => {
348
- const impl = stmt;
349
- return {
350
- sql: impl.query,
351
- params: impl.bindings
352
- };
488
+ const r2Bucket = {
489
+ async get(key, options) {
490
+ const headers = {};
491
+ if (options?.range) {
492
+ if (typeof options.range === "object") {
493
+ const range = options.range;
494
+ if ("suffix" in range && range.suffix) {
495
+ headers.Range = `bytes=-${range.suffix}`;
496
+ } else if ("offset" in range && range.offset !== void 0) {
497
+ headers.Range = "length" in range && range.length ? `bytes=${range.offset}-${range.offset + range.length - 1}` : `bytes=${range.offset}-`;
498
+ }
499
+ }
500
+ }
501
+ if (options?.onlyIf) {
502
+ const onlyIf = options.onlyIf;
503
+ if (onlyIf.etagMatches) {
504
+ headers["If-Match"] = onlyIf.etagMatches;
505
+ }
506
+ if (onlyIf.etagDoesNotMatch) {
507
+ headers["If-None-Match"] = onlyIf.etagDoesNotMatch;
508
+ }
509
+ if (onlyIf.uploadedBefore) {
510
+ headers["If-Unmodified-Since"] = onlyIf.uploadedBefore.toUTCString();
511
+ }
512
+ if (onlyIf.uploadedAfter) {
513
+ headers["If-Modified-Since"] = onlyIf.uploadedAfter.toUTCString();
514
+ }
515
+ }
516
+ const response = await signedFetch("GET", key, { headers });
517
+ if (response.status === 404) {
518
+ return null;
519
+ }
520
+ if (!response.ok) {
521
+ throw new Error(`R2 GET failed: ${response.status}`);
522
+ }
523
+ const customMetadata = {};
524
+ response.headers.forEach((value, key2) => {
525
+ if (key2.toLowerCase().startsWith("x-amz-meta-")) {
526
+ const metaKey = key2.slice("x-amz-meta-".length);
527
+ customMetadata[metaKey] = value;
528
+ }
353
529
  });
354
- const data = await client.post(endpoint, queries);
355
- return data.map((result) => ({
356
- success: result.success,
357
- results: result.results,
358
- meta: {
359
- served_by: result.meta.served_by || "",
360
- duration: result.meta.duration || 0,
361
- changes: result.meta.changes || 0,
362
- last_row_id: result.meta.last_row_id || 0,
363
- changed_db: result.meta.changed_db || false,
364
- size_after: result.meta.size_after || 0,
365
- rows_read: result.meta.rows_read || 0,
366
- rows_written: result.meta.rows_written || 0
530
+ const r2Object = {
531
+ key,
532
+ version: response.headers.get("x-amz-version-id") || "",
533
+ size: Number.parseInt(response.headers.get("content-length") || "0", 10),
534
+ etag: response.headers.get("etag") || "",
535
+ httpEtag: response.headers.get("etag") || "",
536
+ checksums: {
537
+ toJSON: () => ({})
538
+ },
539
+ uploaded: new Date(response.headers.get("last-modified") || Date.now()),
540
+ httpMetadata: {
541
+ contentType: response.headers.get("content-type") || void 0,
542
+ contentLanguage: response.headers.get("content-language") || void 0,
543
+ contentDisposition: response.headers.get("content-disposition") || void 0,
544
+ contentEncoding: response.headers.get("content-encoding") || void 0,
545
+ cacheControl: response.headers.get("cache-control") || void 0,
546
+ cacheExpiry: response.headers.get("expires") ? new Date(response.headers.get("expires")) : void 0
547
+ },
548
+ customMetadata,
549
+ storageClass: "Standard",
550
+ range: options?.range ? {
551
+ offset: 0,
552
+ length: Number.parseInt(response.headers.get("content-length") || "0", 10)
553
+ } : void 0,
554
+ body: response.body,
555
+ bodyUsed: false,
556
+ arrayBuffer: () => response.arrayBuffer(),
557
+ text: () => response.text(),
558
+ json: () => response.json(),
559
+ blob: () => response.blob(),
560
+ bytes: async () => new Uint8Array(await response.arrayBuffer()),
561
+ writeHttpMetadata: ((headers2) => {
562
+ const metadata = r2Object.httpMetadata;
563
+ if (metadata.contentType) {
564
+ headers2.set("content-type", metadata.contentType);
565
+ }
566
+ if (metadata.contentLanguage) {
567
+ headers2.set("content-language", metadata.contentLanguage);
568
+ }
569
+ if (metadata.contentDisposition) {
570
+ headers2.set("content-disposition", metadata.contentDisposition);
571
+ }
572
+ if (metadata.contentEncoding) {
573
+ headers2.set("content-encoding", metadata.contentEncoding);
574
+ }
575
+ if (metadata.cacheControl) {
576
+ headers2.set("cache-control", metadata.cacheControl);
577
+ }
578
+ if (metadata.cacheExpiry) {
579
+ headers2.set("expires", metadata.cacheExpiry.toUTCString());
580
+ }
581
+ })
582
+ };
583
+ return r2Object;
584
+ },
585
+ async put(key, value, options) {
586
+ const headers = {};
587
+ if (options?.httpMetadata) {
588
+ const metadata = options.httpMetadata;
589
+ if (metadata.contentType) {
590
+ headers["Content-Type"] = metadata.contentType;
367
591
  }
368
- }));
592
+ if (metadata.contentLanguage) {
593
+ headers["Content-Language"] = metadata.contentLanguage;
594
+ }
595
+ if (metadata.contentDisposition) {
596
+ headers["Content-Disposition"] = metadata.contentDisposition;
597
+ }
598
+ if (metadata.contentEncoding) {
599
+ headers["Content-Encoding"] = metadata.contentEncoding;
600
+ }
601
+ if (metadata.cacheControl) {
602
+ headers["Cache-Control"] = metadata.cacheControl;
603
+ }
604
+ if (metadata.cacheExpiry) {
605
+ headers.Expires = metadata.cacheExpiry.toUTCString();
606
+ }
607
+ }
608
+ if (options?.customMetadata) {
609
+ for (const [key2, value2] of Object.entries(options.customMetadata)) {
610
+ headers[`x-amz-meta-${key2}`] = value2;
611
+ }
612
+ }
613
+ let body;
614
+ if (value === null) {
615
+ body = "";
616
+ } else if (value instanceof Blob) {
617
+ body = await value.arrayBuffer();
618
+ } else if (ArrayBuffer.isView(value)) {
619
+ body = value.buffer.slice(value.byteOffset, value.byteOffset + value.byteLength);
620
+ } else {
621
+ body = value;
622
+ }
623
+ const response = await signedFetch("PUT", key, { headers, body });
624
+ if (!response.ok) {
625
+ throw new Error(`R2 PUT failed: ${response.status}`);
626
+ }
627
+ return {
628
+ key,
629
+ version: response.headers.get("x-amz-version-id") || "",
630
+ size: typeof value === "string" ? value.length : 0,
631
+ etag: response.headers.get("etag") || "",
632
+ httpEtag: response.headers.get("etag") || "",
633
+ checksums: {
634
+ toJSON: () => ({})
635
+ },
636
+ uploaded: /* @__PURE__ */ new Date(),
637
+ httpMetadata: options?.httpMetadata || {},
638
+ customMetadata: options?.customMetadata || {},
639
+ storageClass: "Standard",
640
+ writeHttpMetadata: () => {
641
+ }
642
+ };
369
643
  },
370
- async exec(query) {
371
- const endpoint = `/accounts/${client.getAccountId()}/d1/database/${databaseId}/query`;
372
- const data = await client.post(endpoint, {
373
- sql: query
644
+ async delete(keys) {
645
+ const keyArray = Array.isArray(keys) ? keys : [keys];
646
+ if (keyArray.length === 1) {
647
+ const response = await signedFetch("DELETE", keyArray[0]);
648
+ if (!response.ok && response.status !== 404) {
649
+ throw new Error(`R2 DELETE failed: ${response.status}`);
650
+ }
651
+ } else {
652
+ `<?xml version="1.0" encoding="UTF-8"?>
653
+ <Delete>
654
+ ${keyArray.map((key) => `<Object><Key>${key}</Key></Object>`).join("")}
655
+ </Delete>`;
656
+ const response = await signedFetchBucket("POST", {
657
+ queryParams: { delete: "" },
658
+ headers: { "Content-Type": "application/xml" }
659
+ });
660
+ if (!response.ok) {
661
+ throw new Error(`R2 DELETE BULK failed: ${response.status}`);
662
+ }
663
+ }
664
+ },
665
+ async head(key) {
666
+ const response = await signedFetch("HEAD", key);
667
+ if (response.status === 404) {
668
+ return null;
669
+ }
670
+ if (!response.ok) {
671
+ throw new Error(`R2 HEAD failed: ${response.status}`);
672
+ }
673
+ const customMetadata = {};
674
+ response.headers.forEach((value, key2) => {
675
+ if (key2.toLowerCase().startsWith("x-amz-meta-")) {
676
+ const metaKey = key2.slice("x-amz-meta-".length);
677
+ customMetadata[metaKey] = value;
678
+ }
374
679
  });
375
680
  return {
376
- count: data.length,
377
- duration: data.reduce((acc, r) => acc + (r.meta.duration || 0), 0)
681
+ key,
682
+ version: response.headers.get("x-amz-version-id") || "",
683
+ size: Number.parseInt(response.headers.get("content-length") || "0", 10),
684
+ etag: response.headers.get("etag") || "",
685
+ httpEtag: response.headers.get("etag") || "",
686
+ checksums: {
687
+ toJSON: () => ({})
688
+ },
689
+ uploaded: new Date(response.headers.get("last-modified") || Date.now()),
690
+ httpMetadata: {
691
+ contentType: response.headers.get("content-type") || void 0,
692
+ contentLanguage: response.headers.get("content-language") || void 0,
693
+ contentDisposition: response.headers.get("content-disposition") || void 0,
694
+ contentEncoding: response.headers.get("content-encoding") || void 0,
695
+ cacheControl: response.headers.get("cache-control") || void 0,
696
+ cacheExpiry: response.headers.get("expires") ? new Date(response.headers.get("expires")) : void 0
697
+ },
698
+ customMetadata,
699
+ storageClass: "Standard",
700
+ writeHttpMetadata: () => {
701
+ }
378
702
  };
379
703
  },
380
- dump() {
381
- throw new Error("D1 dump() is deprecated and not supported");
704
+ async list(options) {
705
+ const queryParams = {
706
+ "list-type": "2"
707
+ };
708
+ if (options?.limit) {
709
+ queryParams["max-keys"] = options.limit.toString();
710
+ }
711
+ if (options?.prefix) {
712
+ queryParams.prefix = options.prefix;
713
+ }
714
+ if (options?.cursor) {
715
+ queryParams["continuation-token"] = options.cursor;
716
+ }
717
+ if (options?.delimiter) {
718
+ queryParams.delimiter = options.delimiter;
719
+ }
720
+ if (options?.startAfter) {
721
+ queryParams["start-after"] = options.startAfter;
722
+ }
723
+ const response = await signedFetchBucket("GET", { queryParams });
724
+ if (!response.ok) {
725
+ throw new Error(`R2 LIST failed: ${response.status}`);
726
+ }
727
+ const xml = await response.text();
728
+ const objects = [];
729
+ const delimitedPrefixes = [];
730
+ const contentsRegex = /<Contents>(.*?)<\/Contents>/gs;
731
+ let match;
732
+ while ((match = contentsRegex.exec(xml)) !== null) {
733
+ const content = match[1];
734
+ const key = content.match(/<Key>(.*?)<\/Key>/)?.[1] || "";
735
+ const size = Number.parseInt(content.match(/<Size>(.*?)<\/Size>/)?.[1] || "0", 10);
736
+ const etag = content.match(/<ETag>(.*?)<\/ETag>/)?.[1] || "";
737
+ const lastModified = content.match(/<LastModified>(.*?)<\/LastModified>/)?.[1] || "";
738
+ objects.push({
739
+ key,
740
+ version: "",
741
+ size,
742
+ etag,
743
+ httpEtag: etag,
744
+ checksums: {
745
+ toJSON: () => ({})
746
+ },
747
+ uploaded: new Date(lastModified),
748
+ httpMetadata: {},
749
+ customMetadata: {},
750
+ storageClass: "Standard",
751
+ writeHttpMetadata: () => {
752
+ }
753
+ });
754
+ }
755
+ const prefixRegex = /<CommonPrefixes>.*?<Prefix>(.*?)<\/Prefix>.*?<\/CommonPrefixes>/gs;
756
+ while ((match = prefixRegex.exec(xml)) !== null) {
757
+ delimitedPrefixes.push(match[1]);
758
+ }
759
+ const isTruncated = xml.includes("<IsTruncated>true</IsTruncated>");
760
+ const nextContinuationToken = xml.match(/<NextContinuationToken>(.*?)<\/NextContinuationToken>/)?.[1];
761
+ return {
762
+ objects,
763
+ truncated: isTruncated,
764
+ cursor: nextContinuationToken,
765
+ delimitedPrefixes
766
+ };
767
+ },
768
+ createMultipartUpload(_key, _options) {
769
+ throw new Error("R2 multipart upload not yet implemented");
770
+ },
771
+ resumeMultipartUpload(_key, _uploadId) {
772
+ throw new Error("R2 multipart upload not yet implemented");
382
773
  }
383
774
  };
775
+ return r2Bucket;
776
+ }
777
+
778
+ class CloudflareAPIClient {
779
+ baseUrl = "https://api.cloudflare.com/client/v4";
780
+ accountId;
781
+ apiToken;
782
+ constructor(accountId, apiToken) {
783
+ this.accountId = accountId;
784
+ this.apiToken = apiToken;
785
+ }
786
+ async request(method, endpoint, body, headers) {
787
+ const url = `${this.baseUrl}${endpoint}`;
788
+ const response = await fetch(url, {
789
+ method,
790
+ headers: {
791
+ "Authorization": `Bearer ${this.apiToken}`,
792
+ "Content-Type": "application/json",
793
+ ...headers
794
+ },
795
+ body: body ? JSON.stringify(body) : void 0
796
+ });
797
+ const data = await response.json();
798
+ if (!response.ok || !data.success) {
799
+ const errorMessage = data.errors?.[0]?.message || `HTTP ${response.status}`;
800
+ throw new Error(`Cloudflare API Error: ${errorMessage}`);
801
+ }
802
+ return data.result;
803
+ }
804
+ async get(endpoint, headers) {
805
+ return this.request("GET", endpoint, void 0, headers);
806
+ }
807
+ async post(endpoint, body, headers) {
808
+ return this.request("POST", endpoint, body, headers);
809
+ }
810
+ async put(endpoint, body, headers) {
811
+ return this.request("PUT", endpoint, body, headers);
812
+ }
813
+ async delete(endpoint, body, headers) {
814
+ return this.request("DELETE", endpoint, body, headers);
815
+ }
816
+ getAccountId() {
817
+ return this.accountId;
818
+ }
384
819
  }
385
820
 
386
821
  function createR2Binding(config, bucketName) {
387
- throw new Error("R2 binding not yet implemented. Use KV or D1 instead.");
822
+ return createR2Binding$1(config, bucketName);
388
823
  }
389
824
  function createKVBinding(config, namespaceId) {
390
825
  const client = new CloudflareAPIClient(config.accountId, config.apiToken);
package/package.json CHANGED
@@ -1,11 +1,11 @@
1
1
  {
2
2
  "name": "openwrangler",
3
- "version": "0.0.3",
4
3
  "type": "module",
4
+ "version": "0.0.5",
5
5
  "exports": {
6
6
  ".": {
7
- "import": "./dist/index.mjs",
8
- "types": "./dist/index.d.ts"
7
+ "types": "./dist/index.d.ts",
8
+ "import": "./dist/index.mjs"
9
9
  }
10
10
  },
11
11
  "main": "./dist/index.mjs",
@@ -14,15 +14,19 @@
14
14
  "dist"
15
15
  ],
16
16
  "devDependencies": {
17
+ "@antfu/eslint-config": "^6.7.3",
17
18
  "@cloudflare/workers-types": "^4.20251231.0",
19
+ "eslint": "^9.39.2",
18
20
  "nuxt": "^3.15.0",
19
21
  "typescript": "^5.7.2",
20
22
  "unbuild": "^3.3.1",
21
- "wrangler": "^3.99.0",
22
- "@binochoi/nitro-cloudflare-dev": "npm:@bino0216/nitro-cloudflare-dev@0.2.5"
23
+ "wrangler": "^4.54.0",
24
+ "@bino0216/nitro-cloudflare-dev": "0.2.7"
23
25
  },
24
26
  "scripts": {
25
- "dev": "nuxi dev playground",
26
- "build": "unbuild"
27
+ "dev": "cd ./playground && pnpm dev",
28
+ "build": "unbuild",
29
+ "lint": "eslint .",
30
+ "lint:fix": "eslint . --fix"
27
31
  }
28
32
  }