@authhero/cloudflare-adapter 2.7.5 → 2.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +422 -2
- package/dist/cloudflare-adapter.cjs +14 -1
- package/dist/cloudflare-adapter.d.ts +282 -2
- package/dist/cloudflare-adapter.mjs +1520 -1314
- package/package.json +20 -20
package/README.md
CHANGED
|
@@ -1,5 +1,425 @@
|
|
|
1
1
|
# @authhero/cloudflare-adapter
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Cloudflare-specific adapters for AuthHero, providing integrations with Cloudflare services.
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
This package provides three adapters:
|
|
8
|
+
|
|
9
|
+
- **Custom Domains** - Manage custom domains via Cloudflare API
|
|
10
|
+
- **Cache** - Caching using Cloudflare's Cache API
|
|
11
|
+
- **Logs** (optional) - Write authentication logs to Cloudflare R2 using Pipelines and query with R2 SQL
|
|
12
|
+
|
|
13
|
+
## Installation
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
npm install @authhero/cloudflare-adapter
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## Usage
|
|
20
|
+
|
|
21
|
+
### HTTP Endpoint Mode (Default)
|
|
22
|
+
|
|
23
|
+
```typescript
|
|
24
|
+
import createAdapters from "@authhero/cloudflare-adapter";
|
|
25
|
+
|
|
26
|
+
const adapters = createAdapters({
|
|
27
|
+
// Custom domains configuration
|
|
28
|
+
zoneId: "your-cloudflare-zone-id",
|
|
29
|
+
authKey: "your-cloudflare-api-key",
|
|
30
|
+
authEmail: "your-cloudflare-email",
|
|
31
|
+
customDomainAdapter: yourDatabaseCustomDomainsAdapter,
|
|
32
|
+
|
|
33
|
+
// Cache configuration (optional)
|
|
34
|
+
cacheName: "default",
|
|
35
|
+
defaultTtlSeconds: 3600,
|
|
36
|
+
keyPrefix: "authhero:",
|
|
37
|
+
|
|
38
|
+
// R2 SQL logs configuration (optional) - HTTP mode
|
|
39
|
+
r2SqlLogs: {
|
|
40
|
+
pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
|
|
41
|
+
authToken: process.env.R2_SQL_AUTH_TOKEN,
|
|
42
|
+
warehouseName: process.env.R2_WAREHOUSE_NAME,
|
|
43
|
+
namespace: "default",
|
|
44
|
+
tableName: "logs",
|
|
45
|
+
},
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
// Use the adapters
|
|
49
|
+
const { customDomains, cache, logs } = adapters;
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
### Service Binding Mode (Cloudflare Workers)
|
|
53
|
+
|
|
54
|
+
```typescript
|
|
55
|
+
interface Env {
|
|
56
|
+
PIPELINE_SERVICE: { fetch: typeof fetch };
|
|
57
|
+
R2_SQL_AUTH_TOKEN: string;
|
|
58
|
+
R2_WAREHOUSE_NAME: string;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
export default {
|
|
62
|
+
async fetch(request: Request, env: Env) {
|
|
63
|
+
const adapters = createAdapters({
|
|
64
|
+
zoneId: "your-cloudflare-zone-id",
|
|
65
|
+
authKey: "your-cloudflare-api-key",
|
|
66
|
+
authEmail: "your-cloudflare-email",
|
|
67
|
+
customDomainAdapter: yourDatabaseCustomDomainsAdapter,
|
|
68
|
+
|
|
69
|
+
// R2 SQL logs with service binding
|
|
70
|
+
r2SqlLogs: {
|
|
71
|
+
pipelineBinding: env.PIPELINE_SERVICE,
|
|
72
|
+
authToken: env.R2_SQL_AUTH_TOKEN,
|
|
73
|
+
warehouseName: env.R2_WAREHOUSE_NAME,
|
|
74
|
+
},
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
// Use adapters.logs
|
|
78
|
+
},
|
|
79
|
+
};
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
### Passthrough Mode (Multiple Destinations)
|
|
83
|
+
|
|
84
|
+
```typescript
|
|
85
|
+
import createAdapters from "@authhero/cloudflare-adapter";
|
|
86
|
+
import { createOtherLogsAdapter } from "some-package";
|
|
87
|
+
|
|
88
|
+
// Create a base logs adapter
|
|
89
|
+
const baseAdapter = createOtherLogsAdapter();
|
|
90
|
+
|
|
91
|
+
const adapters = createAdapters({
|
|
92
|
+
zoneId: "your-cloudflare-zone-id",
|
|
93
|
+
authKey: "your-cloudflare-api-key",
|
|
94
|
+
authEmail: "your-cloudflare-email",
|
|
95
|
+
customDomainAdapter: yourDatabaseCustomDomainsAdapter,
|
|
96
|
+
|
|
97
|
+
// R2 SQL logs in passthrough mode - sends to both adapters
|
|
98
|
+
r2SqlLogs: {
|
|
99
|
+
baseAdapter,
|
|
100
|
+
pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
|
|
101
|
+
authToken: process.env.R2_SQL_AUTH_TOKEN,
|
|
102
|
+
warehouseName: process.env.R2_WAREHOUSE_NAME,
|
|
103
|
+
},
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
// logs.create() will write to baseAdapter and Pipeline
|
|
107
|
+
// logs.get() and logs.list() will read from baseAdapter
|
|
108
|
+
const { logs } = adapters;
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
## Adapters
|
|
112
|
+
|
|
113
|
+
### Custom Domains Adapter
|
|
114
|
+
|
|
115
|
+
Manages custom domains through the Cloudflare API.
|
|
116
|
+
|
|
117
|
+
#### Configuration
|
|
118
|
+
|
|
119
|
+
```typescript
|
|
120
|
+
{
|
|
121
|
+
zoneId: string; // Your Cloudflare zone ID
|
|
122
|
+
authKey: string; // Your Cloudflare API key
|
|
123
|
+
authEmail: string; // Your Cloudflare account email
|
|
124
|
+
enterprise?: boolean; // Whether using Cloudflare Enterprise (default: false)
|
|
125
|
+
customDomainAdapter: CustomDomainsAdapter; // Database adapter for storing domain data
|
|
126
|
+
}
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
#### Methods
|
|
130
|
+
|
|
131
|
+
- `create(tenantId, domain)` - Create a custom domain
|
|
132
|
+
- `get(tenantId, domainId)` - Get a custom domain
|
|
133
|
+
- `getByDomain(domain)` - Get domain by domain name
|
|
134
|
+
- `list(tenantId, params)` - List custom domains
|
|
135
|
+
- `remove(tenantId, domainId)` - Remove a custom domain
|
|
136
|
+
- `update(tenantId, domainId, data)` - Update a custom domain
|
|
137
|
+
|
|
138
|
+
### Cache Adapter
|
|
139
|
+
|
|
140
|
+
Provides caching using Cloudflare's Cache API.
|
|
141
|
+
|
|
142
|
+
#### Configuration
|
|
143
|
+
|
|
144
|
+
```typescript
|
|
145
|
+
{
|
|
146
|
+
cacheName?: string; // Cache name (default: "default")
|
|
147
|
+
defaultTtlSeconds?: number; // Default TTL in seconds (default: undefined)
|
|
148
|
+
keyPrefix?: string; // Key prefix for namespacing (default: undefined)
|
|
149
|
+
}
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
#### Methods
|
|
153
|
+
|
|
154
|
+
- `get<T>(key)` - Get a value from cache
|
|
155
|
+
- `set<T>(key, value, ttl?)` - Set a value in cache with optional TTL
|
|
156
|
+
- `delete(key)` - Delete a value from cache
|
|
157
|
+
|
|
158
|
+
#### Example
|
|
159
|
+
|
|
160
|
+
```typescript
|
|
161
|
+
// Set a value with 1 hour TTL
|
|
162
|
+
await cache.set("user:123", userData, 3600);
|
|
163
|
+
|
|
164
|
+
// Get a value
|
|
165
|
+
const user = await cache.get("user:123");
|
|
166
|
+
|
|
167
|
+
// Delete a value
|
|
168
|
+
await cache.delete("user:123");
|
|
169
|
+
```
|
|
170
|
+
|
|
171
|
+
### Logs Adapter (R2 SQL + Pipelines)
|
|
172
|
+
|
|
173
|
+
Write authentication logs to Cloudflare R2 using Pipelines for ingestion and R2 SQL for querying.
|
|
174
|
+
|
|
175
|
+
#### Architecture
|
|
176
|
+
|
|
177
|
+
This adapter uses two Cloudflare services:
|
|
178
|
+
|
|
179
|
+
- **Pipelines**: HTTP endpoint for ingesting log data into R2
|
|
180
|
+
- **R2 SQL**: Query interface for analyzing logs stored in Apache Iceberg format
|
|
181
|
+
|
|
182
|
+
#### Prerequisites
|
|
183
|
+
|
|
184
|
+
1. **Create an R2 bucket and enable R2 Data Catalog**:
|
|
185
|
+
|
|
186
|
+
```bash
|
|
187
|
+
npx wrangler r2 bucket create my-authhero-logs
|
|
188
|
+
npx wrangler r2 bucket catalog enable my-authhero-logs
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
Note the "Warehouse" name from the output - you'll need it later.
|
|
192
|
+
|
|
193
|
+
2. **Create an API token**
|
|
194
|
+
|
|
195
|
+
In the Cloudflare Dashboard:
|
|
196
|
+
|
|
197
|
+
- Go to R2 object storage
|
|
198
|
+
- Select "Manage API tokens"
|
|
199
|
+
- Create a token with Admin Read & Write permissions
|
|
200
|
+
- Save the token value
|
|
201
|
+
|
|
202
|
+
3. **Create a schema file** (`schema.json`):
|
|
203
|
+
|
|
204
|
+
```json
|
|
205
|
+
{
|
|
206
|
+
"fields": [
|
|
207
|
+
{ "name": "id", "type": "string", "required": true },
|
|
208
|
+
{ "name": "tenant_id", "type": "string", "required": true },
|
|
209
|
+
{ "name": "type", "type": "string", "required": true },
|
|
210
|
+
{ "name": "date", "type": "string", "required": true },
|
|
211
|
+
{ "name": "description", "type": "string", "required": false },
|
|
212
|
+
{ "name": "ip", "type": "string", "required": false },
|
|
213
|
+
{ "name": "user_agent", "type": "string", "required": true },
|
|
214
|
+
{ "name": "details", "type": "string", "required": false },
|
|
215
|
+
{ "name": "isMobile", "type": "int64", "required": false },
|
|
216
|
+
{ "name": "user_id", "type": "string", "required": false },
|
|
217
|
+
{ "name": "user_name", "type": "string", "required": false },
|
|
218
|
+
{ "name": "connection", "type": "string", "required": false },
|
|
219
|
+
{ "name": "connection_id", "type": "string", "required": false },
|
|
220
|
+
{ "name": "client_id", "type": "string", "required": false },
|
|
221
|
+
{ "name": "client_name", "type": "string", "required": false },
|
|
222
|
+
{ "name": "audience", "type": "string", "required": false },
|
|
223
|
+
{ "name": "scope", "type": "string", "required": false },
|
|
224
|
+
{ "name": "strategy", "type": "string", "required": false },
|
|
225
|
+
{ "name": "strategy_type", "type": "string", "required": false },
|
|
226
|
+
{ "name": "hostname", "type": "string", "required": false },
|
|
227
|
+
{ "name": "auth0_client", "type": "string", "required": false },
|
|
228
|
+
{ "name": "log_id", "type": "string", "required": true }
|
|
229
|
+
]
|
|
230
|
+
}
|
|
231
|
+
```
|
|
232
|
+
|
|
233
|
+
4. **Create a Pipeline**:
|
|
234
|
+
|
|
235
|
+
```bash
|
|
236
|
+
npx wrangler pipelines setup
|
|
237
|
+
```
|
|
238
|
+
|
|
239
|
+
Follow the prompts:
|
|
240
|
+
|
|
241
|
+
- **Pipeline name**: `authhero-logs`
|
|
242
|
+
- **Enable HTTP endpoint**: `yes`
|
|
243
|
+
- **Require authentication**: `no` (or `yes` if you want additional security)
|
|
244
|
+
- **Configure custom CORS origins**: `no`
|
|
245
|
+
- **Schema definition**: `Load from file`
|
|
246
|
+
- **Schema file path**: `schema.json`
|
|
247
|
+
- **Destination type**: `Data Catalog Table`
|
|
248
|
+
- **R2 bucket name**: `my-authhero-logs`
|
|
249
|
+
- **Namespace**: `default`
|
|
250
|
+
- **Table name**: `logs`
|
|
251
|
+
- **Catalog API token**: Enter your token from step 2
|
|
252
|
+
- **Compression**: `zstd`
|
|
253
|
+
- **Roll file when size reaches (MB)**: `100`
|
|
254
|
+
- **Roll file when time reaches (seconds)**: `300` (5 minutes)
|
|
255
|
+
- **SQL transformation**: `Use simple ingestion query`
|
|
256
|
+
|
|
257
|
+
**Important**: Save the HTTP endpoint URL from the output (e.g., `https://abc123.ingest.cloudflare.com`)
|
|
258
|
+
|
|
259
|
+
#### Configuration
|
|
260
|
+
|
|
261
|
+
The R2 SQL logs adapter supports three usage modes:
|
|
262
|
+
|
|
263
|
+
##### 1. HTTP Endpoint Mode (Default)
|
|
264
|
+
|
|
265
|
+
Use this mode when calling the Pipeline from outside a Cloudflare Worker:
|
|
266
|
+
|
|
267
|
+
```typescript
|
|
268
|
+
{
|
|
269
|
+
pipelineEndpoint: string; // Pipeline HTTP endpoint URL for ingesting logs
|
|
270
|
+
authToken: string; // R2 SQL API token for querying logs
|
|
271
|
+
warehouseName: string; // R2 warehouse name
|
|
272
|
+
namespace?: string; // Catalog namespace (default: "default")
|
|
273
|
+
tableName?: string; // Table name (default: "logs")
|
|
274
|
+
apiBaseUrl?: string; // R2 SQL API base URL
|
|
275
|
+
timeout?: number; // HTTP timeout in ms (default: 30000)
|
|
276
|
+
}
|
|
277
|
+
```
|
|
278
|
+
|
|
279
|
+
##### 2. Service Binding Mode (Workers)
|
|
280
|
+
|
|
281
|
+
Use this mode when running inside a Cloudflare Worker with a service binding to the Pipeline:
|
|
282
|
+
|
|
283
|
+
```typescript
|
|
284
|
+
// wrangler.toml
|
|
285
|
+
[[pipelines]];
|
|
286
|
+
binding = "PIPELINE_SERVICE";
|
|
287
|
+
pipeline = "my-pipeline";
|
|
288
|
+
|
|
289
|
+
// TypeScript
|
|
290
|
+
interface Env {
|
|
291
|
+
PIPELINE_SERVICE: { fetch: typeof fetch };
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
const { logs } = createAdapters({
|
|
295
|
+
r2SqlLogs: {
|
|
296
|
+
pipelineBinding: env.PIPELINE_SERVICE,
|
|
297
|
+
authToken: env.R2_SQL_AUTH_TOKEN,
|
|
298
|
+
warehouseName: env.R2_WAREHOUSE_NAME,
|
|
299
|
+
},
|
|
300
|
+
});
|
|
301
|
+
```
|
|
302
|
+
|
|
303
|
+
This mode is more efficient as it avoids HTTP overhead for Worker-to-Worker communication.
|
|
304
|
+
|
|
305
|
+
##### 3. Passthrough Mode (Wrap Another Adapter)
|
|
306
|
+
|
|
307
|
+
Use this mode to send logs to both the R2 SQL Pipeline and another logs adapter:
|
|
308
|
+
|
|
309
|
+
```typescript
|
|
310
|
+
const baseAdapter = createSomeOtherLogsAdapter();
|
|
311
|
+
|
|
312
|
+
const { logs } = createAdapters({
|
|
313
|
+
r2SqlLogs: {
|
|
314
|
+
baseAdapter,
|
|
315
|
+
pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
|
|
316
|
+
authToken: env.R2_SQL_AUTH_TOKEN,
|
|
317
|
+
warehouseName: env.R2_WAREHOUSE_NAME,
|
|
318
|
+
},
|
|
319
|
+
});
|
|
320
|
+
```
|
|
321
|
+
|
|
322
|
+
In passthrough mode:
|
|
323
|
+
|
|
324
|
+
- `create()` calls the base adapter first, then sends to the Pipeline in the background
|
|
325
|
+
- `get()` and `list()` are delegated to the base adapter
|
|
326
|
+
- Pipeline ingestion errors are logged but don't fail the operation
|
|
327
|
+
|
|
328
|
+
#### Methods
|
|
329
|
+
|
|
330
|
+
- `create(tenantId, log)` - Create a log entry (sends to Pipeline)
|
|
331
|
+
- `get(tenantId, logId)` - Get a log entry (queries R2 SQL)
|
|
332
|
+
- `list(tenantId, params)` - List logs with pagination and filtering (queries R2 SQL)
|
|
333
|
+
|
|
334
|
+
#### Example
|
|
335
|
+
|
|
336
|
+
```typescript
|
|
337
|
+
// Create a log
|
|
338
|
+
const log = await logs.create("tenant-123", {
|
|
339
|
+
type: "s", // Successful login
|
|
340
|
+
date: new Date().toISOString(),
|
|
341
|
+
ip: "192.168.1.100",
|
|
342
|
+
user_agent: "Mozilla/5.0...",
|
|
343
|
+
isMobile: false,
|
|
344
|
+
user_id: "user-456",
|
|
345
|
+
client_id: "app-789",
|
|
346
|
+
description: "User logged in successfully",
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
// List logs with filtering
|
|
350
|
+
const result = await logs.list("tenant-123", {
|
|
351
|
+
page: 0,
|
|
352
|
+
per_page: 50,
|
|
353
|
+
include_totals: true,
|
|
354
|
+
sort: {
|
|
355
|
+
sort_by: "date",
|
|
356
|
+
sort_order: "desc",
|
|
357
|
+
},
|
|
358
|
+
q: "user_id:user-456", // Lucene-style filter
|
|
359
|
+
});
|
|
360
|
+
```
|
|
361
|
+
|
|
362
|
+
#### Querying Logs with R2 SQL
|
|
363
|
+
|
|
364
|
+
You can query logs directly using the Wrangler CLI:
|
|
365
|
+
|
|
366
|
+
```bash
|
|
367
|
+
# Set up authentication
|
|
368
|
+
export WRANGLER_R2_SQL_AUTH_TOKEN=your_api_token
|
|
369
|
+
|
|
370
|
+
# Query logs
|
|
371
|
+
npx wrangler r2 sql query "your_warehouse" "
|
|
372
|
+
SELECT * FROM default.logs
|
|
373
|
+
WHERE tenant_id = 'tenant-123'
|
|
374
|
+
AND type = 's'
|
|
375
|
+
ORDER BY date DESC
|
|
376
|
+
LIMIT 100
|
|
377
|
+
"
|
|
378
|
+
|
|
379
|
+
# Count logs by type
|
|
380
|
+
npx wrangler r2 sql query "your_warehouse" "
|
|
381
|
+
SELECT type, COUNT(*) as count
|
|
382
|
+
FROM default.logs
|
|
383
|
+
WHERE tenant_id = 'tenant-123'
|
|
384
|
+
GROUP BY type
|
|
385
|
+
"
|
|
386
|
+
```
|
|
387
|
+
|
|
388
|
+
## Environment Variables
|
|
389
|
+
|
|
390
|
+
Recommended environment variables:
|
|
391
|
+
|
|
392
|
+
```env
|
|
393
|
+
# Custom Domains
|
|
394
|
+
CLOUDFLARE_ZONE_ID=your_zone_id
|
|
395
|
+
CLOUDFLARE_AUTH_KEY=your_api_key
|
|
396
|
+
CLOUDFLARE_AUTH_EMAIL=your_email
|
|
397
|
+
|
|
398
|
+
# R2 SQL Logs (optional)
|
|
399
|
+
PIPELINE_ENDPOINT=https://your-stream-id.ingest.cloudflare.com
|
|
400
|
+
R2_SQL_AUTH_TOKEN=your_r2_sql_token
|
|
401
|
+
R2_WAREHOUSE_NAME=your_warehouse_name
|
|
402
|
+
```
|
|
403
|
+
|
|
404
|
+
## TypeScript
|
|
405
|
+
|
|
406
|
+
The package includes TypeScript definitions. Import types as needed:
|
|
407
|
+
|
|
408
|
+
```typescript
|
|
409
|
+
import type {
|
|
410
|
+
CloudflareConfig,
|
|
411
|
+
CloudflareAdapters,
|
|
412
|
+
R2SQLLogsAdapterConfig,
|
|
413
|
+
} from "@authhero/cloudflare-adapter";
|
|
414
|
+
```
|
|
415
|
+
|
|
416
|
+
## Related Documentation
|
|
417
|
+
|
|
418
|
+
- [Cloudflare API](https://developers.cloudflare.com/api/)
|
|
419
|
+
- [Cloudflare Cache API](https://developers.cloudflare.com/workers/runtime-apis/cache/)
|
|
420
|
+
- [Cloudflare R2 SQL](https://developers.cloudflare.com/r2-sql/)
|
|
421
|
+
- [R2 Data Catalog](https://developers.cloudflare.com/r2/data-catalog/)
|
|
422
|
+
|
|
423
|
+
## License
|
|
424
|
+
|
|
425
|
+
MIT
|