@authhero/cloudflare-adapter 2.7.6 → 2.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,5 +1,580 @@
1
1
  # @authhero/cloudflare-adapter
2
2
 
3
- An adapter for the cloudflare api handling:
3
+ Cloudflare-specific adapters for AuthHero, providing integrations with Cloudflare services.
4
4
 
5
- - custom domains
5
+ ## Features
6
+
7
+ This package provides four adapters:
8
+
9
+ - **Custom Domains** - Manage custom domains via Cloudflare API
10
+ - **Cache** - Caching using Cloudflare's Cache API
11
+ - **Geo** (optional) - Extract geographic information from Cloudflare request headers
12
+ - **Logs** (optional) - Write authentication logs to Cloudflare R2 using Pipelines and query with R2 SQL
13
+
14
+ ## Installation
15
+
16
+ ```bash
17
+ npm install @authhero/cloudflare-adapter
18
+ ```
19
+
20
+ ## Usage
21
+
22
+ ### HTTP Endpoint Mode (Default)
23
+
24
+ ```typescript
25
+ import createAdapters from "@authhero/cloudflare-adapter";
26
+
27
+ const adapters = createAdapters({
28
+ // Custom domains configuration
29
+ zoneId: "your-cloudflare-zone-id",
30
+ authKey: "your-cloudflare-api-key",
31
+ authEmail: "your-cloudflare-email",
32
+ customDomainAdapter: yourDatabaseCustomDomainsAdapter,
33
+
34
+ // Cache configuration (optional)
35
+ cacheName: "default",
36
+ defaultTtlSeconds: 3600,
37
+ keyPrefix: "authhero:",
38
+
39
+ // Geo adapter configuration (optional) - automatically included when getHeaders is provided
40
+ getHeaders: () => {
41
+ // In Cloudflare Workers, you'd typically pass request headers
42
+ // Cloudflare automatically adds cf-ipcountry, cf-ipcity, etc.
43
+ return request.headers;
44
+ },
45
+
46
+ // R2 SQL logs configuration (optional) - HTTP mode
47
+ r2SqlLogs: {
48
+ pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
49
+ authToken: process.env.R2_SQL_AUTH_TOKEN,
50
+ warehouseName: process.env.R2_WAREHOUSE_NAME,
51
+ namespace: "default",
52
+ tableName: "logs",
53
+ },
54
+ });
55
+
56
+ // Use the adapters
57
+ const { customDomains, cache, geo, logs } = adapters;
58
+ ```
59
+
60
+ ### Service Binding Mode (Cloudflare Workers)
61
+
62
+ ```typescript
63
+ interface Env {
64
+ PIPELINE_SERVICE: { fetch: typeof fetch };
65
+ R2_SQL_AUTH_TOKEN: string;
66
+ R2_WAREHOUSE_NAME: string;
67
+ }
68
+
69
+ export default {
70
+ async fetch(request: Request, env: Env) {
71
+ const adapters = createAdapters({
72
+ zoneId: "your-cloudflare-zone-id",
73
+ authKey: "your-cloudflare-api-key",
74
+ authEmail: "your-cloudflare-email",
75
+ customDomainAdapter: yourDatabaseCustomDomainsAdapter,
76
+
77
+ // Geo adapter - extract location from Cloudflare headers
78
+ getHeaders: () => Object.fromEntries(request.headers),
79
+
80
+ // R2 SQL logs with service binding
81
+ r2SqlLogs: {
82
+ pipelineBinding: env.PIPELINE_SERVICE,
83
+ authToken: env.R2_SQL_AUTH_TOKEN,
84
+ warehouseName: env.R2_WAREHOUSE_NAME,
85
+ },
86
+ });
87
+
88
+ // Use adapters.logs and adapters.geo
89
+ },
90
+ };
91
+ ```
92
+
93
+ ### Passthrough Mode (Multiple Destinations)
94
+
95
+ ```typescript
96
+ import createAdapters from "@authhero/cloudflare-adapter";
97
+ import { createOtherLogsAdapter } from "some-package";
98
+
99
+ // Create a base logs adapter
100
+ const baseAdapter = createOtherLogsAdapter();
101
+
102
+ const adapters = createAdapters({
103
+ zoneId: "your-cloudflare-zone-id",
104
+ authKey: "your-cloudflare-api-key",
105
+ authEmail: "your-cloudflare-email",
106
+ customDomainAdapter: yourDatabaseCustomDomainsAdapter,
107
+
108
+ // R2 SQL logs in passthrough mode - sends to both adapters
109
+ r2SqlLogs: {
110
+ baseAdapter,
111
+ pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
112
+ authToken: process.env.R2_SQL_AUTH_TOKEN,
113
+ warehouseName: process.env.R2_WAREHOUSE_NAME,
114
+ },
115
+ });
116
+
117
+ // logs.create() will write to baseAdapter and Pipeline
118
+ // logs.get() and logs.list() will read from baseAdapter
119
+ const { logs } = adapters;
120
+ ```
121
+
122
+ ## Adapters
123
+
124
+ ### Custom Domains Adapter
125
+
126
+ Manages custom domains through the Cloudflare API.
127
+
128
+ #### Configuration
129
+
130
+ ```typescript
131
+ {
132
+ zoneId: string; // Your Cloudflare zone ID
133
+ authKey: string; // Your Cloudflare API key
134
+ authEmail: string; // Your Cloudflare account email
135
+ enterprise?: boolean; // Whether using Cloudflare Enterprise (default: false)
136
+ customDomainAdapter: CustomDomainsAdapter; // Database adapter for storing domain data
137
+ }
138
+ ```
139
+
140
+ #### Methods
141
+
142
+ - `create(tenantId, domain)` - Create a custom domain
143
+ - `get(tenantId, domainId)` - Get a custom domain
144
+ - `getByDomain(domain)` - Get domain by domain name
145
+ - `list(tenantId, params)` - List custom domains
146
+ - `remove(tenantId, domainId)` - Remove a custom domain
147
+ - `update(tenantId, domainId, data)` - Update a custom domain
148
+
149
+ ### Cache Adapter
150
+
151
+ Provides caching using Cloudflare's Cache API.
152
+
153
+ #### Configuration
154
+
155
+ ```typescript
156
+ {
157
+ cacheName?: string; // Cache name (default: "default")
158
+ defaultTtlSeconds?: number; // Default TTL in seconds (default: undefined)
159
+ keyPrefix?: string; // Key prefix for namespacing (default: undefined)
160
+ }
161
+ ```
162
+
163
+ #### Methods
164
+
165
+ - `get<T>(key)` - Get a value from cache
166
+ - `set<T>(key, value, ttl?)` - Set a value in cache with optional TTL
167
+ - `delete(key)` - Delete a value from cache
168
+
169
+ #### Example
170
+
171
+ ```typescript
172
+ // Set a value with 1 hour TTL
173
+ await cache.set("user:123", userData, 3600);
174
+
175
+ // Get a value
176
+ const user = await cache.get("user:123");
177
+
178
+ // Delete a value
179
+ await cache.delete("user:123");
180
+ ```
181
+
182
+ ### Logs Adapter (R2 SQL + Pipelines)
183
+
184
+ Write authentication logs to Cloudflare R2 using Pipelines for ingestion and R2 SQL for querying.
185
+
186
+ #### Architecture
187
+
188
+ This adapter uses two Cloudflare services:
189
+
190
+ - **Pipelines**: HTTP endpoint for ingesting log data into R2
191
+ - **R2 SQL**: Query interface for analyzing logs stored in Apache Iceberg format
192
+
193
+ #### Prerequisites
194
+
195
+ 1. **Create an R2 bucket and enable R2 Data Catalog**:
196
+
197
+ ```bash
198
+ npx wrangler r2 bucket create my-authhero-logs
199
+ npx wrangler r2 bucket catalog enable my-authhero-logs
200
+ ```
201
+
202
+ Note the "Warehouse" name from the output - you'll need it later.
203
+
204
+ 2. **Create an API token**
205
+
206
+ In the Cloudflare Dashboard:
207
+
208
+ - Go to R2 object storage
209
+ - Select "Manage API tokens"
210
+ - Create a token with Admin Read & Write permissions
211
+ - Save the token value
212
+
213
+ 3. **Create a schema file** (`schema.json`):
214
+
215
+ ```json
216
+ {
217
+ "fields": [
218
+ { "name": "id", "type": "string", "required": true },
219
+ { "name": "tenant_id", "type": "string", "required": true },
220
+ { "name": "type", "type": "string", "required": true },
221
+ { "name": "date", "type": "string", "required": true },
222
+ { "name": "description", "type": "string", "required": false },
223
+ { "name": "ip", "type": "string", "required": false },
224
+ { "name": "user_agent", "type": "string", "required": true },
225
+ { "name": "details", "type": "string", "required": false },
226
+ { "name": "isMobile", "type": "int64", "required": false },
227
+ { "name": "user_id", "type": "string", "required": false },
228
+ { "name": "user_name", "type": "string", "required": false },
229
+ { "name": "connection", "type": "string", "required": false },
230
+ { "name": "connection_id", "type": "string", "required": false },
231
+ { "name": "client_id", "type": "string", "required": false },
232
+ { "name": "client_name", "type": "string", "required": false },
233
+ { "name": "audience", "type": "string", "required": false },
234
+ { "name": "scope", "type": "string", "required": false },
235
+ { "name": "strategy", "type": "string", "required": false },
236
+ { "name": "strategy_type", "type": "string", "required": false },
237
+ { "name": "hostname", "type": "string", "required": false },
238
+ { "name": "auth0_client", "type": "string", "required": false },
239
+ { "name": "log_id", "type": "string", "required": true }
240
+ ]
241
+ }
242
+ ```
243
+
244
+ 4. **Create a Pipeline**:
245
+
246
+ ```bash
247
+ npx wrangler pipelines setup
248
+ ```
249
+
250
+ Follow the prompts:
251
+
252
+ - **Pipeline name**: `authhero-logs`
253
+ - **Enable HTTP endpoint**: `yes`
254
+ - **Require authentication**: `no` (or `yes` if you want additional security)
255
+ - **Configure custom CORS origins**: `no`
256
+ - **Schema definition**: `Load from file`
257
+ - **Schema file path**: `schema.json`
258
+ - **Destination type**: `Data Catalog Table`
259
+ - **R2 bucket name**: `my-authhero-logs`
260
+ - **Namespace**: `default`
261
+ - **Table name**: `logs`
262
+ - **Catalog API token**: Enter your token from step 2
263
+ - **Compression**: `zstd`
264
+ - **Roll file when size reaches (MB)**: `100`
265
+ - **Roll file when time reaches (seconds)**: `300` (5 minutes)
266
+ - **SQL transformation**: `Use simple ingestion query`
267
+
268
+ **Important**: Save the HTTP endpoint URL from the output (e.g., `https://abc123.ingest.cloudflare.com`)
269
+
270
+ #### Configuration
271
+
272
+ The R2 SQL logs adapter supports three usage modes:
273
+
274
+ ##### 1. HTTP Endpoint Mode (Default)
275
+
276
+ Use this mode when calling the Pipeline from outside a Cloudflare Worker:
277
+
278
+ ```typescript
279
+ {
280
+ pipelineEndpoint: string; // Pipeline HTTP endpoint URL for ingesting logs
281
+ authToken: string; // R2 SQL API token for querying logs
282
+ warehouseName: string; // R2 warehouse name
283
+ namespace?: string; // Catalog namespace (default: "default")
284
+ tableName?: string; // Table name (default: "logs")
285
+ apiBaseUrl?: string; // R2 SQL API base URL
286
+ timeout?: number; // HTTP timeout in ms (default: 30000)
287
+ }
288
+ ```
289
+
290
+ ##### 2. Service Binding Mode (Workers)
291
+
292
+ Use this mode when running inside a Cloudflare Worker with a service binding to the Pipeline:
293
+
294
+ ```typescript
295
+ // wrangler.toml
296
+ [[pipelines]];
297
+ binding = "PIPELINE_SERVICE";
298
+ pipeline = "my-pipeline";
299
+
300
+ // TypeScript
301
+ interface Env {
302
+ PIPELINE_SERVICE: { fetch: typeof fetch };
303
+ }
304
+
305
+ const { logs } = createAdapters({
306
+ r2SqlLogs: {
307
+ pipelineBinding: env.PIPELINE_SERVICE,
308
+ authToken: env.R2_SQL_AUTH_TOKEN,
309
+ warehouseName: env.R2_WAREHOUSE_NAME,
310
+ },
311
+ });
312
+ ```
313
+
314
+ This mode is more efficient as it avoids HTTP overhead for Worker-to-Worker communication.
315
+
316
+ ##### 3. Passthrough Mode (Wrap Another Adapter)
317
+
318
+ Use this mode to send logs to both the R2 SQL Pipeline and another logs adapter:
319
+
320
+ ```typescript
321
+ const baseAdapter = createSomeOtherLogsAdapter();
322
+
323
+ const { logs } = createAdapters({
324
+ r2SqlLogs: {
325
+ baseAdapter,
326
+ pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
327
+ authToken: env.R2_SQL_AUTH_TOKEN,
328
+ warehouseName: env.R2_WAREHOUSE_NAME,
329
+ },
330
+ });
331
+ ```
332
+
333
+ In passthrough mode:
334
+
335
+ - `create()` calls the base adapter first, then sends to the Pipeline in the background
336
+ - `get()` and `list()` are delegated to the base adapter
337
+ - Pipeline ingestion errors are logged but don't fail the operation
338
+
339
+ #### Methods
340
+
341
+ - `create(tenantId, log)` - Create a log entry (sends to Pipeline)
342
+ - `get(tenantId, logId)` - Get a log entry (queries R2 SQL)
343
+ - `list(tenantId, params)` - List logs with pagination and filtering (queries R2 SQL)
344
+
345
+ #### Example
346
+
347
+ ```typescript
348
+ // Create a log
349
+ const log = await logs.create("tenant-123", {
350
+ type: "s", // Successful login
351
+ date: new Date().toISOString(),
352
+ ip: "192.168.1.100",
353
+ user_agent: "Mozilla/5.0...",
354
+ isMobile: false,
355
+ user_id: "user-456",
356
+ client_id: "app-789",
357
+ description: "User logged in successfully",
358
+ });
359
+
360
+ // List logs with filtering
361
+ const result = await logs.list("tenant-123", {
362
+ page: 0,
363
+ per_page: 50,
364
+ include_totals: true,
365
+ sort: {
366
+ sort_by: "date",
367
+ sort_order: "desc",
368
+ },
369
+ q: "user_id:user-456", // Lucene-style filter
370
+ });
371
+ ```
372
+
373
+ #### Querying Logs with R2 SQL
374
+
375
+ You can query logs directly using the Wrangler CLI:
376
+
377
+ ```bash
378
+ # Set up authentication
379
+ export WRANGLER_R2_SQL_AUTH_TOKEN=your_api_token
380
+
381
+ # Query logs
382
+ npx wrangler r2 sql query "your_warehouse" "
383
+ SELECT * FROM default.logs
384
+ WHERE tenant_id = 'tenant-123'
385
+ AND type = 's'
386
+ ORDER BY date DESC
387
+ LIMIT 100
388
+ "
389
+
390
+ # Count logs by type
391
+ npx wrangler r2 sql query "your_warehouse" "
392
+ SELECT type, COUNT(*) as count
393
+ FROM default.logs
394
+ WHERE tenant_id = 'tenant-123'
395
+ GROUP BY type
396
+ "
397
+ ```
398
+
399
+ ## Geo Adapter
400
+
401
+ The Cloudflare Geo adapter extracts geographic location information from Cloudflare's automatic request headers. This is used to enrich authentication logs with location data.
402
+
403
+ ### Features
404
+
405
+ - **Zero Latency**: Uses headers already provided by Cloudflare Workers
406
+ - **No API Calls**: No external services or databases required
407
+ - **Comprehensive Data**: Includes country, city, coordinates, timezone, and continent
408
+ - **Automatic**: Cloudflare populates headers automatically for every request
409
+
410
+ ### Configuration
411
+
412
+ The geo adapter is automatically created when you provide the `getHeaders` function:
413
+
414
+ ```typescript
415
+ const adapters = createAdapters({
416
+ // ... other config
417
+ getHeaders: () => Object.fromEntries(request.headers),
418
+ });
419
+
420
+ // Access the geo adapter
421
+ const geoInfo = await adapters.geo?.getGeoInfo();
422
+ ```
423
+
424
+ ### Cloudflare Headers Used
425
+
426
+ The adapter reads these Cloudflare-provided headers:
427
+
428
+ | Header | Description | Example |
429
+ | ---------------- | ------------------------- | --------------------- |
430
+ | `cf-ipcountry` | 2-letter ISO country code | `US` |
431
+ | `cf-ipcity` | City name | `San Francisco` |
432
+ | `cf-iplatitude` | Latitude coordinate | `37.7749` |
433
+ | `cf-iplongitude` | Longitude coordinate | `-122.4194` |
434
+ | `cf-timezone` | IANA timezone identifier | `America/Los_Angeles` |
435
+ | `cf-ipcontinent` | 2-letter continent code | `NA` |
436
+
437
+ ### Response Format
438
+
439
+ ```typescript
440
+ interface GeoInfo {
441
+ country_code: string; // "US"
442
+ country_code3: string; // "USA"
443
+ country_name: string; // "United States"
444
+ city_name: string; // "San Francisco"
445
+ latitude: string; // "37.7749"
446
+ longitude: string; // "-122.4194"
447
+ time_zone: string; // "America/Los_Angeles"
448
+ continent_code: string; // "NA"
449
+ }
450
+ ```
451
+
452
+ ### Integration with AuthHero
453
+
454
+ When configured in AuthHero, the geo adapter automatically enriches authentication logs:
455
+
456
+ ```typescript
457
+ import { init } from "@authhero/authhero";
458
+ import createAdapters from "@authhero/cloudflare-adapter";
459
+
460
+ const cloudflareAdapters = createAdapters({
461
+ getHeaders: () => Object.fromEntries(request.headers),
462
+ // ... other config
463
+ });
464
+
465
+ const authhero = init({
466
+ data: yourDatabaseAdapter,
467
+ geo: cloudflareAdapters.geo, // Add geo adapter
468
+ // ... other config
469
+ });
470
+ ```
471
+
472
+ Logs will automatically include `location_info`:
473
+
474
+ ```json
475
+ {
476
+ "type": "s",
477
+ "date": "2025-11-28T12:00:00.000Z",
478
+ "location_info": {
479
+ "country_code": "US",
480
+ "country_code3": "USA",
481
+ "country_name": "United States",
482
+ "city_name": "San Francisco",
483
+ "latitude": "37.7749",
484
+ "longitude": "-122.4194",
485
+ "time_zone": "America/Los_Angeles",
486
+ "continent_code": "NA"
487
+ }
488
+ }
489
+ ```
490
+
491
+ ### Alternative: IP Geolocation Databases
492
+
493
+ If you're not using Cloudflare Workers or need more detailed location data, you can implement a custom `GeoAdapter` using IP geolocation databases like MaxMind GeoIP2:
494
+
495
+ ```typescript
496
+ import maxmind from "maxmind";
497
+ import { GeoAdapter, GeoInfo } from "@authhero/adapter-interfaces";
498
+
499
+ class MaxMindGeoAdapter implements GeoAdapter {
500
+ private reader: maxmind.Reader<maxmind.CityResponse>;
501
+
502
+ private constructor(reader: maxmind.Reader<maxmind.CityResponse>) {
503
+ this.reader = reader;
504
+ }
505
+
506
+ static async create(databasePath: string): Promise<MaxMindGeoAdapter> {
507
+ const reader = await maxmind.open<maxmind.CityResponse>(databasePath);
508
+ return new MaxMindGeoAdapter(reader);
509
+ }
510
+
511
+ async getGeoInfo(): Promise<GeoInfo | null> {
512
+ const ip = this.getClientIP();
513
+ const lookup = this.reader.get(ip);
514
+
515
+ if (!lookup) return null;
516
+
517
+ return {
518
+ country_code: lookup.country?.iso_code || "",
519
+ country_code3: lookup.country?.iso_code3 || "",
520
+ country_name: lookup.country?.names?.en || "",
521
+ city_name: lookup.city?.names?.en || "",
522
+ latitude: lookup.location?.latitude?.toString() || "",
523
+ longitude: lookup.location?.longitude?.toString() || "",
524
+ time_zone: lookup.location?.time_zone || "",
525
+ continent_code: lookup.continent?.code || "",
526
+ };
527
+ }
528
+ }
529
+
530
+ // Usage:
531
+ const geoAdapter = await MaxMindGeoAdapter.create(
532
+ "/path/to/GeoLite2-City.mmdb",
533
+ );
534
+ ```
535
+
536
+ **Considerations for IP Databases**:
537
+
538
+ - Requires database downloads and regular updates
539
+ - Additional latency for lookups (1-5ms typically)
540
+ - May require licensing fees
541
+ - Works in any environment, not just edge platforms
542
+
543
+ ## Environment Variables
544
+
545
+ Recommended environment variables:
546
+
547
+ ```env
548
+ # Custom Domains
549
+ CLOUDFLARE_ZONE_ID=your_zone_id
550
+ CLOUDFLARE_AUTH_KEY=your_api_key
551
+ CLOUDFLARE_AUTH_EMAIL=your_email
552
+
553
+ # R2 SQL Logs (optional)
554
+ PIPELINE_ENDPOINT=https://your-stream-id.ingest.cloudflare.com
555
+ R2_SQL_AUTH_TOKEN=your_r2_sql_token
556
+ R2_WAREHOUSE_NAME=your_warehouse_name
557
+ ```
558
+
559
+ ## TypeScript
560
+
561
+ The package includes TypeScript definitions. Import types as needed:
562
+
563
+ ```typescript
564
+ import type {
565
+ CloudflareConfig,
566
+ CloudflareAdapters,
567
+ R2SQLLogsAdapterConfig,
568
+ } from "@authhero/cloudflare-adapter";
569
+ ```
570
+
571
+ ## Related Documentation
572
+
573
+ - [Cloudflare API](https://developers.cloudflare.com/api/)
574
+ - [Cloudflare Cache API](https://developers.cloudflare.com/workers/runtime-apis/cache/)
575
+ - [Cloudflare R2 SQL](https://developers.cloudflare.com/r2-sql/)
576
+ - [R2 Data Catalog](https://developers.cloudflare.com/r2/data-catalog/)
577
+
578
+ ## License
579
+
580
+ MIT