@authhero/cloudflare-adapter 2.14.2 → 2.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -4,12 +4,14 @@ Cloudflare-specific adapters for AuthHero, providing integrations with Cloudflar
4
4
 
5
5
  ## Features
6
6
 
7
- This package provides four adapters:
7
+ This package provides adapters for:
8
8
 
9
9
  - **Custom Domains** - Manage custom domains via Cloudflare API
10
10
  - **Cache** - Caching using Cloudflare's Cache API
11
11
  - **Geo** (optional) - Extract geographic information from Cloudflare request headers
12
- - **Logs** (optional) - Write authentication logs to Cloudflare R2 using Pipelines and query with R2 SQL
12
+ - **Logs** (optional) - Two options for authentication logs:
13
+ - **Analytics Engine** - Low-latency writes with SQL querying (90-day retention)
14
+ - **R2 SQL + Pipelines** - Long-term storage with unlimited retention
13
15
 
14
16
  ## Installation
15
17
 
@@ -19,7 +21,42 @@ npm install @authhero/cloudflare-adapter
19
21
 
20
22
  ## Usage
21
23
 
22
- ### HTTP Endpoint Mode (Default)
24
+ ### With Analytics Engine Logs (Recommended for Workers)
25
+
26
+ ```typescript
27
+ import createAdapters from "@authhero/cloudflare-adapter";
28
+
29
+ // In a Cloudflare Worker
30
+ interface Env {
31
+ AUTH_LOGS: AnalyticsEngineDataset;
32
+ CLOUDFLARE_ACCOUNT_ID: string;
33
+ ANALYTICS_ENGINE_API_TOKEN: string;
34
+ }
35
+
36
+ export default {
37
+ async fetch(request: Request, env: Env) {
38
+ const adapters = createAdapters({
39
+ // Custom domains configuration
40
+ zoneId: "your-cloudflare-zone-id",
41
+ authKey: "your-cloudflare-api-key",
42
+ authEmail: "your-cloudflare-email",
43
+ customDomainAdapter: yourDatabaseCustomDomainsAdapter,
44
+
45
+ // Analytics Engine logs (low latency writes)
46
+ analyticsEngineLogs: {
47
+ analyticsEngineBinding: env.AUTH_LOGS,
48
+ accountId: env.CLOUDFLARE_ACCOUNT_ID,
49
+ apiToken: env.ANALYTICS_ENGINE_API_TOKEN,
50
+ },
51
+ });
52
+
53
+ const { customDomains, cache, geo, logs } = adapters;
54
+ // ...
55
+ },
56
+ };
57
+ ```
58
+
59
+ ### With R2 SQL Logs (HTTP Endpoint Mode)
23
60
 
24
61
  ```typescript
25
62
  import createAdapters from "@authhero/cloudflare-adapter";
@@ -92,31 +129,42 @@ export default {
92
129
 
93
130
  ### Passthrough Mode (Multiple Destinations)
94
131
 
132
+ Use the core `createPassthroughAdapter` utility to sync logs to multiple destinations:
133
+
95
134
  ```typescript
96
- import createAdapters from "@authhero/cloudflare-adapter";
97
- import { createOtherLogsAdapter } from "some-package";
135
+ import { createPassthroughAdapter } from "@authhero/adapter-interfaces";
136
+ import createAdapters, {
137
+ createR2SQLLogsAdapter,
138
+ createAnalyticsEngineLogsAdapter,
139
+ } from "@authhero/cloudflare-adapter";
98
140
 
99
- // Create a base logs adapter
100
- const baseAdapter = createOtherLogsAdapter();
141
+ // Primary adapter (e.g., existing database)
142
+ const databaseAdapter = createDatabaseLogsAdapter();
101
143
 
102
- const adapters = createAdapters({
103
- zoneId: "your-cloudflare-zone-id",
104
- authKey: "your-cloudflare-api-key",
105
- authEmail: "your-cloudflare-email",
106
- customDomainAdapter: yourDatabaseCustomDomainsAdapter,
144
+ // Cloudflare logs adapters for secondary syncing
145
+ const r2SqlAdapter = createR2SQLLogsAdapter({
146
+ pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
147
+ authToken: process.env.R2_SQL_AUTH_TOKEN,
148
+ warehouseName: process.env.R2_WAREHOUSE_NAME,
149
+ });
107
150
 
108
- // R2 SQL logs in passthrough mode - sends to both adapters
109
- r2SqlLogs: {
110
- baseAdapter,
111
- pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
112
- authToken: process.env.R2_SQL_AUTH_TOKEN,
113
- warehouseName: process.env.R2_WAREHOUSE_NAME,
114
- },
151
+ const analyticsEngineAdapter = createAnalyticsEngineLogsAdapter({
152
+ analyticsEngineBinding: env.AUTH_LOGS,
153
+ accountId: env.CLOUDFLARE_ACCOUNT_ID,
154
+ apiToken: env.ANALYTICS_ENGINE_API_TOKEN,
115
155
  });
116
156
 
117
- // logs.create() will write to baseAdapter and Pipeline
118
- // logs.get() and logs.list() will read from baseAdapter
119
- const { logs } = adapters;
157
+ // Create passthrough adapter - writes to primary and all secondaries
158
+ const logsAdapter = createPassthroughAdapter({
159
+ primary: databaseAdapter,
160
+ secondaries: [
161
+ { adapter: { create: r2SqlAdapter.create } },
162
+ { adapter: { create: analyticsEngineAdapter.create } },
163
+ ],
164
+ });
165
+
166
+ // logsAdapter.create() writes to database, R2 SQL Pipeline, and Analytics Engine
167
+ // logsAdapter.get() and logsAdapter.list() read from database only
120
168
  ```
121
169
 
122
170
  ## Adapters
@@ -334,45 +382,43 @@ export default {
334
382
  };
335
383
  ```
336
384
 
337
- **With Base Adapter (Passthrough Mode):**
338
-
339
- ```typescript
340
- const baseAdapter = createKyselyLogsAdapter(db);
341
-
342
- const adapters = createAdapters({
343
- // ... other config
344
- r2SqlLogs: {
345
- baseAdapter, // Logs written to base adapter first
346
- pipelineBinding: env.AUTH_LOGS_STREAM, // Then sent to Pipeline in background
347
- // authToken and warehouseName not needed when using baseAdapter
348
- },
349
- });
350
- ```
351
-
352
385
  The Pipeline binding uses the `.send()` method for direct data ingestion.
353
386
 
354
387
  ##### 3. Passthrough Mode (Wrap Another Adapter)
355
388
 
356
- Use this mode to send logs to both the R2 SQL Pipeline and another logs adapter:
389
+ Use the core `createPassthroughAdapter` utility to send logs to both R2 SQL Pipeline and another logs adapter:
357
390
 
358
391
  ```typescript
359
- const baseAdapter = createSomeOtherLogsAdapter();
392
+ import { createPassthroughAdapter } from "@authhero/adapter-interfaces";
393
+ import { createR2SQLLogsAdapter } from "@authhero/cloudflare-adapter";
360
394
 
361
- const { logs } = createAdapters({
362
- r2SqlLogs: {
363
- baseAdapter,
364
- pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
365
- authToken: env.R2_SQL_AUTH_TOKEN,
366
- warehouseName: env.R2_WAREHOUSE_NAME,
367
- },
395
+ // Primary adapter (e.g., existing database)
396
+ const databaseAdapter = createDatabaseLogsAdapter();
397
+
398
+ // R2 SQL Pipeline adapter
399
+ const r2SqlAdapter = createR2SQLLogsAdapter({
400
+ pipelineEndpoint: "https://your-stream-id.ingest.cloudflare.com",
401
+ authToken: env.R2_SQL_AUTH_TOKEN,
402
+ warehouseName: env.R2_WAREHOUSE_NAME,
403
+ });
404
+
405
+ // Create passthrough adapter
406
+ const logsAdapter = createPassthroughAdapter({
407
+ primary: databaseAdapter,
408
+ secondaries: [
409
+ {
410
+ adapter: { create: r2SqlAdapter.create },
411
+ onError: (err) => console.error("R2 SQL sync failed:", err),
412
+ },
413
+ ],
368
414
  });
369
415
  ```
370
416
 
371
417
  In passthrough mode:
372
418
 
373
- - `create()` calls the base adapter first, then sends to the Pipeline in the background
374
- - `get()` and `list()` are delegated to the base adapter
375
- - Pipeline ingestion errors are logged but don't fail the operation
419
+ - `create()` calls the primary adapter first, then sends to secondaries in the background
420
+ - `get()` and `list()` read from the primary adapter only
421
+ - Secondary errors are logged but don't fail the operation
376
422
 
377
423
  #### Methods
378
424
 
@@ -434,6 +480,202 @@ npx wrangler r2 sql query "your_warehouse" "
434
480
  "
435
481
  ```
436
482
 
483
+ ### Logs Adapter (Analytics Engine)
484
+
485
+ Write authentication logs to Cloudflare Workers Analytics Engine for low-latency writes and SQL-based querying.
486
+
487
+ #### Architecture
488
+
489
+ This adapter uses Cloudflare's Workers Analytics Engine:
490
+
491
+ - **Write**: Fire-and-forget writes using `writeDataPoint()` (no HTTP latency)
492
+ - **Query**: SQL API for analyzing logs stored in Analytics Engine
493
+
494
+ #### When to Use Analytics Engine vs R2 SQL
495
+
496
+ | Feature | Analytics Engine | R2 SQL + Pipelines |
497
+ | -------------- | -------------------------------- | ----------------------------- |
498
+ | Write Latency | ~0ms (fire-and-forget) | ~50-100ms (HTTP) |
499
+ | Data Retention | 90 days (free), configurable | Unlimited |
500
+ | Query Language | SQL (ClickHouse-like) | SQL (Iceberg) |
501
+ | Best For | Real-time analytics, recent logs | Long-term storage, compliance |
502
+ | Pricing | Free tier available | Pay per storage + queries |
503
+
504
+ #### Prerequisites
505
+
506
+ 1. **Create an Analytics Engine Dataset**:
507
+
508
+ Configure in `wrangler.toml`:
509
+
510
+ ```toml
511
+ [[analytics_engine_datasets]]
512
+ binding = "AUTH_LOGS"
513
+ dataset = "authhero_logs"
514
+ ```
515
+
516
+ 2. **Create an API Token**:
517
+
518
+ Create a Cloudflare API token with `Account Analytics: Read` permission for querying logs.
519
+
520
+ #### Configuration
521
+
522
+ ```typescript
523
+ interface AnalyticsEngineLogsAdapterConfig {
524
+ // Analytics Engine dataset binding (for Workers)
525
+ analyticsEngineBinding?: AnalyticsEngineDataset;
526
+
527
+ // Cloudflare account ID (required for SQL queries)
528
+ accountId: string;
529
+
530
+ // API token with Analytics Engine read permission
531
+ apiToken: string;
532
+
533
+ // Dataset name (default: "authhero_logs")
534
+ dataset?: string;
535
+
536
+ // HTTP timeout in ms (default: 30000)
537
+ timeout?: number;
538
+ }
539
+ ```
540
+
541
+ #### Usage
542
+
543
+ ```typescript
544
+ import createAdapters, {
545
+ createAnalyticsEngineLogsAdapter,
546
+ } from "@authhero/cloudflare-adapter";
547
+
548
+ // Option 1: Use via createAdapters
549
+ const adapters = createAdapters({
550
+ zoneId: "your-zone-id",
551
+ authKey: "your-api-key",
552
+ authEmail: "your-email",
553
+ customDomainAdapter: yourDbAdapter,
554
+
555
+ analyticsEngineLogs: {
556
+ analyticsEngineBinding: env.AUTH_LOGS,
557
+ accountId: env.CLOUDFLARE_ACCOUNT_ID,
558
+ apiToken: env.ANALYTICS_ENGINE_API_TOKEN,
559
+ dataset: "authhero_logs",
560
+ },
561
+ });
562
+
563
+ // Option 2: Use adapter directly
564
+ const logsAdapter = createAnalyticsEngineLogsAdapter({
565
+ analyticsEngineBinding: env.AUTH_LOGS,
566
+ accountId: env.CLOUDFLARE_ACCOUNT_ID,
567
+ apiToken: env.ANALYTICS_ENGINE_API_TOKEN,
568
+ });
569
+ ```
570
+
571
+ #### Worker Example
572
+
573
+ ```typescript
574
+ interface Env {
575
+ AUTH_LOGS: AnalyticsEngineDataset;
576
+ CLOUDFLARE_ACCOUNT_ID: string;
577
+ ANALYTICS_ENGINE_API_TOKEN: string;
578
+ }
579
+
580
+ export default {
581
+ async fetch(request: Request, env: Env) {
582
+ const { logs } = createAdapters({
583
+ // ... other config
584
+ analyticsEngineLogs: {
585
+ analyticsEngineBinding: env.AUTH_LOGS,
586
+ accountId: env.CLOUDFLARE_ACCOUNT_ID,
587
+ apiToken: env.ANALYTICS_ENGINE_API_TOKEN,
588
+ },
589
+ });
590
+
591
+ // Write a log (fire-and-forget, no await needed)
592
+ await logs.create("tenant-123", {
593
+ type: "s",
594
+ date: new Date().toISOString(),
595
+ ip: request.headers.get("cf-connecting-ip") || "",
596
+ user_agent: request.headers.get("user-agent") || "",
597
+ isMobile: false,
598
+ user_id: "user-456",
599
+ description: "User logged in",
600
+ });
601
+
602
+ // Query logs
603
+ const recentLogs = await logs.list("tenant-123", {
604
+ per_page: 50,
605
+ q: "type:s",
606
+ });
607
+
608
+ return new Response("OK");
609
+ },
610
+ };
611
+ ```
612
+
613
+ #### Passthrough Mode
614
+
615
+ Use Analytics Engine alongside another logs adapter using the core `createPassthroughAdapter` utility:
616
+
617
+ ```typescript
618
+ import { createPassthroughAdapter } from "@authhero/adapter-interfaces";
619
+ import { createAnalyticsEngineLogsAdapter } from "@authhero/cloudflare-adapter";
620
+
621
+ // Primary adapter (e.g., database)
622
+ const databaseAdapter = createDatabaseLogsAdapter();
623
+
624
+ // Analytics Engine adapter for write syncing
625
+ const analyticsEngineAdapter = createAnalyticsEngineLogsAdapter({
626
+ analyticsEngineBinding: env.AUTH_LOGS,
627
+ accountId: env.CLOUDFLARE_ACCOUNT_ID,
628
+ apiToken: env.ANALYTICS_ENGINE_API_TOKEN,
629
+ });
630
+
631
+ // Create passthrough adapter - writes to both, reads from primary
632
+ const logsAdapter = createPassthroughAdapter({
633
+ primary: databaseAdapter,
634
+ secondaries: [
635
+ {
636
+ adapter: { create: analyticsEngineAdapter.create },
637
+ onError: (err) => console.error("Analytics sync failed:", err),
638
+ },
639
+ ],
640
+ });
641
+
642
+ // logs.create() writes to both adapters (database first, then Analytics Engine)
643
+ // logs.get() and logs.list() read from database only
644
+ ```
645
+
646
+ #### Data Schema
647
+
648
+ Analytics Engine stores logs using blob and double fields:
649
+
650
+ | Field | Type | Description |
651
+ | --------- | ------ | ---------------------------------------- |
652
+ | blob1 | string | log_id |
653
+ | blob2 | string | tenant_id |
654
+ | blob3 | string | type (e.g., "s", "f") |
655
+ | blob4 | string | date (ISO string) |
656
+ | blob5 | string | description |
657
+ | blob6 | string | ip |
658
+ | blob7 | string | user_agent |
659
+ | blob8-18 | string | user_id, connection, client_id, etc. |
660
+ | blob19-20 | string | JSON stringified (details, auth0_client) |
661
+ | double1 | number | isMobile (0 or 1) |
662
+ | double2 | number | timestamp (epoch ms) |
663
+ | index1 | string | tenant_id (for efficient filtering) |
664
+
665
+ #### Querying with SQL API
666
+
667
+ ```bash
668
+ # List recent logs for a tenant
669
+ curl "https://api.cloudflare.com/client/v4/accounts/{account_id}/analytics_engine/sql" \
670
+ -H "Authorization: Bearer $API_TOKEN" \
671
+ -d "SELECT * FROM authhero_logs WHERE index1 = 'tenant-123' ORDER BY timestamp DESC LIMIT 50"
672
+
673
+ # Count logins by type
674
+ curl "https://api.cloudflare.com/client/v4/accounts/{account_id}/analytics_engine/sql" \
675
+ -H "Authorization: Bearer $API_TOKEN" \
676
+ -d "SELECT blob3 as type, count() as count FROM authhero_logs WHERE index1 = 'tenant-123' GROUP BY blob3"
677
+ ```
678
+
437
679
  ## Geo Adapter
438
680
 
439
681
  The Cloudflare Geo adapter extracts geographic location information from Cloudflare's automatic request headers. This is used to enrich authentication logs with location data.