getu-attribution-v2-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +775 -0
- package/dist/core/AttributionSDK.d.ts +63 -0
- package/dist/core/AttributionSDK.d.ts.map +1 -0
- package/dist/core/AttributionSDK.js +709 -0
- package/dist/getuai-attribution.min.js +1 -0
- package/dist/index.d.ts +85 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.esm.js +1859 -0
- package/dist/index.esm.js.map +1 -0
- package/dist/index.js +2017 -0
- package/dist/queue/index.d.ts +51 -0
- package/dist/queue/index.d.ts.map +1 -0
- package/dist/queue/index.js +215 -0
- package/dist/storage/index.d.ts +46 -0
- package/dist/storage/index.d.ts.map +1 -0
- package/dist/storage/index.js +363 -0
- package/dist/types/index.d.ts +101 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +32 -0
- package/dist/utils/index.d.ts +36 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +297 -0
- package/package.json +63 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { QueueInterface, EventData, LoggerInterface } from "../types";
|
|
2
|
+
export declare class EventQueueManager implements QueueInterface {
|
|
3
|
+
private queue;
|
|
4
|
+
private processing;
|
|
5
|
+
private batchTimer;
|
|
6
|
+
private logger;
|
|
7
|
+
private apiKey;
|
|
8
|
+
private apiEndpoint;
|
|
9
|
+
private batchSize;
|
|
10
|
+
private batchInterval;
|
|
11
|
+
private maxRetries;
|
|
12
|
+
private retryDelay;
|
|
13
|
+
private sendEvents;
|
|
14
|
+
constructor(logger: LoggerInterface, apiKey: string, apiEndpoint: string, batchSize: number | undefined, batchInterval: number | undefined, maxRetries: number | undefined, retryDelay: number | undefined, sendEvents: (events: EventData[]) => Promise<void>);
|
|
15
|
+
add(event: EventData): void;
|
|
16
|
+
process(): Promise<void>;
|
|
17
|
+
private processImmediate;
|
|
18
|
+
private scheduleBatchProcessing;
|
|
19
|
+
clear(): void;
|
|
20
|
+
size(): number;
|
|
21
|
+
getStats(): {
|
|
22
|
+
size: number;
|
|
23
|
+
processing: boolean;
|
|
24
|
+
};
|
|
25
|
+
flush(): Promise<void>;
|
|
26
|
+
private debouncedProcess;
|
|
27
|
+
}
|
|
28
|
+
export declare class EventHttpClient {
|
|
29
|
+
private logger;
|
|
30
|
+
private apiKey;
|
|
31
|
+
private apiEndpoint;
|
|
32
|
+
private maxRetries;
|
|
33
|
+
private retryDelay;
|
|
34
|
+
constructor(logger: LoggerInterface, apiKey: string, apiEndpoint: string, maxRetries?: number, retryDelay?: number);
|
|
35
|
+
sendEvents(events: EventData[]): Promise<void>;
|
|
36
|
+
sendSingleEvent(event: EventData): Promise<void>;
|
|
37
|
+
testConnection(): Promise<boolean>;
|
|
38
|
+
}
|
|
39
|
+
export declare class MemoryQueueManager implements QueueInterface {
|
|
40
|
+
private queue;
|
|
41
|
+
private maxSize;
|
|
42
|
+
private logger;
|
|
43
|
+
constructor(logger: LoggerInterface, maxSize?: number);
|
|
44
|
+
add(event: EventData): void;
|
|
45
|
+
process(): Promise<void>;
|
|
46
|
+
clear(): void;
|
|
47
|
+
size(): number;
|
|
48
|
+
getAllEvents(): EventData[];
|
|
49
|
+
removeEvents(eventIds: string[]): void;
|
|
50
|
+
}
|
|
51
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/queue/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,cAAc,EACd,SAAS,EAET,eAAe,EAEhB,MAAM,UAAU,CAAC;AAGlB,qBAAa,iBAAkB,YAAW,cAAc;IACtD,OAAO,CAAC,KAAK,CAAmB;IAChC,OAAO,CAAC,UAAU,CAAkB;IACpC,OAAO,CAAC,UAAU,CAA8C;IAChE,OAAO,CAAC,MAAM,CAAkB;IAChC,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,aAAa,CAAS;IAC9B,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,UAAU,CAAyC;gBAGzD,MAAM,EAAE,eAAe,EACvB,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,MAAM,EACnB,SAAS,EAAE,MAAM,YAAM,EACvB,aAAa,EAAE,MAAM,YAAO,EAC5B,UAAU,EAAE,MAAM,YAAI,EACtB,UAAU,EAAE,MAAM,YAAO,EACzB,UAAU,EAAE,CAAC,MAAM,EAAE,SAAS,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC;IAepD,GAAG,CAAC,KAAK,EAAE,SAAS,GAAG,IAAI;IAgBrB,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;YA6ChB,gBAAgB;IAmB9B,OAAO,CAAC,uBAAuB;IAU/B,KAAK,IAAI,IAAI;IAUb,IAAI,IAAI,MAAM;IAKd,QAAQ,IAAI;QAAE,IAAI,EAAE,MAAM,CAAC;QAAC,UAAU,EAAE,OAAO,CAAA;KAAE;IAQ3C,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ5B,OAAO,CAAC,gBAAgB,CAAa;CACtC;AAGD,qBAAa,eAAe;IAC1B,OAAO,CAAC,MAAM,CAAkB;IAChC,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,UAAU,CAAS;gBAGzB,MAAM,EAAE,eAAe,EACvB,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,MAAM,EACnB,UAAU,GAAE,MAAU,EACtB,UAAU,GAAE,MAAa;IASrB,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAmD9C,eAAe,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IAKhD,cAAc,IAAI,OAAO,CAAC,OAAO,CAAC;CAezC;AAGD,qBAAa,kBAAmB,YAAW,cAAc;IACvD,OAAO,CAAC,KAAK,CAAmB;IAChC,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,MAAM,CAAkB;gBAEpB,MAAM,EAAE,eAAe,EAAE,OAAO,GAAE,MAAa;IAK3D,GAAG,CAAC,KAAK,EAAE,SAAS,GAAG,IAAI;IAUrB,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAM9B,KAAK,IAAI,IAAI;IAKb,IAAI,IAAI,MAAM;IAKd,YAAY,IAAI,SAAS,EAAE;IAK3B,YAAY,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI;CAKvC"}
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import { IMMEDIATE_EVENTS, } from "../types";
|
|
2
|
+
import { retry, debounce, getTimestamp } from "../utils";
|
|
3
|
+
export class EventQueueManager {
|
|
4
|
+
constructor(logger, apiKey, apiEndpoint, batchSize = 100, batchInterval = 5000, maxRetries = 3, retryDelay = 1000, sendEvents) {
|
|
5
|
+
this.queue = [];
|
|
6
|
+
this.processing = false;
|
|
7
|
+
this.batchTimer = null;
|
|
8
|
+
this.logger = logger;
|
|
9
|
+
this.apiKey = apiKey;
|
|
10
|
+
this.apiEndpoint = apiEndpoint;
|
|
11
|
+
this.batchSize = batchSize;
|
|
12
|
+
this.batchInterval = batchInterval;
|
|
13
|
+
this.maxRetries = maxRetries;
|
|
14
|
+
this.retryDelay = retryDelay;
|
|
15
|
+
this.sendEvents = sendEvents;
|
|
16
|
+
// Debounced process function to avoid excessive calls
|
|
17
|
+
this.debouncedProcess = debounce(this.process.bind(this), 100);
|
|
18
|
+
}
|
|
19
|
+
add(event) {
|
|
20
|
+
this.queue.push(event);
|
|
21
|
+
this.logger.debug(`Event added to queue: ${event.event_type}`);
|
|
22
|
+
// Check if this is an immediate event
|
|
23
|
+
if (IMMEDIATE_EVENTS.includes(event.event_type)) {
|
|
24
|
+
this.logger.debug(`Immediate event detected: ${event.event_type}, processing immediately`);
|
|
25
|
+
this.processImmediate(event);
|
|
26
|
+
}
|
|
27
|
+
else {
|
|
28
|
+
// Schedule batch processing
|
|
29
|
+
this.scheduleBatchProcessing();
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
async process() {
|
|
33
|
+
if (this.processing || this.queue.length === 0) {
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
this.processing = true;
|
|
37
|
+
try {
|
|
38
|
+
const eventsToProcess = this.queue.splice(0, this.batchSize);
|
|
39
|
+
this.logger.debug(`Processing ${eventsToProcess.length} events from queue`);
|
|
40
|
+
const result = await this.sendEvents(eventsToProcess);
|
|
41
|
+
this.logger.info(`Successfully processed ${eventsToProcess.length} events`);
|
|
42
|
+
// Events are automatically removed from queue when successfully sent
|
|
43
|
+
// No need to manually clean up as they were already spliced from the queue
|
|
44
|
+
}
|
|
45
|
+
catch (error) {
|
|
46
|
+
this.logger.error("Failed to process events:", error);
|
|
47
|
+
// Put events back in queue for retry
|
|
48
|
+
const failedEvents = this.queue.splice(0, this.batchSize);
|
|
49
|
+
this.queue.unshift(...failedEvents);
|
|
50
|
+
// Schedule retry
|
|
51
|
+
setTimeout(() => {
|
|
52
|
+
this.processing = false;
|
|
53
|
+
this.debouncedProcess();
|
|
54
|
+
}, this.retryDelay);
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
this.processing = false;
|
|
58
|
+
// Process remaining events if any
|
|
59
|
+
if (this.queue.length > 0) {
|
|
60
|
+
this.debouncedProcess();
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
async processImmediate(event) {
|
|
64
|
+
try {
|
|
65
|
+
this.logger.debug(`Processing immediate event: ${event.event_type}`);
|
|
66
|
+
await this.sendEvents([event]);
|
|
67
|
+
this.logger.info(`Immediate event processed successfully: ${event.event_type}`);
|
|
68
|
+
// Event is automatically removed from queue when successfully sent
|
|
69
|
+
}
|
|
70
|
+
catch (error) {
|
|
71
|
+
this.logger.error(`Failed to process immediate event: ${event.event_type}`, error);
|
|
72
|
+
// Add to queue for retry
|
|
73
|
+
this.queue.unshift(event);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
scheduleBatchProcessing() {
|
|
77
|
+
if (this.batchTimer) {
|
|
78
|
+
clearTimeout(this.batchTimer);
|
|
79
|
+
}
|
|
80
|
+
this.batchTimer = setTimeout(() => {
|
|
81
|
+
this.debouncedProcess();
|
|
82
|
+
}, this.batchInterval);
|
|
83
|
+
}
|
|
84
|
+
clear() {
|
|
85
|
+
this.queue = [];
|
|
86
|
+
if (this.batchTimer) {
|
|
87
|
+
clearTimeout(this.batchTimer);
|
|
88
|
+
this.batchTimer = null;
|
|
89
|
+
}
|
|
90
|
+
this.processing = false;
|
|
91
|
+
this.logger.info("Event queue cleared");
|
|
92
|
+
}
|
|
93
|
+
size() {
|
|
94
|
+
return this.queue.length;
|
|
95
|
+
}
|
|
96
|
+
// Get queue statistics
|
|
97
|
+
getStats() {
|
|
98
|
+
return {
|
|
99
|
+
size: this.queue.length,
|
|
100
|
+
processing: this.processing,
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
// Force process all events in queue
|
|
104
|
+
async flush() {
|
|
105
|
+
this.logger.info("Flushing event queue");
|
|
106
|
+
while (this.queue.length > 0) {
|
|
107
|
+
await this.process();
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
// HTTP client for sending events
|
|
112
|
+
export class EventHttpClient {
|
|
113
|
+
constructor(logger, apiKey, apiEndpoint, maxRetries = 3, retryDelay = 1000) {
|
|
114
|
+
this.logger = logger;
|
|
115
|
+
this.apiKey = apiKey;
|
|
116
|
+
this.apiEndpoint = apiEndpoint;
|
|
117
|
+
this.maxRetries = maxRetries;
|
|
118
|
+
this.retryDelay = retryDelay;
|
|
119
|
+
}
|
|
120
|
+
async sendEvents(events) {
|
|
121
|
+
if (events.length === 0) {
|
|
122
|
+
return;
|
|
123
|
+
}
|
|
124
|
+
// Transform events to match server format
|
|
125
|
+
const transformedEvents = events.map((event) => ({
|
|
126
|
+
event_id: event.event_id,
|
|
127
|
+
event_type: event.event_type,
|
|
128
|
+
tracking_user_id: event.tracking_user_id,
|
|
129
|
+
utm_source: event.utm_source || null,
|
|
130
|
+
utm_medium: event.utm_medium || null,
|
|
131
|
+
utm_campaign: event.utm_campaign || null,
|
|
132
|
+
utm_term: event.utm_term || null,
|
|
133
|
+
utm_content: event.utm_content || null,
|
|
134
|
+
revenue: event.revenue || null,
|
|
135
|
+
currency: event.currency || null,
|
|
136
|
+
event_data: event.event_data || null,
|
|
137
|
+
context: event.context || null,
|
|
138
|
+
timestamp: event.timestamp || getTimestamp(), // Convert to seconds
|
|
139
|
+
}));
|
|
140
|
+
const batchRequest = { events: transformedEvents };
|
|
141
|
+
await retry(async () => {
|
|
142
|
+
const response = await fetch(`${this.apiEndpoint}/attribution/events`, {
|
|
143
|
+
method: "POST",
|
|
144
|
+
headers: {
|
|
145
|
+
"Content-Type": "application/json",
|
|
146
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
147
|
+
},
|
|
148
|
+
body: JSON.stringify(batchRequest),
|
|
149
|
+
});
|
|
150
|
+
if (!response.ok) {
|
|
151
|
+
const errorText = await response.text();
|
|
152
|
+
throw new Error(`HTTP ${response.status}: ${errorText}`);
|
|
153
|
+
}
|
|
154
|
+
const result = await response.json();
|
|
155
|
+
this.logger.debug("Events sent successfully:", result);
|
|
156
|
+
// Return the sent events for cleanup
|
|
157
|
+
return { result, sentEvents: events };
|
|
158
|
+
}, this.maxRetries, this.retryDelay);
|
|
159
|
+
}
|
|
160
|
+
async sendSingleEvent(event) {
|
|
161
|
+
await this.sendEvents([event]);
|
|
162
|
+
}
|
|
163
|
+
// Test connection
|
|
164
|
+
async testConnection() {
|
|
165
|
+
try {
|
|
166
|
+
const response = await fetch(`${this.apiEndpoint}/health`, {
|
|
167
|
+
method: "GET",
|
|
168
|
+
headers: {
|
|
169
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
170
|
+
},
|
|
171
|
+
});
|
|
172
|
+
return response.ok;
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
this.logger.error("Connection test failed:", error);
|
|
176
|
+
return false;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
// Memory queue fallback
|
|
181
|
+
export class MemoryQueueManager {
|
|
182
|
+
constructor(logger, maxSize = 1000) {
|
|
183
|
+
this.queue = [];
|
|
184
|
+
this.logger = logger;
|
|
185
|
+
this.maxSize = maxSize;
|
|
186
|
+
}
|
|
187
|
+
add(event) {
|
|
188
|
+
if (this.queue.length >= this.maxSize) {
|
|
189
|
+
this.logger.warn("Memory queue full, removing oldest event");
|
|
190
|
+
this.queue.shift();
|
|
191
|
+
}
|
|
192
|
+
this.queue.push(event);
|
|
193
|
+
this.logger.debug(`Event added to memory queue: ${event.event_type}`);
|
|
194
|
+
}
|
|
195
|
+
async process() {
|
|
196
|
+
// Memory queue doesn't process automatically
|
|
197
|
+
// Events are stored in memory only
|
|
198
|
+
this.logger.debug("Memory queue process called (no-op)");
|
|
199
|
+
}
|
|
200
|
+
clear() {
|
|
201
|
+
this.queue = [];
|
|
202
|
+
this.logger.info("Memory queue cleared");
|
|
203
|
+
}
|
|
204
|
+
size() {
|
|
205
|
+
return this.queue.length;
|
|
206
|
+
}
|
|
207
|
+
// Get all events from memory queue
|
|
208
|
+
getAllEvents() {
|
|
209
|
+
return [...this.queue];
|
|
210
|
+
}
|
|
211
|
+
// Remove events from memory queue
|
|
212
|
+
removeEvents(eventIds) {
|
|
213
|
+
this.queue = this.queue.filter((event) => !eventIds.includes(event.event_id));
|
|
214
|
+
}
|
|
215
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { StorageInterface, AttributionData, EventData, LoggerInterface } from "../types";
|
|
2
|
+
export declare class LocalStorageManager implements StorageInterface {
|
|
3
|
+
private logger;
|
|
4
|
+
constructor(logger: LoggerInterface);
|
|
5
|
+
get(key: string): any;
|
|
6
|
+
set(key: string, value: any): void;
|
|
7
|
+
remove(key: string): void;
|
|
8
|
+
clear(): void;
|
|
9
|
+
private handleQuotaExceeded;
|
|
10
|
+
}
|
|
11
|
+
export declare class IndexedDBManager {
|
|
12
|
+
private dbName;
|
|
13
|
+
private dbVersion;
|
|
14
|
+
private storeName;
|
|
15
|
+
private logger;
|
|
16
|
+
private db;
|
|
17
|
+
constructor(logger: LoggerInterface);
|
|
18
|
+
init(): Promise<void>;
|
|
19
|
+
addEvent(event: EventData): Promise<void>;
|
|
20
|
+
getUnsentEvents(limit?: number): Promise<EventData[]>;
|
|
21
|
+
markEventsAsSent(eventIds: string[]): Promise<void>;
|
|
22
|
+
cleanupOldEvents(maxAge?: number): Promise<void>;
|
|
23
|
+
getQueueSize(): Promise<number>;
|
|
24
|
+
clear(): Promise<void>;
|
|
25
|
+
}
|
|
26
|
+
export declare class AttributionStorageManager {
|
|
27
|
+
private localStorage;
|
|
28
|
+
private indexedDB;
|
|
29
|
+
private logger;
|
|
30
|
+
private readonly UTM_STORAGE_KEY;
|
|
31
|
+
private readonly SESSION_STORAGE_KEY;
|
|
32
|
+
constructor(logger: LoggerInterface);
|
|
33
|
+
init(): Promise<void>;
|
|
34
|
+
storeUTMData(utmData: Partial<AttributionData>): void;
|
|
35
|
+
getUTMData(): AttributionData | null;
|
|
36
|
+
storeSession(session: any): void;
|
|
37
|
+
getSession(): any;
|
|
38
|
+
queueEvent(event: EventData): Promise<void>;
|
|
39
|
+
getUnsentEvents(limit?: number): Promise<EventData[]>;
|
|
40
|
+
markEventsAsSent(eventIds: string[]): Promise<void>;
|
|
41
|
+
getQueueSize(): Promise<number>;
|
|
42
|
+
cleanupOldEvents(): Promise<void>;
|
|
43
|
+
clearQueue(): Promise<void>;
|
|
44
|
+
cleanupExpiredData(): void;
|
|
45
|
+
}
|
|
46
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/storage/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,gBAAgB,EAChB,eAAe,EACf,SAAS,EACT,eAAe,EAEhB,MAAM,UAAU,CAAC;AASlB,qBAAa,mBAAoB,YAAW,gBAAgB;IAC1D,OAAO,CAAC,MAAM,CAAkB;gBAEpB,MAAM,EAAE,eAAe;IAInC,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG;IAmBrB,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,GAAG,IAAI;IAelC,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;IAUzB,KAAK,IAAI,IAAI;IAUb,OAAO,CAAC,mBAAmB;CA4B5B;AAGD,qBAAa,gBAAgB;IAC3B,OAAO,CAAC,MAAM,CAAgC;IAC9C,OAAO,CAAC,SAAS,CAAa;IAC9B,OAAO,CAAC,SAAS,CAAoB;IACrC,OAAO,CAAC,MAAM,CAAkB;IAChC,OAAO,CAAC,EAAE,CAA4B;gBAE1B,MAAM,EAAE,eAAe;IAI7B,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAoCrB,QAAQ,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IA6BzC,eAAe,CAAC,KAAK,GAAE,MAAY,GAAG,OAAO,CAAC,SAAS,EAAE,CAAC;IA2B1D,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAuDnD,gBAAgB,CACpB,MAAM,GAAE,MAAgC,GACvC,OAAO,CAAC,IAAI,CAAC;IAgCV,YAAY,IAAI,OAAO,CAAC,MAAM,CAAC;IAqB/B,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAqB7B;AAGD,qBAAa,yBAAyB;IACpC,OAAO,CAAC,YAAY,CAAsB;IAC1C,OAAO,CAAC,SAAS,CAAmB;IACpC,OAAO,CAAC,MAAM,CAAkB;IAChC,OAAO,CAAC,QAAQ,CAAC,eAAe,CAA0B;IAC1D,OAAO,CAAC,QAAQ,CAAC,mBAAmB,CAAyB;gBAEjD,MAAM,EAAE,eAAe;IAM7B,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAK3B,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC,eAAe,CAAC,GAAG,IAAI;IAkCrD,UAAU,IAAI,eAAe,GAAG,IAAI;IAepC,YAAY,CAAC,OAAO,EAAE,GAAG,GAAG,IAAI;IAIhC,UAAU,IAAI,GAAG;IAKX,UAAU,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IAS3C,eAAe,CAAC,KAAK,GAAE,MAAY,GAAG,OAAO,CAAC,SAAS,EAAE,CAAC;IAI1D,gBAAgB,CAAC,QAAQ,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAInD,YAAY,IAAI,OAAO,CAAC,MAAM,CAAC;IAI/B,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IAIjC,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAKjC,kBAAkB,IAAI,IAAI;CAG3B"}
|
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
import { isLocalStorageSupported, isIndexedDBSupported, } from "../utils";
|
|
2
|
+
// LocalStorage implementation
|
|
3
|
+
export class LocalStorageManager {
|
|
4
|
+
constructor(logger) {
|
|
5
|
+
this.logger = logger;
|
|
6
|
+
}
|
|
7
|
+
get(key) {
|
|
8
|
+
try {
|
|
9
|
+
if (!isLocalStorageSupported()) {
|
|
10
|
+
this.logger.warn("LocalStorage not supported");
|
|
11
|
+
return null;
|
|
12
|
+
}
|
|
13
|
+
const item = localStorage.getItem(key);
|
|
14
|
+
if (item === null) {
|
|
15
|
+
return null;
|
|
16
|
+
}
|
|
17
|
+
return JSON.parse(item);
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
this.logger.error("Error reading from localStorage:", error);
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
set(key, value) {
|
|
25
|
+
try {
|
|
26
|
+
if (!isLocalStorageSupported()) {
|
|
27
|
+
this.logger.warn("LocalStorage not supported");
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
localStorage.setItem(key, JSON.stringify(value));
|
|
31
|
+
}
|
|
32
|
+
catch (error) {
|
|
33
|
+
this.logger.error("Error writing to localStorage:", error);
|
|
34
|
+
// Handle quota exceeded
|
|
35
|
+
this.handleQuotaExceeded();
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
remove(key) {
|
|
39
|
+
try {
|
|
40
|
+
if (isLocalStorageSupported()) {
|
|
41
|
+
localStorage.removeItem(key);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
catch (error) {
|
|
45
|
+
this.logger.error("Error removing from localStorage:", error);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
clear() {
|
|
49
|
+
try {
|
|
50
|
+
if (isLocalStorageSupported()) {
|
|
51
|
+
localStorage.clear();
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
catch (error) {
|
|
55
|
+
this.logger.error("Error clearing localStorage:", error);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
handleQuotaExceeded() {
|
|
59
|
+
// Try to clean up old data
|
|
60
|
+
try {
|
|
61
|
+
const keys = Object.keys(localStorage);
|
|
62
|
+
const attributionKeys = keys.filter((key) => key.startsWith("attribution_"));
|
|
63
|
+
if (attributionKeys.length > 0) {
|
|
64
|
+
// Remove oldest data first
|
|
65
|
+
attributionKeys.sort((a, b) => {
|
|
66
|
+
const aData = this.get(a);
|
|
67
|
+
const bData = this.get(b);
|
|
68
|
+
return (aData?.expiresAt || 0) - (bData?.expiresAt || 0);
|
|
69
|
+
});
|
|
70
|
+
// Remove oldest 20% of data
|
|
71
|
+
const toRemove = Math.ceil(attributionKeys.length * 0.2);
|
|
72
|
+
attributionKeys.slice(0, toRemove).forEach((key) => {
|
|
73
|
+
this.remove(key);
|
|
74
|
+
});
|
|
75
|
+
this.logger.info(`Cleaned up ${toRemove} old attribution records`);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
catch (error) {
|
|
79
|
+
this.logger.error("Error during quota cleanup:", error);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
// IndexedDB implementation
|
|
84
|
+
export class IndexedDBManager {
|
|
85
|
+
constructor(logger) {
|
|
86
|
+
this.dbName = "attribution_events";
|
|
87
|
+
this.dbVersion = 1;
|
|
88
|
+
this.storeName = "events";
|
|
89
|
+
this.db = null;
|
|
90
|
+
this.logger = logger;
|
|
91
|
+
}
|
|
92
|
+
async init() {
|
|
93
|
+
if (!isIndexedDBSupported()) {
|
|
94
|
+
this.logger.warn("IndexedDB not supported");
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
return new Promise((resolve, reject) => {
|
|
98
|
+
const request = indexedDB.open(this.dbName, this.dbVersion);
|
|
99
|
+
request.onerror = () => {
|
|
100
|
+
this.logger.error("Failed to open IndexedDB:", request.error);
|
|
101
|
+
reject(request.error);
|
|
102
|
+
};
|
|
103
|
+
request.onsuccess = () => {
|
|
104
|
+
this.db = request.result;
|
|
105
|
+
this.logger.info("IndexedDB initialized successfully");
|
|
106
|
+
resolve();
|
|
107
|
+
};
|
|
108
|
+
request.onupgradeneeded = (event) => {
|
|
109
|
+
const db = event.target.result;
|
|
110
|
+
if (!db.objectStoreNames.contains(this.storeName)) {
|
|
111
|
+
const store = db.createObjectStore(this.storeName, {
|
|
112
|
+
keyPath: "id",
|
|
113
|
+
autoIncrement: true,
|
|
114
|
+
});
|
|
115
|
+
store.createIndex("timestamp", "timestamp", { unique: false });
|
|
116
|
+
store.createIndex("sent", "sent", { unique: false });
|
|
117
|
+
store.createIndex("queued_at", "queued_at", { unique: false });
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
async addEvent(event) {
|
|
123
|
+
if (!this.db) {
|
|
124
|
+
throw new Error("IndexedDB not initialized");
|
|
125
|
+
}
|
|
126
|
+
return new Promise((resolve, reject) => {
|
|
127
|
+
const transaction = this.db.transaction([this.storeName], "readwrite");
|
|
128
|
+
const store = transaction.objectStore(this.storeName);
|
|
129
|
+
const eventRecord = {
|
|
130
|
+
...event,
|
|
131
|
+
queued_at: Date.now(),
|
|
132
|
+
sent: false,
|
|
133
|
+
};
|
|
134
|
+
const request = store.add(eventRecord);
|
|
135
|
+
request.onsuccess = () => {
|
|
136
|
+
this.logger.debug("Event added to IndexedDB queue");
|
|
137
|
+
resolve();
|
|
138
|
+
};
|
|
139
|
+
request.onerror = () => {
|
|
140
|
+
this.logger.error("Failed to add event to IndexedDB:", request.error);
|
|
141
|
+
reject(request.error);
|
|
142
|
+
};
|
|
143
|
+
});
|
|
144
|
+
}
|
|
145
|
+
async getUnsentEvents(limit = 100) {
|
|
146
|
+
if (!this.db) {
|
|
147
|
+
return [];
|
|
148
|
+
}
|
|
149
|
+
return new Promise((resolve, reject) => {
|
|
150
|
+
const transaction = this.db.transaction([this.storeName], "readonly");
|
|
151
|
+
const store = transaction.objectStore(this.storeName);
|
|
152
|
+
const index = store.index("sent");
|
|
153
|
+
const request = index.getAll(IDBKeyRange.only(false), limit);
|
|
154
|
+
request.onsuccess = () => {
|
|
155
|
+
const events = request.result.map((record) => {
|
|
156
|
+
const { queued_at, sent, ...event } = record;
|
|
157
|
+
return event;
|
|
158
|
+
});
|
|
159
|
+
resolve(events);
|
|
160
|
+
};
|
|
161
|
+
request.onerror = () => {
|
|
162
|
+
this.logger.error("Failed to get unsent events:", request.error);
|
|
163
|
+
reject(request.error);
|
|
164
|
+
};
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
async markEventsAsSent(eventIds) {
|
|
168
|
+
if (!this.db || eventIds.length === 0) {
|
|
169
|
+
return;
|
|
170
|
+
}
|
|
171
|
+
return new Promise((resolve, reject) => {
|
|
172
|
+
const transaction = this.db.transaction([this.storeName], "readwrite");
|
|
173
|
+
const store = transaction.objectStore(this.storeName);
|
|
174
|
+
let completed = 0;
|
|
175
|
+
let hasError = false;
|
|
176
|
+
eventIds.forEach((eventId) => {
|
|
177
|
+
const getRequest = store.get(eventId);
|
|
178
|
+
getRequest.onsuccess = () => {
|
|
179
|
+
if (getRequest.result) {
|
|
180
|
+
const record = { ...getRequest.result, sent: true };
|
|
181
|
+
const putRequest = store.put(record);
|
|
182
|
+
putRequest.onsuccess = () => {
|
|
183
|
+
completed++;
|
|
184
|
+
if (completed === eventIds.length && !hasError) {
|
|
185
|
+
resolve();
|
|
186
|
+
}
|
|
187
|
+
};
|
|
188
|
+
putRequest.onerror = () => {
|
|
189
|
+
hasError = true;
|
|
190
|
+
this.logger.error("Failed to mark event as sent:", putRequest.error);
|
|
191
|
+
reject(putRequest.error);
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
else {
|
|
195
|
+
completed++;
|
|
196
|
+
if (completed === eventIds.length && !hasError) {
|
|
197
|
+
resolve();
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
};
|
|
201
|
+
getRequest.onerror = () => {
|
|
202
|
+
hasError = true;
|
|
203
|
+
this.logger.error("Failed to get event for marking as sent:", getRequest.error);
|
|
204
|
+
reject(getRequest.error);
|
|
205
|
+
};
|
|
206
|
+
});
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
async cleanupOldEvents(maxAge = 7 * 24 * 60 * 60 * 1000) {
|
|
210
|
+
if (!this.db) {
|
|
211
|
+
return;
|
|
212
|
+
}
|
|
213
|
+
const cutoffTime = Date.now() - maxAge;
|
|
214
|
+
return new Promise((resolve, reject) => {
|
|
215
|
+
const transaction = this.db.transaction([this.storeName], "readwrite");
|
|
216
|
+
const store = transaction.objectStore(this.storeName);
|
|
217
|
+
const index = store.index("queued_at");
|
|
218
|
+
const request = index.openCursor(IDBKeyRange.upperBound(cutoffTime));
|
|
219
|
+
request.onsuccess = () => {
|
|
220
|
+
const cursor = request.result;
|
|
221
|
+
if (cursor) {
|
|
222
|
+
cursor.delete();
|
|
223
|
+
cursor.continue();
|
|
224
|
+
}
|
|
225
|
+
else {
|
|
226
|
+
this.logger.info("Old events cleanup completed");
|
|
227
|
+
resolve();
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
request.onerror = () => {
|
|
231
|
+
this.logger.error("Failed to cleanup old events:", request.error);
|
|
232
|
+
reject(request.error);
|
|
233
|
+
};
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
async getQueueSize() {
|
|
237
|
+
if (!this.db) {
|
|
238
|
+
return 0;
|
|
239
|
+
}
|
|
240
|
+
return new Promise((resolve, reject) => {
|
|
241
|
+
const transaction = this.db.transaction([this.storeName], "readonly");
|
|
242
|
+
const store = transaction.objectStore(this.storeName);
|
|
243
|
+
const request = store.count();
|
|
244
|
+
request.onsuccess = () => {
|
|
245
|
+
resolve(request.result);
|
|
246
|
+
};
|
|
247
|
+
request.onerror = () => {
|
|
248
|
+
this.logger.error("Failed to get queue size:", request.error);
|
|
249
|
+
reject(request.error);
|
|
250
|
+
};
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
async clear() {
|
|
254
|
+
if (!this.db) {
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
return new Promise((resolve, reject) => {
|
|
258
|
+
const transaction = this.db.transaction([this.storeName], "readwrite");
|
|
259
|
+
const store = transaction.objectStore(this.storeName);
|
|
260
|
+
const request = store.clear();
|
|
261
|
+
request.onsuccess = () => {
|
|
262
|
+
this.logger.info("IndexedDB queue cleared");
|
|
263
|
+
resolve();
|
|
264
|
+
};
|
|
265
|
+
request.onerror = () => {
|
|
266
|
+
this.logger.error("Failed to clear IndexedDB queue:", request.error);
|
|
267
|
+
reject(request.error);
|
|
268
|
+
};
|
|
269
|
+
});
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
// Attribution storage manager
|
|
273
|
+
export class AttributionStorageManager {
|
|
274
|
+
constructor(logger) {
|
|
275
|
+
this.UTM_STORAGE_KEY = "attribution_utm_data";
|
|
276
|
+
this.SESSION_STORAGE_KEY = "attribution_session";
|
|
277
|
+
this.logger = logger;
|
|
278
|
+
this.localStorage = new LocalStorageManager(logger);
|
|
279
|
+
this.indexedDB = new IndexedDBManager(logger);
|
|
280
|
+
}
|
|
281
|
+
async init() {
|
|
282
|
+
await this.indexedDB.init();
|
|
283
|
+
}
|
|
284
|
+
// UTM data management
|
|
285
|
+
storeUTMData(utmData) {
|
|
286
|
+
try {
|
|
287
|
+
const existingData = this.getUTMData();
|
|
288
|
+
// Create default UTM data if none exists
|
|
289
|
+
const defaultUTMData = {
|
|
290
|
+
utm_source: "",
|
|
291
|
+
utm_medium: "",
|
|
292
|
+
utm_campaign: "",
|
|
293
|
+
utm_term: "",
|
|
294
|
+
utm_content: "",
|
|
295
|
+
timestamp: Date.now(),
|
|
296
|
+
};
|
|
297
|
+
const newData = {
|
|
298
|
+
firstTouch: existingData?.firstTouch || defaultUTMData,
|
|
299
|
+
lastTouch: existingData?.lastTouch || defaultUTMData,
|
|
300
|
+
touchpoints: existingData?.touchpoints || [],
|
|
301
|
+
...utmData,
|
|
302
|
+
expiresAt: Date.now() + 30 * 24 * 60 * 60 * 1000, // 30 days
|
|
303
|
+
};
|
|
304
|
+
this.localStorage.set(this.UTM_STORAGE_KEY, newData);
|
|
305
|
+
this.logger.debug("UTM data stored successfully:", {
|
|
306
|
+
firstTouch: newData.firstTouch,
|
|
307
|
+
lastTouch: newData.lastTouch,
|
|
308
|
+
touchpointsCount: newData.touchpoints.length,
|
|
309
|
+
expiresAt: new Date(newData.expiresAt).toISOString(),
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
catch (error) {
|
|
313
|
+
this.logger.error("Failed to store UTM data:", error);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
getUTMData() {
|
|
317
|
+
const data = this.localStorage.get(this.UTM_STORAGE_KEY);
|
|
318
|
+
if (data && data.expiresAt && data.expiresAt > Date.now()) {
|
|
319
|
+
return data;
|
|
320
|
+
}
|
|
321
|
+
// Clean up expired data
|
|
322
|
+
if (data) {
|
|
323
|
+
this.localStorage.remove(this.UTM_STORAGE_KEY);
|
|
324
|
+
}
|
|
325
|
+
return null;
|
|
326
|
+
}
|
|
327
|
+
// Session management
|
|
328
|
+
storeSession(session) {
|
|
329
|
+
this.localStorage.set(this.SESSION_STORAGE_KEY, session);
|
|
330
|
+
}
|
|
331
|
+
getSession() {
|
|
332
|
+
return this.localStorage.get(this.SESSION_STORAGE_KEY);
|
|
333
|
+
}
|
|
334
|
+
// Event queue management
|
|
335
|
+
async queueEvent(event) {
|
|
336
|
+
try {
|
|
337
|
+
await this.indexedDB.addEvent(event);
|
|
338
|
+
}
|
|
339
|
+
catch (error) {
|
|
340
|
+
this.logger.error("Failed to queue event:", error);
|
|
341
|
+
throw error;
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
async getUnsentEvents(limit = 100) {
|
|
345
|
+
return await this.indexedDB.getUnsentEvents(limit);
|
|
346
|
+
}
|
|
347
|
+
async markEventsAsSent(eventIds) {
|
|
348
|
+
await this.indexedDB.markEventsAsSent(eventIds);
|
|
349
|
+
}
|
|
350
|
+
async getQueueSize() {
|
|
351
|
+
return await this.indexedDB.getQueueSize();
|
|
352
|
+
}
|
|
353
|
+
async cleanupOldEvents() {
|
|
354
|
+
await this.indexedDB.cleanupOldEvents();
|
|
355
|
+
}
|
|
356
|
+
async clearQueue() {
|
|
357
|
+
await this.indexedDB.clear();
|
|
358
|
+
}
|
|
359
|
+
// Cleanup expired data
|
|
360
|
+
cleanupExpiredData() {
|
|
361
|
+
this.getUTMData(); // This will automatically clean up expired UTM data
|
|
362
|
+
}
|
|
363
|
+
}
|