@powerhousedao/analytics-engine-browser 6.0.0-dev.105 → 6.0.0-dev.107
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +45 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +414 -0
- package/dist/index.js.map +1 -0
- package/package.json +11 -14
- package/dist/src/BrowserAnalyticsStore.d.ts +0 -8
- package/dist/src/BrowserAnalyticsStore.d.ts.map +0 -1
- package/dist/src/MemoryAnalyticsStore.d.ts +0 -24
- package/dist/src/MemoryAnalyticsStore.d.ts.map +0 -1
- package/dist/src/PgLiteExecutor.d.ts +0 -16
- package/dist/src/PgLiteExecutor.d.ts.map +0 -1
- package/dist/src/index.d.ts +0 -5
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/index.js +0 -54517
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { AnalyticsDimension, AnalyticsPath, AnalyticsSeries, AnalyticsSeriesInput, AnalyticsSeriesQuery, AnalyticsUpdateCallback, IAnalyticsProfiler, IAnalyticsStore, SqlQueryLogger, SqlResultsLogger } from "@powerhousedao/analytics-engine-core";
|
|
2
|
+
import { PGlite } from "@electric-sql/pglite";
|
|
3
|
+
import { PGliteWorker } from "@electric-sql/pglite/worker";
|
|
4
|
+
|
|
5
|
+
//#region src/BrowserAnalyticsStore.d.ts
|
|
6
|
+
type BrowserAnalyticsStoreOptions = {
|
|
7
|
+
pgLite: PGlite | PGliteWorker;
|
|
8
|
+
queryLogger?: SqlQueryLogger;
|
|
9
|
+
resultsLogger?: SqlResultsLogger;
|
|
10
|
+
profiler?: IAnalyticsProfiler;
|
|
11
|
+
};
|
|
12
|
+
declare class BrowserAnalyticsStore implements IAnalyticsStore {
|
|
13
|
+
private _queryLogger;
|
|
14
|
+
private _resultsLogger;
|
|
15
|
+
private _pgExecutor;
|
|
16
|
+
private _profiler;
|
|
17
|
+
private readonly _subscriptionManager;
|
|
18
|
+
private _pgLite;
|
|
19
|
+
constructor({
|
|
20
|
+
pgLite,
|
|
21
|
+
queryLogger,
|
|
22
|
+
resultsLogger,
|
|
23
|
+
profiler
|
|
24
|
+
}: BrowserAnalyticsStoreOptions);
|
|
25
|
+
init(): Promise<void>;
|
|
26
|
+
raw(sql: string): Promise<any[]>;
|
|
27
|
+
destroy(): Promise<void>;
|
|
28
|
+
getDimensions(): Promise<any>;
|
|
29
|
+
getMatchingSeries(query: AnalyticsSeriesQuery): Promise<AnalyticsSeries<string | AnalyticsDimension>[]>;
|
|
30
|
+
clearEmptyAnalyticsDimensions(): Promise<number>;
|
|
31
|
+
clearSeriesBySource(source: AnalyticsPath, cleanUpDimensions?: boolean): Promise<number>;
|
|
32
|
+
private _addDimensionMetadata;
|
|
33
|
+
private _formatQueryRecords;
|
|
34
|
+
addSeriesValues(inputs: AnalyticsSeriesInput[]): Promise<void>;
|
|
35
|
+
private _createDimensionPath;
|
|
36
|
+
private _linkDimensions;
|
|
37
|
+
addSeriesValue(input: AnalyticsSeriesInput): Promise<void>;
|
|
38
|
+
subscribeToSource(source: AnalyticsPath, callback: AnalyticsUpdateCallback): () => void;
|
|
39
|
+
}
|
|
40
|
+
//#endregion
|
|
41
|
+
//#region src/utils.d.ts
|
|
42
|
+
declare function createFsPglite(databaseName: string): Promise<PGlite & Record<string, never>>;
|
|
43
|
+
//#endregion
|
|
44
|
+
export { BrowserAnalyticsStore, BrowserAnalyticsStoreOptions, createFsPglite };
|
|
45
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","names":[],"sources":["../src/BrowserAnalyticsStore.ts","../src/utils.ts"],"mappings":";;;;;KAsHY,4BAAA;EACV,MAAA,EAAQ,MAAA,GAAS,YAAA;EACjB,WAAA,GAAc,cAAA;EACd,aAAA,GAAgB,gBAAA;EAChB,QAAA,GAAW,kBAAA;AAAA;AAAA,cAGA,qBAAA,YAAiC,eAAA;EAAA,QACpC,YAAA;EAAA,QACA,cAAA;EAAA,QACA,WAAA;EAAA,QACA,SAAA;EAAA,iBACS,oBAAA;EAAA,QACT,OAAA;;IAGN,MAAA;IACA,WAAA;IACA,aAAA;IACA;EAAA,GACC,4BAAA;EAkBU,IAAA,CAAA,GAAI,OAAA;EAQJ,GAAA,CAAI,GAAA,WAAW,OAAA;EAYf,OAAA,CAAA,GAAO,OAAA;EAIP,aAAA,CAAA,GAAiB,OAAA;EAwDjB,iBAAA,CACX,KAAA,EAAO,oBAAA,GACN,OAAA,CAAQ,eAAA,UAAyB,kBAAA;EA+FvB,6BAAA,CAAA,GAAiC,OAAA;EAgBjC,mBAAA,CACX,MAAA,EAAQ,aAAA,EACR,iBAAA,aACC,OAAA;EAAA,QAqBW,qBAAA;EAAA,QA4BN,mBAAA;EAuCK,eAAA,CAAgB,MAAA,EAAQ,oBAAA,KAAyB,OAAA;EAAA,QA4EhD,oBAAA;EAAA,QAsBA,eAAA;EA+CD,cAAA,CAAe,KAAA,EAAO,oBAAA,GAAuB,OAAA;EAInD,iBAAA,CACL,MAAA,EAAQ,aAAA,EACR,QAAA,EAAU,uBAAA;AAAA;;;iBC7kBQ,cAAA,CAAe,YAAA,WAAoB,OAAA,CAAA,MAAA,GAAA,MAAA"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,414 @@
|
|
|
1
|
+
import { AnalyticsPath, AnalyticsSubscriptionManager, PassthroughAnalyticsProfiler } from "@powerhousedao/analytics-engine-core";
|
|
2
|
+
import { pascalCase } from "change-case";
|
|
3
|
+
import { DateTime } from "luxon";
|
|
4
|
+
import { IdbFs, PGlite } from "@electric-sql/pglite";
|
|
5
|
+
//#region src/PgLiteExecutor.ts
|
|
6
|
+
const parseRawResults = (rawResults) => {
|
|
7
|
+
const allValues = [];
|
|
8
|
+
for (const returnValue of rawResults.values() || []) {
|
|
9
|
+
const { fields, rows } = returnValue;
|
|
10
|
+
const values = new Array(rows.length);
|
|
11
|
+
for (let i = 0, iLen = values.length; i < iLen; i++) {
|
|
12
|
+
const row = rows[i];
|
|
13
|
+
const value = {};
|
|
14
|
+
for (let j = 0, jLen = fields.length; j < jLen; j++) {
|
|
15
|
+
const { name, dataTypeID } = fields[j];
|
|
16
|
+
value[name] = row[name];
|
|
17
|
+
}
|
|
18
|
+
values[i] = value;
|
|
19
|
+
}
|
|
20
|
+
allValues.push(...values);
|
|
21
|
+
}
|
|
22
|
+
return allValues;
|
|
23
|
+
};
|
|
24
|
+
var PGLiteQueryExecutor = class {
|
|
25
|
+
_index = 0;
|
|
26
|
+
_sql = null;
|
|
27
|
+
constructor(_profiler, _queryLogger, _resultsLogger) {
|
|
28
|
+
this._profiler = _profiler;
|
|
29
|
+
this._queryLogger = _queryLogger;
|
|
30
|
+
this._resultsLogger = _resultsLogger;
|
|
31
|
+
}
|
|
32
|
+
init(sql) {
|
|
33
|
+
this._sql = sql;
|
|
34
|
+
}
|
|
35
|
+
async execute(raw) {
|
|
36
|
+
if (!this._sql) throw new Error("PGLiteQueryExecutor not initialized");
|
|
37
|
+
const index = this._index++;
|
|
38
|
+
if (this._queryLogger) this._queryLogger(index, raw);
|
|
39
|
+
const results = await this._profiler.record("Query", async () => {
|
|
40
|
+
const rawResults = await this._sql?.exec(raw);
|
|
41
|
+
if (!rawResults) return;
|
|
42
|
+
return parseRawResults(rawResults);
|
|
43
|
+
});
|
|
44
|
+
if (this._resultsLogger) this._resultsLogger(index, results);
|
|
45
|
+
return results;
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
//#endregion
|
|
49
|
+
//#region src/BrowserAnalyticsStore.ts
|
|
50
|
+
const initSql = `
|
|
51
|
+
|
|
52
|
+
create table if not exists "AnalyticsSeries"
|
|
53
|
+
(
|
|
54
|
+
id serial primary key,
|
|
55
|
+
source varchar(255) not null,
|
|
56
|
+
start timestamp not null,
|
|
57
|
+
"end" timestamp,
|
|
58
|
+
metric varchar(255) not null,
|
|
59
|
+
value real not null,
|
|
60
|
+
unit varchar(255),
|
|
61
|
+
fn varchar(255) not null,
|
|
62
|
+
params json
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
create unique index if not exists "AnalyticsSeries_pkey"
|
|
66
|
+
on "AnalyticsSeries" (id);
|
|
67
|
+
|
|
68
|
+
create index if not exists analyticsseries_end_index
|
|
69
|
+
on "AnalyticsSeries" ("end");
|
|
70
|
+
|
|
71
|
+
create index if not exists analyticsseries_fn_index
|
|
72
|
+
on "AnalyticsSeries" (fn);
|
|
73
|
+
|
|
74
|
+
create index if not exists analyticsseries_metric_index
|
|
75
|
+
on "AnalyticsSeries" (metric);
|
|
76
|
+
|
|
77
|
+
create index if not exists analyticsseries_source_index
|
|
78
|
+
on "AnalyticsSeries" (source);
|
|
79
|
+
|
|
80
|
+
create index if not exists analyticsseries_start_index
|
|
81
|
+
on "AnalyticsSeries" (start);
|
|
82
|
+
|
|
83
|
+
create index if not exists analyticsseries_unit_index
|
|
84
|
+
on "AnalyticsSeries" (unit);
|
|
85
|
+
|
|
86
|
+
create index if not exists analyticsseries_value_index
|
|
87
|
+
on "AnalyticsSeries" (value);
|
|
88
|
+
|
|
89
|
+
create table if not exists "AnalyticsDimension"
|
|
90
|
+
(
|
|
91
|
+
id serial primary key,
|
|
92
|
+
dimension varchar(255) not null,
|
|
93
|
+
path varchar(255) not null,
|
|
94
|
+
label varchar(255),
|
|
95
|
+
icon varchar(1000),
|
|
96
|
+
description text
|
|
97
|
+
);
|
|
98
|
+
|
|
99
|
+
create unique index if not exists "AnalyticsDimension_pkey"
|
|
100
|
+
on "AnalyticsDimension" (id);
|
|
101
|
+
|
|
102
|
+
create index if not exists analyticsdimension_dimension_index
|
|
103
|
+
on "AnalyticsDimension" (dimension);
|
|
104
|
+
|
|
105
|
+
create index if not exists analyticsdimension_path_index
|
|
106
|
+
on "AnalyticsDimension" (path);
|
|
107
|
+
|
|
108
|
+
create table if not exists "AnalyticsSeries_AnalyticsDimension"
|
|
109
|
+
(
|
|
110
|
+
"seriesId" integer not null
|
|
111
|
+
constraint analyticsseries_analyticsdimension_seriesid_foreign
|
|
112
|
+
references "AnalyticsSeries"
|
|
113
|
+
on delete cascade,
|
|
114
|
+
"dimensionId" integer not null
|
|
115
|
+
constraint analyticsseries_analyticsdimension_dimensionid_foreign
|
|
116
|
+
references "AnalyticsDimension"
|
|
117
|
+
on delete cascade
|
|
118
|
+
);
|
|
119
|
+
|
|
120
|
+
create index if not exists analyticsseries_analyticsdimension_dimensionid_index
|
|
121
|
+
on "AnalyticsSeries_AnalyticsDimension" ("dimensionId");
|
|
122
|
+
|
|
123
|
+
create index if not exists analyticsseries_analyticsdimension_seriesid_index
|
|
124
|
+
on "AnalyticsSeries_AnalyticsDimension" ("seriesId");
|
|
125
|
+
|
|
126
|
+
`;
|
|
127
|
+
var BrowserAnalyticsStore = class {
|
|
128
|
+
_queryLogger;
|
|
129
|
+
_resultsLogger;
|
|
130
|
+
_pgExecutor;
|
|
131
|
+
_profiler;
|
|
132
|
+
_subscriptionManager = new AnalyticsSubscriptionManager();
|
|
133
|
+
_pgLite;
|
|
134
|
+
constructor({ pgLite, queryLogger, resultsLogger, profiler }) {
|
|
135
|
+
if (!profiler) profiler = new PassthroughAnalyticsProfiler();
|
|
136
|
+
const executor = new PGLiteQueryExecutor(profiler, queryLogger, resultsLogger);
|
|
137
|
+
this._pgLite = pgLite;
|
|
138
|
+
this._queryLogger = queryLogger || (() => {});
|
|
139
|
+
this._resultsLogger = resultsLogger || (() => {});
|
|
140
|
+
this._profiler = profiler;
|
|
141
|
+
this._pgExecutor = executor;
|
|
142
|
+
}
|
|
143
|
+
async init() {
|
|
144
|
+
this._pgExecutor.init(this._pgLite);
|
|
145
|
+
await this._pgLite.exec(initSql);
|
|
146
|
+
}
|
|
147
|
+
async raw(sql) {
|
|
148
|
+
this._queryLogger(-1, sql);
|
|
149
|
+
return await this._profiler.record("QueryRaw", async () => {
|
|
150
|
+
const results = await this._pgLite.exec(sql);
|
|
151
|
+
this._resultsLogger(-1, results);
|
|
152
|
+
return parseRawResults(results || []);
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
async destroy() {
|
|
156
|
+
this._pgLite.close();
|
|
157
|
+
}
|
|
158
|
+
async getDimensions() {
|
|
159
|
+
const result = await this._pgLite.query(`
|
|
160
|
+
select "dimension", "path", "icon", "label", "description"
|
|
161
|
+
from "AnalyticsDimension"
|
|
162
|
+
where "path" is not null
|
|
163
|
+
and "path" <> ''
|
|
164
|
+
and "path" <> '/'
|
|
165
|
+
`);
|
|
166
|
+
if (!Array.isArray(result.rows)) return [];
|
|
167
|
+
const grouped = result.rows.reduce((acc, row) => {
|
|
168
|
+
if (!acc[row.dimension]) acc[row.dimension] = {
|
|
169
|
+
name: row.dimension,
|
|
170
|
+
values: []
|
|
171
|
+
};
|
|
172
|
+
acc[row.dimension]?.values.push({
|
|
173
|
+
path: row.path,
|
|
174
|
+
icon: row.icon,
|
|
175
|
+
label: row.label,
|
|
176
|
+
description: row.description
|
|
177
|
+
});
|
|
178
|
+
return acc;
|
|
179
|
+
}, {});
|
|
180
|
+
return Object.values(grouped);
|
|
181
|
+
}
|
|
182
|
+
async getMatchingSeries(query) {
|
|
183
|
+
const units = query.currency ? query.currency.firstSegment().filters : null;
|
|
184
|
+
const dimensions = Object.keys(query.select);
|
|
185
|
+
const params = [];
|
|
186
|
+
const innerSelects = [
|
|
187
|
+
`"AS_inner"."id"`,
|
|
188
|
+
`"AS_inner"."source"`,
|
|
189
|
+
`"AS_inner"."start"`,
|
|
190
|
+
`"AS_inner"."end"`,
|
|
191
|
+
`"AS_inner"."metric"`,
|
|
192
|
+
`"AS_inner"."value"`,
|
|
193
|
+
`"AS_inner"."unit"`,
|
|
194
|
+
`"AS_inner"."fn"`,
|
|
195
|
+
`"AS_inner"."params"`
|
|
196
|
+
];
|
|
197
|
+
const innerWheres = [];
|
|
198
|
+
const outerSelects = [`"AV".*`];
|
|
199
|
+
const outerJoins = [];
|
|
200
|
+
const outerWheres = [];
|
|
201
|
+
params.push(query.metrics);
|
|
202
|
+
innerWheres.push(`"AS_inner"."metric" = any($${params.length}::text[])`);
|
|
203
|
+
if (units && units.length > 0 && units[0] !== "") {
|
|
204
|
+
params.push(units);
|
|
205
|
+
innerWheres.push(`"AS_inner"."unit" = any($${params.length}::text[])`);
|
|
206
|
+
}
|
|
207
|
+
if (query.end) {
|
|
208
|
+
params.push(query.end.toISO());
|
|
209
|
+
innerWheres.push(`"AS_inner"."start" < $${params.length}`);
|
|
210
|
+
}
|
|
211
|
+
for (const dimension of dimensions) {
|
|
212
|
+
params.push(dimension);
|
|
213
|
+
innerSelects.push(`
|
|
214
|
+
(
|
|
215
|
+
select "AD"."path"
|
|
216
|
+
from "AnalyticsSeries_AnalyticsDimension" as "ASAD"
|
|
217
|
+
left join "AnalyticsDimension" as "AD"
|
|
218
|
+
on "AD"."id" = "ASAD"."dimensionId"
|
|
219
|
+
where "ASAD"."seriesId" = "AS_inner"."id"
|
|
220
|
+
and "AD"."dimension" = $${params.length}
|
|
221
|
+
limit 1
|
|
222
|
+
) as "dim_${dimension}"
|
|
223
|
+
`);
|
|
224
|
+
}
|
|
225
|
+
for (const [dimension, paths] of Object.entries(query.select)) {
|
|
226
|
+
outerJoins.push(`
|
|
227
|
+
left join "AnalyticsDimension" as "${dimension}"
|
|
228
|
+
on "${dimension}"."path" = "AV"."dim_${dimension}"
|
|
229
|
+
`);
|
|
230
|
+
outerSelects.push(`"${dimension}"."icon" as "dim_icon"`, `"${dimension}"."description" as "dim_description"`, `"${dimension}"."label" as "dim_label"`);
|
|
231
|
+
if (paths.length === 1) {
|
|
232
|
+
params.push(paths[0].toString("/%"));
|
|
233
|
+
outerWheres.push(`"AV"."dim_${dimension}" like $${params.length}`);
|
|
234
|
+
} else if (paths.length > 1) {
|
|
235
|
+
const orParts = [];
|
|
236
|
+
for (const path of paths) {
|
|
237
|
+
params.push(path.toString("/%"));
|
|
238
|
+
orParts.push(`"AV"."dim_${dimension}" like $${params.length}`);
|
|
239
|
+
}
|
|
240
|
+
outerWheres.push(`(${orParts.join(" or ")})`);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
const sql = `
|
|
244
|
+
select
|
|
245
|
+
${outerSelects.join(",\n ")}
|
|
246
|
+
from (
|
|
247
|
+
select
|
|
248
|
+
${innerSelects.join(",\n ")}
|
|
249
|
+
from "AnalyticsSeries" as "AS_inner"
|
|
250
|
+
where ${innerWheres.join("\n and ")}
|
|
251
|
+
) as "AV"
|
|
252
|
+
${outerJoins.join("\n")}
|
|
253
|
+
${outerWheres.length ? `where ${outerWheres.join("\n and ")}` : ""}
|
|
254
|
+
order by "AV"."start"
|
|
255
|
+
`;
|
|
256
|
+
const result = await this._pgLite.query(sql, params);
|
|
257
|
+
return this._formatQueryRecords(result.rows, dimensions);
|
|
258
|
+
}
|
|
259
|
+
async clearEmptyAnalyticsDimensions() {
|
|
260
|
+
return (await this._pgLite.query(`
|
|
261
|
+
delete from "AnalyticsDimension" as "AD"
|
|
262
|
+
where not exists (
|
|
263
|
+
select 1
|
|
264
|
+
from "AnalyticsSeries_AnalyticsDimension" as "ASAD"
|
|
265
|
+
where "ASAD"."dimensionId" = "AD"."id"
|
|
266
|
+
)
|
|
267
|
+
returning "id"
|
|
268
|
+
`)).rows.length;
|
|
269
|
+
}
|
|
270
|
+
async clearSeriesBySource(source, cleanUpDimensions = false) {
|
|
271
|
+
let deletedCount = (await this._pgLite.query(`
|
|
272
|
+
delete from "AnalyticsSeries"
|
|
273
|
+
where "source" like $1
|
|
274
|
+
returning "id"
|
|
275
|
+
`, [source.toString("/%")])).rows.length;
|
|
276
|
+
if (cleanUpDimensions) deletedCount += await this.clearEmptyAnalyticsDimensions();
|
|
277
|
+
this._subscriptionManager.notifySubscribers([source]);
|
|
278
|
+
return deletedCount;
|
|
279
|
+
}
|
|
280
|
+
async _addDimensionMetadata(path, icon, label, description) {
|
|
281
|
+
if (!icon && !label && !description) return;
|
|
282
|
+
await this._pgLite.query(`
|
|
283
|
+
update "AnalyticsDimension"
|
|
284
|
+
set
|
|
285
|
+
"icon" = $1,
|
|
286
|
+
"label" = $2,
|
|
287
|
+
"description" = $3
|
|
288
|
+
where "path" = $4
|
|
289
|
+
`, [
|
|
290
|
+
icon ? icon : "",
|
|
291
|
+
label ? label : "",
|
|
292
|
+
description ? description : "",
|
|
293
|
+
`${path.toString()}/`
|
|
294
|
+
]);
|
|
295
|
+
}
|
|
296
|
+
_formatQueryRecords(records, dimensions) {
|
|
297
|
+
return records.map((r) => {
|
|
298
|
+
const start = r.start instanceof Date ? r.start : new Date(r.start);
|
|
299
|
+
const end = r.end == null ? null : r.end instanceof Date ? r.end : new Date(r.end);
|
|
300
|
+
const result = {
|
|
301
|
+
id: r.id,
|
|
302
|
+
source: AnalyticsPath.fromString(r.source.slice(0, -1)),
|
|
303
|
+
start: DateTime.fromJSDate(start),
|
|
304
|
+
end: end ? DateTime.fromJSDate(end) : null,
|
|
305
|
+
metric: r.metric,
|
|
306
|
+
value: r.value,
|
|
307
|
+
unit: r.unit,
|
|
308
|
+
fn: r.fn,
|
|
309
|
+
params: r.params,
|
|
310
|
+
dimensions: {}
|
|
311
|
+
};
|
|
312
|
+
dimensions.forEach((dimension) => {
|
|
313
|
+
const dimPath = r[`dim_${dimension}`];
|
|
314
|
+
result.dimensions[dimension] = {
|
|
315
|
+
path: AnalyticsPath.fromString(dimPath ? dimPath.slice(0, -1) : "?"),
|
|
316
|
+
icon: r.dim_icon ? r.dim_icon : "",
|
|
317
|
+
label: r.dim_label ? r.dim_label : "",
|
|
318
|
+
description: r.dim_description ? r.dim_description : ""
|
|
319
|
+
};
|
|
320
|
+
});
|
|
321
|
+
return result;
|
|
322
|
+
}).sort((a, b) => a.id - b.id);
|
|
323
|
+
}
|
|
324
|
+
async addSeriesValues(inputs) {
|
|
325
|
+
const dimensionsMap = {};
|
|
326
|
+
for (const input of inputs) {
|
|
327
|
+
const seriesId = (await this._pgLite.query(`
|
|
328
|
+
insert into "AnalyticsSeries" (
|
|
329
|
+
"start",
|
|
330
|
+
"end",
|
|
331
|
+
"source",
|
|
332
|
+
"metric",
|
|
333
|
+
"value",
|
|
334
|
+
"unit",
|
|
335
|
+
"fn",
|
|
336
|
+
"params"
|
|
337
|
+
)
|
|
338
|
+
values ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
339
|
+
returning "id"
|
|
340
|
+
`, [
|
|
341
|
+
input.start.toJSDate(),
|
|
342
|
+
input.end ? input.end.toJSDate() : null,
|
|
343
|
+
input.source.toString("/"),
|
|
344
|
+
pascalCase(input.metric),
|
|
345
|
+
input.value,
|
|
346
|
+
input.unit || null,
|
|
347
|
+
input.fn || "Single",
|
|
348
|
+
input.params || null
|
|
349
|
+
])).rows[0]?.id;
|
|
350
|
+
if (seriesId == null) throw new Error("Failed to insert AnalyticsSeries row");
|
|
351
|
+
for (const [dim, path] of Object.entries(input.dimensions || {})) {
|
|
352
|
+
if (!dimensionsMap[dim]) dimensionsMap[dim] = {};
|
|
353
|
+
const pathKey = path.toString("/");
|
|
354
|
+
if (!dimensionsMap[dim][pathKey]) dimensionsMap[dim][pathKey] = [];
|
|
355
|
+
dimensionsMap[dim][pathKey].push(seriesId);
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
for (const [dim, pathMap] of Object.entries(dimensionsMap)) await this._linkDimensions(dim, pathMap);
|
|
359
|
+
for (const input of inputs) {
|
|
360
|
+
const metaDimension = input.dimensionMetadata;
|
|
361
|
+
if (!metaDimension) continue;
|
|
362
|
+
await this._addDimensionMetadata(metaDimension.path, metaDimension.icon, metaDimension.label, metaDimension.description);
|
|
363
|
+
}
|
|
364
|
+
this._subscriptionManager.notifySubscribers(inputs.map((input) => input.source));
|
|
365
|
+
}
|
|
366
|
+
async _createDimensionPath(dimension, path) {
|
|
367
|
+
const id = (await this._pgLite.query(`
|
|
368
|
+
insert into "AnalyticsDimension" ("dimension", "path")
|
|
369
|
+
values ($1, $2)
|
|
370
|
+
returning "id"
|
|
371
|
+
`, [dimension, path])).rows[0]?.id;
|
|
372
|
+
if (id == null) throw new Error("Failed to create AnalyticsDimension");
|
|
373
|
+
return id;
|
|
374
|
+
}
|
|
375
|
+
async _linkDimensions(dimension, pathMap) {
|
|
376
|
+
const paths = Object.keys(pathMap);
|
|
377
|
+
if (paths.length === 0) return;
|
|
378
|
+
const existing = (await this._pgLite.query(`
|
|
379
|
+
select "path", "id"
|
|
380
|
+
from "AnalyticsDimension"
|
|
381
|
+
where "dimension" = $1
|
|
382
|
+
and "path" = any($2::text[])
|
|
383
|
+
`, [dimension, paths])).rows;
|
|
384
|
+
for (const [path, ids] of Object.entries(pathMap)) {
|
|
385
|
+
const existingRecord = existing.find((record) => record.path === path);
|
|
386
|
+
const dimensionId = existingRecord ? existingRecord.id : await this._createDimensionPath(dimension, path);
|
|
387
|
+
for (const seriesId of ids) await this._pgLite.query(`
|
|
388
|
+
insert into "AnalyticsSeries_AnalyticsDimension" (
|
|
389
|
+
"seriesId",
|
|
390
|
+
"dimensionId"
|
|
391
|
+
)
|
|
392
|
+
values ($1, $2)
|
|
393
|
+
`, [seriesId, dimensionId]);
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
async addSeriesValue(input) {
|
|
397
|
+
await this.addSeriesValues([input]);
|
|
398
|
+
}
|
|
399
|
+
subscribeToSource(source, callback) {
|
|
400
|
+
return this._subscriptionManager.subscribeToPath(source, callback);
|
|
401
|
+
}
|
|
402
|
+
};
|
|
403
|
+
//#endregion
|
|
404
|
+
//#region src/utils.ts
|
|
405
|
+
async function createFsPglite(databaseName) {
|
|
406
|
+
return await PGlite.create({
|
|
407
|
+
fs: new IdbFs(databaseName),
|
|
408
|
+
relaxedDurability: true
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
//#endregion
|
|
412
|
+
export { BrowserAnalyticsStore, createFsPglite };
|
|
413
|
+
|
|
414
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../src/PgLiteExecutor.ts","../src/BrowserAnalyticsStore.ts","../src/utils.ts"],"sourcesContent":["import type { PGlite } from \"@electric-sql/pglite\";\nimport type { PGliteWorker } from \"@electric-sql/pglite/worker\";\nimport {\n type IAnalyticsProfiler,\n type ISqlExecutor,\n type SqlQueryLogger,\n type SqlResultsLogger,\n} from \"@powerhousedao/analytics-engine-core\";\n\nexport const parseRawResults = (rawResults: any[]) => {\n const allValues = [];\n for (const returnValue of rawResults.values() || []) {\n const { fields, rows } = returnValue;\n const values = new Array(rows.length);\n for (let i = 0, iLen = values.length; i < iLen; i++) {\n const row = rows[i];\n const value: any = {};\n for (let j = 0, jLen = fields.length; j < jLen; j++) {\n // todo: switch on dataTypeID\n const { name, dataTypeID } = fields[j];\n value[name] = row[name];\n }\n\n values[i] = value;\n }\n\n allValues.push(...values);\n }\n\n return allValues;\n};\n\nexport class PGLiteQueryExecutor implements ISqlExecutor {\n private _index: number = 0;\n private _sql: PGlite | PGliteWorker | null = null;\n\n constructor(\n private readonly _profiler: IAnalyticsProfiler,\n private readonly _queryLogger?: SqlQueryLogger,\n private readonly _resultsLogger?: SqlResultsLogger,\n ) {\n //\n }\n\n init(sql: PGlite | PGliteWorker) {\n this._sql = sql;\n }\n\n async execute(raw: string) {\n if (!this._sql) {\n throw new Error(\"PGLiteQueryExecutor not initialized\");\n }\n\n const index = this._index++;\n\n if (this._queryLogger) {\n this._queryLogger(index, raw);\n }\n\n const results: any = await this._profiler.record(\"Query\", async () => {\n const rawResults = await this._sql?.exec(raw);\n if (!rawResults) {\n return;\n }\n\n const allValues = parseRawResults(rawResults);\n return allValues;\n });\n\n if (this._resultsLogger) {\n this._resultsLogger(index, results);\n }\n\n return results;\n }\n}\n","import type { PGlite } from \"@electric-sql/pglite\";\nimport type { PGliteWorker } from \"@electric-sql/pglite/worker\";\nimport type {\n AnalyticsDimension,\n AnalyticsSeries,\n AnalyticsSeriesInput,\n AnalyticsSeriesQuery,\n AnalyticsUpdateCallback,\n SqlQueryLogger,\n SqlResultsLogger,\n} from \"@powerhousedao/analytics-engine-core\";\nimport {\n AnalyticsPath,\n AnalyticsSubscriptionManager,\n type IAnalyticsProfiler,\n type IAnalyticsStore,\n PassthroughAnalyticsProfiler,\n} from \"@powerhousedao/analytics-engine-core\";\nimport { pascalCase } from \"change-case\";\nimport { DateTime } from \"luxon\";\nimport { parseRawResults, PGLiteQueryExecutor } from \"./PgLiteExecutor.js\";\n\ntype DimensionsMap = Record<string, Record<string, number[]>>;\n\ntype AnalyticsSeriesRecord = {\n id: number;\n source: string;\n start: Date | string;\n end: Date | string | null;\n metric: string;\n value: number;\n unit: string | null;\n fn: string;\n params: Record<string, any> | null;\n [dimension: `dim_${string}`]: string | null | undefined;\n dim_icon?: string | null;\n dim_label?: string | null;\n dim_description?: string | null;\n};\n\nconst initSql = `\n\n create table if not exists \"AnalyticsSeries\"\n (\n id serial primary key,\n source varchar(255) not null,\n start timestamp not null,\n \"end\" timestamp,\n metric varchar(255) not null,\n value real not null,\n unit varchar(255),\n fn varchar(255) not null,\n params json\n );\n\n create unique index if not exists \"AnalyticsSeries_pkey\"\n on \"AnalyticsSeries\" (id);\n\n create index if not exists analyticsseries_end_index\n on \"AnalyticsSeries\" (\"end\");\n\n create index if not exists analyticsseries_fn_index\n on \"AnalyticsSeries\" (fn);\n\n create index if not exists analyticsseries_metric_index\n on \"AnalyticsSeries\" (metric);\n\n create index if not exists analyticsseries_source_index\n on \"AnalyticsSeries\" (source);\n\n create index if not exists analyticsseries_start_index\n on \"AnalyticsSeries\" (start);\n\n create index if not exists analyticsseries_unit_index\n on \"AnalyticsSeries\" (unit);\n\n create index if not exists analyticsseries_value_index\n on \"AnalyticsSeries\" (value);\n\n create table if not exists \"AnalyticsDimension\"\n (\n id serial primary key,\n dimension varchar(255) not null,\n path varchar(255) not null,\n label varchar(255),\n icon varchar(1000),\n description text\n );\n\n create unique index if not exists \"AnalyticsDimension_pkey\"\n on \"AnalyticsDimension\" (id);\n\n create index if not exists analyticsdimension_dimension_index\n on \"AnalyticsDimension\" (dimension);\n\n create index if not exists analyticsdimension_path_index\n on \"AnalyticsDimension\" (path);\n\n create table if not exists \"AnalyticsSeries_AnalyticsDimension\"\n (\n \"seriesId\" integer not null\n constraint analyticsseries_analyticsdimension_seriesid_foreign\n references \"AnalyticsSeries\"\n on delete cascade,\n \"dimensionId\" integer not null\n constraint analyticsseries_analyticsdimension_dimensionid_foreign\n references \"AnalyticsDimension\"\n on delete cascade\n );\n\n create index if not exists analyticsseries_analyticsdimension_dimensionid_index\n on \"AnalyticsSeries_AnalyticsDimension\" (\"dimensionId\");\n\n create index if not exists analyticsseries_analyticsdimension_seriesid_index\n on \"AnalyticsSeries_AnalyticsDimension\" (\"seriesId\");\n\n`;\n\nexport type BrowserAnalyticsStoreOptions = {\n pgLite: PGlite | PGliteWorker;\n queryLogger?: SqlQueryLogger;\n resultsLogger?: SqlResultsLogger;\n profiler?: IAnalyticsProfiler;\n};\n\nexport class BrowserAnalyticsStore implements IAnalyticsStore {\n private _queryLogger: SqlQueryLogger;\n private _resultsLogger: SqlResultsLogger;\n private _pgExecutor: PGLiteQueryExecutor;\n private _profiler: IAnalyticsProfiler;\n private readonly _subscriptionManager = new AnalyticsSubscriptionManager();\n private _pgLite: PGlite | PGliteWorker;\n\n public constructor({\n pgLite,\n queryLogger,\n resultsLogger,\n profiler,\n }: BrowserAnalyticsStoreOptions) {\n if (!profiler) {\n profiler = new PassthroughAnalyticsProfiler();\n }\n\n const executor = new PGLiteQueryExecutor(\n profiler,\n queryLogger,\n resultsLogger,\n );\n\n this._pgLite = pgLite;\n this._queryLogger = queryLogger || (() => {});\n this._resultsLogger = resultsLogger || (() => {});\n this._profiler = profiler;\n this._pgExecutor = executor;\n }\n\n public async init() {\n // init executor\n this._pgExecutor.init(this._pgLite);\n\n // create tables if they do not exist\n await this._pgLite.exec(initSql);\n }\n\n public async raw(sql: string) {\n this._queryLogger(-1, sql);\n\n return await this._profiler.record(\"QueryRaw\", async () => {\n const results = await this._pgLite.exec(sql);\n\n this._resultsLogger(-1, results);\n\n return parseRawResults(results || []);\n });\n }\n\n public async destroy() {\n this._pgLite.close();\n }\n\n public async getDimensions(): Promise<any> {\n const result = await this._pgLite.query<{\n dimension: string;\n path: string;\n icon: string | null;\n label: string | null;\n description: string | null;\n }>(`\n select \"dimension\", \"path\", \"icon\", \"label\", \"description\"\n from \"AnalyticsDimension\"\n where \"path\" is not null\n and \"path\" <> ''\n and \"path\" <> '/'\n `);\n\n if (!Array.isArray(result.rows)) return [];\n\n const grouped = result.rows.reduce(\n (\n acc: Record<\n string,\n | {\n name: string;\n values: {\n path: string;\n icon: string | null;\n label: string | null;\n description: string | null;\n }[];\n }\n | undefined\n >,\n row,\n ) => {\n if (!acc[row.dimension]) {\n acc[row.dimension] = {\n name: row.dimension,\n values: [],\n };\n }\n\n acc[row.dimension]?.values.push({\n path: row.path,\n icon: row.icon,\n label: row.label,\n description: row.description,\n });\n\n return acc;\n },\n {},\n );\n\n return Object.values(grouped);\n }\n\n public async getMatchingSeries(\n query: AnalyticsSeriesQuery,\n ): Promise<AnalyticsSeries<string | AnalyticsDimension>[]> {\n const units = query.currency ? query.currency.firstSegment().filters : null;\n const dimensions = Object.keys(query.select);\n\n const params: unknown[] = [];\n const innerSelects: string[] = [\n `\"AS_inner\".\"id\"`,\n `\"AS_inner\".\"source\"`,\n `\"AS_inner\".\"start\"`,\n `\"AS_inner\".\"end\"`,\n `\"AS_inner\".\"metric\"`,\n `\"AS_inner\".\"value\"`,\n `\"AS_inner\".\"unit\"`,\n `\"AS_inner\".\"fn\"`,\n `\"AS_inner\".\"params\"`,\n ];\n const innerWheres: string[] = [];\n const outerSelects: string[] = [`\"AV\".*`];\n const outerJoins: string[] = [];\n const outerWheres: string[] = [];\n\n params.push(query.metrics);\n innerWheres.push(`\"AS_inner\".\"metric\" = any($${params.length}::text[])`);\n\n if (units && units.length > 0 && units[0] !== \"\") {\n params.push(units);\n innerWheres.push(`\"AS_inner\".\"unit\" = any($${params.length}::text[])`);\n }\n\n if (query.end) {\n params.push(query.end.toISO());\n innerWheres.push(`\"AS_inner\".\"start\" < $${params.length}`);\n }\n\n for (const dimension of dimensions) {\n params.push(dimension);\n innerSelects.push(`\n (\n select \"AD\".\"path\"\n from \"AnalyticsSeries_AnalyticsDimension\" as \"ASAD\"\n left join \"AnalyticsDimension\" as \"AD\"\n on \"AD\".\"id\" = \"ASAD\".\"dimensionId\"\n where \"ASAD\".\"seriesId\" = \"AS_inner\".\"id\"\n and \"AD\".\"dimension\" = $${params.length}\n limit 1\n ) as \"dim_${dimension}\"\n `);\n }\n\n for (const [dimension, paths] of Object.entries(query.select)) {\n outerJoins.push(`\n left join \"AnalyticsDimension\" as \"${dimension}\"\n on \"${dimension}\".\"path\" = \"AV\".\"dim_${dimension}\"\n `);\n\n outerSelects.push(\n `\"${dimension}\".\"icon\" as \"dim_icon\"`,\n `\"${dimension}\".\"description\" as \"dim_description\"`,\n `\"${dimension}\".\"label\" as \"dim_label\"`,\n );\n\n if (paths.length === 1) {\n params.push(paths[0].toString(\"/%\"));\n outerWheres.push(`\"AV\".\"dim_${dimension}\" like $${params.length}`);\n } else if (paths.length > 1) {\n const orParts: string[] = [];\n\n for (const path of paths) {\n params.push(path.toString(\"/%\"));\n orParts.push(`\"AV\".\"dim_${dimension}\" like $${params.length}`);\n }\n\n outerWheres.push(`(${orParts.join(\" or \")})`);\n }\n }\n\n const sql = `\n select\n ${outerSelects.join(\",\\n \")}\n from (\n select\n ${innerSelects.join(\",\\n \")}\n from \"AnalyticsSeries\" as \"AS_inner\"\n where ${innerWheres.join(\"\\n and \")}\n ) as \"AV\"\n ${outerJoins.join(\"\\n\")}\n ${outerWheres.length ? `where ${outerWheres.join(\"\\n and \")}` : \"\"}\n order by \"AV\".\"start\"\n `;\n\n const result = await this._pgLite.query<AnalyticsSeriesRecord>(sql, params);\n\n return this._formatQueryRecords(result.rows, dimensions);\n }\n\n public async clearEmptyAnalyticsDimensions(): Promise<number> {\n const result = await this._pgLite.query<{ id: number }>(\n `\n delete from \"AnalyticsDimension\" as \"AD\"\n where not exists (\n select 1\n from \"AnalyticsSeries_AnalyticsDimension\" as \"ASAD\"\n where \"ASAD\".\"dimensionId\" = \"AD\".\"id\"\n )\n returning \"id\"\n `,\n );\n\n return result.rows.length;\n }\n\n public async clearSeriesBySource(\n source: AnalyticsPath,\n cleanUpDimensions: boolean = false,\n ): Promise<number> {\n const result = await this._pgLite.query<{ id: number }>(\n `\n delete from \"AnalyticsSeries\"\n where \"source\" like $1\n returning \"id\"\n `,\n [source.toString(\"/%\")],\n );\n\n let deletedCount = result.rows.length;\n\n if (cleanUpDimensions) {\n deletedCount += await this.clearEmptyAnalyticsDimensions();\n }\n\n this._subscriptionManager.notifySubscribers([source]);\n\n return deletedCount;\n }\n\n private async _addDimensionMetadata(\n path: string,\n icon: string | null | undefined,\n label: string | null | undefined,\n description: string | null | undefined,\n ): Promise<void> {\n if (!icon && !label && !description) {\n return;\n }\n\n await this._pgLite.query(\n `\n update \"AnalyticsDimension\"\n set\n \"icon\" = $1,\n \"label\" = $2,\n \"description\" = $3\n where \"path\" = $4\n `,\n [\n icon ? icon : \"\",\n label ? label : \"\",\n description ? description : \"\",\n `${path.toString()}/`,\n ],\n );\n }\n\n private _formatQueryRecords(\n records: AnalyticsSeriesRecord[],\n dimensions: string[],\n ): AnalyticsSeries<string | AnalyticsDimension>[] {\n const formatted = records.map((r) => {\n const start = r.start instanceof Date ? r.start : new Date(r.start);\n const end =\n r.end == null ? null : r.end instanceof Date ? r.end : new Date(r.end);\n\n const result = {\n id: r.id,\n source: AnalyticsPath.fromString(r.source.slice(0, -1)),\n start: DateTime.fromJSDate(start),\n end: end ? DateTime.fromJSDate(end) : null,\n metric: r.metric,\n value: r.value,\n unit: r.unit,\n fn: r.fn,\n params: r.params,\n dimensions: {} as Record<string, AnalyticsDimension>,\n };\n\n dimensions.forEach((dimension) => {\n const dimPath = r[`dim_${dimension}`];\n\n result.dimensions[dimension] = {\n path: AnalyticsPath.fromString(dimPath ? dimPath.slice(0, -1) : \"?\"),\n icon: r.dim_icon ? r.dim_icon : \"\",\n label: r.dim_label ? r.dim_label : \"\",\n description: r.dim_description ? r.dim_description : \"\",\n };\n });\n\n return result;\n });\n\n return formatted.sort((a, b) => a.id - b.id);\n }\n\n public async addSeriesValues(inputs: AnalyticsSeriesInput[]): Promise<void> {\n const dimensionsMap: DimensionsMap = {};\n\n for (const input of inputs) {\n const result = await this._pgLite.query<{ id: number }>(\n `\n insert into \"AnalyticsSeries\" (\n \"start\",\n \"end\",\n \"source\",\n \"metric\",\n \"value\",\n \"unit\",\n \"fn\",\n \"params\"\n )\n values ($1, $2, $3, $4, $5, $6, $7, $8)\n returning \"id\"\n `,\n [\n input.start.toJSDate(),\n input.end ? input.end.toJSDate() : null,\n input.source.toString(\"/\"),\n pascalCase(input.metric),\n input.value,\n input.unit || null,\n input.fn || \"Single\",\n input.params || null,\n ],\n );\n\n const seriesId = result.rows[0]?.id;\n\n if (seriesId == null) {\n throw new Error(\"Failed to insert AnalyticsSeries row\");\n }\n\n for (const [dim, path] of Object.entries(input.dimensions || {})) {\n if (!dimensionsMap[dim]) {\n dimensionsMap[dim] = {};\n }\n\n const pathKey = path.toString(\"/\");\n\n if (!dimensionsMap[dim][pathKey]) {\n dimensionsMap[dim][pathKey] = [];\n }\n\n dimensionsMap[dim][pathKey].push(seriesId);\n }\n }\n\n for (const [dim, pathMap] of Object.entries(dimensionsMap)) {\n await this._linkDimensions(dim, pathMap);\n }\n\n for (const input of inputs) {\n const metaDimension: any = input.dimensionMetadata;\n\n if (!metaDimension) {\n continue;\n }\n\n await this._addDimensionMetadata(\n metaDimension.path,\n metaDimension.icon,\n metaDimension.label,\n metaDimension.description,\n );\n }\n\n this._subscriptionManager.notifySubscribers(\n inputs.map((input) => input.source),\n );\n }\n\n private async _createDimensionPath(\n dimension: string,\n path: string,\n ): Promise<number> {\n const result = await this._pgLite.query<{ id: number | null }>(\n `\n insert into \"AnalyticsDimension\" (\"dimension\", \"path\")\n values ($1, $2)\n returning \"id\"\n `,\n [dimension, path],\n );\n\n const id = result.rows[0]?.id;\n\n if (id == null) {\n throw new Error(\"Failed to create AnalyticsDimension\");\n }\n\n return id;\n }\n\n private async _linkDimensions(\n dimension: string,\n pathMap: Record<string, number[]>,\n ): Promise<void> {\n const paths = Object.keys(pathMap);\n\n if (paths.length === 0) {\n return;\n }\n\n const existingResult = await this._pgLite.query<{\n path: string;\n id: number;\n }>(\n `\n select \"path\", \"id\"\n from \"AnalyticsDimension\"\n where \"dimension\" = $1\n and \"path\" = any($2::text[])\n `,\n [dimension, paths],\n );\n\n const existing = existingResult.rows;\n\n for (const [path, ids] of Object.entries(pathMap)) {\n const existingRecord = existing.find((record) => record.path === path);\n\n const dimensionId = existingRecord\n ? existingRecord.id\n : await this._createDimensionPath(dimension, path);\n\n for (const seriesId of ids) {\n await this._pgLite.query(\n `\n insert into \"AnalyticsSeries_AnalyticsDimension\" (\n \"seriesId\",\n \"dimensionId\"\n )\n values ($1, $2)\n `,\n [seriesId, dimensionId],\n );\n }\n }\n }\n\n public async addSeriesValue(input: AnalyticsSeriesInput): Promise<void> {\n await this.addSeriesValues([input]);\n }\n\n public subscribeToSource(\n source: AnalyticsPath,\n callback: AnalyticsUpdateCallback,\n ): () => void {\n return this._subscriptionManager.subscribeToPath(source, callback);\n }\n}\n","import { IdbFs, PGlite } from \"@electric-sql/pglite\";\n\nexport async function createFsPglite(databaseName: string) {\n return await PGlite.create({\n fs: new IdbFs(databaseName),\n relaxedDurability: true,\n });\n}\n"],"mappings":";;;;;AASA,MAAa,mBAAmB,eAAsB;CACpD,MAAM,YAAY,EAAE;AACpB,MAAK,MAAM,eAAe,WAAW,QAAQ,IAAI,EAAE,EAAE;EACnD,MAAM,EAAE,QAAQ,SAAS;EACzB,MAAM,SAAS,IAAI,MAAM,KAAK,OAAO;AACrC,OAAK,IAAI,IAAI,GAAG,OAAO,OAAO,QAAQ,IAAI,MAAM,KAAK;GACnD,MAAM,MAAM,KAAK;GACjB,MAAM,QAAa,EAAE;AACrB,QAAK,IAAI,IAAI,GAAG,OAAO,OAAO,QAAQ,IAAI,MAAM,KAAK;IAEnD,MAAM,EAAE,MAAM,eAAe,OAAO;AACpC,UAAM,QAAQ,IAAI;;AAGpB,UAAO,KAAK;;AAGd,YAAU,KAAK,GAAG,OAAO;;AAG3B,QAAO;;AAGT,IAAa,sBAAb,MAAyD;CACvD,SAAyB;CACzB,OAA6C;CAE7C,YACE,WACA,cACA,gBACA;AAHiB,OAAA,YAAA;AACA,OAAA,eAAA;AACA,OAAA,iBAAA;;CAKnB,KAAK,KAA4B;AAC/B,OAAK,OAAO;;CAGd,MAAM,QAAQ,KAAa;AACzB,MAAI,CAAC,KAAK,KACR,OAAM,IAAI,MAAM,sCAAsC;EAGxD,MAAM,QAAQ,KAAK;AAEnB,MAAI,KAAK,aACP,MAAK,aAAa,OAAO,IAAI;EAG/B,MAAM,UAAe,MAAM,KAAK,UAAU,OAAO,SAAS,YAAY;GACpE,MAAM,aAAa,MAAM,KAAK,MAAM,KAAK,IAAI;AAC7C,OAAI,CAAC,WACH;AAIF,UADkB,gBAAgB,WAAW;IAE7C;AAEF,MAAI,KAAK,eACP,MAAK,eAAe,OAAO,QAAQ;AAGrC,SAAO;;;;;ACjCX,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqFhB,IAAa,wBAAb,MAA8D;CAC5D;CACA;CACA;CACA;CACA,uBAAwC,IAAI,8BAA8B;CAC1E;CAEA,YAAmB,EACjB,QACA,aACA,eACA,YAC+B;AAC/B,MAAI,CAAC,SACH,YAAW,IAAI,8BAA8B;EAG/C,MAAM,WAAW,IAAI,oBACnB,UACA,aACA,cACD;AAED,OAAK,UAAU;AACf,OAAK,eAAe,sBAAsB;AAC1C,OAAK,iBAAiB,wBAAwB;AAC9C,OAAK,YAAY;AACjB,OAAK,cAAc;;CAGrB,MAAa,OAAO;AAElB,OAAK,YAAY,KAAK,KAAK,QAAQ;AAGnC,QAAM,KAAK,QAAQ,KAAK,QAAQ;;CAGlC,MAAa,IAAI,KAAa;AAC5B,OAAK,aAAa,IAAI,IAAI;AAE1B,SAAO,MAAM,KAAK,UAAU,OAAO,YAAY,YAAY;GACzD,MAAM,UAAU,MAAM,KAAK,QAAQ,KAAK,IAAI;AAE5C,QAAK,eAAe,IAAI,QAAQ;AAEhC,UAAO,gBAAgB,WAAW,EAAE,CAAC;IACrC;;CAGJ,MAAa,UAAU;AACrB,OAAK,QAAQ,OAAO;;CAGtB,MAAa,gBAA8B;EACzC,MAAM,SAAS,MAAM,KAAK,QAAQ,MAM/B;;;;;;IAMH;AAEA,MAAI,CAAC,MAAM,QAAQ,OAAO,KAAK,CAAE,QAAO,EAAE;EAE1C,MAAM,UAAU,OAAO,KAAK,QAExB,KAaA,QACG;AACH,OAAI,CAAC,IAAI,IAAI,WACX,KAAI,IAAI,aAAa;IACnB,MAAM,IAAI;IACV,QAAQ,EAAE;IACX;AAGH,OAAI,IAAI,YAAY,OAAO,KAAK;IAC9B,MAAM,IAAI;IACV,MAAM,IAAI;IACV,OAAO,IAAI;IACX,aAAa,IAAI;IAClB,CAAC;AAEF,UAAO;KAET,EAAE,CACH;AAED,SAAO,OAAO,OAAO,QAAQ;;CAG/B,MAAa,kBACX,OACyD;EACzD,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,cAAc,CAAC,UAAU;EACvE,MAAM,aAAa,OAAO,KAAK,MAAM,OAAO;EAE5C,MAAM,SAAoB,EAAE;EAC5B,MAAM,eAAyB;GAC7B;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACD;EACD,MAAM,cAAwB,EAAE;EAChC,MAAM,eAAyB,CAAC,SAAS;EACzC,MAAM,aAAuB,EAAE;EAC/B,MAAM,cAAwB,EAAE;AAEhC,SAAO,KAAK,MAAM,QAAQ;AAC1B,cAAY,KAAK,8BAA8B,OAAO,OAAO,WAAW;AAExE,MAAI,SAAS,MAAM,SAAS,KAAK,MAAM,OAAO,IAAI;AAChD,UAAO,KAAK,MAAM;AAClB,eAAY,KAAK,4BAA4B,OAAO,OAAO,WAAW;;AAGxE,MAAI,MAAM,KAAK;AACb,UAAO,KAAK,MAAM,IAAI,OAAO,CAAC;AAC9B,eAAY,KAAK,yBAAyB,OAAO,SAAS;;AAG5D,OAAK,MAAM,aAAa,YAAY;AAClC,UAAO,KAAK,UAAU;AACtB,gBAAa,KAAK;;;;;;;oCAOY,OAAO,OAAO;;kBAEhC,UAAU;MACtB;;AAGF,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;AAC7D,cAAW,KAAK;2CACqB,UAAU;cACvC,UAAU,uBAAuB,UAAU;MACnD;AAEA,gBAAa,KACX,IAAI,UAAU,yBACd,IAAI,UAAU,uCACd,IAAI,UAAU,0BACf;AAED,OAAI,MAAM,WAAW,GAAG;AACtB,WAAO,KAAK,MAAM,GAAG,SAAS,KAAK,CAAC;AACpC,gBAAY,KAAK,aAAa,UAAU,UAAU,OAAO,SAAS;cACzD,MAAM,SAAS,GAAG;IAC3B,MAAM,UAAoB,EAAE;AAE5B,SAAK,MAAM,QAAQ,OAAO;AACxB,YAAO,KAAK,KAAK,SAAS,KAAK,CAAC;AAChC,aAAQ,KAAK,aAAa,UAAU,UAAU,OAAO,SAAS;;AAGhE,gBAAY,KAAK,IAAI,QAAQ,KAAK,OAAO,CAAC,GAAG;;;EAIjD,MAAM,MAAM;;QAER,aAAa,KAAK,YAAY,CAAC;;;UAG7B,aAAa,KAAK,cAAc,CAAC;;cAE7B,YAAY,KAAK,iBAAiB,CAAC;;MAE3C,WAAW,KAAK,KAAK,CAAC;MACtB,YAAY,SAAS,SAAS,YAAY,KAAK,eAAe,KAAK,GAAG;;;EAIxE,MAAM,SAAS,MAAM,KAAK,QAAQ,MAA6B,KAAK,OAAO;AAE3E,SAAO,KAAK,oBAAoB,OAAO,MAAM,WAAW;;CAG1D,MAAa,gCAAiD;AAa5D,UAZe,MAAM,KAAK,QAAQ,MAChC;;;;;;;;MASD,EAEa,KAAK;;CAGrB,MAAa,oBACX,QACA,oBAA6B,OACZ;EAUjB,IAAI,gBATW,MAAM,KAAK,QAAQ,MAChC;;;;OAKA,CAAC,OAAO,SAAS,KAAK,CAAC,CACxB,EAEyB,KAAK;AAE/B,MAAI,kBACF,iBAAgB,MAAM,KAAK,+BAA+B;AAG5D,OAAK,qBAAqB,kBAAkB,CAAC,OAAO,CAAC;AAErD,SAAO;;CAGT,MAAc,sBACZ,MACA,MACA,OACA,aACe;AACf,MAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,YACtB;AAGF,QAAM,KAAK,QAAQ,MACjB;;;;;;;OAQA;GACE,OAAO,OAAO;GACd,QAAQ,QAAQ;GAChB,cAAc,cAAc;GAC5B,GAAG,KAAK,UAAU,CAAC;GACpB,CACF;;CAGH,oBACE,SACA,YACgD;AAiChD,SAhCkB,QAAQ,KAAK,MAAM;GACnC,MAAM,QAAQ,EAAE,iBAAiB,OAAO,EAAE,QAAQ,IAAI,KAAK,EAAE,MAAM;GACnE,MAAM,MACJ,EAAE,OAAO,OAAO,OAAO,EAAE,eAAe,OAAO,EAAE,MAAM,IAAI,KAAK,EAAE,IAAI;GAExE,MAAM,SAAS;IACb,IAAI,EAAE;IACN,QAAQ,cAAc,WAAW,EAAE,OAAO,MAAM,GAAG,GAAG,CAAC;IACvD,OAAO,SAAS,WAAW,MAAM;IACjC,KAAK,MAAM,SAAS,WAAW,IAAI,GAAG;IACtC,QAAQ,EAAE;IACV,OAAO,EAAE;IACT,MAAM,EAAE;IACR,IAAI,EAAE;IACN,QAAQ,EAAE;IACV,YAAY,EAAE;IACf;AAED,cAAW,SAAS,cAAc;IAChC,MAAM,UAAU,EAAE,OAAO;AAEzB,WAAO,WAAW,aAAa;KAC7B,MAAM,cAAc,WAAW,UAAU,QAAQ,MAAM,GAAG,GAAG,GAAG,IAAI;KACpE,MAAM,EAAE,WAAW,EAAE,WAAW;KAChC,OAAO,EAAE,YAAY,EAAE,YAAY;KACnC,aAAa,EAAE,kBAAkB,EAAE,kBAAkB;KACtD;KACD;AAEF,UAAO;IACP,CAEe,MAAM,GAAG,MAAM,EAAE,KAAK,EAAE,GAAG;;CAG9C,MAAa,gBAAgB,QAA+C;EAC1E,MAAM,gBAA+B,EAAE;AAEvC,OAAK,MAAM,SAAS,QAAQ;GA4B1B,MAAM,YA3BS,MAAM,KAAK,QAAQ,MAChC;;;;;;;;;;;;;SAcA;IACE,MAAM,MAAM,UAAU;IACtB,MAAM,MAAM,MAAM,IAAI,UAAU,GAAG;IACnC,MAAM,OAAO,SAAS,IAAI;IAC1B,WAAW,MAAM,OAAO;IACxB,MAAM;IACN,MAAM,QAAQ;IACd,MAAM,MAAM;IACZ,MAAM,UAAU;IACjB,CACF,EAEuB,KAAK,IAAI;AAEjC,OAAI,YAAY,KACd,OAAM,IAAI,MAAM,uCAAuC;AAGzD,QAAK,MAAM,CAAC,KAAK,SAAS,OAAO,QAAQ,MAAM,cAAc,EAAE,CAAC,EAAE;AAChE,QAAI,CAAC,cAAc,KACjB,eAAc,OAAO,EAAE;IAGzB,MAAM,UAAU,KAAK,SAAS,IAAI;AAElC,QAAI,CAAC,cAAc,KAAK,SACtB,eAAc,KAAK,WAAW,EAAE;AAGlC,kBAAc,KAAK,SAAS,KAAK,SAAS;;;AAI9C,OAAK,MAAM,CAAC,KAAK,YAAY,OAAO,QAAQ,cAAc,CACxD,OAAM,KAAK,gBAAgB,KAAK,QAAQ;AAG1C,OAAK,MAAM,SAAS,QAAQ;GAC1B,MAAM,gBAAqB,MAAM;AAEjC,OAAI,CAAC,cACH;AAGF,SAAM,KAAK,sBACT,cAAc,MACd,cAAc,MACd,cAAc,OACd,cAAc,YACf;;AAGH,OAAK,qBAAqB,kBACxB,OAAO,KAAK,UAAU,MAAM,OAAO,CACpC;;CAGH,MAAc,qBACZ,WACA,MACiB;EAUjB,MAAM,MATS,MAAM,KAAK,QAAQ,MAChC;;;;OAKA,CAAC,WAAW,KAAK,CAClB,EAEiB,KAAK,IAAI;AAE3B,MAAI,MAAM,KACR,OAAM,IAAI,MAAM,sCAAsC;AAGxD,SAAO;;CAGT,MAAc,gBACZ,WACA,SACe;EACf,MAAM,QAAQ,OAAO,KAAK,QAAQ;AAElC,MAAI,MAAM,WAAW,EACnB;EAgBF,MAAM,YAbiB,MAAM,KAAK,QAAQ,MAIxC;;;;;OAMA,CAAC,WAAW,MAAM,CACnB,EAE+B;AAEhC,OAAK,MAAM,CAAC,MAAM,QAAQ,OAAO,QAAQ,QAAQ,EAAE;GACjD,MAAM,iBAAiB,SAAS,MAAM,WAAW,OAAO,SAAS,KAAK;GAEtE,MAAM,cAAc,iBAChB,eAAe,KACf,MAAM,KAAK,qBAAqB,WAAW,KAAK;AAEpD,QAAK,MAAM,YAAY,IACrB,OAAM,KAAK,QAAQ,MACjB;;;;;;WAOA,CAAC,UAAU,YAAY,CACxB;;;CAKP,MAAa,eAAe,OAA4C;AACtE,QAAM,KAAK,gBAAgB,CAAC,MAAM,CAAC;;CAGrC,kBACE,QACA,UACY;AACZ,SAAO,KAAK,qBAAqB,gBAAgB,QAAQ,SAAS;;;;;AC/kBtE,eAAsB,eAAe,cAAsB;AACzD,QAAO,MAAM,OAAO,OAAO;EACzB,IAAI,IAAI,MAAM,aAAa;EAC3B,mBAAmB;EACpB,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@powerhousedao/analytics-engine-browser",
|
|
3
|
-
"version": "6.0.0-dev.
|
|
3
|
+
"version": "6.0.0-dev.107",
|
|
4
4
|
"license": "AGPL-3.0-only",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -12,35 +12,32 @@
|
|
|
12
12
|
"type": "module",
|
|
13
13
|
"exports": {
|
|
14
14
|
".": {
|
|
15
|
-
"types": "./dist/
|
|
16
|
-
"import": "./dist/
|
|
15
|
+
"types": "./dist/index.d.ts",
|
|
16
|
+
"import": "./dist/index.js"
|
|
17
17
|
}
|
|
18
18
|
},
|
|
19
19
|
"files": [
|
|
20
|
-
"dist
|
|
20
|
+
"dist"
|
|
21
21
|
],
|
|
22
|
-
"
|
|
22
|
+
"dependencies": {
|
|
23
23
|
"@electric-sql/pglite": "0.3.15",
|
|
24
24
|
"date-fns": "4.1.0",
|
|
25
25
|
"events": "3.3.0",
|
|
26
26
|
"util": "0.12.5",
|
|
27
|
-
"knex": "3.1.0",
|
|
28
27
|
"luxon": "3.7.2",
|
|
28
|
+
"change-case": "5.4.4",
|
|
29
|
+
"@powerhousedao/analytics-engine-core": "6.0.0-dev.107"
|
|
30
|
+
},
|
|
31
|
+
"devDependencies": {
|
|
29
32
|
"@vitest/browser": "4.0.18",
|
|
30
33
|
"@vitest/browser-playwright": "4.0.18",
|
|
31
34
|
"@types/luxon": "3.7.1",
|
|
32
35
|
"playwright": "1.58.2",
|
|
33
36
|
"vitest": "4.0.18",
|
|
34
|
-
"
|
|
35
|
-
"@powerhousedao/analytics-engine-knex": "6.0.0-dev.105"
|
|
36
|
-
},
|
|
37
|
-
"peerDependencies": {
|
|
38
|
-
"react": ">=19.0.0",
|
|
39
|
-
"react-dom": ">=19.0.0",
|
|
40
|
-
"@electric-sql/pglite": ">=0.3.0"
|
|
37
|
+
"tsdown": "0.21.0"
|
|
41
38
|
},
|
|
42
39
|
"scripts": {
|
|
43
|
-
"build:bundle": "
|
|
40
|
+
"build:bundle": "tsdown",
|
|
44
41
|
"test:e2e": "vitest --run ./**/*.test.ts"
|
|
45
42
|
}
|
|
46
43
|
}
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import { MemoryAnalyticsStore, type MemoryAnalyticsStoreOptions } from "./MemoryAnalyticsStore.js";
|
|
2
|
-
export type BrowserAnalyticsStoreOptions = MemoryAnalyticsStoreOptions & {
|
|
3
|
-
databaseName: string;
|
|
4
|
-
};
|
|
5
|
-
export declare class BrowserAnalyticsStore extends MemoryAnalyticsStore {
|
|
6
|
-
constructor(options?: BrowserAnalyticsStoreOptions);
|
|
7
|
-
}
|
|
8
|
-
//# sourceMappingURL=BrowserAnalyticsStore.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"BrowserAnalyticsStore.d.ts","sourceRoot":"","sources":["../../src/BrowserAnalyticsStore.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,oBAAoB,EACpB,KAAK,2BAA2B,EACjC,MAAM,2BAA2B,CAAC;AAGnC,MAAM,MAAM,4BAA4B,GAAG,2BAA2B,GAAG;IACvE,YAAY,EAAE,MAAM,CAAC;CACtB,CAAC;AAEF,qBAAa,qBAAsB,SAAQ,oBAAoB;gBAE3D,OAAO,GAAE,4BAA4D;CAYxE"}
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import { type IAnalyticsProfiler } from "@powerhousedao/analytics-engine-core";
|
|
2
|
-
import { KnexAnalyticsStore, type SqlQueryLogger, type SqlResultsLogger } from "@powerhousedao/analytics-engine-knex";
|
|
3
|
-
import type { Knex } from "knex";
|
|
4
|
-
import { PGlite } from "@electric-sql/pglite";
|
|
5
|
-
export type MemoryAnalyticsStoreOptions = {
|
|
6
|
-
pgLiteFactory?: () => Promise<PGlite>;
|
|
7
|
-
knex?: Knex;
|
|
8
|
-
queryLogger?: SqlQueryLogger;
|
|
9
|
-
resultsLogger?: SqlResultsLogger;
|
|
10
|
-
profiler?: IAnalyticsProfiler;
|
|
11
|
-
};
|
|
12
|
-
export declare class MemoryAnalyticsStore extends KnexAnalyticsStore {
|
|
13
|
-
private _pgLiteFactory;
|
|
14
|
-
private _queryLogger;
|
|
15
|
-
private _resultsLogger;
|
|
16
|
-
private _pgExecutor;
|
|
17
|
-
private _profiler;
|
|
18
|
-
private _sql;
|
|
19
|
-
constructor({ knex, pgLiteFactory, queryLogger, resultsLogger, profiler, }?: MemoryAnalyticsStoreOptions);
|
|
20
|
-
init(): Promise<void>;
|
|
21
|
-
raw(sql: string): Promise<any[]>;
|
|
22
|
-
destroy(): Promise<void>;
|
|
23
|
-
}
|
|
24
|
-
//# sourceMappingURL=MemoryAnalyticsStore.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"MemoryAnalyticsStore.d.ts","sourceRoot":"","sources":["../../src/MemoryAnalyticsStore.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,kBAAkB,EAExB,MAAM,sCAAsC,CAAC;AAC9C,OAAO,EACL,kBAAkB,EAClB,KAAK,cAAc,EACnB,KAAK,gBAAgB,EACtB,MAAM,sCAAsC,CAAC;AAE9C,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAEjC,OAAO,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAgF9C,MAAM,MAAM,2BAA2B,GAAG;IACxC,aAAa,CAAC,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAC;IACtC,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,WAAW,CAAC,EAAE,cAAc,CAAC;IAC7B,aAAa,CAAC,EAAE,gBAAgB,CAAC;IACjC,QAAQ,CAAC,EAAE,kBAAkB,CAAC;CAC/B,CAAC;AAEF,qBAAa,oBAAqB,SAAQ,kBAAkB;IAC1D,OAAO,CAAC,cAAc,CAAwB;IAC9C,OAAO,CAAC,YAAY,CAAiB;IACrC,OAAO,CAAC,cAAc,CAAmB;IACzC,OAAO,CAAC,WAAW,CAAsB;IACzC,OAAO,CAAC,SAAS,CAAqB;IACtC,OAAO,CAAC,IAAI,CAAuB;gBAEhB,EACjB,IAAI,EACJ,aAAa,EACb,WAAW,EACX,aAAa,EACb,QAAQ,GACT,GAAE,2BAAgC;IA4BtB,IAAI;IAUJ,GAAG,CAAC,GAAG,EAAE,MAAM;IAYf,OAAO;CAKrB"}
|
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
import type { PGlite } from "@electric-sql/pglite";
|
|
2
|
-
import { type IAnalyticsProfiler } from "@powerhousedao/analytics-engine-core";
|
|
3
|
-
import type { IKnexQueryExecutor, SqlQueryLogger, SqlResultsLogger } from "@powerhousedao/analytics-engine-knex";
|
|
4
|
-
import type { Knex } from "knex";
|
|
5
|
-
export declare const parseRawResults: (rawResults: any[]) => any[];
|
|
6
|
-
export declare class PGLiteQueryExecutor implements IKnexQueryExecutor {
|
|
7
|
-
private readonly _profiler;
|
|
8
|
-
private readonly _queryLogger?;
|
|
9
|
-
private readonly _resultsLogger?;
|
|
10
|
-
private _index;
|
|
11
|
-
private _sql;
|
|
12
|
-
constructor(_profiler: IAnalyticsProfiler, _queryLogger?: SqlQueryLogger | undefined, _resultsLogger?: SqlResultsLogger | undefined);
|
|
13
|
-
init(sql: PGlite): void;
|
|
14
|
-
execute<T extends {}, U>(query: Knex.QueryBuilder<T, U>): Promise<any>;
|
|
15
|
-
}
|
|
16
|
-
//# sourceMappingURL=PgLiteExecutor.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"PgLiteExecutor.d.ts","sourceRoot":"","sources":["../../src/PgLiteExecutor.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AACnD,OAAO,EAAE,KAAK,kBAAkB,EAAE,MAAM,sCAAsC,CAAC;AAC/E,OAAO,KAAK,EACV,kBAAkB,EAClB,cAAc,EACd,gBAAgB,EACjB,MAAM,sCAAsC,CAAC;AAC9C,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAEjC,eAAO,MAAM,eAAe,GAAI,YAAY,GAAG,EAAE,UAqBhD,CAAC;AAEF,qBAAa,mBAAoB,YAAW,kBAAkB;IAK1D,OAAO,CAAC,QAAQ,CAAC,SAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;IAC9B,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC;IANlC,OAAO,CAAC,MAAM,CAAa;IAC3B,OAAO,CAAC,IAAI,CAAuB;gBAGhB,SAAS,EAAE,kBAAkB,EAC7B,YAAY,CAAC,EAAE,cAAc,YAAA,EAC7B,cAAc,CAAC,EAAE,gBAAgB,YAAA;IAKpD,IAAI,CAAC,GAAG,EAAE,MAAM;IAIV,OAAO,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC;CA4B9D"}
|
package/dist/src/index.d.ts
DELETED
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
export { MemoryAnalyticsStore } from "./MemoryAnalyticsStore.js";
|
|
2
|
-
export { BrowserAnalyticsStore } from "./BrowserAnalyticsStore.js";
|
|
3
|
-
export type { MemoryAnalyticsStoreOptions } from "./MemoryAnalyticsStore.js";
|
|
4
|
-
export type { BrowserAnalyticsStoreOptions } from "./BrowserAnalyticsStore.js";
|
|
5
|
-
//# sourceMappingURL=index.d.ts.map
|
package/dist/src/index.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AACjE,OAAO,EAAE,qBAAqB,EAAE,MAAM,4BAA4B,CAAC;AACnE,YAAY,EAAE,2BAA2B,EAAE,MAAM,2BAA2B,CAAC;AAC7E,YAAY,EAAE,4BAA4B,EAAE,MAAM,4BAA4B,CAAC"}
|