querymon-builder 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,139 @@
1
+ # Querymon
2
+
3
+ A powerful, schema-driven query builder for MongoDB and Express. Automatically generate analytics APIs with advanced filtering, aggregations, time-series analysis, and more.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm install querymon mongoose
9
+ ```
10
+
11
+ ## Features
12
+
13
+ - 🔍 **Complex Filtering**: Logical operators ($and, $or), comparison operators, and deeply nested fields.
14
+ - 📊 **Aggregations**: Sum, Average, Count, Min, Max, Median, Percentile.
15
+ - 📈 **Time-Series**: Automatic grouping by Day, Week, Month, Year.
16
+ - 🔄 **Pivot Tables**: Transform data into matrix formats.
17
+ - 🔗 **Joins**: Query across collections with automatic lookups.
18
+ - 👥 **Cohort Analysis**: Analyze user retention and behavior over time.
19
+ - 📉 **Funnel Analysis**: Track conversion rates across defined steps.
20
+ - 📅 **Period Comparisons**: Compare current vs previous period metrics.
21
+ - 📂 **Export**: Built-in CSV and JSON export capabilities.
22
+
23
+ ## Quick Start
24
+
25
+ ```typescript
26
+ import express from 'express';
27
+ import mongoose from 'mongoose';
28
+ import { Querymon } from 'querymon';
29
+
30
+ const app = express();
31
+ app.use(express.json());
32
+
33
+ // 1. Connect to MongoDB
34
+ mongoose.connect(process.env.MONGO_URI);
35
+
36
+ // 2. Define your Mongoose Schema
37
+ const SalesSchema = new mongoose.Schema({
38
+ product: String,
39
+ amount: Number,
40
+ date: Date,
41
+ category: String,
42
+ });
43
+
44
+ // 3. Initialize Querymon
45
+ const querymon = new Querymon({ connection: mongoose.connection });
46
+
47
+ // 4. Register a Collection
48
+ querymon.query('sales', {
49
+ schema: SalesSchema,
50
+ dimensions: ['product', 'category'],
51
+ metrics: ['amount'],
52
+ timeFields: ['date'],
53
+ });
54
+
55
+ // 5. Mount the Router
56
+ app.use('/api', querymon.router());
57
+
58
+ app.listen(3000, () => console.log('Querymon API running on port 3000'));
59
+ ```
60
+
61
+ ## API Usage
62
+
63
+ ### Basic Query
64
+
65
+ ```http
66
+ POST /api/sales/query
67
+ {
68
+ "filters": { "category": "Electronics" },
69
+ "groupBy": ["product"],
70
+ "aggregations": {
71
+ "totalRevenue": { "field": "amount", "operation": "sum" }
72
+ }
73
+ }
74
+ ```
75
+
76
+ ### Time-Series
77
+
78
+ ```http
79
+ POST /api/sales/query
80
+ {
81
+ "timeSeriesBy": {
82
+ "field": "date",
83
+ "interval": "month",
84
+ "start": "2024-01-01",
85
+ "end": "2024-12-31"
86
+ },
87
+ "aggregations": {
88
+ "revenue": { "field": "amount", "operation": "sum" }
89
+ }
90
+ }
91
+ ```
92
+
93
+ ### Cohort Analysis
94
+
95
+ ```http
96
+ POST /api/users/query
97
+ {
98
+ "cohort": {
99
+ "identityField": "userId",
100
+ "cohortDateField": "createdAt",
101
+ "interval": "month",
102
+ "start": "2024-01-01",
103
+ "end": "2024-06-01",
104
+ "eventDateField": "lastLogin"
105
+ }
106
+ }
107
+ ```
108
+
109
+ ### Funnel Analysis
110
+
111
+ ```http
112
+ POST /api/events/query
113
+ {
114
+ "funnel": {
115
+ "identityField": "userId",
116
+ "dateField": "timestamp",
117
+ "steps": [
118
+ { "name": "View", "filters": { "event": "view_item" } },
119
+ { "name": "Cart", "filters": { "event": "add_to_cart" } },
120
+ { "name": "Purchase", "filters": { "event": "purchase" } }
121
+ ]
122
+ }
123
+ }
124
+ ```
125
+
126
+ ### Export to CSV
127
+
128
+ ```http
129
+ POST /api/sales/query
130
+ {
131
+ "groupBy": ["category"],
132
+ "aggregations": { "revenue": { "field": "amount", "operation": "sum" } },
133
+ "export": { "format": "csv", "filename": "report" }
134
+ }
135
+ ```
136
+
137
+ ## License
138
+
139
+ MIT
@@ -0,0 +1,4 @@
1
+ import { QueryBody } from './types';
2
+ export declare class CohortBuilder {
3
+ static build(query: QueryBody): any[];
4
+ }
@@ -0,0 +1,121 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.CohortBuilder = void 0;
4
+ class CohortBuilder {
5
+ static build(query) {
6
+ if (!query.cohort)
7
+ return [];
8
+ const { identityField, cohortDateField, interval, start, end, eventDateField } = query.cohort;
9
+ // 1. Initial Match (Filter events within range)
10
+ // We might need to look slightly back to find true first action?
11
+ // Actually, cohort analysis usually defines cohort based on first action *within* or *before* the period?
12
+ // Simple version: Users whose first action is within [start, end].
13
+ // But we need to see their subsequent actions too.
14
+ // If we filter by [start, end], we only see activity in that range.
15
+ // Typically:
16
+ // a. Find users who started in [start, end].
17
+ // b. Track their retention in subsequent periods.
18
+ // This implies we need data BEYOND `end` if we want to see retention for late cohorts?
19
+ // Or `end` defines the analysis window.
20
+ const pipeline = [];
21
+ // Filter to relevant time window
22
+ const dateFilter = {};
23
+ if (start)
24
+ dateFilter.$gte = new Date(start);
25
+ if (end)
26
+ dateFilter.$lt = new Date(end);
27
+ if (start || end) {
28
+ pipeline.push({
29
+ $match: { [eventDateField]: dateFilter },
30
+ });
31
+ }
32
+ // 2. Determine First Action Date (Cohort Date) for each User
33
+ pipeline.push({
34
+ $setWindowFields: {
35
+ partitionBy: `$${identityField}`,
36
+ sortBy: { [cohortDateField]: 1 },
37
+ output: {
38
+ firstActionDate: {
39
+ $min: `$${cohortDateField}`,
40
+ window: { documents: ['unbounded', 'current'] },
41
+ },
42
+ },
43
+ },
44
+ });
45
+ // 3. Define Cohort Label and Period Index
46
+ // Cohort Label = Format(firstActionDate)
47
+ // Period = Diff(currentDate, firstActionDate) in interval
48
+ let dateFormat = '%Y-%m';
49
+ if (interval === 'day')
50
+ dateFormat = '%Y-%m-%d';
51
+ if (interval === 'year')
52
+ dateFormat = '%Y';
53
+ pipeline.push({
54
+ $project: {
55
+ identityField: `$${identityField}`,
56
+ cohort: { $dateToString: { format: dateFormat, date: '$firstActionDate' } },
57
+ currentDate: { $dateToString: { format: dateFormat, date: `$${eventDateField}` } },
58
+ // Calculate period difference
59
+ // This is complex in pure params.
60
+ // Using $dateDiff (Mongo 5.0+)
61
+ period: {
62
+ $dateDiff: {
63
+ startDate: '$firstActionDate',
64
+ endDate: `$${eventDateField}`,
65
+ unit: interval,
66
+ },
67
+ },
68
+ // Pass other aggregations fields if needed
69
+ amount: 1, // hardcoded for now or dynamic
70
+ },
71
+ });
72
+ // 4. Group by Cohort and Period
73
+ pipeline.push({
74
+ $group: {
75
+ _id: {
76
+ cohort: '$cohort',
77
+ period: '$period',
78
+ },
79
+ count: { $addToSet: `$identityField` }, // Distinct users
80
+ // metrics...
81
+ // revenue: { $sum: "$amount" }
82
+ },
83
+ });
84
+ // 5. Final Count & Format
85
+ pipeline.push({
86
+ $project: {
87
+ _id: 0,
88
+ cohort: '$_id.cohort',
89
+ period: '$_id.period',
90
+ customers: { $size: '$count' },
91
+ },
92
+ });
93
+ // 6. Sort
94
+ pipeline.push({
95
+ $sort: { cohort: 1, period: 1 },
96
+ });
97
+ // 7. Group to nested structure (Optional, to match example)
98
+ // Example: { cohorts: [ { cohort: "2024-01", periods: [ ... ] } ] }
99
+ pipeline.push({
100
+ $group: {
101
+ _id: '$cohort',
102
+ periods: {
103
+ $push: {
104
+ period: '$period',
105
+ customers: '$customers',
106
+ },
107
+ },
108
+ },
109
+ });
110
+ pipeline.push({
111
+ $project: {
112
+ _id: 0,
113
+ cohort: '$_id',
114
+ periods: 1,
115
+ },
116
+ });
117
+ pipeline.push({ $sort: { cohort: 1 } });
118
+ return pipeline;
119
+ }
120
+ }
121
+ exports.CohortBuilder = CohortBuilder;
@@ -0,0 +1,4 @@
1
+ import { QueryBody, CollectionConfig } from './types';
2
+ export declare class CompareBuilder {
3
+ static build(query: QueryBody, config: CollectionConfig): any[];
4
+ }
@@ -0,0 +1,40 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.CompareBuilder = void 0;
4
+ const PipelineBuilder_1 = require("./PipelineBuilder");
5
+ class CompareBuilder {
6
+ static build(query, config) {
7
+ // Create clean copies for current and previous
8
+ const currentQuery = {
9
+ ...query,
10
+ filters: { ...query.filters, ...query.compare?.current },
11
+ compare: undefined,
12
+ };
13
+ const previousQuery = {
14
+ ...query,
15
+ filters: { ...query.filters, ...query.compare?.previous },
16
+ compare: undefined,
17
+ };
18
+ // Important: PipelineBuilder needs to be clean from 'compare' property to avoid recursion
19
+ // We just removed it above.
20
+ const currentPipeline = PipelineBuilder_1.PipelineBuilder.build(currentQuery, config);
21
+ const previousPipeline = PipelineBuilder_1.PipelineBuilder.build(previousQuery, config);
22
+ // Facet Stage
23
+ const pipeline = [
24
+ {
25
+ $facet: {
26
+ current: currentPipeline,
27
+ previous: previousPipeline,
28
+ },
29
+ },
30
+ ];
31
+ // The frontend or response handler can merge these results.
32
+ // To strictly match the example output:
33
+ // { data: [ { category: "Electronics", current: { ... }, previous: { ... } } ] }
34
+ // We would need to unwind both arrays and group by the groupBy key.
35
+ // This is complex as some keys might be missing in one period.
36
+ // For initial implementation, returning { current: [], previous: [] } is acceptable MVP.
37
+ return pipeline;
38
+ }
39
+ }
40
+ exports.CompareBuilder = CompareBuilder;
@@ -0,0 +1,4 @@
1
+ import { QueryBody } from './types';
2
+ export declare class FunnelBuilder {
3
+ static build(query: QueryBody): any[];
4
+ }
@@ -0,0 +1,157 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.FunnelBuilder = void 0;
4
+ class FunnelBuilder {
5
+ static build(query) {
6
+ if (!query.funnel)
7
+ return [];
8
+ const { identityField, steps, dateField } = query.funnel;
9
+ // 1. Match relevant events (optimize early)
10
+ // Construct an $or filter of all step filters
11
+ const allFilters = steps.map((s) => s.filters);
12
+ const pipeline = [{ $match: { $or: allFilters } }];
13
+ // 2. Identify which step each event belongs to
14
+ // We'll create a field 'stepIndex'
15
+ // Prioritize earlier steps if an event matches multiple? Usually strictly one match expected, but let's assume first match wins.
16
+ const branches = steps.map((s, idx) => ({
17
+ case: s.filters, // This might need simple expression conversion if complex filters used
18
+ then: idx,
19
+ }));
20
+ // Note: 'case' expects aggregation expression, not query syntax.
21
+ // If s.filters is simple { status: 'A' }, we need { $eq: ['$status', 'A'] }.
22
+ // Converting query filters to aggregation expressions is non-trivial.
23
+ // ALTERNATIVE: Use $cond in projection?
24
+ // Or better: We already matched them.
25
+ // Let's assume for MVP filters are simple or we can use generic $cond checks if possible.
26
+ // Actually, we can just project a field using the same filters if they are supported in project? No, $cond needs boolean expr.
27
+ // SIMPLIFICATION for MVP:
28
+ // We assume the user setup allows us to distinguish steps.
29
+ // Let's use $switch. But we need to convert { a: 1 } to { $eq: ["$a", 1] }.
30
+ // Let's implement a helper to convert simple equality matches.
31
+ // Complex operators ($gt, $lt) need more work.
32
+ // Helper to build conditional expression from filter
33
+ function buildExpr(filter) {
34
+ const conds = [];
35
+ for (const [k, v] of Object.entries(filter)) {
36
+ // Handle simple equality
37
+ if (typeof v !== 'object') {
38
+ conds.push({ $eq: [`$${k}`, v] });
39
+ }
40
+ else {
41
+ // Ignore complex for now or try to handle
42
+ // If v has operators
43
+ }
44
+ }
45
+ if (conds.length === 0)
46
+ return true; // generic match
47
+ if (conds.length === 1)
48
+ return conds[0];
49
+ return { $and: conds };
50
+ }
51
+ const switchBranches = steps.map((s, idx) => ({
52
+ case: buildExpr(s.filters),
53
+ then: idx,
54
+ }));
55
+ pipeline.push({
56
+ $project: {
57
+ identityField: `$${identityField}`,
58
+ date: `$${dateField}`,
59
+ stepIndex: {
60
+ $switch: {
61
+ branches: switchBranches,
62
+ default: -1,
63
+ },
64
+ },
65
+ },
66
+ });
67
+ // Filter out -1 (shouldn't happen due to initial match, but good for safety)
68
+ pipeline.push({ $match: { stepIndex: { $gte: 0 } } });
69
+ // 3. Sort by date
70
+ pipeline.push({ $sort: { date: 1 } });
71
+ // 4. Group by User to collect event stream
72
+ pipeline.push({
73
+ $group: {
74
+ _id: '$identityField',
75
+ events: { $push: { step: '$stepIndex', date: '$date' } },
76
+ },
77
+ });
78
+ // 5. Calculate Max Step Reached (Strict Order)
79
+ pipeline.push({
80
+ $project: {
81
+ maxStep: {
82
+ $reduce: {
83
+ input: '$events',
84
+ initialValue: { nextStep: 0, maxStep: -1 },
85
+ in: {
86
+ $cond: {
87
+ if: { $eq: ['$$this.step', '$$value.nextStep'] },
88
+ then: {
89
+ nextStep: { $add: ['$$value.nextStep', 1] },
90
+ maxStep: '$$this.step',
91
+ },
92
+ else: '$$value', // Keep state if step doesn't match expected next step
93
+ },
94
+ },
95
+ },
96
+ },
97
+ },
98
+ });
99
+ // Extract scalar from object
100
+ pipeline.push({
101
+ $project: {
102
+ maxStepReached: '$maxStep.maxStep',
103
+ },
104
+ });
105
+ // 6. Aggregate counts per step
106
+ // We have { _id: user, maxStepReached: 1 }
107
+ // We want: Step 0 count, Step 1 count...
108
+ // Step N count = count of users where maxStepReached >= N
109
+ pipeline.push({
110
+ $group: {
111
+ _id: '$maxStepReached',
112
+ count: { $sum: 1 },
113
+ },
114
+ });
115
+ // 7. Reshape to Cumulative Counts
116
+ // We get [ { _id: 0, count: 10 }, { _id: 1, count: 5 } ]
117
+ // We need to fetch all rows and compute cumulative in app or via complex lookup.
118
+ // Let's do a simple sort and then let the app handle cumulative or do it here with $setWindowFields (cumulative sum reverse)?
119
+ // $setWindowFields is great here.
120
+ pipeline.push({ $sort: { _id: -1 } }); // Sort descending by step index
121
+ pipeline.push({
122
+ $setWindowFields: {
123
+ sortBy: { _id: -1 },
124
+ output: {
125
+ cumulativeCount: {
126
+ $sum: '$count',
127
+ window: { documents: ['unbounded', 'current'] }, // Sum from top (highest step) down to current
128
+ },
129
+ },
130
+ },
131
+ });
132
+ pipeline.push({ $sort: { _id: 1 } });
133
+ // 8. Final Format
134
+ // Map step index back to name?
135
+ // We can't easily map JS array in aggregation without hardcoding.
136
+ // Let's iterate steps and use $switch again to map _id to Name.
137
+ const nameBranches = steps.map((s, idx) => ({
138
+ case: { $eq: ['$_id', idx] },
139
+ then: s.name,
140
+ }));
141
+ pipeline.push({
142
+ $project: {
143
+ _id: 0,
144
+ stepIndex: '$_id',
145
+ step: {
146
+ $switch: {
147
+ branches: nameBranches,
148
+ default: 'Unknown',
149
+ },
150
+ },
151
+ count: '$cumulativeCount',
152
+ },
153
+ });
154
+ return pipeline;
155
+ }
156
+ }
157
+ exports.FunnelBuilder = FunnelBuilder;
@@ -0,0 +1,6 @@
1
+ import { QueryBody, CollectionConfig } from './types';
2
+ export declare class PipelineBuilder {
3
+ static build(query: QueryBody, config: CollectionConfig): any[];
4
+ private static buildGroupStage;
5
+ private static buildMatchStage;
6
+ }
@@ -0,0 +1,229 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PipelineBuilder = void 0;
4
+ const PivotBuilder_1 = require("./PivotBuilder");
5
+ class PipelineBuilder {
6
+ static build(query, config) {
7
+ // 4. Handle timeSeriesBy (modifies Match and Group stages)
8
+ // If timeSeriesBy is present, we need to:
9
+ // a. Add date range filter to match stage (if start/end provided)
10
+ // b. Add date grouping to group stage
11
+ if (query.timeSeriesBy) {
12
+ // Validation check for allowed time fields
13
+ if (config.timeFields && !config.timeFields.includes(query.timeSeriesBy.field)) {
14
+ throw new Error(`Field '${query.timeSeriesBy.field}' is not a valid time field.`);
15
+ }
16
+ // Add date range to filters if not already present
17
+ // We merge into filters object below
18
+ }
19
+ // We'll reconstruct the pipeline logic slightly to accommodate this dependency
20
+ const pipeline = [];
21
+ // 0. Handle Joins
22
+ // We check if any configured join is used in the query (filters, groupBy, select, aggregations)
23
+ if (config.joins) {
24
+ const usedJoins = new Set();
25
+ const checkUsage = (str) => {
26
+ if (!str)
27
+ return;
28
+ const parts = str.split('.');
29
+ if (parts.length > 1 && config.joins[parts[0]]) {
30
+ usedJoins.add(parts[0]);
31
+ }
32
+ };
33
+ // Check select
34
+ query.select?.forEach(checkUsage);
35
+ // Check groupBy
36
+ query.groupBy?.forEach(checkUsage);
37
+ // Check filters (keys) - simple recursive check for dot notation or nested objects
38
+ const checkFilters = (obj) => {
39
+ if (!obj)
40
+ return;
41
+ Object.keys(obj).forEach((k) => {
42
+ checkUsage(k); // Check top level keys like "customer.segment"
43
+ if (typeof obj[k] === 'object' && obj[k] !== null && !Array.isArray(obj[k])) {
44
+ checkFilters(obj[k]);
45
+ }
46
+ });
47
+ };
48
+ if (query.filters)
49
+ checkFilters(query.filters);
50
+ // Add Lookup stages
51
+ usedJoins.forEach((joinKey) => {
52
+ const joinConfig = config.joins[joinKey];
53
+ pipeline.push({
54
+ $lookup: {
55
+ from: joinConfig.collection,
56
+ localField: joinConfig.localField,
57
+ foreignField: joinConfig.foreignField,
58
+ as: joinKey,
59
+ },
60
+ });
61
+ // If oneToOne, unwind to make it a single object instead of array
62
+ if (joinConfig.type !== 'oneToMany') {
63
+ pipeline.push({
64
+ $unwind: {
65
+ path: `$${joinKey}`,
66
+ preserveNullAndEmptyArrays: true,
67
+ },
68
+ });
69
+ }
70
+ });
71
+ }
72
+ // Merge time series filters
73
+ const filters = { ...query.filters };
74
+ if (query.timeSeriesBy && (query.timeSeriesBy.start || query.timeSeriesBy.end)) {
75
+ const field = query.timeSeriesBy.field;
76
+ filters[field] = filters[field] || {};
77
+ if (query.timeSeriesBy.start)
78
+ filters[field].$gte = new Date(query.timeSeriesBy.start);
79
+ if (query.timeSeriesBy.end)
80
+ filters[field].$lt = new Date(query.timeSeriesBy.end);
81
+ }
82
+ // 1. Match Stage
83
+ let matchStage = null;
84
+ if (Object.keys(filters).length > 0) {
85
+ matchStage = { $match: this.buildMatchStage(filters) };
86
+ pipeline.push(matchStage);
87
+ }
88
+ // 5. Handle Pivot
89
+ // If pivot is requested, we override the normal group/project stages
90
+ if (query.pivot) {
91
+ return PivotBuilder_1.PivotBuilder.build(query, matchStage); // Delegate to specific pivot builder
92
+ }
93
+ // 2. Group Stage
94
+ if (query.groupBy || query.aggregations || query.timeSeriesBy) {
95
+ pipeline.push({ $group: this.buildGroupStage(query) });
96
+ // Flatten _id
97
+ const project = { _id: 0 };
98
+ if (query.groupBy) {
99
+ query.groupBy.forEach((field) => {
100
+ const safeKey = field.replace(/\./g, '_');
101
+ project[field] = `$_id.${safeKey}`;
102
+ });
103
+ }
104
+ if (query.timeSeriesBy) {
105
+ project.date = `$_id.date_interval`; // Standardize output name
106
+ }
107
+ if (query.aggregations) {
108
+ Object.entries(query.aggregations).forEach(([key, config]) => {
109
+ if (config.operation === 'median' || config.operation === 'percentile') {
110
+ // $percentile returns an array, extract the first element
111
+ project[key] = { $arrayElemAt: [`$${key}`, 0] };
112
+ }
113
+ else {
114
+ project[key] = 1;
115
+ }
116
+ });
117
+ }
118
+ pipeline.push({ $project: project });
119
+ }
120
+ // 3. Project Stage (Select) - If no grouping, generic projection
121
+ if (query.select && !query.groupBy && !query.aggregations) {
122
+ const projection = {};
123
+ query.select.forEach((field) => (projection[field] = 1));
124
+ pipeline.push({ $project: projection });
125
+ }
126
+ // 4. Sort
127
+ if (query.sort) {
128
+ const sortStage = {};
129
+ query.sort.forEach((s) => {
130
+ Object.entries(s).forEach(([k, v]) => (sortStage[k] = v === 'asc' ? 1 : -1));
131
+ });
132
+ pipeline.push({ $sort: sortStage });
133
+ }
134
+ // 5. Limit/Skip
135
+ if (query.skip)
136
+ pipeline.push({ $skip: query.skip });
137
+ if (query.limit)
138
+ pipeline.push({ $limit: query.limit });
139
+ return pipeline;
140
+ }
141
+ static buildGroupStage(query) {
142
+ const group = { _id: {} };
143
+ // Handle GroupBy
144
+ if (query.groupBy) {
145
+ query.groupBy.forEach((field) => {
146
+ const safeKey = field.replace(/\./g, '_');
147
+ group._id[safeKey] = `$${field}`;
148
+ });
149
+ }
150
+ // Handle TimeSeriesBy
151
+ if (query.timeSeriesBy) {
152
+ const { field, interval } = query.timeSeriesBy;
153
+ let format = '%Y-%m-%d';
154
+ switch (interval) {
155
+ case 'year':
156
+ format = '%Y-01-01';
157
+ break;
158
+ case 'month':
159
+ format = '%Y-%m-01';
160
+ break;
161
+ case 'week':
162
+ format = '%Y-%U';
163
+ break; // ISO week
164
+ case 'day':
165
+ format = '%Y-%m-%d';
166
+ break;
167
+ }
168
+ group._id.date_interval = {
169
+ $dateToString: { format, date: `$${field}` },
170
+ };
171
+ }
172
+ if (!query.groupBy && !query.timeSeriesBy) {
173
+ group._id = null; // Grand total if no groupBy
174
+ }
175
+ // Handle Aggregations
176
+ if (query.aggregations) {
177
+ for (const [key, config] of Object.entries(query.aggregations)) {
178
+ const op = config.operation.toLowerCase();
179
+ const field = config.field ? `$${config.field}` : null;
180
+ switch (op) {
181
+ case 'sum':
182
+ group[key] = { $sum: field };
183
+ break;
184
+ case 'avg':
185
+ group[key] = { $avg: field };
186
+ break;
187
+ case 'min':
188
+ group[key] = { $min: field };
189
+ break;
190
+ case 'max':
191
+ group[key] = { $max: field };
192
+ break;
193
+ case 'count':
194
+ group[key] = { $sum: 1 };
195
+ break;
196
+ case 'median':
197
+ group[key] = { $percentile: { input: field, p: [0.5], method: 'approximate' } };
198
+ break;
199
+ case 'percentile':
200
+ const p = config.value ? config.value / 100 : 0.95;
201
+ group[key] = { $percentile: { input: field, p: [p], method: 'approximate' } };
202
+ break;
203
+ }
204
+ }
205
+ }
206
+ return group;
207
+ }
208
+ static buildMatchStage(filters) {
209
+ const match = {};
210
+ for (const [key, value] of Object.entries(filters)) {
211
+ if (key === '$and' || key === '$or') {
212
+ match[key] = value.map((v) => this.buildMatchStage(v));
213
+ }
214
+ else if (Array.isArray(value)) {
215
+ // Implicit $in for arrays
216
+ match[key] = { $in: value };
217
+ }
218
+ else if (typeof value === 'object' && value !== null) {
219
+ // Handle operators like gte, lt, etc.
220
+ match[key] = value;
221
+ }
222
+ else {
223
+ match[key] = value;
224
+ }
225
+ }
226
+ return match;
227
+ }
228
+ }
229
+ exports.PipelineBuilder = PipelineBuilder;
@@ -0,0 +1,4 @@
1
+ import { QueryBody } from './types';
2
+ export declare class PivotBuilder {
3
+ static build(query: QueryBody, matchStage: any): any[];
4
+ }
@@ -0,0 +1,72 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.PivotBuilder = void 0;
4
+ class PivotBuilder {
5
+ static build(query, matchStage) {
6
+ const pipeline = [];
7
+ if (matchStage) {
8
+ pipeline.push(matchStage);
9
+ }
10
+ if (!query.pivot)
11
+ return pipeline;
12
+ const { rows, columns, values } = query.pivot;
13
+ // 1. Group by Rows + Columns
14
+ const firstGroup = {
15
+ _id: {},
16
+ };
17
+ rows.forEach((r) => (firstGroup._id[r] = `$${r}`));
18
+ columns.forEach((c) => (firstGroup._id[c] = `$${c}`));
19
+ // Value aggregation
20
+ const op = values.operation.toLowerCase();
21
+ const field = values.field ? `$${values.field}` : null;
22
+ let accumulator = { $sum: 1 };
23
+ switch (op) {
24
+ case 'sum':
25
+ accumulator = { $sum: field };
26
+ break;
27
+ case 'avg':
28
+ accumulator = { $avg: field };
29
+ break;
30
+ case 'min':
31
+ accumulator = { $min: field };
32
+ break;
33
+ case 'max':
34
+ accumulator = { $max: field };
35
+ break;
36
+ case 'count':
37
+ accumulator = { $sum: 1 };
38
+ break;
39
+ }
40
+ firstGroup.value = accumulator;
41
+ pipeline.push({ $group: firstGroup });
42
+ // 2. Group by Rows and push columns
43
+ const secondGroup = {
44
+ _id: {},
45
+ data: {
46
+ $push: {
47
+ k: `$_id.${columns[0]}`, // Assuming single column dimension for now
48
+ v: '$value',
49
+ },
50
+ },
51
+ };
52
+ rows.forEach((r) => (secondGroup._id[r] = `$_id.${r}`));
53
+ pipeline.push({ $group: secondGroup });
54
+ // 3. Reshape (ArrayToObject)
55
+ const project = {
56
+ _id: 0,
57
+ };
58
+ // Include row keys
59
+ rows.forEach((r) => (project[r] = `$_id.${r}`));
60
+ // Convert data array to object (Pivot)
61
+ // Merging row keys with pivot values
62
+ pipeline.push({
63
+ $replaceRoot: {
64
+ newRoot: {
65
+ $mergeObjects: [project, { $arrayToObject: '$data' }],
66
+ },
67
+ },
68
+ });
69
+ return pipeline;
70
+ }
71
+ }
72
+ exports.PivotBuilder = PivotBuilder;
@@ -0,0 +1,17 @@
1
+ import { Router } from 'express';
2
+ import { QuerymonConfig, CollectionConfig } from './types';
3
+ export declare class Querymon {
4
+ private connection;
5
+ private models;
6
+ private configs;
7
+ constructor(config?: QuerymonConfig);
8
+ /**
9
+ * Register a collection for querying
10
+ */
11
+ query(collectionName: string, config: CollectionConfig): void;
12
+ /**
13
+ * Generate Express router
14
+ */
15
+ router(): Router;
16
+ private toCSV;
17
+ }
@@ -0,0 +1,147 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.Querymon = void 0;
7
+ const express_1 = require("express");
8
+ const mongoose_1 = __importDefault(require("mongoose"));
9
+ const PipelineBuilder_1 = require("./PipelineBuilder");
10
+ const CompareBuilder_1 = require("./CompareBuilder");
11
+ const CohortBuilder_1 = require("./CohortBuilder");
12
+ const FunnelBuilder_1 = require("./FunnelBuilder");
13
+ class Querymon {
14
+ constructor(config = {}) {
15
+ this.models = new Map();
16
+ this.configs = new Map();
17
+ this.connection = config.connection || mongoose_1.default.connection;
18
+ }
19
+ /**
20
+ * Register a collection for querying
21
+ */
22
+ query(collectionName, config) {
23
+ // Ensure model exists or create it
24
+ let model;
25
+ if (this.connection.models[collectionName]) {
26
+ model = this.connection.models[collectionName];
27
+ }
28
+ else {
29
+ model = this.connection.model(collectionName, config.schema);
30
+ }
31
+ this.models.set(collectionName, model);
32
+ this.configs.set(collectionName, config);
33
+ }
34
+ /**
35
+ * Generate Express router
36
+ */
37
+ router() {
38
+ const router = (0, express_1.Router)();
39
+ // Query endpoint: POST /:collection/query
40
+ router.post('/:collection/query', async (req, res) => {
41
+ try {
42
+ const { collection } = req.params;
43
+ const collectionName = collection;
44
+ const body = req.body;
45
+ console.log('Query Body:', JSON.stringify(body, null, 2));
46
+ const model = this.models.get(collectionName);
47
+ const config = this.configs.get(collectionName);
48
+ if (!model || !config) {
49
+ res.status(404).json({ error: `Collection '${collectionName}' not found or not registered with Querymon` });
50
+ return;
51
+ }
52
+ let pipeline;
53
+ if (body.compare) {
54
+ pipeline = CompareBuilder_1.CompareBuilder.build(body, config);
55
+ }
56
+ else if (body.cohort) {
57
+ pipeline = CohortBuilder_1.CohortBuilder.build(body);
58
+ }
59
+ else if (body.funnel) {
60
+ pipeline = FunnelBuilder_1.FunnelBuilder.build(body);
61
+ }
62
+ else {
63
+ pipeline = PipelineBuilder_1.PipelineBuilder.build(body, config);
64
+ }
65
+ console.log('Pipeline:', JSON.stringify(pipeline, null, 2));
66
+ const startTime = Date.now();
67
+ const data = await model.aggregate(pipeline).exec();
68
+ const executionTime = `${Date.now() - startTime}ms`;
69
+ // Handle Export
70
+ if (body.export) {
71
+ const { format, filename = 'export' } = body.export;
72
+ if (format === 'csv') {
73
+ const csv = this.toCSV(data);
74
+ res.header('Content-Type', 'text/csv');
75
+ res.attachment(`${filename}.csv`);
76
+ res.send(csv);
77
+ return;
78
+ }
79
+ else if (format === 'json') {
80
+ res.header('Content-Type', 'application/json');
81
+ res.attachment(`${filename}.json`);
82
+ res.send(JSON.stringify(data, null, 2));
83
+ return;
84
+ }
85
+ }
86
+ res.json({
87
+ data,
88
+ meta: {
89
+ total: data.length, // accurate count requires a separate count query or facet
90
+ executionTime,
91
+ },
92
+ });
93
+ }
94
+ catch (error) {
95
+ res.status(500).json({ error: error.message, body: req.body });
96
+ }
97
+ });
98
+ return router;
99
+ }
100
+ toCSV(data) {
101
+ if (!data || data.length === 0)
102
+ return '';
103
+ // Flatten logic helper
104
+ const flatten = (obj, prefix = '', res = {}) => {
105
+ for (const [key, value] of Object.entries(obj)) {
106
+ const newKey = prefix ? `${prefix}.${key}` : key;
107
+ if (value && typeof value === 'object' && !Array.isArray(value) && !(value instanceof Date)) {
108
+ flatten(value, newKey, res);
109
+ }
110
+ else {
111
+ res[newKey] = value;
112
+ }
113
+ }
114
+ return res;
115
+ };
116
+ // Flatten all rows
117
+ const flatData = data.map((row) => flatten(row));
118
+ // Get all unique keys for header
119
+ const headers = Array.from(new Set(flatData.flatMap((row) => Object.keys(row))));
120
+ // Build CSV string
121
+ const csvRows = [
122
+ headers.join(','), // Header row
123
+ ...flatData.map((row) => {
124
+ return headers
125
+ .map((header) => {
126
+ const val = row[header];
127
+ // Handle strings with commas, nulls, dates
128
+ if (val === null || val === undefined)
129
+ return '';
130
+ if (val instanceof Date)
131
+ return val.toISOString();
132
+ if (typeof val === 'string') {
133
+ // Escape quotes and wrap in quotes if contains comma or quote
134
+ if (val.includes(',') || val.includes('"') || val.includes('\n')) {
135
+ return `"${val.replace(/"/g, '""')}"`;
136
+ }
137
+ return val;
138
+ }
139
+ return String(val);
140
+ })
141
+ .join(',');
142
+ }),
143
+ ];
144
+ return csvRows.join('\n');
145
+ }
146
+ }
147
+ exports.Querymon = Querymon;
@@ -0,0 +1,7 @@
1
+ export * from './types';
2
+ export * from './Querymon';
3
+ export * from './PipelineBuilder';
4
+ export * from './PivotBuilder';
5
+ export * from './CompareBuilder';
6
+ export * from './CohortBuilder';
7
+ export * from './FunnelBuilder';
package/dist/index.js ADDED
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
+ };
16
+ Object.defineProperty(exports, "__esModule", { value: true });
17
+ __exportStar(require("./types"), exports);
18
+ __exportStar(require("./Querymon"), exports);
19
+ __exportStar(require("./PipelineBuilder"), exports);
20
+ __exportStar(require("./PivotBuilder"), exports);
21
+ __exportStar(require("./CompareBuilder"), exports);
22
+ __exportStar(require("./CohortBuilder"), exports);
23
+ __exportStar(require("./FunnelBuilder"), exports);
@@ -0,0 +1,70 @@
1
+ import mongoose, { Schema } from 'mongoose';
2
+ export interface QuerymonConfig {
3
+ connection?: mongoose.Connection;
4
+ }
5
+ export interface JoinConfig {
6
+ collection: string;
7
+ localField: string;
8
+ foreignField: string;
9
+ type?: 'oneToOne' | 'oneToMany';
10
+ fields?: string[];
11
+ }
12
+ export interface CollectionConfig {
13
+ schema: Schema;
14
+ dimensions?: string[];
15
+ metrics?: string[];
16
+ timeFields?: string[];
17
+ allowedAggregations?: string[];
18
+ joins?: Record<string, JoinConfig>;
19
+ }
20
+ export interface QueryBody {
21
+ filters?: Record<string, any>;
22
+ select?: string[];
23
+ groupBy?: string[];
24
+ timeSeriesBy?: {
25
+ field: string;
26
+ interval: 'day' | 'week' | 'month' | 'year';
27
+ start?: string | Date;
28
+ end?: string | Date;
29
+ };
30
+ pivot?: {
31
+ rows: string[];
32
+ columns: string[];
33
+ values: {
34
+ field: string;
35
+ operation: string;
36
+ };
37
+ };
38
+ compare?: {
39
+ current: Record<string, any>;
40
+ previous: Record<string, any>;
41
+ };
42
+ cohort?: {
43
+ identityField: string;
44
+ cohortDateField: string;
45
+ interval: 'day' | 'week' | 'month' | 'year';
46
+ start: string | Date;
47
+ end: string | Date;
48
+ eventDateField: string;
49
+ };
50
+ funnel?: {
51
+ identityField: string;
52
+ steps: {
53
+ name: string;
54
+ filters: Record<string, any>;
55
+ }[];
56
+ dateField: string;
57
+ };
58
+ export?: {
59
+ format: 'csv' | 'json';
60
+ filename?: string;
61
+ };
62
+ aggregations?: Record<string, {
63
+ field?: string;
64
+ operation: string;
65
+ value?: number;
66
+ }>;
67
+ sort?: Record<string, 'asc' | 'desc'>[];
68
+ limit?: number;
69
+ skip?: number;
70
+ }
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
package/package.json ADDED
@@ -0,0 +1,36 @@
1
+ {
2
+ "name": "querymon-builder",
3
+ "version": "1.0.0",
4
+ "description": "Schema-driven query builder for analytics APIs",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
7
+ "keywords": [
8
+ "mongodb",
9
+ "mongoose",
10
+ "analytics",
11
+ "query-builder",
12
+ "api",
13
+ "pivot",
14
+ "cohort",
15
+ "funnel"
16
+ ],
17
+ "author": "Mukesh",
18
+ "license": "MIT",
19
+ "scripts": {
20
+ "build": "tsc",
21
+ "test": "echo \"Error: no test specified\" && exit 1"
22
+ },
23
+ "keywords": [],
24
+ "author": "",
25
+ "license": "ISC",
26
+ "type": "commonjs",
27
+ "devDependencies": {
28
+ "@types/express": "^5.0.6",
29
+ "@types/node": "^25.2.3",
30
+ "express": "^5.2.1",
31
+ "mongodb-memory-server": "^11.0.1",
32
+ "mongoose": "^9.2.1",
33
+ "ts-node": "^10.9.2",
34
+ "typescript": "^5.9.3"
35
+ }
36
+ }