aiden-shared-calculations-unified 1.0.16 → 1.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,92 @@
1
+ /**
2
+ * @fileoverview Meta-calculation (Pass 3) that correlates the asset flow
3
+ * of the "In Profit" cohort vs. the "In Loss" cohort to find
4
+ * powerful divergence signals (e.g., profit-taking, capitulation).
5
+ */
6
+
7
+ class ProfitCohortDivergence {
8
+ constructor() {
9
+ this.flowThreshold = 0.5; // Min abs flow % to be considered a signal
10
+ }
11
+
12
+ /**
13
+ * @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
14
+ * @param {object} dependencies The shared dependencies (db, logger).
15
+ * @param {object} config The computation system configuration.
16
+ * @returns {Promise<object|null>} The analysis result or null.
17
+ */
18
+ async process(dateStr, dependencies, config) {
19
+ const { db, logger } = dependencies;
20
+ const collection = config.resultsCollection;
21
+ const resultsSub = config.resultsSubcollection || 'results';
22
+ const compsSub = config.computationsSubcollection || 'computations';
23
+
24
+ // 1. Define dependencies
25
+ const refsToGet = [
26
+ {
27
+ key: 'profit_flow',
28
+ ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('in-profit-asset-crowd-flow')
29
+ },
30
+ {
31
+ key: 'loss_flow',
32
+ ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('in-loss-asset-crowd-flow')
33
+ }
34
+ ];
35
+
36
+ // 2. Fetch
37
+ const snapshots = await db.getAll(...refsToGet.map(r => r.ref));
38
+ const profitFlowData = snapshots[0].exists ? snapshots[0].data() : null;
39
+ const lossFlowData = snapshots[1].exists ? snapshots[1].data() : null;
40
+
41
+ // 3. Handle "day-delay"
42
+ if (!profitFlowData || !lossFlowData) {
43
+ logger.log('WARN', `[ProfitCohortDivergence] Missing cohort flow data for ${dateStr}. Allowing backfill.`);
44
+ return null; // Let backfill handle it
45
+ }
46
+
47
+ const results = {};
48
+ const allTickers = new Set([...Object.keys(profitFlowData), ...Object.keys(lossFlowData)]);
49
+
50
+ // 4. Correlate
51
+ for (const ticker of allTickers) {
52
+ const profitFlow = profitFlowData[ticker]?.net_crowd_flow_pct || 0;
53
+ const lossFlow = lossFlowData[ticker]?.net_crowd_flow_pct || 0;
54
+
55
+ const profitSells = profitFlow <= -this.flowThreshold;
56
+ const profitBuys = profitFlow >= this.flowThreshold;
57
+ const lossSells = lossFlow <= -this.flowThreshold;
58
+ const lossBuys = lossFlow >= this.flowThreshold;
59
+
60
+ let status = 'No Divergence';
61
+ let detail = 'Both cohorts are acting similarly or flow is insignificant.';
62
+
63
+ if (profitSells && lossBuys) {
64
+ status = 'Profit Taking';
65
+ detail = 'The "in-profit" cohort is selling to the "in-loss" cohort, who are averaging down.';
66
+ } else if (profitBuys && lossSells) {
67
+ status = 'Capitulation';
68
+ detail = 'The "in-loss" cohort is panic-selling, and the "in-profit" cohort is buying the dip.';
69
+ } else if (profitBuys && lossBuys) {
70
+ status = 'High Conviction Buy';
71
+ detail = 'All cohorts are net-buying.';
72
+ } else if (profitSells && lossSells) {
73
+ status = 'High Conviction Sell';
74
+ detail = 'All cohorts are net-selling.';
75
+ }
76
+
77
+ results[ticker] = {
78
+ status: status,
79
+ detail: detail,
80
+ profit_cohort_flow: profitFlow,
81
+ loss_cohort_flow: lossFlow
82
+ };
83
+ }
84
+
85
+ return results;
86
+ }
87
+
88
+ async getResult() { return null; }
89
+ reset() {}
90
+ }
91
+
92
+ module.exports = ProfitCohortDivergence;
@@ -0,0 +1,128 @@
1
+ /**
2
+ * @fileoverview Meta-calculation (Pass 3) that correlates the asset/sector flow
3
+ * of the "Smart Cohort" vs. the "Dumb Cohort" to find divergence signals.
4
+ *
5
+ * This identifies:
6
+ * 1. "Capitulation": Smart cohort is buying what the dumb cohort is panic-selling.
7
+ * 2. "Euphoria": Smart cohort is selling what the dumb cohort is FOMO-buying.
8
+ */
9
+
10
+ class SmartDumbDivergenceIndex {
11
+ constructor() {
12
+ // Minimum net flow (as a percentage) to be considered a signal
13
+ this.FLOW_THRESHOLD = 0.5;
14
+ }
15
+
16
+ /**
17
+ * @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
18
+ * @param {object} dependencies The shared dependencies (db, logger).
19
+ * @param {object} config The computation system configuration.
20
+ * @returns {Promise<object|null>} The analysis result or null.
21
+ */
22
+ async process(dateStr, dependencies, config) {
23
+ const { db, logger } = dependencies;
24
+ const collection = config.resultsCollection;
25
+ const resultsSub = config.resultsSubcollection || 'results';
26
+ const compsSub = config.computationsSubcollection || 'computations';
27
+
28
+ // 1. Define dependencies
29
+ const refsToGet = [
30
+ {
31
+ key: 'smart_flow',
32
+ ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('smart-cohort-flow')
33
+ },
34
+ {
35
+ key: 'dumb_flow',
36
+ ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('dumb-cohort-flow')
37
+ }
38
+ ];
39
+
40
+ // 2. Fetch
41
+ const snapshots = await db.getAll(...refsToGet.map(r => r.ref));
42
+ const smartData = snapshots[0].exists ? snapshots[0].data() : null;
43
+ const dumbData = snapshots[1].exists ? snapshots[1].data() : null;
44
+
45
+ // 3. Handle "day-delay"
46
+ if (!smartData || !dumbData) {
47
+ logger.log('WARN', `[SmartDumbDivergence] Missing cohort flow data for ${dateStr}. Allowing backfill.`);
48
+ return null; // Let backfill handle it
49
+ }
50
+
51
+ const results = {
52
+ assets: {},
53
+ sectors: {}
54
+ };
55
+
56
+ const smartAssetFlow = smartData.asset_flow || {};
57
+ const dumbAssetFlow = dumbData.asset_flow || {};
58
+ const smartSectorFlow = smartData.sector_rotation || {};
59
+ const dumbSectorFlow = dumbData.sector_rotation || {};
60
+
61
+ // 4. Correlate Assets
62
+ const allTickers = new Set([...Object.keys(smartAssetFlow), ...Object.keys(dumbAssetFlow)]);
63
+ for (const ticker of allTickers) {
64
+ const sFlow = smartAssetFlow[ticker]?.net_crowd_flow_pct || 0;
65
+ const dFlow = dumbAssetFlow[ticker]?.net_crowd_flow_pct || 0;
66
+
67
+ const smartBuys = sFlow >= this.FLOW_THRESHOLD;
68
+ const smartSells = sFlow <= -this.FLOW_THRESHOLD;
69
+ const dumbBuys = dFlow >= this.FLOW_THRESHOLD;
70
+ const dumbSells = dFlow <= -this.FLOW_THRESHOLD;
71
+
72
+ let status = 'No_Divergence';
73
+ let detail = 'Cohorts are aligned or flow is insignificant.';
74
+
75
+ if (smartBuys && dumbSells) {
76
+ status = 'Capitulation';
77
+ detail = 'Smart cohort is buying the dip from the panic-selling dumb cohort.';
78
+ } else if (smartSells && dumbBuys) {
79
+ status = 'Euphoria';
80
+ detail = 'Smart cohort is selling into the FOMO-buying dumb cohort.';
81
+ } else if (smartBuys && dumbBuys) {
82
+ status = 'Aligned_Buy';
83
+ } else if (smartSells && dumbSells) {
84
+ status = 'Aligned_Sell';
85
+ }
86
+
87
+ if (status !== 'No_Divergence') {
88
+ results.assets[ticker] = {
89
+ status: status,
90
+ detail: detail,
91
+ smart_cohort_flow_pct: sFlow,
92
+ dumb_cohort_flow_pct: dFlow
93
+ };
94
+ }
95
+ }
96
+
97
+ // 5. Correlate Sectors (Note: flow is total $, not %)
98
+ // We can just check for opposing signs
99
+ const allSectors = new Set([...Object.keys(smartSectorFlow), ...Object.keys(dumbSectorFlow)]);
100
+ for (const sector of allSectors) {
101
+ const sFlow = smartSectorFlow[sector] || 0;
102
+ const dFlow = dumbSectorFlow[sector] || 0;
103
+
104
+ let status = 'No_Divergence';
105
+
106
+ if (sFlow > 0 && dFlow < 0) {
107
+ status = 'Capitulation';
108
+ } else if (sFlow < 0 && dFlow > 0) {
109
+ status = 'Euphoria';
110
+ }
111
+
112
+ if (status !== 'No_Divergence') {
113
+ results.sectors[sector] = {
114
+ status: status,
115
+ smart_cohort_flow_usd: sFlow,
116
+ dumb_cohort_flow_usd: dFlow
117
+ };
118
+ }
119
+ }
120
+
121
+ return results;
122
+ }
123
+
124
+ async getResult() { return null; }
125
+ reset() {}
126
+ }
127
+
128
+ module.exports = SmartDumbDivergenceIndex;
@@ -0,0 +1,123 @@
1
+ /**
2
+ * @fileoverview Meta-calculation (Pass 3) to correlate daily social sentiment with
3
+ * the actual crowd asset flow. It identifies divergences between what the crowd
4
+ * says and what they do.
5
+ */
6
+
7
+ class SocialFlowCorrelation {
8
+ constructor() {
9
+ // Define sensitivity thresholds
10
+ this.bullishSentimentThreshold = 70.0; // % ratio
11
+ this.bearishSentimentThreshold = 30.0; // % ratio
12
+ this.positiveFlowThreshold = 0.5; // net_crowd_flow_pct
13
+ this.negativeFlowThreshold = -0.5; // net_crowd_flow_pct
14
+ }
15
+
16
+ /**
17
+ * @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
18
+ * @param {object} dependencies The shared dependencies (db, logger).
19
+ * @param {object} config The computation system configuration.
20
+ * @returns {Promise<object|null>} The analysis result or null.
21
+ */
22
+ async process(dateStr, dependencies, config) {
23
+ const { db, logger } = dependencies;
24
+ const collection = config.resultsCollection;
25
+
26
+ // 1. Define dependencies for the *same day*
27
+ const dependenciesToFetch = [
28
+ { category: 'socialPosts', computation: 'social_sentiment_aggregation' },
29
+ { category: 'behavioural', computation: 'asset_crowd_flow' }
30
+ ];
31
+
32
+ // 2. Build refs and fetch
33
+ const refs = dependenciesToFetch.map(d =>
34
+ db.collection(collection).doc(dateStr)
35
+ .collection('results').doc(d.category)
36
+ .collection('computations').doc(d.computation)
37
+ );
38
+
39
+ const snapshots = await db.getAll(...refs);
40
+
41
+ // 3. Check for data and handle the "day-delay"
42
+ const socialData = snapshots[0].exists ? snapshots[0].data() : null;
43
+ const flowData = snapshots[1].exists ? snapshots[1].data() : null;
44
+
45
+ if (!socialData || !flowData) {
46
+ logger.log('WARN', `[SocialFlowCorrelation] Missing dependency data for ${dateStr}. Allowing backfill.`);
47
+ // Return null. This stops execution and does not save an empty doc.
48
+ // The Pass 3 backfill will pick this up tomorrow.
49
+ return null;
50
+ }
51
+
52
+ // 4. If data exists, perform the correlation
53
+ const sentimentMap = socialData.tickerSentiment || {};
54
+ const correlationResults = {};
55
+
56
+ // Use all tickers from the flow data as the primary loop
57
+ for (const ticker in flowData) {
58
+ if (!flowData[ticker] || typeof flowData[ticker].net_crowd_flow_pct === 'undefined') {
59
+ continue;
60
+ }
61
+
62
+ const flow = flowData[ticker].net_crowd_flow_pct;
63
+ const sentiment = sentimentMap[ticker]?.sentimentRatio; // e.g., 85.0 or 22.5
64
+
65
+ if (typeof sentiment === 'undefined') {
66
+ // No sentiment found, just record flow
67
+ correlationResults[ticker] = {
68
+ status: 'no_social_sentiment',
69
+ net_crowd_flow_pct: flow
70
+ };
71
+ continue;
72
+ }
73
+
74
+ // --- The "Jaw-Drop" Logic ---
75
+ if (sentiment >= this.bullishSentimentThreshold && flow <= this.negativeFlowThreshold) {
76
+ // Crowd is very bullish but is actively selling
77
+ correlationResults[ticker] = {
78
+ status: 'Bullish Divergence',
79
+ detail: 'Crowd is publicly bullish but is net-selling the asset.',
80
+ sentiment_ratio: sentiment,
81
+ net_crowd_flow_pct: flow
82
+ };
83
+ } else if (sentiment <= this.bearishSentimentThreshold && flow >= this.positiveFlowThreshold) {
84
+ // Crowd is very bearish but is actively buying
85
+ correlationResults[ticker] = {
86
+ status: 'Bearish Divergence',
87
+ detail: 'Crowd is publicly bearish but is net-buying the asset.',
88
+ sentiment_ratio: sentiment,
89
+ net_crowd_flow_pct: flow
90
+ };
91
+ } else if (sentiment >= this.bullishSentimentThreshold && flow >= this.positiveFlowThreshold) {
92
+ // Crowd is bullish and is buying
93
+ correlationResults[ticker] = {
94
+ status: 'High Conviction Buy',
95
+ sentiment_ratio: sentiment,
96
+ net_crowd_flow_pct: flow
97
+ };
98
+ } else if (sentiment <= this.bearishSentimentThreshold && flow <= this.negativeFlowThreshold) {
99
+ // Crowd is bearish and is selling
100
+ correlationResults[ticker] = {
101
+ status: 'High Conviction Sell',
102
+ sentiment_ratio: sentiment,
103
+ net_crowd_flow_pct: flow
104
+ };
105
+ } else {
106
+ // No strong signal or divergence
107
+ correlationResults[ticker] = {
108
+ status: 'No Clear Signal',
109
+ sentiment_ratio: sentiment,
110
+ net_crowd_flow_pct: flow
111
+ };
112
+ }
113
+ }
114
+
115
+ return correlationResults;
116
+ }
117
+
118
+ // Must exist for the meta-computation runner
119
+ async getResult() { return null; }
120
+ reset() {}
121
+ }
122
+
123
+ module.exports = SocialFlowCorrelation;
@@ -1,47 +1,68 @@
1
1
  /**
2
2
  * @fileoverview Tracks user profitability over a 7-day rolling window.
3
- * This version shards the output to avoid Firestore's "too many index entries" error.
3
+ * This version shards the output AND calculates the user's *weighted average daily PNL (as a decimal %)*.
4
4
  */
5
5
 
6
6
  const { Firestore } = require('@google-cloud/firestore');
7
7
  const firestore = new Firestore();
8
8
 
9
- // Define a constant for the number of shards
10
9
  const NUM_SHARDS = 50;
11
10
 
12
11
  class UserProfitabilityTracker {
13
12
  constructor() {
14
- this.dailyProfits = {};
13
+ // This will store { userId: { weightedPnlSum: 0, totalInvested: 0 } }
14
+ this.dailyData = {};
15
15
  }
16
16
 
17
- process(todayPortfolio, yesterdayPortfolio, userId) {
18
- if (!todayPortfolio) return;
17
+ /**
18
+ * Calculates the weighted PNL for the day.
19
+ * NetProfit is a decimal % return (e.g., 0.03)
20
+ * Invested is a decimal % weight (e.g., 0.05)
21
+ */
22
+ calculateWeightedDailyPnl(portfolio) {
23
+ if (!portfolio || !portfolio.AggregatedPositions || portfolio.AggregatedPositions.length === 0) {
24
+ return { weightedPnl: 0, totalInvested: 0 };
25
+ }
26
+
27
+ let weightedPnlSum = 0;
28
+ let totalInvested = 0;
19
29
 
20
- const todayPnl = this.calculateTotalPnl(todayPortfolio);
21
- if (todayPnl !== null) {
22
- this.dailyProfits[userId] = todayPnl;
30
+ for (const pos of portfolio.AggregatedPositions) {
31
+ // Use NetProfit (the % return)
32
+ const netProfit = ('NetProfit' in pos) ? pos.NetProfit : (pos.ProfitAndLoss || 0);
33
+ // Use InvestedAmount (the % portfolio weight)
34
+ const invested = pos.InvestedAmount || pos.Amount || 0;
35
+
36
+ if (invested > 0) {
37
+ weightedPnlSum += netProfit * invested;
38
+ totalInvested += invested;
39
+ }
23
40
  }
41
+
42
+ return { weightedPnlSum, totalInvested };
24
43
  }
25
44
 
26
- calculateTotalPnl(portfolio) {
27
- if (portfolio && portfolio.AggregatedPositions) {
28
- return portfolio.AggregatedPositions.reduce((sum, pos) => sum + pos.NetProfit, 0);
29
- } else if (portfolio && portfolio.PublicPositions) {
30
- return portfolio.PublicPositions.reduce((sum, pos) => sum + pos.NetProfit, 0);
45
+ process(todayPortfolio, yesterdayPortfolio, userId) {
46
+ if (!todayPortfolio) return;
47
+
48
+ const { weightedPnlSum, totalInvested } = this.calculateWeightedDailyPnl(todayPortfolio);
49
+
50
+ if (totalInvested > 0) {
51
+ this.dailyData[userId] = { weightedPnlSum, totalInvested };
31
52
  }
32
- return null;
33
53
  }
34
54
 
35
55
  async getResult() {
36
56
  const today = new Date().toISOString().slice(0, 10);
37
- const results = {};
57
+ const results = {}; // For sharded history
58
+ const dailyPnlMap = {}; // For the new profile calc
38
59
 
39
60
  // Prepare sharded data structure
40
61
  for (let i = 0; i < NUM_SHARDS; i++) {
41
62
  results[`user_profitability_shard_${i}`] = {};
42
63
  }
43
64
 
44
- // Fetch all existing shards in parallel for efficiency
65
+ // ... (Fetch existing shards logic, same as your file) ...
45
66
  const shardPromises = [];
46
67
  for (let i = 0; i < NUM_SHARDS; i++) {
47
68
  const docRef = firestore.collection('historical_insights').doc(`user_profitability_shard_${i}`);
@@ -50,18 +71,38 @@ class UserProfitabilityTracker {
50
71
  const shardSnapshots = await Promise.all(shardPromises);
51
72
  const existingData = shardSnapshots.map(snap => (snap.exists ? snap.data().profits : {}));
52
73
 
53
- // Process users and assign them to shards
54
- for (const userId in this.dailyProfits) {
74
+
75
+ for (const userId in this.dailyData) {
76
+ const { weightedPnlSum, totalInvested } = this.dailyData[userId];
77
+
78
+ // Calculate the final weighted average % return for the day
79
+ // We cap totalInvested at 1.0 (100%) in case of data issues
80
+ const totalWeight = Math.min(1.0, totalInvested);
81
+ const dailyAvgPnl = (totalWeight > 0) ? (weightedPnlSum / totalWeight) : 0;
82
+
83
+ // Store this for the profile calc dependency
84
+ dailyPnlMap[userId] = dailyAvgPnl;
85
+
86
+ // --- Now, update the sharded history ---
55
87
  const shardIndex = parseInt(userId, 10) % NUM_SHARDS;
56
88
  const userHistory = existingData[shardIndex][userId] || [];
57
89
 
58
- userHistory.push({ date: today, pnl: this.dailyProfits[userId] });
90
+ // Store the decimal % pnl in the history
91
+ userHistory.push({ date: today, pnl: dailyAvgPnl });
59
92
 
60
- // Ensure we only keep the last 7 days of data
61
- results[`user_profitability_shard_${shardIndex}`][userId] = userHistory.slice(-7);
93
+ const shardKey = `user_profitability_shard_${shardIndex}`;
94
+ if (!results[shardKey]) results[shardKey] = {};
95
+ results[shardKey][userId] = userHistory.slice(-7);
62
96
  }
63
97
 
64
- return { sharded_user_profitability: results };
98
+ return {
99
+ sharded_user_profitability: results,
100
+ daily_pnl_map: dailyPnlMap // <-- This now correctly outputs the weighted avg % PNL
101
+ };
102
+ }
103
+
104
+ reset() {
105
+ this.dailyData = {};
65
106
  }
66
107
  }
67
108
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "aiden-shared-calculations-unified",
3
- "version": "1.0.16",
3
+ "version": "1.0.18",
4
4
  "description": "Shared calculation modules for the BullTrackers Computation System.",
5
5
  "main": "index.js",
6
6
  "files": [