aiden-shared-calculations-unified 1.0.16 → 1.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/calculations/behavioural/historical/in_loss_asset_crowd_flow.js +99 -0
- package/calculations/behavioural/historical/in_profit_asset_crowd_flow.js +100 -0
- package/calculations/meta/capital_deployment_strategy.js +138 -0
- package/calculations/meta/crowd_sharpe_ratio_proxy.js +92 -0
- package/calculations/meta/profit_cohort_divergence.js +92 -0
- package/calculations/meta/social_flow_correlation.js +123 -0
- package/package.json +1 -1
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
const { loadAllPriceData, getDailyPriceChange } = require('../../../utils/price_data_provider');
|
|
2
|
+
const { loadInstrumentMappings } = require('../../../utils/sector_mapping_provider');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @fileoverview Calculates "Net Crowd Flow" for each asset, BUT
|
|
6
|
+
* *only* for the cohort of users who are currently IN LOSS
|
|
7
|
+
* on their positions for that asset.
|
|
8
|
+
*/
|
|
9
|
+
class InLossAssetCrowdFlow {
|
|
10
|
+
constructor() {
|
|
11
|
+
this.asset_values = {}; // Stores { day1_value_sum: 0, day2_value_sum: 0 }
|
|
12
|
+
this.user_count = 0;
|
|
13
|
+
this.priceMap = null;
|
|
14
|
+
this.mappings = null;
|
|
15
|
+
this.dates = {};
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
_initAsset(instrumentId) {
|
|
19
|
+
if (!this.asset_values[instrumentId]) {
|
|
20
|
+
this.asset_values[instrumentId] = { day1_value_sum: 0, day2_value_sum: 0 };
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
process(todayPortfolio, yesterdayPortfolio, userId, context) {
|
|
25
|
+
if (!todayPortfolio || !yesterdayPortfolio || !todayPortfolio.AggregatedPositions || !yesterdayPortfolio.AggregatedPositions) {
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (!this.dates.today && context.todayDateStr && context.yesterdayDateStr) {
|
|
30
|
+
this.dates.today = context.todayDateStr;
|
|
31
|
+
this.dates.yesterday = context.yesterdayDateStr;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const yesterdayPositions = new Map(yesterdayPortfolio.AggregatedPositions.map(p => [p.InstrumentID, p]));
|
|
35
|
+
const todayPositions = new Map(todayPortfolio.AggregatedPositions.map(p => [p.InstrumentID, p]));
|
|
36
|
+
|
|
37
|
+
const allInstrumentIds = new Set([
|
|
38
|
+
...yesterdayPositions.keys(),
|
|
39
|
+
...todayPositions.keys()
|
|
40
|
+
]);
|
|
41
|
+
|
|
42
|
+
for (const instrumentId of allInstrumentIds) {
|
|
43
|
+
const yPos = yesterdayPositions.get(instrumentId);
|
|
44
|
+
const tPos = todayPositions.get(instrumentId);
|
|
45
|
+
|
|
46
|
+
// --- COHORT LOGIC ---
|
|
47
|
+
// Only aggregate if the user is in LOSS on this asset.
|
|
48
|
+
const tNetProfit = tPos?.NetProfit || 0;
|
|
49
|
+
if (tNetProfit >= 0) { // Note: >= 0 (includes zero profit)
|
|
50
|
+
continue; // Skip this asset for this user
|
|
51
|
+
}
|
|
52
|
+
// --- END COHORT LOGIC ---
|
|
53
|
+
|
|
54
|
+
this._initAsset(instrumentId);
|
|
55
|
+
this.asset_values[instrumentId].day1_value_sum += (yPos?.Value || 0);
|
|
56
|
+
this.asset_values[instrumentId].day2_value_sum += (tPos?.Value || 0);
|
|
57
|
+
}
|
|
58
|
+
this.user_count++;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
async getResult() {
|
|
62
|
+
if (this.user_count === 0 || !this.dates.today) return {};
|
|
63
|
+
if (!this.priceMap || !this.mappings) {
|
|
64
|
+
const [priceData, mappingData] = await Promise.all([
|
|
65
|
+
loadAllPriceData(),
|
|
66
|
+
loadInstrumentMappings()
|
|
67
|
+
]);
|
|
68
|
+
this.priceMap = priceData;
|
|
69
|
+
this.mappings = mappingData;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const finalResults = {};
|
|
73
|
+
const todayStr = this.dates.today;
|
|
74
|
+
const yesterdayStr = this.dates.yesterday;
|
|
75
|
+
|
|
76
|
+
for (const instrumentId in this.asset_values) {
|
|
77
|
+
const ticker = this.mappings.instrumentToTicker[instrumentId] || `id_${instrumentId}`;
|
|
78
|
+
|
|
79
|
+
const avg_day1_value = this.asset_values[instrumentId].day1_value_sum / this.user_count;
|
|
80
|
+
const avg_day2_value = this.asset_values[instrumentId].day2_value_sum / this.user_count;
|
|
81
|
+
const priceChangePct = getDailyPriceChange(instrumentId, yesterdayStr, todayStr, this.priceMap);
|
|
82
|
+
|
|
83
|
+
if (priceChangePct === null) continue;
|
|
84
|
+
|
|
85
|
+
const expected_day2_value = avg_day1_value * (1 + priceChangePct);
|
|
86
|
+
const net_crowd_flow_pct = avg_day2_value - expected_day2_value;
|
|
87
|
+
|
|
88
|
+
finalResults[ticker] = {
|
|
89
|
+
net_crowd_flow_pct: net_crowd_flow_pct,
|
|
90
|
+
avg_value_day1_pct: avg_day1_value,
|
|
91
|
+
avg_value_day2_pct: avg_day2_value
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
return finalResults;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
reset() { /*...reset all properties...*/ }
|
|
98
|
+
}
|
|
99
|
+
module.exports = InLossAssetCrowdFlow;
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
const { loadAllPriceData, getDailyPriceChange } = require('../../../utils/price_data_provider');
|
|
2
|
+
const { loadInstrumentMappings } = require('../../../utils/sector_mapping_provider');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* @fileoverview Calculates "Net Crowd Flow" for each asset, BUT
|
|
6
|
+
* *only* for the cohort of users who are currently IN PROFIT
|
|
7
|
+
* on their positions for that asset.
|
|
8
|
+
*/
|
|
9
|
+
class InProfitAssetCrowdFlow {
|
|
10
|
+
constructor() {
|
|
11
|
+
this.asset_values = {}; // Stores { day1_value_sum: 0, day2_value_sum: 0 }
|
|
12
|
+
this.user_count = 0;
|
|
13
|
+
this.priceMap = null;
|
|
14
|
+
this.mappings = null;
|
|
15
|
+
this.dates = {};
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
_initAsset(instrumentId) {
|
|
19
|
+
if (!this.asset_values[instrumentId]) {
|
|
20
|
+
this.asset_values[instrumentId] = { day1_value_sum: 0, day2_value_sum: 0 };
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
process(todayPortfolio, yesterdayPortfolio, userId, context) {
|
|
25
|
+
if (!todayPortfolio || !yesterdayPortfolio || !todayPortfolio.AggregatedPositions || !yesterdayPortfolio.AggregatedPositions) {
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
if (!this.dates.today && context.todayDateStr && context.yesterdayDateStr) {
|
|
30
|
+
this.dates.today = context.todayDateStr;
|
|
31
|
+
this.dates.yesterday = context.yesterdayDateStr;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const yesterdayPositions = new Map(yesterdayPortfolio.AggregatedPositions.map(p => [p.InstrumentID, p]));
|
|
35
|
+
const todayPositions = new Map(todayPortfolio.AggregatedPositions.map(p => [p.InstrumentID, p]));
|
|
36
|
+
|
|
37
|
+
const allInstrumentIds = new Set([
|
|
38
|
+
...yesterdayPositions.keys(),
|
|
39
|
+
...todayPositions.keys()
|
|
40
|
+
]);
|
|
41
|
+
|
|
42
|
+
for (const instrumentId of allInstrumentIds) {
|
|
43
|
+
const yPos = yesterdayPositions.get(instrumentId);
|
|
44
|
+
const tPos = todayPositions.get(instrumentId);
|
|
45
|
+
|
|
46
|
+
// --- COHORT LOGIC ---
|
|
47
|
+
// Only aggregate if the user is in PROFIT on this asset.
|
|
48
|
+
// We check *today's* profit status as the primary signal.
|
|
49
|
+
const tNetProfit = tPos?.NetProfit || 0;
|
|
50
|
+
if (tNetProfit <= 0) {
|
|
51
|
+
continue; // Skip this asset for this user
|
|
52
|
+
}
|
|
53
|
+
// --- END COHORT LOGIC ---
|
|
54
|
+
|
|
55
|
+
this._initAsset(instrumentId);
|
|
56
|
+
this.asset_values[instrumentId].day1_value_sum += (yPos?.Value || 0);
|
|
57
|
+
this.asset_values[instrumentId].day2_value_sum += (tPos?.Value || 0);
|
|
58
|
+
}
|
|
59
|
+
this.user_count++; // Note: This is user_count of *all* users, which is fine for avg.
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
async getResult() {
|
|
63
|
+
if (this.user_count === 0 || !this.dates.today) return {};
|
|
64
|
+
if (!this.priceMap || !this.mappings) {
|
|
65
|
+
const [priceData, mappingData] = await Promise.all([
|
|
66
|
+
loadAllPriceData(),
|
|
67
|
+
loadInstrumentMappings()
|
|
68
|
+
]);
|
|
69
|
+
this.priceMap = priceData;
|
|
70
|
+
this.mappings = mappingData;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const finalResults = {};
|
|
74
|
+
const todayStr = this.dates.today;
|
|
75
|
+
const yesterdayStr = this.dates.yesterday;
|
|
76
|
+
|
|
77
|
+
for (const instrumentId in this.asset_values) {
|
|
78
|
+
const ticker = this.mappings.instrumentToTicker[instrumentId] || `id_${instrumentId}`;
|
|
79
|
+
|
|
80
|
+
const avg_day1_value = this.asset_values[instrumentId].day1_value_sum / this.user_count;
|
|
81
|
+
const avg_day2_value = this.asset_values[instrumentId].day2_value_sum / this.user_count;
|
|
82
|
+
const priceChangePct = getDailyPriceChange(instrumentId, yesterdayStr, todayStr, this.priceMap);
|
|
83
|
+
|
|
84
|
+
if (priceChangePct === null) continue;
|
|
85
|
+
|
|
86
|
+
const expected_day2_value = avg_day1_value * (1 + priceChangePct);
|
|
87
|
+
const net_crowd_flow_pct = avg_day2_value - expected_day2_value;
|
|
88
|
+
|
|
89
|
+
finalResults[ticker] = {
|
|
90
|
+
net_crowd_flow_pct: net_crowd_flow_pct,
|
|
91
|
+
avg_value_day1_pct: avg_day1_value,
|
|
92
|
+
avg_value_day2_pct: avg_day2_value
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
return finalResults;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
reset() { /*...reset all properties...*/ }
|
|
99
|
+
}
|
|
100
|
+
module.exports = InProfitAssetCrowdFlow;
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Meta-calculation (Pass 3) that analyzes "what" the crowd does
|
|
3
|
+
* following a net deposit signal. It determines if the new capital is used to
|
|
4
|
+
* buy *new* assets or *add* to existing ones.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const { FieldValue } = require('@google-cloud/firestore');
|
|
8
|
+
|
|
9
|
+
class CapitalDeploymentStrategy {
|
|
10
|
+
constructor() {
|
|
11
|
+
this.lookbackDays = 7;
|
|
12
|
+
this.correlationWindow = 3; // How many days after a signal to link behavior
|
|
13
|
+
this.depositSignalThreshold = -1.0; // From crowd_cash_flow_proxy
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
_getDateStr(baseDate, daysAgo) {
|
|
17
|
+
const date = new Date(baseDate + 'T00:00:00Z');
|
|
18
|
+
date.setUTCDate(date.getUTCDate() - daysAgo);
|
|
19
|
+
return date.toISOString().slice(0, 10);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
24
|
+
* @param {object} dependencies The shared dependencies (db, logger).
|
|
25
|
+
* @param {object} config The computation system configuration.
|
|
26
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
27
|
+
*/
|
|
28
|
+
async process(dateStr, dependencies, config) {
|
|
29
|
+
const { db, logger } = dependencies;
|
|
30
|
+
const collection = config.resultsCollection;
|
|
31
|
+
const resultsSub = config.resultsSubcollection || 'results';
|
|
32
|
+
const compsSub = config.computationsSubcollection || 'computations';
|
|
33
|
+
|
|
34
|
+
// 1. Find the most recent deposit signal
|
|
35
|
+
let depositSignal = null;
|
|
36
|
+
let depositSignalDay = null;
|
|
37
|
+
let refsToGet = [];
|
|
38
|
+
|
|
39
|
+
for (let i = 1; i <= this.lookbackDays; i++) {
|
|
40
|
+
const checkDate = this._getDateStr(dateStr, i);
|
|
41
|
+
refsToGet.push({
|
|
42
|
+
date: checkDate,
|
|
43
|
+
key: `signal_${checkDate}`,
|
|
44
|
+
ref: db.collection(collection).doc(checkDate).collection(resultsSub).doc('capital_flow').collection(compsSub).doc('crowd-cash-flow-proxy')
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const signalSnapshots = await db.getAll(...refsToGet.map(r => r.ref));
|
|
49
|
+
const dataMap = new Map();
|
|
50
|
+
signalSnapshots.forEach((snap, idx) => {
|
|
51
|
+
if (snap.exists) dataMap.set(refsToGet[idx].key, snap.data());
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
for (let i = 1; i <= this.lookbackDays; i++) {
|
|
55
|
+
const checkDate = this._getDateStr(dateStr, i);
|
|
56
|
+
const flowData = dataMap.get(`signal_${checkDate}`);
|
|
57
|
+
if (flowData && flowData.cash_flow_effect_proxy < this.depositSignalThreshold) {
|
|
58
|
+
depositSignal = flowData;
|
|
59
|
+
depositSignalDay = checkDate;
|
|
60
|
+
break; // Found the most recent signal
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if (!depositSignal) {
|
|
65
|
+
return {
|
|
66
|
+
status: 'no_deposit_signal_found',
|
|
67
|
+
lookback_days: this.lookbackDays
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// 2. Check if today is within the correlation window
|
|
72
|
+
const daysSinceSignal = (new Date(dateStr) - new Date(depositSignalDay)) / (1000 * 60 * 60 * 24);
|
|
73
|
+
|
|
74
|
+
if (daysSinceSignal <= 0 || daysSinceSignal > this.correlationWindow) {
|
|
75
|
+
return {
|
|
76
|
+
status: 'outside_correlation_window',
|
|
77
|
+
signal_day: depositSignalDay,
|
|
78
|
+
days_since_signal: daysSinceSignal
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// 3. Fetch deployment data for *today*
|
|
83
|
+
// We are correlating the *past signal* with *today's action*
|
|
84
|
+
refsToGet = [
|
|
85
|
+
{
|
|
86
|
+
key: 'new_alloc',
|
|
87
|
+
ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('capital_flow').collection(compsSub).doc('new-allocation-percentage')
|
|
88
|
+
},
|
|
89
|
+
{
|
|
90
|
+
key: 're_alloc',
|
|
91
|
+
ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('capital_flow').collection(compsSub).doc('reallocation-increase-percentage')
|
|
92
|
+
}
|
|
93
|
+
];
|
|
94
|
+
|
|
95
|
+
const deploymentSnapshots = await db.getAll(...refsToGet.map(r => r.ref));
|
|
96
|
+
const newAllocData = deploymentSnapshots[0].exists ? deploymentSnapshots[0].data() : null;
|
|
97
|
+
const reAllocData = deploymentSnapshots[1].exists ? deploymentSnapshots[1].data() : null;
|
|
98
|
+
|
|
99
|
+
// 4. Handle "day-delay" for *this* data
|
|
100
|
+
if (!newAllocData || !reAllocData) {
|
|
101
|
+
logger.log('WARN', `[CapitalDeploymentStrategy] Missing deployment data for ${dateStr}. Allowing backfill.`);
|
|
102
|
+
// This is a "same-day" meta-calc, so we return null to let backfill run
|
|
103
|
+
return null;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// 5. Calculate deployment bias
|
|
107
|
+
const newAlloc = newAllocData.average_new_allocation_percentage || 0;
|
|
108
|
+
const reAlloc = reAllocData.average_reallocation_increase_percentage || 0;
|
|
109
|
+
|
|
110
|
+
const totalDeployment = newAlloc + reAlloc;
|
|
111
|
+
let newAssetBias = 0;
|
|
112
|
+
let existingAssetBias = 0;
|
|
113
|
+
|
|
114
|
+
if (totalDeployment > 0) {
|
|
115
|
+
newAssetBias = (newAlloc / totalDeployment) * 100;
|
|
116
|
+
existingAssetBias = (reAlloc / totalDeployment) * 100;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
return {
|
|
120
|
+
status: 'analysis_complete',
|
|
121
|
+
analysis_date: dateStr,
|
|
122
|
+
signal_date: depositSignalDay,
|
|
123
|
+
days_since_signal: daysSinceSignal,
|
|
124
|
+
signal_deposit_proxy_pct: Math.abs(depositSignal.cash_flow_effect_proxy),
|
|
125
|
+
deployment_new_alloc_pct: newAlloc,
|
|
126
|
+
deployment_existing_alloc_pct: reAlloc,
|
|
127
|
+
total_deployment_pct: totalDeployment,
|
|
128
|
+
new_asset_bias: newAssetBias,
|
|
129
|
+
existing_asset_bias: existingAssetBias
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Must exist for the meta-computation runner
|
|
134
|
+
async getResult() { return null; }
|
|
135
|
+
reset() {}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
module.exports = CapitalDeploymentStrategy;
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Meta-calculation (Pass 3) to calculate a proxy for the
|
|
3
|
+
* crowd's "Sharpe Ratio" (risk-adjusted return) on a per-asset basis.
|
|
4
|
+
* It uses the components from 'pnl_distribution_per_stock' to calculate
|
|
5
|
+
* the standard deviation of P/L, which serves as the "risk".
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
class CrowdSharpeRatioProxy {
|
|
9
|
+
constructor() {}
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
13
|
+
* @param {object} dependencies The shared dependencies (db, logger).
|
|
14
|
+
* @param {object} config The computation system configuration.
|
|
15
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
16
|
+
*/
|
|
17
|
+
async process(dateStr, dependencies, config) {
|
|
18
|
+
const { db, logger } = dependencies;
|
|
19
|
+
const collection = config.resultsCollection;
|
|
20
|
+
|
|
21
|
+
// 1. Define dependency
|
|
22
|
+
const dependency = { category: 'pnl', computation: 'pnl-distribution-per-stock' };
|
|
23
|
+
|
|
24
|
+
// 2. Build ref and fetch
|
|
25
|
+
const docRef = db.collection(collection).doc(dateStr)
|
|
26
|
+
.collection('results').doc(dependency.category)
|
|
27
|
+
.collection('computations').doc(dependency.computation);
|
|
28
|
+
|
|
29
|
+
const snapshot = await docRef.get();
|
|
30
|
+
|
|
31
|
+
// 3. Handle the "day-delay"
|
|
32
|
+
if (!snapshot.exists) {
|
|
33
|
+
logger.log('WARN', `[CrowdSharpeRatioProxy] Missing dependency 'pnl-distribution-per-stock' for ${dateStr}. Allowing backfill.`);
|
|
34
|
+
return null; // Let backfill handle it
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const data = snapshot.data();
|
|
38
|
+
const pnlDistribution = data.pnl_distribution_by_asset;
|
|
39
|
+
|
|
40
|
+
if (!pnlDistribution) {
|
|
41
|
+
logger.log('WARN', `[CrowdSharpeRatioProxy] Dependency data for ${dateStr} is empty. Skipping.`);
|
|
42
|
+
return null;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const results = {};
|
|
46
|
+
|
|
47
|
+
// 4. Calculate Sharpe Proxy for each asset
|
|
48
|
+
for (const ticker in pnlDistribution) {
|
|
49
|
+
const stats = pnlDistribution[ticker];
|
|
50
|
+
const N = stats.position_count;
|
|
51
|
+
|
|
52
|
+
// Need at least 2 data points to calculate variance
|
|
53
|
+
if (N < 2) continue;
|
|
54
|
+
|
|
55
|
+
const mean = stats.pnl_sum / N; // E(x)
|
|
56
|
+
const mean_sq = stats.pnl_sum_sq / N; // E(x^2)
|
|
57
|
+
|
|
58
|
+
const variance = mean_sq - (mean * mean);
|
|
59
|
+
|
|
60
|
+
// If variance is negative (floating point error) or zero, we can't get std_dev
|
|
61
|
+
if (variance <= 0) {
|
|
62
|
+
results[ticker] = {
|
|
63
|
+
average_pnl: mean,
|
|
64
|
+
std_dev_pnl: 0,
|
|
65
|
+
sharpe_ratio_proxy: 0,
|
|
66
|
+
position_count: N
|
|
67
|
+
};
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const std_dev = Math.sqrt(variance); // "Risk"
|
|
72
|
+
|
|
73
|
+
// Calculate Sharpe Ratio (Return / Risk)
|
|
74
|
+
// (Assuming 0 risk-free rate)
|
|
75
|
+
const sharpe_proxy = mean / std_dev;
|
|
76
|
+
|
|
77
|
+
results[ticker] = {
|
|
78
|
+
average_pnl: mean,
|
|
79
|
+
std_dev_pnl: std_dev,
|
|
80
|
+
sharpe_ratio_proxy: sharpe_proxy,
|
|
81
|
+
position_count: N
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return results;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async getResult() { return null; }
|
|
89
|
+
reset() {}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
module.exports = CrowdSharpeRatioProxy;
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Meta-calculation (Pass 3) that correlates the asset flow
|
|
3
|
+
* of the "In Profit" cohort vs. the "In Loss" cohort to find
|
|
4
|
+
* powerful divergence signals (e.g., profit-taking, capitulation).
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
class ProfitCohortDivergence {
|
|
8
|
+
constructor() {
|
|
9
|
+
this.flowThreshold = 0.5; // Min abs flow % to be considered a signal
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
14
|
+
* @param {object} dependencies The shared dependencies (db, logger).
|
|
15
|
+
* @param {object} config The computation system configuration.
|
|
16
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
17
|
+
*/
|
|
18
|
+
async process(dateStr, dependencies, config) {
|
|
19
|
+
const { db, logger } = dependencies;
|
|
20
|
+
const collection = config.resultsCollection;
|
|
21
|
+
const resultsSub = config.resultsSubcollection || 'results';
|
|
22
|
+
const compsSub = config.computationsSubcollection || 'computations';
|
|
23
|
+
|
|
24
|
+
// 1. Define dependencies
|
|
25
|
+
const refsToGet = [
|
|
26
|
+
{
|
|
27
|
+
key: 'profit_flow',
|
|
28
|
+
ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('in-profit-asset-crowd-flow')
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
key: 'loss_flow',
|
|
32
|
+
ref: db.collection(collection).doc(dateStr).collection(resultsSub).doc('behavioural').collection(compsSub).doc('in-loss-asset-crowd-flow')
|
|
33
|
+
}
|
|
34
|
+
];
|
|
35
|
+
|
|
36
|
+
// 2. Fetch
|
|
37
|
+
const snapshots = await db.getAll(...refsToGet.map(r => r.ref));
|
|
38
|
+
const profitFlowData = snapshots[0].exists ? snapshots[0].data() : null;
|
|
39
|
+
const lossFlowData = snapshots[1].exists ? snapshots[1].data() : null;
|
|
40
|
+
|
|
41
|
+
// 3. Handle "day-delay"
|
|
42
|
+
if (!profitFlowData || !lossFlowData) {
|
|
43
|
+
logger.log('WARN', `[ProfitCohortDivergence] Missing cohort flow data for ${dateStr}. Allowing backfill.`);
|
|
44
|
+
return null; // Let backfill handle it
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const results = {};
|
|
48
|
+
const allTickers = new Set([...Object.keys(profitFlowData), ...Object.keys(lossFlowData)]);
|
|
49
|
+
|
|
50
|
+
// 4. Correlate
|
|
51
|
+
for (const ticker of allTickers) {
|
|
52
|
+
const profitFlow = profitFlowData[ticker]?.net_crowd_flow_pct || 0;
|
|
53
|
+
const lossFlow = lossFlowData[ticker]?.net_crowd_flow_pct || 0;
|
|
54
|
+
|
|
55
|
+
const profitSells = profitFlow <= -this.flowThreshold;
|
|
56
|
+
const profitBuys = profitFlow >= this.flowThreshold;
|
|
57
|
+
const lossSells = lossFlow <= -this.flowThreshold;
|
|
58
|
+
const lossBuys = lossFlow >= this.flowThreshold;
|
|
59
|
+
|
|
60
|
+
let status = 'No Divergence';
|
|
61
|
+
let detail = 'Both cohorts are acting similarly or flow is insignificant.';
|
|
62
|
+
|
|
63
|
+
if (profitSells && lossBuys) {
|
|
64
|
+
status = 'Profit Taking';
|
|
65
|
+
detail = 'The "in-profit" cohort is selling to the "in-loss" cohort, who are averaging down.';
|
|
66
|
+
} else if (profitBuys && lossSells) {
|
|
67
|
+
status = 'Capitulation';
|
|
68
|
+
detail = 'The "in-loss" cohort is panic-selling, and the "in-profit" cohort is buying the dip.';
|
|
69
|
+
} else if (profitBuys && lossBuys) {
|
|
70
|
+
status = 'High Conviction Buy';
|
|
71
|
+
detail = 'All cohorts are net-buying.';
|
|
72
|
+
} else if (profitSells && lossSells) {
|
|
73
|
+
status = 'High Conviction Sell';
|
|
74
|
+
detail = 'All cohorts are net-selling.';
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
results[ticker] = {
|
|
78
|
+
status: status,
|
|
79
|
+
detail: detail,
|
|
80
|
+
profit_cohort_flow: profitFlow,
|
|
81
|
+
loss_cohort_flow: lossFlow
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return results;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async getResult() { return null; }
|
|
89
|
+
reset() {}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
module.exports = ProfitCohortDivergence;
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Meta-calculation (Pass 3) to correlate daily social sentiment with
|
|
3
|
+
* the actual crowd asset flow. It identifies divergences between what the crowd
|
|
4
|
+
* says and what they do.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
class SocialFlowCorrelation {
|
|
8
|
+
constructor() {
|
|
9
|
+
// Define sensitivity thresholds
|
|
10
|
+
this.bullishSentimentThreshold = 70.0; // % ratio
|
|
11
|
+
this.bearishSentimentThreshold = 30.0; // % ratio
|
|
12
|
+
this.positiveFlowThreshold = 0.5; // net_crowd_flow_pct
|
|
13
|
+
this.negativeFlowThreshold = -0.5; // net_crowd_flow_pct
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* @param {string} dateStr The date to run the analysis for (e.g., "2025-10-31").
|
|
18
|
+
* @param {object} dependencies The shared dependencies (db, logger).
|
|
19
|
+
* @param {object} config The computation system configuration.
|
|
20
|
+
* @returns {Promise<object|null>} The analysis result or null.
|
|
21
|
+
*/
|
|
22
|
+
async process(dateStr, dependencies, config) {
|
|
23
|
+
const { db, logger } = dependencies;
|
|
24
|
+
const collection = config.resultsCollection;
|
|
25
|
+
|
|
26
|
+
// 1. Define dependencies for the *same day*
|
|
27
|
+
const dependenciesToFetch = [
|
|
28
|
+
{ category: 'socialPosts', computation: 'social_sentiment_aggregation' },
|
|
29
|
+
{ category: 'behavioural', computation: 'asset_crowd_flow' }
|
|
30
|
+
];
|
|
31
|
+
|
|
32
|
+
// 2. Build refs and fetch
|
|
33
|
+
const refs = dependenciesToFetch.map(d =>
|
|
34
|
+
db.collection(collection).doc(dateStr)
|
|
35
|
+
.collection('results').doc(d.category)
|
|
36
|
+
.collection('computations').doc(d.computation)
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
const snapshots = await db.getAll(...refs);
|
|
40
|
+
|
|
41
|
+
// 3. Check for data and handle the "day-delay"
|
|
42
|
+
const socialData = snapshots[0].exists ? snapshots[0].data() : null;
|
|
43
|
+
const flowData = snapshots[1].exists ? snapshots[1].data() : null;
|
|
44
|
+
|
|
45
|
+
if (!socialData || !flowData) {
|
|
46
|
+
logger.log('WARN', `[SocialFlowCorrelation] Missing dependency data for ${dateStr}. Allowing backfill.`);
|
|
47
|
+
// Return null. This stops execution and does not save an empty doc.
|
|
48
|
+
// The Pass 3 backfill will pick this up tomorrow.
|
|
49
|
+
return null;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// 4. If data exists, perform the correlation
|
|
53
|
+
const sentimentMap = socialData.tickerSentiment || {};
|
|
54
|
+
const correlationResults = {};
|
|
55
|
+
|
|
56
|
+
// Use all tickers from the flow data as the primary loop
|
|
57
|
+
for (const ticker in flowData) {
|
|
58
|
+
if (!flowData[ticker] || typeof flowData[ticker].net_crowd_flow_pct === 'undefined') {
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const flow = flowData[ticker].net_crowd_flow_pct;
|
|
63
|
+
const sentiment = sentimentMap[ticker]?.sentimentRatio; // e.g., 85.0 or 22.5
|
|
64
|
+
|
|
65
|
+
if (typeof sentiment === 'undefined') {
|
|
66
|
+
// No sentiment found, just record flow
|
|
67
|
+
correlationResults[ticker] = {
|
|
68
|
+
status: 'no_social_sentiment',
|
|
69
|
+
net_crowd_flow_pct: flow
|
|
70
|
+
};
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// --- The "Jaw-Drop" Logic ---
|
|
75
|
+
if (sentiment >= this.bullishSentimentThreshold && flow <= this.negativeFlowThreshold) {
|
|
76
|
+
// Crowd is very bullish but is actively selling
|
|
77
|
+
correlationResults[ticker] = {
|
|
78
|
+
status: 'Bullish Divergence',
|
|
79
|
+
detail: 'Crowd is publicly bullish but is net-selling the asset.',
|
|
80
|
+
sentiment_ratio: sentiment,
|
|
81
|
+
net_crowd_flow_pct: flow
|
|
82
|
+
};
|
|
83
|
+
} else if (sentiment <= this.bearishSentimentThreshold && flow >= this.positiveFlowThreshold) {
|
|
84
|
+
// Crowd is very bearish but is actively buying
|
|
85
|
+
correlationResults[ticker] = {
|
|
86
|
+
status: 'Bearish Divergence',
|
|
87
|
+
detail: 'Crowd is publicly bearish but is net-buying the asset.',
|
|
88
|
+
sentiment_ratio: sentiment,
|
|
89
|
+
net_crowd_flow_pct: flow
|
|
90
|
+
};
|
|
91
|
+
} else if (sentiment >= this.bullishSentimentThreshold && flow >= this.positiveFlowThreshold) {
|
|
92
|
+
// Crowd is bullish and is buying
|
|
93
|
+
correlationResults[ticker] = {
|
|
94
|
+
status: 'High Conviction Buy',
|
|
95
|
+
sentiment_ratio: sentiment,
|
|
96
|
+
net_crowd_flow_pct: flow
|
|
97
|
+
};
|
|
98
|
+
} else if (sentiment <= this.bearishSentimentThreshold && flow <= this.negativeFlowThreshold) {
|
|
99
|
+
// Crowd is bearish and is selling
|
|
100
|
+
correlationResults[ticker] = {
|
|
101
|
+
status: 'High Conviction Sell',
|
|
102
|
+
sentiment_ratio: sentiment,
|
|
103
|
+
net_crowd_flow_pct: flow
|
|
104
|
+
};
|
|
105
|
+
} else {
|
|
106
|
+
// No strong signal or divergence
|
|
107
|
+
correlationResults[ticker] = {
|
|
108
|
+
status: 'No Clear Signal',
|
|
109
|
+
sentiment_ratio: sentiment,
|
|
110
|
+
net_crowd_flow_pct: flow
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return correlationResults;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Must exist for the meta-computation runner
|
|
119
|
+
async getResult() { return null; }
|
|
120
|
+
reset() {}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
module.exports = SocialFlowCorrelation;
|