aiden-shared-calculations-unified 1.0.95 → 1.0.97
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/calculations/capitulation/asset-volatility-estimator.js +1 -2
- package/calculations/capitulation/retail-capitulation-risk-forecast.js +1 -2
- package/calculations/core/Insights-total-long-per-stock +56 -0
- package/calculations/core/insights-daily-bought-vs-sold-count.js +74 -0
- package/calculations/core/insights-daily-ownership-delta.js +70 -0
- package/calculations/core/insights-sentimet-per-stock.js +68 -0
- package/calculations/core/insights-total-long-per-sector +73 -0
- package/calculations/core/insights-total-positions-held.js +49 -0
- package/calculations/ghost-book/cost-basis-density.js +1 -2
- package/calculations/ghost-book/liquidity-vacuum.js +4 -4
- package/calculations/ghost-book/retail-gamma-exposure.js +0 -1
- package/calculations/helix/winner-loser-flow.js +1 -1
- package/calculations/predicative-alpha/cognitive-dissonance.js +1 -2
- package/calculations/predicative-alpha/diamond-hand-fracture.js +1 -2
- package/calculations/predicative-alpha/mimetic-latency.js +1 -2
- package/package.json +1 -1
- package/calculations/legacy/activity_by_pnl_status.js +0 -119
- package/calculations/legacy/asset_crowd_flow.js +0 -163
- package/calculations/legacy/capital_deployment_strategy.js +0 -108
- package/calculations/legacy/capital_liquidation_performance.js +0 -139
- package/calculations/legacy/capital_vintage_performance.js +0 -136
- package/calculations/legacy/cash-flow-deployment.js +0 -144
- package/calculations/legacy/cash-flow-liquidation.js +0 -146
- package/calculations/legacy/crowd-cash-flow-proxy.js +0 -128
- package/calculations/legacy/crowd_conviction_score.js +0 -261
- package/calculations/legacy/crowd_sharpe_ratio_proxy.js +0 -137
- package/calculations/legacy/daily_asset_activity.js +0 -128
- package/calculations/legacy/daily_user_activity_tracker.js +0 -182
- package/calculations/legacy/deposit_withdrawal_percentage.js +0 -125
- package/calculations/legacy/diversification_pnl.js +0 -115
- package/calculations/legacy/drawdown_response.js +0 -137
- package/calculations/legacy/dumb-cohort-flow.js +0 -238
- package/calculations/legacy/gain_response.js +0 -137
- package/calculations/legacy/historical_performance_aggregator.js +0 -85
- package/calculations/legacy/in_loss_asset_crowd_flow.js +0 -168
- package/calculations/legacy/in_profit_asset_crowd_flow.js +0 -168
- package/calculations/legacy/negative_expectancy_cohort_flow.js +0 -232
- package/calculations/legacy/new_allocation_percentage.js +0 -98
- package/calculations/legacy/paper_vs_diamond_hands.js +0 -107
- package/calculations/legacy/position_count_pnl.js +0 -120
- package/calculations/legacy/positive_expectancy_cohort_flow.js +0 -232
- package/calculations/legacy/profit_cohort_divergence.js +0 -115
- package/calculations/legacy/profitability_migration.js +0 -104
- package/calculations/legacy/reallocation_increase_percentage.js +0 -104
- package/calculations/legacy/risk_appetite_change.js +0 -97
- package/calculations/legacy/sector_rotation.js +0 -117
- package/calculations/legacy/shark_attack_signal.js +0 -112
- package/calculations/legacy/smart-cohort-flow.js +0 -238
- package/calculations/legacy/smart-dumb-divergence-index.js +0 -143
- package/calculations/legacy/smart_dumb_divergence_index_v2.js +0 -138
- package/calculations/legacy/smart_money_flow.js +0 -198
- package/calculations/legacy/social-predictive-regime-state.js +0 -102
- package/calculations/legacy/social-topic-driver-index.js +0 -147
- package/calculations/legacy/social-topic-predictive-potential.js +0 -461
- package/calculations/legacy/social_flow_correlation.js +0 -112
- package/calculations/legacy/speculator_adjustment_activity.js +0 -103
- package/calculations/legacy/strategy-performance.js +0 -265
- package/calculations/legacy/tsl_effectiveness.js +0 -85
- package/calculations/legacy/user-investment-profile.js +0 -313
- package/calculations/legacy/user_expectancy_score.js +0 -106
- package/calculations/legacy/user_profitability_tracker.js +0 -131
|
@@ -1,147 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview Calculation (Pass 5) for social topic driver index.
|
|
3
|
-
*
|
|
4
|
-
* This metric answers: "Which social media topics are driving
|
|
5
|
-
* the most asset price movement or capital flow?"
|
|
6
|
-
*
|
|
7
|
-
* It *depends* on 'social-topic-predictive-potential' (Pass 5).
|
|
8
|
-
*/
|
|
9
|
-
class SocialTopicDriverIndex {
|
|
10
|
-
constructor() {
|
|
11
|
-
// No per-user processing
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
/**
|
|
15
|
-
* Defines the output schema for this calculation.
|
|
16
|
-
* @returns {object} JSON Schema object
|
|
17
|
-
*/
|
|
18
|
-
static getSchema() {
|
|
19
|
-
const topicSchema = {
|
|
20
|
-
"type": "object",
|
|
21
|
-
"properties": {
|
|
22
|
-
"topic": { "type": "string" },
|
|
23
|
-
"driver_score": { "type": "number" },
|
|
24
|
-
// These fields are from an older version but kept for schema
|
|
25
|
-
// compatibility. They will be null in the corrected logic.
|
|
26
|
-
"correlation_flow_30d": { "type": ["number", "null"] },
|
|
27
|
-
"correlation_price_30d": { "type": ["number", "null"] }
|
|
28
|
-
},
|
|
29
|
-
"required": ["topic", "driver_score"]
|
|
30
|
-
};
|
|
31
|
-
|
|
32
|
-
return {
|
|
33
|
-
"type": "object",
|
|
34
|
-
"description": "Ranks social media topics by their 'driver_score' (predictive potential for flow and price).",
|
|
35
|
-
"properties": {
|
|
36
|
-
"top_driving_topics": {
|
|
37
|
-
"type": "array",
|
|
38
|
-
"description": "Top 5 topics driving market activity.",
|
|
39
|
-
"items": topicSchema
|
|
40
|
-
},
|
|
41
|
-
"all_topics": {
|
|
42
|
-
"type": "array",
|
|
43
|
-
"description": "Full list of all topics and their driver scores.",
|
|
44
|
-
"items": topicSchema
|
|
45
|
-
}
|
|
46
|
-
},
|
|
47
|
-
"required": ["top_driving_topics", "all_topics"]
|
|
48
|
-
};
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
/**
|
|
52
|
-
* Statically declare dependencies.
|
|
53
|
-
* (This was already correct)
|
|
54
|
-
*/
|
|
55
|
-
static getDependencies() {
|
|
56
|
-
return [
|
|
57
|
-
'social-topic-predictive-potential' // Pass 5
|
|
58
|
-
];
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
process() {
|
|
62
|
-
// No-op
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
/**
|
|
66
|
-
* --- LOGIC FIXED ---
|
|
67
|
-
* This function is rewritten to correctly consume the output of
|
|
68
|
-
* 'social-topic-predictive-potential'. It aggregates the
|
|
69
|
-
* 'predictivePotential' score for each topic across *all* tickers
|
|
70
|
-
* to create a global driver score.
|
|
71
|
-
*/
|
|
72
|
-
getResult(fetchedDependencies) {
|
|
73
|
-
const potentialData = fetchedDependencies['social-topic-predictive-potential'];
|
|
74
|
-
|
|
75
|
-
const defaults = {
|
|
76
|
-
top_driving_topics: [],
|
|
77
|
-
all_topics: []
|
|
78
|
-
};
|
|
79
|
-
|
|
80
|
-
// The dependency returns a nested object. We need 'daily_topic_signals'.
|
|
81
|
-
const dailyTopicSignals = potentialData?.daily_topic_signals;
|
|
82
|
-
|
|
83
|
-
if (!dailyTopicSignals || Object.keys(dailyTopicSignals).length === 0) {
|
|
84
|
-
return defaults;
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
// Use a Map to aggregate scores for each topic
|
|
88
|
-
const topicAggregator = new Map();
|
|
89
|
-
|
|
90
|
-
// Iterate over each TICKER (e.g., 'AAPL', 'TSLA') in the signals
|
|
91
|
-
for (const tickerData of Object.values(dailyTopicSignals)) {
|
|
92
|
-
|
|
93
|
-
// Combine bullish and bearish topics for that ticker
|
|
94
|
-
const allTickerTopics = [
|
|
95
|
-
...(tickerData.topPredictiveBullishTopics || []),
|
|
96
|
-
...(tickerData.topPredictiveBearishTopics || [])
|
|
97
|
-
];
|
|
98
|
-
|
|
99
|
-
// Iterate over the topics *for that ticker*
|
|
100
|
-
for (const topicData of allTickerTopics) {
|
|
101
|
-
const topicName = topicData.topic;
|
|
102
|
-
|
|
103
|
-
// Use the 'predictivePotential' score calculated by the dependency
|
|
104
|
-
const score = topicData.predictivePotential || 0;
|
|
105
|
-
|
|
106
|
-
if (!topicAggregator.has(topicName)) {
|
|
107
|
-
topicAggregator.set(topicName, { totalScore: 0, count: 0 });
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
const agg = topicAggregator.get(topicName);
|
|
111
|
-
agg.totalScore += score;
|
|
112
|
-
agg.count += 1;
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
const allTopics = [];
|
|
117
|
-
// Now, create the final ranked list
|
|
118
|
-
for (const [topic, data] of topicAggregator.entries()) {
|
|
119
|
-
if (data.count === 0) continue;
|
|
120
|
-
|
|
121
|
-
// Calculate the average driver score across all tickers
|
|
122
|
-
const avgScore = data.totalScore / data.count;
|
|
123
|
-
|
|
124
|
-
allTopics.push({
|
|
125
|
-
topic: topic,
|
|
126
|
-
driver_score: avgScore,
|
|
127
|
-
// Set old/incompatible fields to null to match schema
|
|
128
|
-
correlation_flow_30d: null,
|
|
129
|
-
correlation_price_30d: null
|
|
130
|
-
});
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
// Sort by the new, correct driver_score
|
|
134
|
-
allTopics.sort((a, b) => b.driver_score - a.driver_score);
|
|
135
|
-
|
|
136
|
-
return {
|
|
137
|
-
top_driving_topics: allTopics.slice(0, 5),
|
|
138
|
-
all_topics: allTopics
|
|
139
|
-
};
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
reset() {
|
|
143
|
-
// No state
|
|
144
|
-
}
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
module.exports = SocialTopicDriverIndex;
|
|
@@ -1,461 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* @fileoverview NEW CALCULATION (PASS 3 - META)
|
|
3
|
-
* This is the "Quant" signal discovery engine.
|
|
4
|
-
*
|
|
5
|
-
* This is a stateful, rolling calculation that implements several
|
|
6
|
-
* critical optimizations based on production-level review:
|
|
7
|
-
* 1. It uses `p-limit` to run concurrent transactions safely.
|
|
8
|
-
* 2. It returns state directly from transactions, avoiding costly re-reads.
|
|
9
|
-
* 3. It uses a forward-looking price helper (`_findPriceForward`)
|
|
10
|
-
* to be resilient to market holidays and missing data.
|
|
11
|
-
* 4. All logic is factored into testable helper functions.
|
|
12
|
-
*
|
|
13
|
-
* --- V3 MODIFICATION (Quant Upgrade) ---
|
|
14
|
-
* 5. Implements RECENCY WEIGHTING on the correlation calculation.
|
|
15
|
-
* Signals from recent days are given exponentially more weight
|
|
16
|
-
* than signals from 90 days ago, per standard quant practice.
|
|
17
|
-
* This is achieved by upgrading _calculatePearson to
|
|
18
|
-
* _calculateWeightedPearson.
|
|
19
|
-
*/
|
|
20
|
-
|
|
21
|
-
const { FieldValue } = require('@google-cloud/firestore');
|
|
22
|
-
// p-limit is a non-standard-lib, but is included in the bulltrackers-module
|
|
23
|
-
// and available via the root `index.js` dependency injection.
|
|
24
|
-
// We will assume it's passed in via dependencies.calculationUtils.pLimit
|
|
25
|
-
const pLimit = require('p-limit');
|
|
26
|
-
|
|
27
|
-
// Import all required utils
|
|
28
|
-
const { loadAllPriceData, getDailyPriceChange } = require('../../utils/price_data_provider');
|
|
29
|
-
const { loadInstrumentMappings } = require('../../utils/sector_mapping_provider');
|
|
30
|
-
|
|
31
|
-
// --- CONFIGURATION ---
|
|
32
|
-
const SHARD_COLLECTION_NAME = 'social_topic_rolling_stats';
|
|
33
|
-
const ROLLING_HISTORY_DAYS = 90;
|
|
34
|
-
const MIN_SAMPLE_COUNT = 12; // Min samples needed to trust a correlation
|
|
35
|
-
const CORR_THRESHOLD = 0.25; // Min abs correlation to be considered "stable"
|
|
36
|
-
const FORWARD_WINDOWS = [1, 3, 7, 21]; // [1d, 3d, 7d, 21d]
|
|
37
|
-
const MAX_CONCURRENT_TRANSACTIONS = 50; // Max parallel Firestore transactions
|
|
38
|
-
const MAX_LOOKAHEAD_DAYS = 7; // How far to look for a non-holiday price
|
|
39
|
-
const WEIGHTING_DECAY_K = 3.0; // Decay factor for recency weighting (e.g., k=3.0)
|
|
40
|
-
|
|
41
|
-
// --- STATS HELPER ---
|
|
42
|
-
/**
|
|
43
|
-
* --- MODIFIED: Upgraded to Weighted Pearson Correlation ---
|
|
44
|
-
* Calculates the Pearson correlation coefficient for two arrays,
|
|
45
|
-
* weighted by a third array.
|
|
46
|
-
* Gracefully handles nulls by only using paired data.
|
|
47
|
-
*/
|
|
48
|
-
function _calculateWeightedPearson(vecX, vecY, vecWeights) {
|
|
49
|
-
let sumW = 0;
|
|
50
|
-
let sumWX = 0;
|
|
51
|
-
let sumWY = 0;
|
|
52
|
-
let validPairs = []; // To store non-null pairs
|
|
53
|
-
|
|
54
|
-
for (let i = 0; i < vecX.length; i++) {
|
|
55
|
-
const x = vecX[i];
|
|
56
|
-
const y = vecY[i];
|
|
57
|
-
const w = vecWeights[i]; // Get the weight
|
|
58
|
-
|
|
59
|
-
// Only use pairs where both values are valid numbers and weight is positive
|
|
60
|
-
if (x !== null && y !== null && isFinite(x) && isFinite(y) && w > 0) {
|
|
61
|
-
sumW += w;
|
|
62
|
-
sumWX += w * x;
|
|
63
|
-
sumWY += w * y;
|
|
64
|
-
validPairs.push({x, y, w});
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
// Need at least 2 data points and positive total weight
|
|
69
|
-
if (sumW === 0 || validPairs.length < 2) {
|
|
70
|
-
return { value: 0, samples: validPairs.length };
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
// Calculate weighted means
|
|
74
|
-
const meanX = sumWX / sumW;
|
|
75
|
-
const meanY = sumWY / sumW;
|
|
76
|
-
|
|
77
|
-
let sumCov = 0;
|
|
78
|
-
let sumVarX = 0;
|
|
79
|
-
let sumVarY = 0;
|
|
80
|
-
|
|
81
|
-
// Second pass to calculate weighted covariance and variances
|
|
82
|
-
for (const pair of validPairs) {
|
|
83
|
-
const { x, y, w } = pair;
|
|
84
|
-
sumCov += w * (x - meanX) * (y - meanY);
|
|
85
|
-
sumVarX += w * (x - meanX) * (x - meanX);
|
|
86
|
-
sumVarY += w * (y - meanY) * (y - meanY);
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
const denominator = Math.sqrt(sumVarX * sumVarY);
|
|
90
|
-
|
|
91
|
-
if (denominator === 0) {
|
|
92
|
-
return { value: 0, samples: validPairs.length };
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
// The weighted correlation is cov(x,y) / (std(x) * std(y))
|
|
96
|
-
// The sumW terms cancel out from the numerator and denominator.
|
|
97
|
-
const corr = sumCov / denominator;
|
|
98
|
-
|
|
99
|
-
// Clamp correlation to valid range [-1, 1] to handle floating point errors
|
|
100
|
-
return { value: Math.max(-1, Math.min(1, corr)), samples: validPairs.length };
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
// --- DATE HELPERS ---
|
|
104
|
-
function _getDateStr(baseDate, daysOffset) {
|
|
105
|
-
const date = new Date(baseDate); // baseDate is Date object or string
|
|
106
|
-
date.setUTCDate(date.getUTCDate() + daysOffset);
|
|
107
|
-
return date.toISOString().slice(0, 10);
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
// --- PRICE HELPERS ---
|
|
111
|
-
/**
|
|
112
|
-
* Resiliently finds a price for a given date from the price map.
|
|
113
|
-
* (Looks *backward* for last available price)
|
|
114
|
-
*/
|
|
115
|
-
function _findPrice(instrumentId, dateStr, priceMap) {
|
|
116
|
-
if (!priceMap || !priceMap[instrumentId]) return null;
|
|
117
|
-
const priceHistory = priceMap[instrumentId];
|
|
118
|
-
|
|
119
|
-
let checkDate = new Date(dateStr + 'T00:00:00Z');
|
|
120
|
-
|
|
121
|
-
for (let i = 0; i < MAX_LOOKAHEAD_DAYS; i++) {
|
|
122
|
-
const checkDateStr = checkDate.toISOString().slice(0, 10);
|
|
123
|
-
const price = priceHistory[checkDateStr];
|
|
124
|
-
|
|
125
|
-
if (price !== undefined && price !== null && price > 0) return price;
|
|
126
|
-
checkDate.setUTCDate(checkDate.getUTCDate() - 1);
|
|
127
|
-
}
|
|
128
|
-
return null;
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
/**
|
|
132
|
-
* NEW: Resiliently finds the next available price *on or after* a given date.
|
|
133
|
-
* (Looks *forward* to handle holidays/weekends)
|
|
134
|
-
*/
|
|
135
|
-
function _findPriceForward(instrumentId, dateStr, priceMap) {
|
|
136
|
-
if (!priceMap || !priceMap[instrumentId]) return null;
|
|
137
|
-
const priceHistory = priceMap[instrumentId];
|
|
138
|
-
|
|
139
|
-
let checkDate = new Date(dateStr + 'T00:00:00Z');
|
|
140
|
-
|
|
141
|
-
for (let i = 0; i <= MAX_LOOKAHEAD_DAYS; i++) {
|
|
142
|
-
const checkDateStr = checkDate.toISOString().slice(0, 10);
|
|
143
|
-
const price = priceHistory[checkDateStr];
|
|
144
|
-
|
|
145
|
-
if (price !== undefined && price !== null && price > 0) return price;
|
|
146
|
-
checkDate.setUTCDate(checkDate.getUTCDate() + 1);
|
|
147
|
-
}
|
|
148
|
-
return null;
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
class SocialTopicPredictivePotentialIndex {
|
|
153
|
-
|
|
154
|
-
static getDependencies() {
|
|
155
|
-
// --- FIX 1: Changed from 'social-topic-driver-index' to break the cycle ---
|
|
156
|
-
return ['social-topic-sentiment-matrix'];
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
constructor() {
|
|
160
|
-
this.priceMap = null;
|
|
161
|
-
this.tickerToIdMap = null;
|
|
162
|
-
this.dependenciesLoaded = false;
|
|
163
|
-
// Concurrency limiter for Firestore transactions
|
|
164
|
-
this.pLimit = pLimit(MAX_CONCURRENT_TRANSACTIONS);
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
async _loadDependencies(calculationUtils) {
|
|
168
|
-
if (this.dependenciesLoaded) return;
|
|
169
|
-
|
|
170
|
-
const [priceData, mappings] = await Promise.all([
|
|
171
|
-
calculationUtils.loadAllPriceData(),
|
|
172
|
-
calculationUtils.loadInstrumentMappings()
|
|
173
|
-
]);
|
|
174
|
-
|
|
175
|
-
this.priceMap = priceData;
|
|
176
|
-
this.tickerToIdMap = {};
|
|
177
|
-
if (mappings && mappings.instrumentToTicker) {
|
|
178
|
-
for (const [id, ticker] of Object.entries(mappings.instrumentToTicker)) {
|
|
179
|
-
this.tickerToIdMap[ticker] = id;
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
this.dependenciesLoaded = true;
|
|
183
|
-
}
|
|
184
|
-
|
|
185
|
-
/**
|
|
186
|
-
* @param {string} dateStr The date to run the analysis for (e.g., "2025-11-05").
|
|
187
|
-
* @param {object} dependencies The shared dependencies (db, logger, calculationUtils).
|
|
188
|
-
* @param {object} config The computation system configuration.
|
|
189
|
-
* @param {object} fetchedDependencies In-memory results from Pass 2.
|
|
190
|
-
* @returns {Promise<object|null>} The analysis result or null.
|
|
191
|
-
*/
|
|
192
|
-
async process(dateStr, dependencies, config, fetchedDependencies) {
|
|
193
|
-
const { db, logger, calculationUtils } = dependencies;
|
|
194
|
-
|
|
195
|
-
// 1. Load all dependencies
|
|
196
|
-
// pLimit is not in calculationUtils by default, so we'll use our own
|
|
197
|
-
// If it were, we'd use: this.pLimit = calculationUtils.pLimit(MAX_CONCURRENT_TRANSACTIONS);
|
|
198
|
-
await this._loadDependencies(calculationUtils);
|
|
199
|
-
|
|
200
|
-
// --- FIX 2: Read from the correct dependency ---
|
|
201
|
-
const todaySignals = fetchedDependencies['social-topic-sentiment-matrix'];
|
|
202
|
-
|
|
203
|
-
if (!todaySignals || Object.keys(todaySignals).length === 0) {
|
|
204
|
-
// --- FIX 2.1: Updated log message ---
|
|
205
|
-
logger.log('WARN', `[SocialTopicPredictive] Missing or empty dependency 'social-topic-sentiment-matrix' for ${dateStr}. Skipping.`);
|
|
206
|
-
return null;
|
|
207
|
-
}
|
|
208
|
-
|
|
209
|
-
if (!this.priceMap || !this.tickerToIdMap) {
|
|
210
|
-
logger.log('ERROR', `[SocialTopicPredictive] Price map or Ticker map failed to load. Aborting.`);
|
|
211
|
-
return null;
|
|
212
|
-
}
|
|
213
|
-
|
|
214
|
-
// --- Prepare final output objects ---
|
|
215
|
-
const shardedData = {};
|
|
216
|
-
const dailyOutput = {};
|
|
217
|
-
|
|
218
|
-
const allTickers = Object.keys(todaySignals);
|
|
219
|
-
|
|
220
|
-
// 2. Run all ticker updates in parallel, limited by pLimit
|
|
221
|
-
const transactionPromises = allTickers.map(ticker =>
|
|
222
|
-
this.pLimit(() => this._processTickerTransaction(
|
|
223
|
-
ticker,
|
|
224
|
-
dateStr,
|
|
225
|
-
todaySignals[ticker] || {},
|
|
226
|
-
dependencies
|
|
227
|
-
))
|
|
228
|
-
);
|
|
229
|
-
|
|
230
|
-
const txResults = await Promise.all(transactionPromises);
|
|
231
|
-
|
|
232
|
-
// 3. Collect results from transactions (FIX A)
|
|
233
|
-
for (const res of txResults) {
|
|
234
|
-
if (!res) continue; // Transaction may have failed and returned null
|
|
235
|
-
shardedData[res.ticker] = res.state;
|
|
236
|
-
dailyOutput[res.ticker] = res.dailyOutputForTicker;
|
|
237
|
-
}
|
|
238
|
-
|
|
239
|
-
// 4. Return both the state (for sharded write) and daily signal (for API)
|
|
240
|
-
return {
|
|
241
|
-
sharded_social_stats: { [SHARD_COLLECTION_NAME]: shardedData },
|
|
242
|
-
daily_topic_signals: dailyOutput
|
|
243
|
-
};
|
|
244
|
-
}
|
|
245
|
-
|
|
246
|
-
/**
|
|
247
|
-
* This is the core logic, run for a single ticker *inside* a transaction.
|
|
248
|
-
* It reads, modifies, and writes state for one ticker, then returns
|
|
249
|
-
* the new state and daily signal.
|
|
250
|
-
*/
|
|
251
|
-
async _processTickerTransaction(ticker, dateStr, todaySignal, dependencies) {
|
|
252
|
-
const { db, logger } = dependencies;
|
|
253
|
-
|
|
254
|
-
try {
|
|
255
|
-
return await db.runTransaction(async (transaction) => {
|
|
256
|
-
const instrumentId = this.tickerToIdMap[ticker];
|
|
257
|
-
if (!instrumentId) return null;
|
|
258
|
-
|
|
259
|
-
const todayPrice = _findPrice(instrumentId, dateStr, this.priceMap);
|
|
260
|
-
if (todayPrice === null) return null; // Cannot update returns
|
|
261
|
-
|
|
262
|
-
// --- 4a. Load State ---
|
|
263
|
-
const docRef = db.collection(SHARD_COLLECTION_NAME).doc(ticker);
|
|
264
|
-
const doc = await transaction.get(docRef);
|
|
265
|
-
const state = doc.exists ? doc.data() : {};
|
|
266
|
-
|
|
267
|
-
// --- 4b. Update Forward Returns (Factored Helper) ---
|
|
268
|
-
this._updateForwardReturns(state, instrumentId, dateStr, todayPrice, this.priceMap);
|
|
269
|
-
|
|
270
|
-
// --- 4c. Add New Signals (Factored Helper) ---
|
|
271
|
-
// We assume 'todaySignal' (from social-topic-sentiment-matrix)
|
|
272
|
-
// has an 'allDrivers' property.
|
|
273
|
-
this._addNewSignals(state, todaySignal.allDrivers || [], dateStr);
|
|
274
|
-
|
|
275
|
-
// --- 4d. Recalculate Correlations (Factored Helper) ---
|
|
276
|
-
const dailyOutputForTicker = this._recalculateAllTopics(state, logger);
|
|
277
|
-
|
|
278
|
-
// --- 4e. Save State (FIX 3.2: Use merge) ---
|
|
279
|
-
transaction.set(docRef, state, { merge: true });
|
|
280
|
-
|
|
281
|
-
// --- 4f. Return (FIX A) ---
|
|
282
|
-
return { ticker, state, dailyOutputForTicker };
|
|
283
|
-
});
|
|
284
|
-
} catch (error) {
|
|
285
|
-
logger.log('ERROR', `[SocialTopicPredictive] Transaction failed for ticker ${ticker}`, { err: error.message, stack: error.stack });
|
|
286
|
-
return null; // Return null on transaction failure
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
|
|
290
|
-
/**
|
|
291
|
-
* Helper to update past forward-return windows.
|
|
292
|
-
* Modifies the `state` object in-place.
|
|
293
|
-
*/
|
|
294
|
-
_updateForwardReturns(state, instrumentId, dateStr, todayPrice, priceMap) {
|
|
295
|
-
for (const topic in state) {
|
|
296
|
-
if (!state[topic].rolling_90d_history) continue;
|
|
297
|
-
|
|
298
|
-
for (const historyEntry of state[topic].rolling_90d_history) {
|
|
299
|
-
const signalPrice = _findPrice(instrumentId, historyEntry.date, priceMap);
|
|
300
|
-
if (signalPrice === null) continue;
|
|
301
|
-
|
|
302
|
-
// FIX D: Check all windows
|
|
303
|
-
for (const window of FORWARD_WINDOWS) {
|
|
304
|
-
// Skip if already filled
|
|
305
|
-
if (historyEntry[`fwd_${window}d`] !== null) continue;
|
|
306
|
-
|
|
307
|
-
const targetDateStr = _getDateStr(new Date(historyEntry.date + 'T00:00:00Z'), window);
|
|
308
|
-
|
|
309
|
-
// Not yet time to fill this window
|
|
310
|
-
if (dateStr < targetDateStr) continue;
|
|
311
|
-
|
|
312
|
-
// FIX C: Use _findPriceForward
|
|
313
|
-
const targetPrice = _findPriceForward(instrumentId, targetDateStr, priceMap);
|
|
314
|
-
|
|
315
|
-
if (targetPrice !== null) {
|
|
316
|
-
historyEntry[`fwd_${window}d`] = (targetPrice / signalPrice) - 1;
|
|
317
|
-
}
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
|
|
323
|
-
/**
|
|
324
|
-
* Helper to add the new day's signals to the state.
|
|
325
|
-
* Modifies the `state` object in-place.
|
|
326
|
-
*/
|
|
327
|
-
_addNewSignals(state, newTickerSignals, dateStr) {
|
|
328
|
-
for (const newSignal of newTickerSignals) {
|
|
329
|
-
// FIX B: Normalize topic key
|
|
330
|
-
const topic = (newSignal.topic || 'untagged').toLowerCase();
|
|
331
|
-
|
|
332
|
-
if (!state[topic]) {
|
|
333
|
-
state[topic] = {
|
|
334
|
-
rolling_90d_history: [],
|
|
335
|
-
correlations: {},
|
|
336
|
-
predictivePotential: 0,
|
|
337
|
-
avgDailyConviction: 0
|
|
338
|
-
};
|
|
339
|
-
}
|
|
340
|
-
|
|
341
|
-
state[topic].rolling_90d_history.push({
|
|
342
|
-
date: dateStr,
|
|
343
|
-
sentimentScore: newSignal.sentimentScore,
|
|
344
|
-
conviction: newSignal.convictionScore,
|
|
345
|
-
fwd_1d: null, fwd_3d: null, fwd_7d: null, fwd_21d: null,
|
|
346
|
-
});
|
|
347
|
-
|
|
348
|
-
// Prune history
|
|
349
|
-
state[topic].rolling_90d_history = state[topic].rolling_90d_history.slice(-ROLLING_HISTORY_DAYS);
|
|
350
|
-
}
|
|
351
|
-
}
|
|
352
|
-
|
|
353
|
-
/**
|
|
354
|
-
* Helper to recalculate all stats for a ticker's topics.
|
|
355
|
-
* Modifies the `state` object in-place.
|
|
356
|
-
* @returns {object} The dailyOutputForTicker
|
|
357
|
-
*/
|
|
358
|
-
_recalculateAllTopics(state, logger) {
|
|
359
|
-
const predictiveTopics = [];
|
|
360
|
-
|
|
361
|
-
for (const topic in state) {
|
|
362
|
-
const history = state[topic].rolling_90d_history;
|
|
363
|
-
if (!history || history.length === 0) continue;
|
|
364
|
-
|
|
365
|
-
// FIX G: Warn if doc size is at risk
|
|
366
|
-
if (history.length > 500) { // arbitrary high number
|
|
367
|
-
logger.log('WARN', `[SocialTopicPredictive] Topic "${topic}" has ${history.length} history entries. May approach doc size limit.`);
|
|
368
|
-
}
|
|
369
|
-
|
|
370
|
-
// FIX E: Ensure sort order
|
|
371
|
-
history.sort((a, b) => new Date(a.date) - new Date(b.date));
|
|
372
|
-
|
|
373
|
-
// --- START V3 MODIFICATION: RECENCY WEIGHTING ---
|
|
374
|
-
const N = history.length;
|
|
375
|
-
if (N === 0) continue;
|
|
376
|
-
|
|
377
|
-
// Create recency weights (exponential decay)
|
|
378
|
-
// Newest item (i=N-1) gets weight 1.0 (age=0)
|
|
379
|
-
// Oldest item (i=0) gets weight exp(-K) (age=N-1)
|
|
380
|
-
const vecWeights = history.map((h, i) => {
|
|
381
|
-
const age_in_days = N - 1 - i; // 0 for newest, N-1 for oldest
|
|
382
|
-
// Normalize age by history length so K is consistent
|
|
383
|
-
return Math.exp(-WEIGHTING_DECAY_K * age_in_days / N);
|
|
384
|
-
});
|
|
385
|
-
// --- END V3 MODIFICATION ---
|
|
386
|
-
|
|
387
|
-
const vecSentiment = history.map(h => h.sentimentScore);
|
|
388
|
-
const vecConviction = history.map(h => h.conviction); // Used for PP score
|
|
389
|
-
|
|
390
|
-
let totalPP = 0;
|
|
391
|
-
let windowsCounted = 0;
|
|
392
|
-
let totalStableWindows = 0;
|
|
393
|
-
|
|
394
|
-
for (const window of FORWARD_WINDOWS) {
|
|
395
|
-
const vecForwardReturn = history.map(h => h[`fwd_${window}d`]);
|
|
396
|
-
|
|
397
|
-
// --- V3 MODIFICATION ---
|
|
398
|
-
// Use the new weighted Pearson calculation
|
|
399
|
-
const corr = _calculateWeightedPearson(vecSentiment, vecForwardReturn, vecWeights);
|
|
400
|
-
// --- END V3 MODIFICATION ---
|
|
401
|
-
|
|
402
|
-
state[topic].correlations[`fwd_${window}d`] = corr; // Save { value, samples }
|
|
403
|
-
|
|
404
|
-
if (corr.samples >= MIN_SAMPLE_COUNT) {
|
|
405
|
-
totalPP += Math.abs(corr.value);
|
|
406
|
-
windowsCounted++;
|
|
407
|
-
if (Math.abs(corr.value) > CORR_THRESHOLD) {
|
|
408
|
-
totalStableWindows++;
|
|
409
|
-
}
|
|
410
|
-
}
|
|
411
|
-
}
|
|
412
|
-
|
|
413
|
-
let ppScore = 0;
|
|
414
|
-
if (windowsCounted > 0) {
|
|
415
|
-
ppScore = totalPP / windowsCounted;
|
|
416
|
-
|
|
417
|
-
const avgConviction = state[topic].rolling_90d_history.reduce((acc, h) => acc + h.conviction, 0) / history.length;
|
|
418
|
-
state[topic].avgDailyConviction = avgConviction;
|
|
419
|
-
|
|
420
|
-
// FIX F: Clamp before log
|
|
421
|
-
ppScore *= Math.log(1 + Math.max(0, avgConviction));
|
|
422
|
-
|
|
423
|
-
ppScore *= (totalStableWindows / windowsCounted);
|
|
424
|
-
}
|
|
425
|
-
|
|
426
|
-
state[topic].predictivePotential = ppScore;
|
|
427
|
-
|
|
428
|
-
predictiveTopics.push({
|
|
429
|
-
topic: topic,
|
|
430
|
-
predictivePotential: ppScore,
|
|
431
|
-
sentimentScore: vecSentiment[vecSentiment.length - 1], // Latest sentiment
|
|
432
|
-
correlations: state[topic].correlations
|
|
433
|
-
});
|
|
434
|
-
}
|
|
435
|
-
|
|
436
|
-
// --- Generate Daily Output ---
|
|
437
|
-
predictiveTopics.sort((a, b) => b.predictivePotential - a.predictivePotential);
|
|
438
|
-
const confidence = predictiveTopics.length > 0 ? predictiveTopics[0].predictivePotential : 0;
|
|
439
|
-
const normalizedConfidence = Math.min(1, confidence / 10); // Simple normalization
|
|
440
|
-
|
|
441
|
-
return {
|
|
442
|
-
topPredictiveBullishTopics: predictiveTopics
|
|
443
|
-
.filter(t => t.sentimentScore > 0.1)
|
|
444
|
-
.slice(0, 5),
|
|
445
|
-
topPredictiveBearishTopics: predictiveTopics
|
|
446
|
-
.filter(t => t.sentimentScore < -0.1)
|
|
447
|
-
.slice(0, 5),
|
|
448
|
-
predictiveConfidenceScore: normalizedConfidence
|
|
449
|
-
};
|
|
450
|
-
}
|
|
451
|
-
|
|
452
|
-
async getResult() { return null; } // Logic is in process()
|
|
453
|
-
reset() {
|
|
454
|
-
this.priceMap = null;
|
|
455
|
-
this.tickerToIdMap = null;
|
|
456
|
-
this.dependenciesLoaded = false;
|
|
457
|
-
this.pLimit = pLimit(MAX_CONCURRENT_TRANSACTIONS);
|
|
458
|
-
}
|
|
459
|
-
}
|
|
460
|
-
|
|
461
|
-
module.exports = SocialTopicPredictivePotentialIndex;
|