bulltrackers-module 1.0.751 → 1.0.752
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
Here is the comprehensive documentation for the **Admin Test Endpoint (`compute-admin-test`)**. This guide details the supported actions, available overrides, and their specific impacts on your infrastructure.
|
|
2
|
+
|
|
3
|
+
### **Overview**
|
|
4
|
+
|
|
5
|
+
The Admin Test Endpoint is a privileged tool designed for **safe, isolated testing** of computations in production. It allows you to trigger runs manually, bypass scheduling locks, and—most importantly—**divert execution to test infrastructure** (custom tables or worker pools) to prevent polluting production data.
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
### **Global Overrides (Infrastructure Testing)**
|
|
10
|
+
|
|
11
|
+
These parameters can be applied to **`run`** and **`run_limited`** actions to redirect the execution flow.
|
|
12
|
+
|
|
13
|
+
| Parameter | Type | Description | Impact |
|
|
14
|
+
| --- | --- | --- | --- |
|
|
15
|
+
| **`outputTable`** | `string` | The BigQuery table where results will be written. | **Data Diversion**: Instead of writing to `computation_results_v3`, the Orchestrator writes to this table. Useful for validating logic without affecting production dashboards. |
|
|
16
|
+
| **`workerUrl`** | `string` | The URL of the worker Cloud Function to invoke. | **Traffic Diversion**: If the worker pool is used, the Orchestrator will send HTTP requests to this URL instead of the production worker. |
|
|
17
|
+
| **`useWorkerPool`** | `boolean` | Force enable/disable the remote worker pool. | **Execution Strategy**: <br>
|
|
18
|
+
|
|
19
|
+
<br>`true`: Forces remote execution (even for small batches).<br>
|
|
20
|
+
|
|
21
|
+
<br>`false`: Forces local execution (inside the Admin function).<br>
|
|
22
|
+
|
|
23
|
+
<br>`undefined`: Uses system default config. |
|
|
24
|
+
|
|
25
|
+
---
|
|
26
|
+
|
|
27
|
+
### **Supported Actions**
|
|
28
|
+
|
|
29
|
+
#### **1. `status**`
|
|
30
|
+
|
|
31
|
+
Returns the current system manifest, listing all registered computations and their schedules.
|
|
32
|
+
|
|
33
|
+
* **Configurables:** None.
|
|
34
|
+
* **Data Written:** None.
|
|
35
|
+
* **Impact:** Read-only. Low impact.
|
|
36
|
+
* **Cloud Functions:** `compute-admin-test` (Local).
|
|
37
|
+
|
|
38
|
+
#### **2. `analyze**`
|
|
39
|
+
|
|
40
|
+
Runs the "Scheduler Logic" for a specific date to determine what *would* run, without actually running it. Checks dependencies, hash changes, and locks.
|
|
41
|
+
|
|
42
|
+
* **Configurables:**
|
|
43
|
+
* `date` (YYYY-MM-DD): The target date to analyze.
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
* **Data Written:** None.
|
|
47
|
+
* **Impact:** Read-only. Low impact.
|
|
48
|
+
* **Cloud Functions:** `compute-admin-test` (Local).
|
|
49
|
+
|
|
50
|
+
#### **3. `run**`
|
|
51
|
+
|
|
52
|
+
Executes a full computation for the specified date. This is the primary tool for manual triggers and backfills.
|
|
53
|
+
|
|
54
|
+
* **Configurables:**
|
|
55
|
+
* `computation` (Required): Name of the computation (e.g., `UserPortfolioSummary`).
|
|
56
|
+
* `date`: Target date (Default: Today).
|
|
57
|
+
* `entityIds` (Array): Run only for these specific entities.
|
|
58
|
+
* `force` (Boolean): If `true`, runs even if the code/data hasn't changed (Default: `true` for testing).
|
|
59
|
+
* `dryRun` (Boolean): If `true`, computes results but **does not write to BigQuery**.
|
|
60
|
+
* *Plus Global Overrides (`outputTable`, `workerUrl`, `useWorkerPool`)*.
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
* **Data Written:**
|
|
64
|
+
* **Default:** Writes to `config.resultStore.table` (Production).
|
|
65
|
+
* **With `outputTable`:** Writes to the specified custom table.
|
|
66
|
+
* **With `dryRun`:** No data written.
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
* **Impact:** High. Can trigger heavy BigQuery queries and write large amounts of data.
|
|
70
|
+
* **Cloud Functions:**
|
|
71
|
+
* **Local Mode:** `compute-admin-test` processes all logic.
|
|
72
|
+
* **Worker Pool Mode:** `compute-admin-test` acts as the Orchestrator; `computation-worker` (or `workerUrl`) executes the logic.
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
#### **4. `run_limited**`
|
|
77
|
+
|
|
78
|
+
A safer version of `run` that automatically fetches a small sample of entities (e.g., 5 random users) and runs the computation only for them.
|
|
79
|
+
|
|
80
|
+
* **Configurables:**
|
|
81
|
+
* `computation` (Required): Name of computation.
|
|
82
|
+
* `limit` (Integer): Number of entities to test (Default: 10).
|
|
83
|
+
* *Plus Global Overrides (`outputTable`, `workerUrl`, `useWorkerPool`)*.
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
* **Data Written:** Same behavior as `run`, but only for the sampled entities.
|
|
87
|
+
* **Impact:** Moderate. Executes real logic but on a strictly limited scope.
|
|
88
|
+
* **Cloud Functions:** Same as `run`.
|
|
89
|
+
|
|
90
|
+
#### **5. `test_worker**`
|
|
91
|
+
|
|
92
|
+
Directly tests the **logic** of a worker execution locally within the Admin function. This bypasses the Orchestrator's batching and storage logic, simulating exactly what happens inside a single worker instance.
|
|
93
|
+
|
|
94
|
+
* **Configurables:**
|
|
95
|
+
* `computation` (Required): Name of computation.
|
|
96
|
+
* `entityIds` (Required Array): Must provide at least one ID to test.
|
|
97
|
+
* `date`: Target date.
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
* **Data Written:** **None**. The result is returned directly in the HTTP response body for inspection.
|
|
101
|
+
* **Impact:** Low. Fetches data for 1 entity and runs logic in-memory.
|
|
102
|
+
* **Cloud Functions:** `compute-admin-test` (Local only).
|
|
103
|
+
|
|
104
|
+
---
|
|
105
|
+
|
|
106
|
+
### **Summary of Data Flow**
|
|
107
|
+
|
|
108
|
+
| Scenario | Execution Location | Data Destination |
|
|
109
|
+
| --- | --- | --- |
|
|
110
|
+
| **Standard Run** | `compute-admin-test` (Local) | `computation_results_v3` (Prod) |
|
|
111
|
+
| **Standard Run + Worker Pool** | `computation-worker` (Remote) | `computation_results_v3` (Prod) |
|
|
112
|
+
| **Run + `outputTable**` | `compute-admin-test` (Local) | **`YOUR_CUSTOM_TABLE`** |
|
|
113
|
+
| **Run + `workerUrl**` | **`worker-test`** (Remote) | `computation_results_v3` (Prod) |
|
|
114
|
+
| **Run + `dryRun**` | `compute-admin-test` (Local) | *None* |
|
|
115
|
+
| **Run + `outputTable` + `workerUrl**` | **`worker-test`** (Remote) | **`YOUR_CUSTOM_TABLE`** |
|
|
@@ -95,6 +95,7 @@ while true; do
|
|
|
95
95
|
echo "5) Test Specific Entities (run with entityIds)"
|
|
96
96
|
echo "6) Test Worker Logic Directly (test_worker)"
|
|
97
97
|
echo "7) Test Worker Pool Offloading (run with useWorkerPool)"
|
|
98
|
+
echo "8) Advanced Infrastructure Test (Custom Table/Worker)"
|
|
98
99
|
echo "q) Quit"
|
|
99
100
|
|
|
100
101
|
read -p "Enter choice: " choice
|
|
@@ -167,6 +168,45 @@ while true; do
|
|
|
167
168
|
echo -e "${YELLOW}Running with Worker Pool enabled...${NC}"
|
|
168
169
|
run_test "{\"action\": \"run\", \"computation\": \"$COMP_NAME\", \"date\": \"$TARGET_DATE\", \"useWorkerPool\": true, \"force\": true}"
|
|
169
170
|
;;
|
|
171
|
+
|
|
172
|
+
8)
|
|
173
|
+
# Advanced Infrastructure Test (Custom Table & Worker)
|
|
174
|
+
echo -e "${YELLOW}--- Infrastructure Integration Test ---${NC}"
|
|
175
|
+
ask_var "Enter Computation Name" "UserPortfolioSummary" "COMP_NAME"
|
|
176
|
+
ask_var "Enter Date" "$DEFAULT_DATE" "TARGET_DATE"
|
|
177
|
+
ask_var "Enter Output Table Name" "computation_results_test" "OUT_TABLE"
|
|
178
|
+
ask_var "Enter Custom Worker URL (optional)" "" "WORKER_URL"
|
|
179
|
+
ask_var "Use Worker Pool? (true/false)" "true" "USE_POOL"
|
|
180
|
+
ask_var "Entity Limit (0 for Full Run)" "10" "LIMIT_NUM"
|
|
181
|
+
|
|
182
|
+
# Determine Action (run vs run_limited)
|
|
183
|
+
if [ "$LIMIT_NUM" -gt 0 ]; then
|
|
184
|
+
ACTION="run_limited"
|
|
185
|
+
LIMIT_PART=", \"limit\": $LIMIT_NUM"
|
|
186
|
+
else
|
|
187
|
+
ACTION="run"
|
|
188
|
+
LIMIT_PART=", \"force\": true"
|
|
189
|
+
fi
|
|
190
|
+
|
|
191
|
+
# Build Optional JSON Parts
|
|
192
|
+
TABLE_PART=""
|
|
193
|
+
if [ ! -z "$OUT_TABLE" ]; then
|
|
194
|
+
TABLE_PART=", \"outputTable\": \"$OUT_TABLE\""
|
|
195
|
+
fi
|
|
196
|
+
|
|
197
|
+
WORKER_PART=""
|
|
198
|
+
if [ ! -z "$WORKER_URL" ]; then
|
|
199
|
+
WORKER_PART=", \"workerUrl\": \"$WORKER_URL\""
|
|
200
|
+
fi
|
|
201
|
+
|
|
202
|
+
echo -e "${YELLOW}Running ${ACTION} on ${OUT_TABLE}...${NC}"
|
|
203
|
+
|
|
204
|
+
# Construct Payload
|
|
205
|
+
# We use loose concatenation here which works fine for JSON string building in bash
|
|
206
|
+
JSON="{\"action\": \"$ACTION\", \"computation\": \"$COMP_NAME\", \"date\": \"$TARGET_DATE\", \"useWorkerPool\": $USE_POOL $LIMIT_PART $TABLE_PART $WORKER_PART}"
|
|
207
|
+
|
|
208
|
+
run_test "$JSON"
|
|
209
|
+
;;
|
|
170
210
|
|
|
171
211
|
q|Q)
|
|
172
212
|
echo "Exiting."
|
package/index.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Main entry point for the Bulltrackers shared module.
|
|
3
3
|
* CLEANED: Removed legacy V1 Dispatcher/Computation references.
|
|
4
|
+
* UPDATED: Re-integrated Dispatcher (Task Throttler).
|
|
4
5
|
*/
|
|
5
6
|
|
|
6
7
|
// Core utilities
|
|
@@ -21,7 +22,6 @@ const { checkDiscoveryNeed, getDiscoveryCandidates, dispatchDiscovery } = requir
|
|
|
21
22
|
const { getUpdateTargets, dispatchUpdates } = require('./functions/orchestrator/helpers/update_helpers');
|
|
22
23
|
|
|
23
24
|
// --- COMPUTATION SYSTEM V2 (The new standard) ---
|
|
24
|
-
// We import the WHOLE package now, which includes handlers AND ManifestBuilder
|
|
25
25
|
const computationSystemV2 = require('./functions/computation-system-v2/index');
|
|
26
26
|
|
|
27
27
|
// Task Engine
|
|
@@ -30,6 +30,9 @@ const { handleDiscover } = require('./functions
|
|
|
30
30
|
const { handleVerify } = require('./functions/task-engine/helpers/verify_helpers');
|
|
31
31
|
const { handleUpdate } = require('./functions/task-engine/helpers/update_helpers');
|
|
32
32
|
|
|
33
|
+
// Dispatcher (Task Throttler)
|
|
34
|
+
const { handleRequest: dispatcherRequest } = require('./functions/dispatcher/index');
|
|
35
|
+
|
|
33
36
|
const { createApiV2App } = require('./functions/api-v2/index');
|
|
34
37
|
|
|
35
38
|
// Maintenance & Backfills
|
|
@@ -76,7 +79,9 @@ const orchestrator = {
|
|
|
76
79
|
dispatchUpdates,
|
|
77
80
|
};
|
|
78
81
|
|
|
79
|
-
|
|
82
|
+
const dispatcher = {
|
|
83
|
+
handleRequest: dispatcherRequest
|
|
84
|
+
};
|
|
80
85
|
|
|
81
86
|
const taskEngine = {
|
|
82
87
|
handleRequest: taskRequest,
|
|
@@ -85,7 +90,7 @@ const taskEngine = {
|
|
|
85
90
|
handleUpdate,
|
|
86
91
|
};
|
|
87
92
|
|
|
88
|
-
const computationSystem = computationSystemV2;
|
|
93
|
+
const computationSystem = computationSystemV2;
|
|
89
94
|
|
|
90
95
|
const api = {
|
|
91
96
|
createApiV2App,
|
|
@@ -119,5 +124,5 @@ const alertSystem = {
|
|
|
119
124
|
};
|
|
120
125
|
|
|
121
126
|
module.exports = {
|
|
122
|
-
pipe: { core, orchestrator, taskEngine, computationSystem, api, maintenance, proxy, alertSystem },
|
|
127
|
+
pipe: { core, orchestrator, dispatcher, taskEngine, computationSystem, api, maintenance, proxy, alertSystem },
|
|
123
128
|
};
|