@discomedia/utils 1.0.25 → 1.0.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/alpaca-trading-api-6NxNgQBn.js +1413 -0
- package/dist/alpaca-trading-api-6NxNgQBn.js.map +1 -0
- package/dist/index-frontend.cjs +105 -12
- package/dist/index-frontend.cjs.map +1 -1
- package/dist/index-frontend.mjs +105 -13
- package/dist/index-frontend.mjs.map +1 -1
- package/dist/index.cjs +257 -43
- package/dist/index.cjs.map +1 -1
- package/dist/index.mjs +257 -44
- package/dist/index.mjs.map +1 -1
- package/dist/package.json +3 -3
- package/dist/test.js +868 -5517
- package/dist/test.js.map +1 -1
- package/dist/types/alpaca-trading-api.d.ts +33 -0
- package/dist/types/alpaca-trading-api.d.ts.map +1 -1
- package/dist/types/index-frontend.d.ts +1 -1
- package/dist/types/index.d.ts +3 -1
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/json-tools.d.ts.map +1 -1
- package/dist/types/llm-deepseek.d.ts +1 -1
- package/dist/types/llm-deepseek.d.ts.map +1 -1
- package/dist/types/llm-images.d.ts.map +1 -1
- package/dist/types/llm-openai.d.ts +2 -2
- package/dist/types/llm-openai.d.ts.map +1 -1
- package/dist/types/llm-openrouter.d.ts +28 -0
- package/dist/types/llm-openrouter.d.ts.map +1 -0
- package/dist/types/misc-utils.d.ts.map +1 -1
- package/dist/types/types/llm-types.d.ts +26 -3
- package/dist/types/types/llm-types.d.ts.map +1 -1
- package/dist/types/types/logging-types.d.ts +1 -1
- package/dist/types/types/logging-types.d.ts.map +1 -1
- package/dist/types-frontend/alpaca-trading-api.d.ts +33 -0
- package/dist/types-frontend/alpaca-trading-api.d.ts.map +1 -1
- package/dist/types-frontend/index-frontend.d.ts +1 -1
- package/dist/types-frontend/index.d.ts +3 -1
- package/dist/types-frontend/index.d.ts.map +1 -1
- package/dist/types-frontend/json-tools.d.ts.map +1 -1
- package/dist/types-frontend/llm-deepseek.d.ts +1 -1
- package/dist/types-frontend/llm-deepseek.d.ts.map +1 -1
- package/dist/types-frontend/llm-images.d.ts.map +1 -1
- package/dist/types-frontend/llm-openai.d.ts +2 -2
- package/dist/types-frontend/llm-openai.d.ts.map +1 -1
- package/dist/types-frontend/llm-openrouter.d.ts +28 -0
- package/dist/types-frontend/llm-openrouter.d.ts.map +1 -0
- package/dist/types-frontend/misc-utils.d.ts.map +1 -1
- package/dist/types-frontend/types/llm-types.d.ts +26 -3
- package/dist/types-frontend/types/llm-types.d.ts.map +1 -1
- package/dist/types-frontend/types/logging-types.d.ts +1 -1
- package/dist/types-frontend/types/logging-types.d.ts.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -1019,8 +1019,28 @@ function dateTimeForGS(date) {
|
|
|
1019
1019
|
.replace(/\./g, '/');
|
|
1020
1020
|
}
|
|
1021
1021
|
|
|
1022
|
+
/**
|
|
1023
|
+
* Type guard to check if a model is an OpenRouter model
|
|
1024
|
+
*/
|
|
1025
|
+
function isOpenRouterModel(model) {
|
|
1026
|
+
const openRouterModels = [
|
|
1027
|
+
'openai/gpt-5',
|
|
1028
|
+
'openai/gpt-5-mini',
|
|
1029
|
+
'openai/gpt-5-nano',
|
|
1030
|
+
'openai/gpt-oss-120b',
|
|
1031
|
+
'z.ai/glm-4.5',
|
|
1032
|
+
'z.ai/glm-4.5-air',
|
|
1033
|
+
'google/gemini-2.5-flash',
|
|
1034
|
+
'google/gemini-2.5-flash-lite',
|
|
1035
|
+
'deepseek/deepseek-r1-0528',
|
|
1036
|
+
'deepseek/deepseek-chat-v3-0324',
|
|
1037
|
+
];
|
|
1038
|
+
return openRouterModels.includes(model);
|
|
1039
|
+
}
|
|
1040
|
+
|
|
1022
1041
|
var Types = /*#__PURE__*/Object.freeze({
|
|
1023
|
-
__proto__: null
|
|
1042
|
+
__proto__: null,
|
|
1043
|
+
isOpenRouterModel: isOpenRouterModel
|
|
1024
1044
|
});
|
|
1025
1045
|
|
|
1026
1046
|
// Utility function for debug logging
|
|
@@ -1112,29 +1132,31 @@ function hideApiKeyFromurl(url) {
|
|
|
1112
1132
|
* @returns Structured error details.
|
|
1113
1133
|
*/
|
|
1114
1134
|
function extractErrorDetails(error, response) {
|
|
1115
|
-
|
|
1135
|
+
const errMsg = error instanceof Error ? error.message : String(error);
|
|
1136
|
+
const errName = error instanceof Error ? error.name : 'Error';
|
|
1137
|
+
if (errName === 'TypeError' && errMsg.includes('fetch')) {
|
|
1116
1138
|
return { type: 'NETWORK_ERROR', reason: 'Network connectivity issue', status: null };
|
|
1117
1139
|
}
|
|
1118
|
-
if (
|
|
1119
|
-
const match =
|
|
1140
|
+
if (errMsg.includes('HTTP error: 429')) {
|
|
1141
|
+
const match = errMsg.match(/RATE_LIMIT: 429:(\d+)/);
|
|
1120
1142
|
const retryAfter = match ? parseInt(match[1]) : undefined;
|
|
1121
1143
|
return { type: 'RATE_LIMIT', reason: 'Rate limit exceeded', status: 429, retryAfter };
|
|
1122
1144
|
}
|
|
1123
|
-
if (
|
|
1145
|
+
if (errMsg.includes('HTTP error: 401') || errMsg.includes('AUTH_ERROR: 401')) {
|
|
1124
1146
|
return { type: 'AUTH_ERROR', reason: 'Authentication failed - invalid API key', status: 401 };
|
|
1125
1147
|
}
|
|
1126
|
-
if (
|
|
1148
|
+
if (errMsg.includes('HTTP error: 403') || errMsg.includes('AUTH_ERROR: 403')) {
|
|
1127
1149
|
return { type: 'AUTH_ERROR', reason: 'Access forbidden - insufficient permissions', status: 403 };
|
|
1128
1150
|
}
|
|
1129
|
-
if (
|
|
1130
|
-
const status = parseInt(
|
|
1151
|
+
if (errMsg.includes('SERVER_ERROR:')) {
|
|
1152
|
+
const status = parseInt(errMsg.split('SERVER_ERROR: ')[1]) || 500;
|
|
1131
1153
|
return { type: 'SERVER_ERROR', reason: `Server error (${status})`, status };
|
|
1132
1154
|
}
|
|
1133
|
-
if (
|
|
1134
|
-
const status = parseInt(
|
|
1155
|
+
if (errMsg.includes('CLIENT_ERROR:')) {
|
|
1156
|
+
const status = parseInt(errMsg.split('CLIENT_ERROR: ')[1]) || 400;
|
|
1135
1157
|
return { type: 'CLIENT_ERROR', reason: `Client error (${status})`, status };
|
|
1136
1158
|
}
|
|
1137
|
-
return { type: 'UNKNOWN', reason:
|
|
1159
|
+
return { type: 'UNKNOWN', reason: errMsg || 'Unknown error', status: null };
|
|
1138
1160
|
}
|
|
1139
1161
|
/**
|
|
1140
1162
|
* Fetches a resource with intelligent retry logic for handling transient errors.
|
|
@@ -1348,51 +1370,47 @@ function pLimit(concurrency) {
|
|
|
1348
1370
|
let activeCount = 0;
|
|
1349
1371
|
|
|
1350
1372
|
const resumeNext = () => {
|
|
1373
|
+
// Process the next queued function if we're under the concurrency limit
|
|
1351
1374
|
if (activeCount < concurrency && queue.size > 0) {
|
|
1352
|
-
queue.dequeue()();
|
|
1353
|
-
// Since `pendingCount` has been decreased by one, increase `activeCount` by one.
|
|
1354
1375
|
activeCount++;
|
|
1376
|
+
queue.dequeue()();
|
|
1355
1377
|
}
|
|
1356
1378
|
};
|
|
1357
1379
|
|
|
1358
1380
|
const next = () => {
|
|
1359
1381
|
activeCount--;
|
|
1360
|
-
|
|
1361
1382
|
resumeNext();
|
|
1362
1383
|
};
|
|
1363
1384
|
|
|
1364
1385
|
const run = async (function_, resolve, arguments_) => {
|
|
1386
|
+
// Execute the function and capture the result promise
|
|
1365
1387
|
const result = (async () => function_(...arguments_))();
|
|
1366
1388
|
|
|
1389
|
+
// Resolve immediately with the promise (don't wait for completion)
|
|
1367
1390
|
resolve(result);
|
|
1368
1391
|
|
|
1392
|
+
// Wait for the function to complete (success or failure)
|
|
1393
|
+
// We catch errors here to prevent unhandled rejections,
|
|
1394
|
+
// but the original promise rejection is preserved for the caller
|
|
1369
1395
|
try {
|
|
1370
1396
|
await result;
|
|
1371
1397
|
} catch {}
|
|
1372
1398
|
|
|
1399
|
+
// Decrement active count and process next queued function
|
|
1373
1400
|
next();
|
|
1374
1401
|
};
|
|
1375
1402
|
|
|
1376
1403
|
const enqueue = (function_, resolve, arguments_) => {
|
|
1377
|
-
// Queue
|
|
1378
|
-
// to preserve asynchronous context.
|
|
1379
|
-
new Promise(internalResolve => {
|
|
1404
|
+
// Queue the internal resolve function instead of the run function
|
|
1405
|
+
// to preserve the asynchronous execution context.
|
|
1406
|
+
new Promise(internalResolve => { // eslint-disable-line promise/param-names
|
|
1380
1407
|
queue.enqueue(internalResolve);
|
|
1381
|
-
}).then(
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
|
|
1388
|
-
// after the `internalResolve` function is dequeued and called. The comparison in the if-statement
|
|
1389
|
-
// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
|
|
1390
|
-
await Promise.resolve();
|
|
1391
|
-
|
|
1392
|
-
if (activeCount < concurrency) {
|
|
1393
|
-
resumeNext();
|
|
1394
|
-
}
|
|
1395
|
-
})();
|
|
1408
|
+
}).then(run.bind(undefined, function_, resolve, arguments_)); // eslint-disable-line promise/prefer-await-to-then
|
|
1409
|
+
|
|
1410
|
+
// Start processing immediately if we haven't reached the concurrency limit
|
|
1411
|
+
if (activeCount < concurrency) {
|
|
1412
|
+
resumeNext();
|
|
1413
|
+
}
|
|
1396
1414
|
};
|
|
1397
1415
|
|
|
1398
1416
|
const generator = (function_, ...arguments_) => new Promise(resolve => {
|
|
@@ -1426,6 +1444,12 @@ function pLimit(concurrency) {
|
|
|
1426
1444
|
});
|
|
1427
1445
|
},
|
|
1428
1446
|
},
|
|
1447
|
+
map: {
|
|
1448
|
+
async value(array, function_) {
|
|
1449
|
+
const promises = array.map(value => this(function_, value));
|
|
1450
|
+
return Promise.all(promises);
|
|
1451
|
+
},
|
|
1452
|
+
},
|
|
1429
1453
|
});
|
|
1430
1454
|
|
|
1431
1455
|
return generator;
|
|
@@ -2368,7 +2392,7 @@ const safeJSON = (text) => {
|
|
|
2368
2392
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2369
2393
|
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
2370
2394
|
|
|
2371
|
-
const VERSION = '5.12.
|
|
2395
|
+
const VERSION = '5.12.2'; // x-release-please-version
|
|
2372
2396
|
|
|
2373
2397
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
2374
2398
|
const isRunningInBrowser = () => {
|
|
@@ -9163,7 +9187,8 @@ function fixBrokenJson(jsonStr) {
|
|
|
9163
9187
|
return parse();
|
|
9164
9188
|
}
|
|
9165
9189
|
catch (error) {
|
|
9166
|
-
|
|
9190
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
9191
|
+
console.error(`Error parsing JSON at position ${index}: ${msg}`);
|
|
9167
9192
|
return null;
|
|
9168
9193
|
}
|
|
9169
9194
|
}
|
|
@@ -9707,7 +9732,13 @@ async function makeImagesCall(prompt, options = {}) {
|
|
|
9707
9732
|
const enhancedResponse = {
|
|
9708
9733
|
...response,
|
|
9709
9734
|
usage: {
|
|
9710
|
-
|
|
9735
|
+
// OpenAI Images response may not include usage details per image; preserve if present
|
|
9736
|
+
...(response.usage ?? {
|
|
9737
|
+
input_tokens: 0,
|
|
9738
|
+
input_tokens_details: { image_tokens: 0, text_tokens: 0 },
|
|
9739
|
+
output_tokens: 0,
|
|
9740
|
+
total_tokens: 0,
|
|
9741
|
+
}),
|
|
9711
9742
|
provider: 'openai',
|
|
9712
9743
|
model: 'gpt-image-1',
|
|
9713
9744
|
cost,
|
|
@@ -9716,7 +9747,8 @@ async function makeImagesCall(prompt, options = {}) {
|
|
|
9716
9747
|
return enhancedResponse;
|
|
9717
9748
|
}
|
|
9718
9749
|
catch (error) {
|
|
9719
|
-
|
|
9750
|
+
const message = error instanceof Error ? error.message : 'Unknown error';
|
|
9751
|
+
throw new Error(`OpenAI Images API call failed: ${message}`);
|
|
9720
9752
|
}
|
|
9721
9753
|
}
|
|
9722
9754
|
|
|
@@ -9941,14 +9973,15 @@ const makeDeepseekCall = async (content, responseFormat = 'json', options = {})
|
|
|
9941
9973
|
const completion = await createDeepseekCompletion(content, responseFormat, mergedOptions);
|
|
9942
9974
|
// Handle tool calls similarly to OpenAI
|
|
9943
9975
|
if (completion.tool_calls && completion.tool_calls.length > 0) {
|
|
9976
|
+
const fnCalls = completion.tool_calls
|
|
9977
|
+
.filter((tc) => tc.type === 'function')
|
|
9978
|
+
.map((tc) => ({
|
|
9979
|
+
id: tc.id,
|
|
9980
|
+
name: tc.function.name,
|
|
9981
|
+
arguments: JSON.parse(tc.function.arguments),
|
|
9982
|
+
}));
|
|
9944
9983
|
return {
|
|
9945
|
-
response: {
|
|
9946
|
-
tool_calls: completion.tool_calls.map((tc) => ({
|
|
9947
|
-
id: tc.id,
|
|
9948
|
-
name: tc.function.name,
|
|
9949
|
-
arguments: JSON.parse(tc.function.arguments),
|
|
9950
|
-
})),
|
|
9951
|
-
},
|
|
9984
|
+
response: { tool_calls: fnCalls },
|
|
9952
9985
|
usage: {
|
|
9953
9986
|
prompt_tokens: completion.usage.prompt_tokens,
|
|
9954
9987
|
completion_tokens: completion.usage.completion_tokens,
|
|
@@ -10003,6 +10036,122 @@ const makeDeepseekCall = async (content, responseFormat = 'json', options = {})
|
|
|
10003
10036
|
}
|
|
10004
10037
|
};
|
|
10005
10038
|
|
|
10039
|
+
// llm-openrouter.ts
|
|
10040
|
+
// Map our ContextMessage to OpenAI chat message
|
|
10041
|
+
function mapContextToMessages(context) {
|
|
10042
|
+
return context.map((msg) => {
|
|
10043
|
+
const role = msg.role === 'developer' ? 'system' : msg.role;
|
|
10044
|
+
return { role, content: msg.content };
|
|
10045
|
+
});
|
|
10046
|
+
}
|
|
10047
|
+
function toOpenRouterModel(model) {
|
|
10048
|
+
if (model && model.includes('/'))
|
|
10049
|
+
return model;
|
|
10050
|
+
const base = normalizeModelName(model || DEFAULT_MODEL);
|
|
10051
|
+
return `openai/${base}`;
|
|
10052
|
+
}
|
|
10053
|
+
// Normalize model name for pricing
|
|
10054
|
+
function normalizeModelForPricing(model) {
|
|
10055
|
+
if (!model)
|
|
10056
|
+
return { provider: 'openai', coreModel: normalizeModelName(DEFAULT_MODEL) };
|
|
10057
|
+
const [maybeProvider, maybeModel] = model.includes('/') ? model.split('/') : ['openai', model];
|
|
10058
|
+
const provider = (maybeProvider === 'deepseek' ? 'deepseek' : 'openai');
|
|
10059
|
+
const coreModel = normalizeModelName(maybeModel || model);
|
|
10060
|
+
return { provider, coreModel };
|
|
10061
|
+
}
|
|
10062
|
+
/**
|
|
10063
|
+
* Make a call through OpenRouter using the OpenAI Chat Completions-compatible API.
|
|
10064
|
+
* Supports: JSON mode, model selection, message history, and tools.
|
|
10065
|
+
*/
|
|
10066
|
+
async function makeOpenRouterCall(input, options = {}) {
|
|
10067
|
+
const { apiKey = process.env.OPENROUTER_API_KEY, model, responseFormat = 'text', tools, toolChoice, context, developerPrompt, temperature = 0.2, max_tokens, top_p, frequency_penalty, presence_penalty, stop, seed, referer = process.env.OPENROUTER_SITE_URL, title = process.env.OPENROUTER_SITE_NAME, } = options;
|
|
10068
|
+
if (!apiKey) {
|
|
10069
|
+
throw new Error('OpenRouter API key is not provided and OPENROUTER_API_KEY is not set');
|
|
10070
|
+
}
|
|
10071
|
+
const client = new OpenAI({
|
|
10072
|
+
apiKey,
|
|
10073
|
+
baseURL: 'https://openrouter.ai/api/v1',
|
|
10074
|
+
defaultHeaders: {
|
|
10075
|
+
...(referer ? { 'HTTP-Referer': referer } : {}),
|
|
10076
|
+
...(title ? { 'X-Title': title } : {}),
|
|
10077
|
+
},
|
|
10078
|
+
});
|
|
10079
|
+
const messages = [];
|
|
10080
|
+
if (developerPrompt && developerPrompt.trim()) {
|
|
10081
|
+
messages.push({ role: 'system', content: developerPrompt });
|
|
10082
|
+
}
|
|
10083
|
+
if (context && context.length > 0) {
|
|
10084
|
+
messages.push(...mapContextToMessages(context));
|
|
10085
|
+
}
|
|
10086
|
+
messages.push({ role: 'user', content: input });
|
|
10087
|
+
// Configure response_format
|
|
10088
|
+
let response_format;
|
|
10089
|
+
let parsingFormat = 'text';
|
|
10090
|
+
if (responseFormat === 'json') {
|
|
10091
|
+
response_format = { type: 'json_object' };
|
|
10092
|
+
parsingFormat = 'json';
|
|
10093
|
+
}
|
|
10094
|
+
else if (typeof responseFormat === 'object') {
|
|
10095
|
+
response_format = { type: 'json_object' };
|
|
10096
|
+
parsingFormat = responseFormat;
|
|
10097
|
+
}
|
|
10098
|
+
const modelId = toOpenRouterModel(model);
|
|
10099
|
+
const completion = await client.chat.completions.create({
|
|
10100
|
+
model: modelId,
|
|
10101
|
+
messages,
|
|
10102
|
+
response_format,
|
|
10103
|
+
tools,
|
|
10104
|
+
tool_choice: toolChoice,
|
|
10105
|
+
temperature,
|
|
10106
|
+
max_tokens,
|
|
10107
|
+
top_p,
|
|
10108
|
+
frequency_penalty,
|
|
10109
|
+
presence_penalty,
|
|
10110
|
+
stop,
|
|
10111
|
+
seed,
|
|
10112
|
+
});
|
|
10113
|
+
const choice = completion.choices && completion.choices.length > 0 ? completion.choices[0] : undefined;
|
|
10114
|
+
const message = (choice && 'message' in choice ? choice.message : undefined);
|
|
10115
|
+
const { provider: pricingProvider, coreModel } = normalizeModelForPricing(modelId);
|
|
10116
|
+
const promptTokens = completion.usage?.prompt_tokens ?? 0;
|
|
10117
|
+
const completionTokens = completion.usage?.completion_tokens ?? 0;
|
|
10118
|
+
const cost = calculateCost(pricingProvider, coreModel, promptTokens, completionTokens);
|
|
10119
|
+
// Tool calls branch: return empty string response and expose tool_calls on LLMResponse
|
|
10120
|
+
const hasToolCalls = Array.isArray(message?.tool_calls) && message.tool_calls.length > 0;
|
|
10121
|
+
if (hasToolCalls) {
|
|
10122
|
+
const usageModel = isOpenRouterModel(modelId) ? modelId : DEFAULT_MODEL;
|
|
10123
|
+
return {
|
|
10124
|
+
response: '',
|
|
10125
|
+
usage: {
|
|
10126
|
+
prompt_tokens: promptTokens,
|
|
10127
|
+
completion_tokens: completionTokens,
|
|
10128
|
+
provider: 'openrouter',
|
|
10129
|
+
model: usageModel,
|
|
10130
|
+
cost,
|
|
10131
|
+
},
|
|
10132
|
+
tool_calls: message.tool_calls,
|
|
10133
|
+
};
|
|
10134
|
+
}
|
|
10135
|
+
const rawText = typeof message?.content === 'string' ? message.content : '';
|
|
10136
|
+
const parsed = await parseResponse(rawText, parsingFormat);
|
|
10137
|
+
if (parsed === null) {
|
|
10138
|
+
throw new Error('Failed to parse OpenRouter response');
|
|
10139
|
+
}
|
|
10140
|
+
// Ensure the model value conforms to LLMModel; otherwise fall back to DEFAULT_MODEL
|
|
10141
|
+
const usageModel = isOpenRouterModel(modelId) ? modelId : DEFAULT_MODEL;
|
|
10142
|
+
return {
|
|
10143
|
+
response: parsed,
|
|
10144
|
+
usage: {
|
|
10145
|
+
prompt_tokens: promptTokens,
|
|
10146
|
+
completion_tokens: completionTokens,
|
|
10147
|
+
provider: 'openrouter',
|
|
10148
|
+
model: usageModel,
|
|
10149
|
+
cost,
|
|
10150
|
+
},
|
|
10151
|
+
...(hasToolCalls ? { tool_calls: message.tool_calls } : {}),
|
|
10152
|
+
};
|
|
10153
|
+
}
|
|
10154
|
+
|
|
10006
10155
|
/**
|
|
10007
10156
|
* A class to measure performance of code execution.
|
|
10008
10157
|
*
|
|
@@ -16962,6 +17111,18 @@ Websocket example
|
|
|
16962
17111
|
this.log(`Received trade update: event ${update.event} for an order to ${update.order.side} ${update.order.qty} of ${update.order.symbol}`);
|
|
16963
17112
|
});
|
|
16964
17113
|
alpacaAPI.connectWebsocket(); // necessary to connect to the WebSocket
|
|
17114
|
+
|
|
17115
|
+
Portfolio History examples
|
|
17116
|
+
// Get standard portfolio history
|
|
17117
|
+
const portfolioHistory = await alpacaAPI.getPortfolioHistory({
|
|
17118
|
+
timeframe: '1D',
|
|
17119
|
+
period: '1M'
|
|
17120
|
+
});
|
|
17121
|
+
|
|
17122
|
+
// Get daily portfolio history with current day included (if available from hourly data)
|
|
17123
|
+
const dailyHistory = await alpacaAPI.getPortfolioDailyHistory({
|
|
17124
|
+
period: '1M'
|
|
17125
|
+
});
|
|
16965
17126
|
*/
|
|
16966
17127
|
class AlpacaTradingAPI {
|
|
16967
17128
|
static new(credentials) {
|
|
@@ -17661,6 +17822,57 @@ class AlpacaTradingAPI {
|
|
|
17661
17822
|
const response = await this.makeRequest(`/account/portfolio/history?${queryParams.toString()}`);
|
|
17662
17823
|
return response;
|
|
17663
17824
|
}
|
|
17825
|
+
/**
|
|
17826
|
+
* Get portfolio daily history for the account, ensuring the most recent day is included
|
|
17827
|
+
* by combining daily and hourly history if needed.
|
|
17828
|
+
*
|
|
17829
|
+
* This function performs two API calls:
|
|
17830
|
+
* 1. Retrieves daily portfolio history
|
|
17831
|
+
* 2. Retrieves hourly portfolio history to check for more recent data
|
|
17832
|
+
*
|
|
17833
|
+
* If hourly history has timestamps more recent than the last timestamp in daily history,
|
|
17834
|
+
* it appends one additional day to the daily history using the most recent hourly values.
|
|
17835
|
+
*
|
|
17836
|
+
* @param params Parameters for the portfolio history request (same as getPortfolioHistory except timeframe is forced to '1D')
|
|
17837
|
+
* @returns Portfolio history data with daily timeframe, including the most recent day if available from hourly data
|
|
17838
|
+
*/
|
|
17839
|
+
async getPortfolioDailyHistory(params) {
|
|
17840
|
+
// Get daily history
|
|
17841
|
+
const dailyParams = { ...params, timeframe: '1D' };
|
|
17842
|
+
const dailyHistory = await this.getPortfolioHistory(dailyParams);
|
|
17843
|
+
// Get hourly history for the last day to check for more recent data
|
|
17844
|
+
const hourlyParams = { timeframe: '1H', period: '1D' };
|
|
17845
|
+
const hourlyHistory = await this.getPortfolioHistory(hourlyParams);
|
|
17846
|
+
// If no hourly history, return daily as-is
|
|
17847
|
+
if (!hourlyHistory.timestamp || hourlyHistory.timestamp.length === 0) {
|
|
17848
|
+
return dailyHistory;
|
|
17849
|
+
}
|
|
17850
|
+
// Get the last timestamp from daily history
|
|
17851
|
+
const lastDailyTimestamp = dailyHistory.timestamp[dailyHistory.timestamp.length - 1];
|
|
17852
|
+
// Check if hourly history has more recent data
|
|
17853
|
+
const recentHourlyData = hourlyHistory.timestamp
|
|
17854
|
+
.map((timestamp, index) => ({ timestamp, index }))
|
|
17855
|
+
.filter(({ timestamp }) => timestamp > lastDailyTimestamp);
|
|
17856
|
+
// If no more recent hourly data, return daily history as-is
|
|
17857
|
+
if (recentHourlyData.length === 0) {
|
|
17858
|
+
return dailyHistory;
|
|
17859
|
+
}
|
|
17860
|
+
// Get the most recent hourly data point
|
|
17861
|
+
const mostRecentHourly = recentHourlyData[recentHourlyData.length - 1];
|
|
17862
|
+
const mostRecentIndex = mostRecentHourly.index;
|
|
17863
|
+
// Calculate the timestamp for the new daily entry (most recent day + 1 day worth of seconds)
|
|
17864
|
+
const oneDayInSeconds = 24 * 60 * 60;
|
|
17865
|
+
const newDailyTimestamp = mostRecentHourly.timestamp + oneDayInSeconds;
|
|
17866
|
+
// Create a new daily history entry with the most recent hourly values
|
|
17867
|
+
const updatedDailyHistory = {
|
|
17868
|
+
...dailyHistory,
|
|
17869
|
+
timestamp: [...dailyHistory.timestamp, newDailyTimestamp],
|
|
17870
|
+
equity: [...dailyHistory.equity, hourlyHistory.equity[mostRecentIndex]],
|
|
17871
|
+
profit_loss: [...dailyHistory.profit_loss, hourlyHistory.profit_loss[mostRecentIndex]],
|
|
17872
|
+
profit_loss_pct: [...dailyHistory.profit_loss_pct, hourlyHistory.profit_loss_pct[mostRecentIndex]],
|
|
17873
|
+
};
|
|
17874
|
+
return updatedDailyHistory;
|
|
17875
|
+
}
|
|
17664
17876
|
/**
|
|
17665
17877
|
* Get option contracts based on specified parameters
|
|
17666
17878
|
* @param params Parameters to filter option contracts
|
|
@@ -18312,6 +18524,7 @@ const disco = {
|
|
|
18312
18524
|
call: makeLLMCall,
|
|
18313
18525
|
seek: makeDeepseekCall,
|
|
18314
18526
|
images: makeImagesCall,
|
|
18527
|
+
open: makeOpenRouterCall,
|
|
18315
18528
|
},
|
|
18316
18529
|
polygon: {
|
|
18317
18530
|
fetchTickerInfo: fetchTickerInfo,
|
|
@@ -18357,5 +18570,5 @@ const disco = {
|
|
|
18357
18570
|
},
|
|
18358
18571
|
};
|
|
18359
18572
|
|
|
18360
|
-
export { AlpacaMarketDataAPI, AlpacaTradingAPI, disco };
|
|
18573
|
+
export { AlpacaMarketDataAPI, AlpacaTradingAPI, disco, isOpenRouterModel };
|
|
18361
18574
|
//# sourceMappingURL=index.mjs.map
|