@tradejs/infra 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/dist/chunk-LNFUOXDW.mjs +42 -0
- package/dist/files.d.mts +9 -0
- package/dist/files.d.ts +9 -0
- package/dist/files.js +103 -0
- package/dist/files.mjs +66 -0
- package/dist/http.d.mts +8 -0
- package/dist/http.d.ts +8 -0
- package/dist/http.js +79 -0
- package/dist/http.mjs +54 -0
- package/dist/logger.d.mts +5 -0
- package/dist/logger.d.ts +5 -0
- package/dist/logger.js +66 -0
- package/dist/logger.mjs +6 -0
- package/dist/ml.d.mts +135 -0
- package/dist/ml.d.ts +135 -0
- package/dist/ml.js +1604 -0
- package/dist/ml.mjs +1512 -0
- package/dist/redis.d.mts +50 -0
- package/dist/redis.d.ts +50 -0
- package/dist/redis.js +337 -0
- package/dist/redis.mjs +296 -0
- package/dist/timescale.d.mts +68 -0
- package/dist/timescale.d.ts +68 -0
- package/dist/timescale.js +508 -0
- package/dist/timescale.mjs +471 -0
- package/package.json +60 -0
- package/proto/ml_infer.proto +19 -0
package/README.md
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# @tradejs/infra
|
|
2
|
+
|
|
3
|
+
Server-only infrastructure adapters for TradeJS.
|
|
4
|
+
|
|
5
|
+
- Homepage: https://tradejs.dev
|
|
6
|
+
- Documentation: https://docs.tradejs.dev
|
|
7
|
+
- Quickstart: https://docs.tradejs.dev/getting-started/quickstart
|
|
8
|
+
|
|
9
|
+
## Where It Fits
|
|
10
|
+
|
|
11
|
+
`@tradejs/infra` is not part of the basic authoring surface.
|
|
12
|
+
|
|
13
|
+
Most external users do not install it directly. It usually comes transitively through:
|
|
14
|
+
|
|
15
|
+
- `@tradejs/app`
|
|
16
|
+
- `@tradejs/cli`
|
|
17
|
+
- `@tradejs/node`
|
|
18
|
+
|
|
19
|
+
Install it directly only if you are building custom server/runtime integrations on top of TradeJS.
|
|
20
|
+
|
|
21
|
+
## Direct Install
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
npm i @tradejs/infra @tradejs/types
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
## Public Surface
|
|
28
|
+
|
|
29
|
+
Import only explicit subpaths:
|
|
30
|
+
|
|
31
|
+
- `@tradejs/infra/files`
|
|
32
|
+
- `@tradejs/infra/http`
|
|
33
|
+
- `@tradejs/infra/logger`
|
|
34
|
+
- `@tradejs/infra/ml`
|
|
35
|
+
- `@tradejs/infra/redis`
|
|
36
|
+
- `@tradejs/infra/timescale`
|
|
37
|
+
|
|
38
|
+
There is no root `@tradejs/infra` import surface.
|
|
39
|
+
|
|
40
|
+
## Usage
|
|
41
|
+
|
|
42
|
+
```ts
|
|
43
|
+
import { getData, setData } from '@tradejs/infra/redis';
|
|
44
|
+
import { logger } from '@tradejs/infra/logger';
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
## Notes
|
|
48
|
+
|
|
49
|
+
- `@tradejs/infra` is server-only. Do not import it into browser/client code.
|
|
50
|
+
- Environment loading should happen in your app/runtime entrypoint, not inside shared library code.
|
|
51
|
+
- If you are following the standard external quickstart, start with `@tradejs/app`, `@tradejs/core`, `@tradejs/node`, `@tradejs/base`, and `@tradejs/cli` instead of adding `@tradejs/infra` manually.
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
// src/logger.ts
|
|
2
|
+
import { createLogger, transports, format } from "winston";
|
|
3
|
+
var baseFormat = format.combine(
|
|
4
|
+
format.timestamp({ format: "DD MMM HH:mm:ss" }),
|
|
5
|
+
format.splat()
|
|
6
|
+
);
|
|
7
|
+
var logger = createLogger({
|
|
8
|
+
format: baseFormat,
|
|
9
|
+
transports: [
|
|
10
|
+
new transports.Console({
|
|
11
|
+
format: format.combine(
|
|
12
|
+
format.colorize({ all: true }),
|
|
13
|
+
format.printf(
|
|
14
|
+
({ level, timestamp, message }) => `${level}: ${timestamp}: ${message}`
|
|
15
|
+
)
|
|
16
|
+
)
|
|
17
|
+
}),
|
|
18
|
+
new transports.File({
|
|
19
|
+
filename: "service.log",
|
|
20
|
+
format: format.combine(
|
|
21
|
+
format.uncolorize(),
|
|
22
|
+
format.printf(
|
|
23
|
+
({ level, timestamp, message }) => `${level}: ${timestamp}: ${message}`
|
|
24
|
+
)
|
|
25
|
+
)
|
|
26
|
+
}),
|
|
27
|
+
new transports.File({
|
|
28
|
+
filename: "error.log",
|
|
29
|
+
format: format.combine(
|
|
30
|
+
format.uncolorize(),
|
|
31
|
+
format.printf(
|
|
32
|
+
({ level, timestamp, message }) => `${level}: ${timestamp}: ${message}`
|
|
33
|
+
)
|
|
34
|
+
),
|
|
35
|
+
level: "error"
|
|
36
|
+
})
|
|
37
|
+
]
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
export {
|
|
41
|
+
logger
|
|
42
|
+
};
|
package/dist/files.d.mts
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
interface Options {
|
|
2
|
+
stringify?: boolean;
|
|
3
|
+
lock?: boolean;
|
|
4
|
+
}
|
|
5
|
+
declare const getFiles: (dir: string) => Promise<string[]>;
|
|
6
|
+
declare const getFile: (dir: string, file: string, fallback?: never[]) => Promise<any>;
|
|
7
|
+
declare const setFile: <T>(dir: string, file: string, data: T, options?: Options) => Promise<void>;
|
|
8
|
+
|
|
9
|
+
export { getFile, getFiles, setFile };
|
package/dist/files.d.ts
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
interface Options {
|
|
2
|
+
stringify?: boolean;
|
|
3
|
+
lock?: boolean;
|
|
4
|
+
}
|
|
5
|
+
declare const getFiles: (dir: string) => Promise<string[]>;
|
|
6
|
+
declare const getFile: (dir: string, file: string, fallback?: never[]) => Promise<any>;
|
|
7
|
+
declare const setFile: <T>(dir: string, file: string, data: T, options?: Options) => Promise<void>;
|
|
8
|
+
|
|
9
|
+
export { getFile, getFiles, setFile };
|
package/dist/files.js
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/files.ts
|
|
31
|
+
var files_exports = {};
|
|
32
|
+
__export(files_exports, {
|
|
33
|
+
getFile: () => getFile,
|
|
34
|
+
getFiles: () => getFiles,
|
|
35
|
+
setFile: () => setFile
|
|
36
|
+
});
|
|
37
|
+
module.exports = __toCommonJS(files_exports);
|
|
38
|
+
var import_crypto = require("crypto");
|
|
39
|
+
var import_promises = __toESM(require("fs/promises"));
|
|
40
|
+
var import_path = __toESM(require("path"));
|
|
41
|
+
var DEFAULT_OPTIONS = {
|
|
42
|
+
stringify: false,
|
|
43
|
+
lock: false
|
|
44
|
+
};
|
|
45
|
+
var toJson = (value, pretty = false) => JSON.stringify(value, null, pretty ? 2 : 0);
|
|
46
|
+
var logError = (message, ...args) => {
|
|
47
|
+
console.error(`[infra:files] ${message}`, ...args);
|
|
48
|
+
};
|
|
49
|
+
var getPath = (dir, file, lock = false) => {
|
|
50
|
+
if (!lock) {
|
|
51
|
+
return import_path.default.join(process.cwd(), dir, `${file}.json`);
|
|
52
|
+
}
|
|
53
|
+
return import_path.default.join(
|
|
54
|
+
process.cwd(),
|
|
55
|
+
"data",
|
|
56
|
+
"cache",
|
|
57
|
+
`${file}.lock.${(0, import_crypto.randomUUID)()}.json`
|
|
58
|
+
);
|
|
59
|
+
};
|
|
60
|
+
var getDir = (dir) => import_path.default.join(process.cwd(), dir);
|
|
61
|
+
var getFiles = async (dir) => {
|
|
62
|
+
return await import_promises.default.readdir(getDir(dir));
|
|
63
|
+
};
|
|
64
|
+
var getFile = async (dir, file, fallback = []) => {
|
|
65
|
+
const fullPath = getPath(dir, file);
|
|
66
|
+
try {
|
|
67
|
+
await import_promises.default.access(fullPath);
|
|
68
|
+
} catch {
|
|
69
|
+
return fallback;
|
|
70
|
+
}
|
|
71
|
+
try {
|
|
72
|
+
const fileContents = await import_promises.default.readFile(fullPath, "utf8");
|
|
73
|
+
return JSON.parse(fileContents);
|
|
74
|
+
} catch (error) {
|
|
75
|
+
logError("failed data file: %s", error);
|
|
76
|
+
await import_promises.default.unlink(fullPath);
|
|
77
|
+
return fallback;
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
var setFile = async (dir, file, data, options = {}) => {
|
|
81
|
+
const { stringify, lock } = {
|
|
82
|
+
...DEFAULT_OPTIONS,
|
|
83
|
+
...options
|
|
84
|
+
};
|
|
85
|
+
const fullPath = getPath(dir, file);
|
|
86
|
+
const lockFullPath = getPath(dir, file, true);
|
|
87
|
+
try {
|
|
88
|
+
if (!lock) {
|
|
89
|
+
await import_promises.default.writeFile(fullPath, toJson(data, stringify));
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
await import_promises.default.writeFile(lockFullPath, toJson(data, stringify));
|
|
93
|
+
await import_promises.default.rename(lockFullPath, fullPath);
|
|
94
|
+
} catch (error) {
|
|
95
|
+
logError("failed to write file %s: %s", fullPath, error);
|
|
96
|
+
}
|
|
97
|
+
};
|
|
98
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
99
|
+
0 && (module.exports = {
|
|
100
|
+
getFile,
|
|
101
|
+
getFiles,
|
|
102
|
+
setFile
|
|
103
|
+
});
|
package/dist/files.mjs
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
// src/files.ts
|
|
2
|
+
import { randomUUID } from "crypto";
|
|
3
|
+
import fs from "fs/promises";
|
|
4
|
+
import path from "path";
|
|
5
|
+
var DEFAULT_OPTIONS = {
|
|
6
|
+
stringify: false,
|
|
7
|
+
lock: false
|
|
8
|
+
};
|
|
9
|
+
var toJson = (value, pretty = false) => JSON.stringify(value, null, pretty ? 2 : 0);
|
|
10
|
+
var logError = (message, ...args) => {
|
|
11
|
+
console.error(`[infra:files] ${message}`, ...args);
|
|
12
|
+
};
|
|
13
|
+
var getPath = (dir, file, lock = false) => {
|
|
14
|
+
if (!lock) {
|
|
15
|
+
return path.join(process.cwd(), dir, `${file}.json`);
|
|
16
|
+
}
|
|
17
|
+
return path.join(
|
|
18
|
+
process.cwd(),
|
|
19
|
+
"data",
|
|
20
|
+
"cache",
|
|
21
|
+
`${file}.lock.${randomUUID()}.json`
|
|
22
|
+
);
|
|
23
|
+
};
|
|
24
|
+
var getDir = (dir) => path.join(process.cwd(), dir);
|
|
25
|
+
var getFiles = async (dir) => {
|
|
26
|
+
return await fs.readdir(getDir(dir));
|
|
27
|
+
};
|
|
28
|
+
var getFile = async (dir, file, fallback = []) => {
|
|
29
|
+
const fullPath = getPath(dir, file);
|
|
30
|
+
try {
|
|
31
|
+
await fs.access(fullPath);
|
|
32
|
+
} catch {
|
|
33
|
+
return fallback;
|
|
34
|
+
}
|
|
35
|
+
try {
|
|
36
|
+
const fileContents = await fs.readFile(fullPath, "utf8");
|
|
37
|
+
return JSON.parse(fileContents);
|
|
38
|
+
} catch (error) {
|
|
39
|
+
logError("failed data file: %s", error);
|
|
40
|
+
await fs.unlink(fullPath);
|
|
41
|
+
return fallback;
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
var setFile = async (dir, file, data, options = {}) => {
|
|
45
|
+
const { stringify, lock } = {
|
|
46
|
+
...DEFAULT_OPTIONS,
|
|
47
|
+
...options
|
|
48
|
+
};
|
|
49
|
+
const fullPath = getPath(dir, file);
|
|
50
|
+
const lockFullPath = getPath(dir, file, true);
|
|
51
|
+
try {
|
|
52
|
+
if (!lock) {
|
|
53
|
+
await fs.writeFile(fullPath, toJson(data, stringify));
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
await fs.writeFile(lockFullPath, toJson(data, stringify));
|
|
57
|
+
await fs.rename(lockFullPath, fullPath);
|
|
58
|
+
} catch (error) {
|
|
59
|
+
logError("failed to write file %s: %s", fullPath, error);
|
|
60
|
+
}
|
|
61
|
+
};
|
|
62
|
+
export {
|
|
63
|
+
getFile,
|
|
64
|
+
getFiles,
|
|
65
|
+
setFile
|
|
66
|
+
};
|
package/dist/http.d.mts
ADDED
package/dist/http.d.ts
ADDED
package/dist/http.js
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/http.ts
|
|
21
|
+
var http_exports = {};
|
|
22
|
+
__export(http_exports, {
|
|
23
|
+
fetchWithRetry: () => fetchWithRetry
|
|
24
|
+
});
|
|
25
|
+
module.exports = __toCommonJS(http_exports);
|
|
26
|
+
var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
27
|
+
var parseRetryAfterMs = (value) => {
|
|
28
|
+
if (!value) return 0;
|
|
29
|
+
const seconds = Number(value);
|
|
30
|
+
if (Number.isFinite(seconds)) {
|
|
31
|
+
return Math.max(0, seconds * 1e3);
|
|
32
|
+
}
|
|
33
|
+
const dateMs = Date.parse(value);
|
|
34
|
+
if (Number.isFinite(dateMs)) {
|
|
35
|
+
return Math.max(0, dateMs - Date.now());
|
|
36
|
+
}
|
|
37
|
+
return 0;
|
|
38
|
+
};
|
|
39
|
+
var fetchWithRetry = async (url, options = {}) => {
|
|
40
|
+
const {
|
|
41
|
+
attempts = 5,
|
|
42
|
+
baseDelayMs = 300,
|
|
43
|
+
maxDelayMs = 5e3,
|
|
44
|
+
...fetchOptions
|
|
45
|
+
} = options;
|
|
46
|
+
let lastError;
|
|
47
|
+
for (let i = 0; i < attempts; i++) {
|
|
48
|
+
try {
|
|
49
|
+
const response = await fetch(url, fetchOptions);
|
|
50
|
+
if (response.ok) {
|
|
51
|
+
return response;
|
|
52
|
+
}
|
|
53
|
+
const shouldRetry = response.status === 429 || response.status >= 500;
|
|
54
|
+
if (!shouldRetry || i === attempts - 1) {
|
|
55
|
+
return response;
|
|
56
|
+
}
|
|
57
|
+
const retryAfterMs = parseRetryAfterMs(
|
|
58
|
+
response.headers.get("retry-after")
|
|
59
|
+
);
|
|
60
|
+
const backoffMs = Math.min(maxDelayMs, baseDelayMs * 2 ** i);
|
|
61
|
+
await sleep(Math.max(retryAfterMs, backoffMs));
|
|
62
|
+
} catch (error) {
|
|
63
|
+
lastError = error;
|
|
64
|
+
if (i === attempts - 1) {
|
|
65
|
+
throw error;
|
|
66
|
+
}
|
|
67
|
+
const backoffMs = Math.min(maxDelayMs, baseDelayMs * 2 ** i);
|
|
68
|
+
await sleep(backoffMs);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
if (lastError) {
|
|
72
|
+
throw lastError;
|
|
73
|
+
}
|
|
74
|
+
return fetch(url, fetchOptions);
|
|
75
|
+
};
|
|
76
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
77
|
+
0 && (module.exports = {
|
|
78
|
+
fetchWithRetry
|
|
79
|
+
});
|
package/dist/http.mjs
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
// src/http.ts
|
|
2
|
+
var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
3
|
+
var parseRetryAfterMs = (value) => {
|
|
4
|
+
if (!value) return 0;
|
|
5
|
+
const seconds = Number(value);
|
|
6
|
+
if (Number.isFinite(seconds)) {
|
|
7
|
+
return Math.max(0, seconds * 1e3);
|
|
8
|
+
}
|
|
9
|
+
const dateMs = Date.parse(value);
|
|
10
|
+
if (Number.isFinite(dateMs)) {
|
|
11
|
+
return Math.max(0, dateMs - Date.now());
|
|
12
|
+
}
|
|
13
|
+
return 0;
|
|
14
|
+
};
|
|
15
|
+
var fetchWithRetry = async (url, options = {}) => {
|
|
16
|
+
const {
|
|
17
|
+
attempts = 5,
|
|
18
|
+
baseDelayMs = 300,
|
|
19
|
+
maxDelayMs = 5e3,
|
|
20
|
+
...fetchOptions
|
|
21
|
+
} = options;
|
|
22
|
+
let lastError;
|
|
23
|
+
for (let i = 0; i < attempts; i++) {
|
|
24
|
+
try {
|
|
25
|
+
const response = await fetch(url, fetchOptions);
|
|
26
|
+
if (response.ok) {
|
|
27
|
+
return response;
|
|
28
|
+
}
|
|
29
|
+
const shouldRetry = response.status === 429 || response.status >= 500;
|
|
30
|
+
if (!shouldRetry || i === attempts - 1) {
|
|
31
|
+
return response;
|
|
32
|
+
}
|
|
33
|
+
const retryAfterMs = parseRetryAfterMs(
|
|
34
|
+
response.headers.get("retry-after")
|
|
35
|
+
);
|
|
36
|
+
const backoffMs = Math.min(maxDelayMs, baseDelayMs * 2 ** i);
|
|
37
|
+
await sleep(Math.max(retryAfterMs, backoffMs));
|
|
38
|
+
} catch (error) {
|
|
39
|
+
lastError = error;
|
|
40
|
+
if (i === attempts - 1) {
|
|
41
|
+
throw error;
|
|
42
|
+
}
|
|
43
|
+
const backoffMs = Math.min(maxDelayMs, baseDelayMs * 2 ** i);
|
|
44
|
+
await sleep(backoffMs);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
if (lastError) {
|
|
48
|
+
throw lastError;
|
|
49
|
+
}
|
|
50
|
+
return fetch(url, fetchOptions);
|
|
51
|
+
};
|
|
52
|
+
export {
|
|
53
|
+
fetchWithRetry
|
|
54
|
+
};
|
package/dist/logger.d.ts
ADDED
package/dist/logger.js
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/logger.ts
|
|
21
|
+
var logger_exports = {};
|
|
22
|
+
__export(logger_exports, {
|
|
23
|
+
logger: () => logger
|
|
24
|
+
});
|
|
25
|
+
module.exports = __toCommonJS(logger_exports);
|
|
26
|
+
var import_winston = require("winston");
|
|
27
|
+
var baseFormat = import_winston.format.combine(
|
|
28
|
+
import_winston.format.timestamp({ format: "DD MMM HH:mm:ss" }),
|
|
29
|
+
import_winston.format.splat()
|
|
30
|
+
);
|
|
31
|
+
var logger = (0, import_winston.createLogger)({
|
|
32
|
+
format: baseFormat,
|
|
33
|
+
transports: [
|
|
34
|
+
new import_winston.transports.Console({
|
|
35
|
+
format: import_winston.format.combine(
|
|
36
|
+
import_winston.format.colorize({ all: true }),
|
|
37
|
+
import_winston.format.printf(
|
|
38
|
+
({ level, timestamp, message }) => `${level}: ${timestamp}: ${message}`
|
|
39
|
+
)
|
|
40
|
+
)
|
|
41
|
+
}),
|
|
42
|
+
new import_winston.transports.File({
|
|
43
|
+
filename: "service.log",
|
|
44
|
+
format: import_winston.format.combine(
|
|
45
|
+
import_winston.format.uncolorize(),
|
|
46
|
+
import_winston.format.printf(
|
|
47
|
+
({ level, timestamp, message }) => `${level}: ${timestamp}: ${message}`
|
|
48
|
+
)
|
|
49
|
+
)
|
|
50
|
+
}),
|
|
51
|
+
new import_winston.transports.File({
|
|
52
|
+
filename: "error.log",
|
|
53
|
+
format: import_winston.format.combine(
|
|
54
|
+
import_winston.format.uncolorize(),
|
|
55
|
+
import_winston.format.printf(
|
|
56
|
+
({ level, timestamp, message }) => `${level}: ${timestamp}: ${message}`
|
|
57
|
+
)
|
|
58
|
+
),
|
|
59
|
+
level: "error"
|
|
60
|
+
})
|
|
61
|
+
]
|
|
62
|
+
});
|
|
63
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
64
|
+
0 && (module.exports = {
|
|
65
|
+
logger
|
|
66
|
+
});
|
package/dist/logger.mjs
ADDED
package/dist/ml.d.mts
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
declare const toFileToken: (value: string) => string;
|
|
2
|
+
declare const getMlChunkFilePath: (strategyName: string, chunkId: string, outDir?: string) => string;
|
|
3
|
+
declare const appendMlDatasetRow: (params: {
|
|
4
|
+
strategyName: string;
|
|
5
|
+
chunkId: string;
|
|
6
|
+
row: Record<string, number | string | null>;
|
|
7
|
+
outDir?: string;
|
|
8
|
+
}) => Promise<string>;
|
|
9
|
+
declare const flushMlDatasetWriter: (filePath: string) => Promise<void>;
|
|
10
|
+
declare const closeMlDatasetWriter: (filePath: string) => Promise<void>;
|
|
11
|
+
declare const closeAllMlDatasetWriters: () => Promise<void>;
|
|
12
|
+
declare const listMlChunkFiles: (params: {
|
|
13
|
+
strategyName: string;
|
|
14
|
+
outDir?: string;
|
|
15
|
+
}) => Promise<string[]>;
|
|
16
|
+
declare const mergeJsonlFiles: (params: {
|
|
17
|
+
filePaths: string[];
|
|
18
|
+
outPath: string;
|
|
19
|
+
}) => Promise<void>;
|
|
20
|
+
|
|
21
|
+
type MlPredictResponse = {
|
|
22
|
+
probability: number;
|
|
23
|
+
threshold: number;
|
|
24
|
+
passed: boolean;
|
|
25
|
+
};
|
|
26
|
+
type MlPredictParams = {
|
|
27
|
+
strategy: string;
|
|
28
|
+
features: Record<string, number>;
|
|
29
|
+
threshold: number;
|
|
30
|
+
grpcAddress?: string;
|
|
31
|
+
};
|
|
32
|
+
declare const buildMlFeatures: (row: Record<string, unknown>) => Record<string, number>;
|
|
33
|
+
declare const fetchMlThreshold: ({ strategy, features, threshold, grpcAddress, }: MlPredictParams) => Promise<MlPredictResponse | null>;
|
|
34
|
+
|
|
35
|
+
type MlSignalRecord = {
|
|
36
|
+
signal: MlSignalPayload;
|
|
37
|
+
context?: {
|
|
38
|
+
strategyConfig?: Record<string, unknown>;
|
|
39
|
+
strategyName?: string;
|
|
40
|
+
symbol?: string;
|
|
41
|
+
entryTimestamp?: number;
|
|
42
|
+
};
|
|
43
|
+
};
|
|
44
|
+
type MlSignalPayload = {
|
|
45
|
+
signalId?: string;
|
|
46
|
+
strategy?: string;
|
|
47
|
+
symbol?: string;
|
|
48
|
+
direction?: string;
|
|
49
|
+
timestamp?: number;
|
|
50
|
+
interval?: number | string;
|
|
51
|
+
prices?: {
|
|
52
|
+
currentPrice?: number;
|
|
53
|
+
takeProfitPrice?: number;
|
|
54
|
+
stopLossPrice?: number;
|
|
55
|
+
riskRatio?: number;
|
|
56
|
+
};
|
|
57
|
+
indicators?: Record<string, unknown>;
|
|
58
|
+
additionalIndicators?: Record<string, unknown>;
|
|
59
|
+
figures?: {
|
|
60
|
+
trendLine?: {
|
|
61
|
+
mode?: 'highs' | 'lows' | string;
|
|
62
|
+
distance?: number;
|
|
63
|
+
alpha?: unknown[];
|
|
64
|
+
points?: Array<{
|
|
65
|
+
value?: number;
|
|
66
|
+
timestamp?: number;
|
|
67
|
+
}>;
|
|
68
|
+
touches?: Array<{
|
|
69
|
+
value?: number;
|
|
70
|
+
timestamp?: number;
|
|
71
|
+
}>;
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
};
|
|
75
|
+
type MlResultRecord = {
|
|
76
|
+
profit?: number;
|
|
77
|
+
direction?: 'LONG' | 'SHORT';
|
|
78
|
+
symbol?: string;
|
|
79
|
+
};
|
|
80
|
+
declare const buildMlTrainingRow: (signalRecord: MlSignalRecord, resultRecord: MlResultRecord | null) => Record<string, number | string | null>;
|
|
81
|
+
declare const trimMlTrainingRowWindows: (row: Record<string, number | string | null>, keep?: 5) => Record<string, number | string | null>;
|
|
82
|
+
|
|
83
|
+
declare const toIsoUtcOrNull: (value: number | null | undefined) => string | null;
|
|
84
|
+
declare const isDerivedDatasetFileName: (name: string) => boolean;
|
|
85
|
+
declare const computeWindowBoundaries: ({ maxLabeledTs, maxTrainTs, testDays, trainRecentDays, walkForwardFolds, }: {
|
|
86
|
+
maxLabeledTs: number;
|
|
87
|
+
maxTrainTs: number;
|
|
88
|
+
testDays: number;
|
|
89
|
+
trainRecentDays: number;
|
|
90
|
+
walkForwardFolds: number;
|
|
91
|
+
}) => {
|
|
92
|
+
holdoutCutoffMs: number;
|
|
93
|
+
holdoutTrainStartMs: number;
|
|
94
|
+
wfStartMs: number;
|
|
95
|
+
prodStartMs: number;
|
|
96
|
+
folds: {
|
|
97
|
+
fold: number;
|
|
98
|
+
startTs: number;
|
|
99
|
+
endTs: number;
|
|
100
|
+
}[];
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
type LookaheadViolation = {
|
|
104
|
+
key: string;
|
|
105
|
+
entryTimestampMs: number;
|
|
106
|
+
featureTimestampMs: number;
|
|
107
|
+
};
|
|
108
|
+
declare const isTimestampFeatureKey: (key: string) => boolean;
|
|
109
|
+
declare const findLookaheadViolations: (row: Record<string, unknown>) => LookaheadViolation[];
|
|
110
|
+
|
|
111
|
+
type MlSeriesAnalysisCandle = {
|
|
112
|
+
open: number;
|
|
113
|
+
high: number;
|
|
114
|
+
low: number;
|
|
115
|
+
close: number;
|
|
116
|
+
volume: number;
|
|
117
|
+
timestamp: number;
|
|
118
|
+
};
|
|
119
|
+
type MlSeriesAnalysisInput = {
|
|
120
|
+
candles: MlSeriesAnalysisCandle[];
|
|
121
|
+
benchmarkCandles?: MlSeriesAnalysisCandle[];
|
|
122
|
+
indicators?: {
|
|
123
|
+
atrPct?: number[];
|
|
124
|
+
price1hPcnt?: number[];
|
|
125
|
+
price24hPcnt?: number[];
|
|
126
|
+
macdHistogram?: number[];
|
|
127
|
+
maFast?: number[];
|
|
128
|
+
maSlow?: number[];
|
|
129
|
+
};
|
|
130
|
+
};
|
|
131
|
+
type MlSeriesAnalysisSummary = Record<string, number>;
|
|
132
|
+
declare const analyzeMlSeriesWindow: (input: MlSeriesAnalysisInput) => MlSeriesAnalysisSummary;
|
|
133
|
+
declare const buildMlSeriesAlignment: (left: MlSeriesAnalysisSummary | undefined, right: MlSeriesAnalysisSummary | undefined) => MlSeriesAnalysisSummary;
|
|
134
|
+
|
|
135
|
+
export { type LookaheadViolation, type MlPredictParams, type MlPredictResponse, type MlResultRecord, type MlSeriesAnalysisCandle, type MlSeriesAnalysisSummary, type MlSignalRecord, analyzeMlSeriesWindow, appendMlDatasetRow, buildMlFeatures, buildMlSeriesAlignment, buildMlTrainingRow, closeAllMlDatasetWriters, closeMlDatasetWriter, computeWindowBoundaries, fetchMlThreshold, findLookaheadViolations, flushMlDatasetWriter, getMlChunkFilePath, isDerivedDatasetFileName, isTimestampFeatureKey, listMlChunkFiles, mergeJsonlFiles, toFileToken, toIsoUtcOrNull, trimMlTrainingRowWindows };
|