dukascopy-node-plus 1.1.0 → 1.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +1 -1
- package/dist/esm/chunk-476J4KQE.js +35 -0
- package/dist/esm/{chunk-DWZJECKF.js → chunk-7MFC3GUP.js} +1 -1
- package/dist/esm/chunk-MTHVPJMW.js +113 -0
- package/dist/esm/chunk-NBUGCZCI.js +164 -0
- package/dist/esm/cli/index.js +1 -1
- package/dist/esm/index.example.nostream.js +38 -0
- package/dist/esm/index.example.stream.js +41 -0
- package/dist/esm/index.js +9 -99
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.example.nostream.js +13855 -0
- package/dist/index.example.stream.d.mts +2 -0
- package/dist/index.example.stream.d.ts +2 -0
- package/dist/{index.example.js → index.example.stream.js} +149 -129
- package/dist/index.js +132 -125
- package/package.json +2 -2
- package/dist/esm/chunk-KWYYNULA.js +0 -184
- package/dist/esm/index.example.js +0 -27
- /package/dist/{index.example.d.mts → index.example.nostream.d.mts} +0 -0
- /package/dist/{index.example.d.ts → index.example.nostream.d.ts} +0 -0
|
@@ -1,184 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
BufferFetcher,
|
|
3
|
-
CacheManager,
|
|
4
|
-
formatBytes,
|
|
5
|
-
generateUrls,
|
|
6
|
-
normaliseDates,
|
|
7
|
-
processData,
|
|
8
|
-
validateConfigNode
|
|
9
|
-
} from "./chunk-D3M3SVMW.js";
|
|
10
|
-
|
|
11
|
-
// src/output-formatter/index.ts
|
|
12
|
-
var candleHeaders = ["timestamp", "open", "high", "low", "close", "volume"];
|
|
13
|
-
var tickHeaders = ["timestamp", "askPrice", "bidPrice", "askVolume", "bidVolume"];
|
|
14
|
-
function formatOutput({
|
|
15
|
-
processedData,
|
|
16
|
-
format,
|
|
17
|
-
timeframe
|
|
18
|
-
}) {
|
|
19
|
-
if (processedData.length === 0) {
|
|
20
|
-
return [];
|
|
21
|
-
}
|
|
22
|
-
const bodyHeaders = timeframe === "tick" /* tick */ ? tickHeaders : candleHeaders;
|
|
23
|
-
if (format === "json" /* json */) {
|
|
24
|
-
const data = processedData.map((arr) => {
|
|
25
|
-
return arr.reduce((all, item, i) => {
|
|
26
|
-
const name = bodyHeaders[i];
|
|
27
|
-
all[name] = item;
|
|
28
|
-
return all;
|
|
29
|
-
}, {});
|
|
30
|
-
});
|
|
31
|
-
return data;
|
|
32
|
-
}
|
|
33
|
-
if (format === "csv" /* csv */) {
|
|
34
|
-
const csvHeaders = bodyHeaders.filter((_, i) => processedData[0][i] !== void 0);
|
|
35
|
-
const csv = [csvHeaders, ...processedData].map((arr) => arr.join(",")).join("\n");
|
|
36
|
-
return csv;
|
|
37
|
-
}
|
|
38
|
-
return processedData;
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
// src/getHistoricalRatesToStream.ts
|
|
42
|
-
import debug from "debug";
|
|
43
|
-
import { Readable, Transform } from "stream";
|
|
44
|
-
import { pipeline } from "stream/promises";
|
|
45
|
-
var DEBUG_NAMESPACE = "dukascopy-node";
|
|
46
|
-
async function getHistoricalRatesToStream(config) {
|
|
47
|
-
const stream = new Readable({
|
|
48
|
-
objectMode: true,
|
|
49
|
-
async read(_size) {
|
|
50
|
-
try {
|
|
51
|
-
const { input, isValid, validationErrors } = validateConfigNode(config);
|
|
52
|
-
debug(`${DEBUG_NAMESPACE}:config`)("%O", {
|
|
53
|
-
input,
|
|
54
|
-
isValid,
|
|
55
|
-
validationErrors
|
|
56
|
-
});
|
|
57
|
-
if (!isValid) {
|
|
58
|
-
this.emit("error", { validationErrors });
|
|
59
|
-
this.push(null);
|
|
60
|
-
return;
|
|
61
|
-
}
|
|
62
|
-
const {
|
|
63
|
-
instrument,
|
|
64
|
-
dates: { from, to },
|
|
65
|
-
timeframe,
|
|
66
|
-
priceType,
|
|
67
|
-
volumes,
|
|
68
|
-
volumeUnits,
|
|
69
|
-
utcOffset,
|
|
70
|
-
ignoreFlats,
|
|
71
|
-
format,
|
|
72
|
-
batchSize,
|
|
73
|
-
pauseBetweenBatchesMs,
|
|
74
|
-
useCache,
|
|
75
|
-
cacheFolderPath,
|
|
76
|
-
retryCount,
|
|
77
|
-
pauseBetweenRetriesMs,
|
|
78
|
-
retryOnEmpty
|
|
79
|
-
} = input;
|
|
80
|
-
const [startDate, endDate] = normaliseDates({
|
|
81
|
-
instrument,
|
|
82
|
-
startDate: from,
|
|
83
|
-
endDate: to,
|
|
84
|
-
timeframe,
|
|
85
|
-
utcOffset
|
|
86
|
-
});
|
|
87
|
-
const [startDateMs, endDateMs] = [+startDate, +endDate];
|
|
88
|
-
const onItemFetch = process.env.DEBUG ? (url, buffer, isCacheHit) => {
|
|
89
|
-
debug(`${DEBUG_NAMESPACE}:fetcher`)(
|
|
90
|
-
url,
|
|
91
|
-
`| ${formatBytes(buffer.length)} |`,
|
|
92
|
-
`${isCacheHit ? "cache" : "network"}`
|
|
93
|
-
);
|
|
94
|
-
} : void 0;
|
|
95
|
-
const bufferFetcher = new BufferFetcher({
|
|
96
|
-
batchSize,
|
|
97
|
-
pauseBetweenBatchesMs,
|
|
98
|
-
cacheManager: useCache ? new CacheManager({ cacheFolderPath }) : void 0,
|
|
99
|
-
retryCount,
|
|
100
|
-
pauseBetweenRetriesMs,
|
|
101
|
-
onItemFetch,
|
|
102
|
-
retryOnEmpty
|
|
103
|
-
});
|
|
104
|
-
let firstLine = true;
|
|
105
|
-
let urlsProcessed = 0;
|
|
106
|
-
const urlsforFetchingData = generateUrls({
|
|
107
|
-
instrument: input.instrument,
|
|
108
|
-
timeframe: input.timeframe,
|
|
109
|
-
priceType: input.priceType,
|
|
110
|
-
startDate,
|
|
111
|
-
endDate
|
|
112
|
-
});
|
|
113
|
-
await pipeline(
|
|
114
|
-
Readable.from(urlsforFetchingData),
|
|
115
|
-
new Transform({
|
|
116
|
-
objectMode: true,
|
|
117
|
-
transform: async (url, _, callback) => {
|
|
118
|
-
const bufferObject = {
|
|
119
|
-
url,
|
|
120
|
-
buffer: await bufferFetcher.fetchBuffer(url),
|
|
121
|
-
isCacheHit: useCache
|
|
122
|
-
};
|
|
123
|
-
try {
|
|
124
|
-
const processedData = processData({
|
|
125
|
-
instrument: input.instrument,
|
|
126
|
-
requestedTimeframe: input.timeframe,
|
|
127
|
-
bufferObjects: [bufferObject],
|
|
128
|
-
priceType: input.priceType,
|
|
129
|
-
volumes: input.volumes,
|
|
130
|
-
volumeUnits: input.volumeUnits,
|
|
131
|
-
ignoreFlats: input.ignoreFlats
|
|
132
|
-
});
|
|
133
|
-
processedData.forEach((item) => {
|
|
134
|
-
const [timestamp] = item;
|
|
135
|
-
if (timestamp && timestamp >= startDateMs && timestamp < endDateMs) {
|
|
136
|
-
if (input.format === "array" /* array */) {
|
|
137
|
-
this.push(item);
|
|
138
|
-
} else {
|
|
139
|
-
const bodyHeaders = timeframe === "tick" /* tick */ ? tickHeaders : candleHeaders;
|
|
140
|
-
if (format === "json" /* json */) {
|
|
141
|
-
const data = item.reduce((all, item2, i) => {
|
|
142
|
-
const name = bodyHeaders[i];
|
|
143
|
-
all[name] = item2;
|
|
144
|
-
return all;
|
|
145
|
-
}, {});
|
|
146
|
-
this.push(data);
|
|
147
|
-
} else if (format === "csv" /* csv */) {
|
|
148
|
-
if (firstLine) {
|
|
149
|
-
const csvHeaders = bodyHeaders.join(",");
|
|
150
|
-
this.push(csvHeaders);
|
|
151
|
-
firstLine = false;
|
|
152
|
-
}
|
|
153
|
-
this.push(item.join(","));
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
});
|
|
158
|
-
callback();
|
|
159
|
-
} catch (err) {
|
|
160
|
-
callback(err);
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
})
|
|
164
|
-
).then(() => {
|
|
165
|
-
if (++urlsProcessed === urlsforFetchingData.length) {
|
|
166
|
-
this.push(null);
|
|
167
|
-
}
|
|
168
|
-
}).catch((err) => {
|
|
169
|
-
this.emit("error", err);
|
|
170
|
-
this.push(null);
|
|
171
|
-
});
|
|
172
|
-
} catch (err) {
|
|
173
|
-
this.emit("error", err);
|
|
174
|
-
this.push(null);
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
});
|
|
178
|
-
return stream;
|
|
179
|
-
}
|
|
180
|
-
|
|
181
|
-
export {
|
|
182
|
-
formatOutput,
|
|
183
|
-
getHistoricalRatesToStream
|
|
184
|
-
};
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
getHistoricalRatesToStream
|
|
3
|
-
} from "./chunk-KWYYNULA.js";
|
|
4
|
-
import "./chunk-D3M3SVMW.js";
|
|
5
|
-
|
|
6
|
-
// src/index.example.ts
|
|
7
|
-
(async () => {
|
|
8
|
-
try {
|
|
9
|
-
const data = await getHistoricalRatesToStream({
|
|
10
|
-
instrument: "eurusd",
|
|
11
|
-
dates: {
|
|
12
|
-
from: /* @__PURE__ */ new Date("2021-02-01"),
|
|
13
|
-
to: /* @__PURE__ */ new Date("2021-03-01")
|
|
14
|
-
},
|
|
15
|
-
timeframe: "d1",
|
|
16
|
-
format: "csv"
|
|
17
|
-
});
|
|
18
|
-
data.on("data", (chunk) => {
|
|
19
|
-
console.log(chunk);
|
|
20
|
-
});
|
|
21
|
-
data.on("end", () => {
|
|
22
|
-
console.log("end");
|
|
23
|
-
});
|
|
24
|
-
} catch (error) {
|
|
25
|
-
console.log("error", error);
|
|
26
|
-
}
|
|
27
|
-
})();
|
|
File without changes
|
|
File without changes
|