tardis-dev 16.2.1 → 16.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. package/dist/computable/computable.js.map +1 -1
  2. package/dist/handy.d.ts +1 -0
  3. package/dist/handy.d.ts.map +1 -1
  4. package/dist/handy.js +5 -2
  5. package/dist/handy.js.map +1 -1
  6. package/dist/instrumentinfo.js +1 -1
  7. package/dist/instrumentinfo.js.map +1 -1
  8. package/dist/mappers/binance.d.ts.map +1 -1
  9. package/dist/mappers/binance.js.map +1 -1
  10. package/dist/mappers/bybit.d.ts.map +1 -1
  11. package/dist/mappers/bybit.js.map +1 -1
  12. package/dist/mappers/gateio.d.ts.map +1 -1
  13. package/dist/mappers/gateio.js.map +1 -1
  14. package/dist/mappers/gateiofutures.js.map +1 -1
  15. package/dist/mappers/huobi.d.ts.map +1 -1
  16. package/dist/mappers/huobi.js.map +1 -1
  17. package/dist/mappers/index.js.map +1 -1
  18. package/dist/mappers/kucoin.d.ts.map +1 -1
  19. package/dist/mappers/kucoin.js.map +1 -1
  20. package/dist/mappers/okex.d.ts.map +1 -1
  21. package/dist/mappers/okex.js.map +1 -1
  22. package/dist/mappers/phemex.d.ts.map +1 -1
  23. package/dist/mappers/phemex.js +11 -2
  24. package/dist/mappers/phemex.js.map +1 -1
  25. package/dist/realtimefeeds/bitnomial.d.ts.map +1 -1
  26. package/dist/realtimefeeds/bitnomial.js.map +1 -1
  27. package/dist/realtimefeeds/coinbase.d.ts.map +1 -1
  28. package/dist/realtimefeeds/coinbase.js.map +1 -1
  29. package/dist/realtimefeeds/hitbtc.d.ts.map +1 -1
  30. package/dist/realtimefeeds/hitbtc.js.map +1 -1
  31. package/dist/realtimefeeds/huobi.d.ts.map +1 -1
  32. package/dist/realtimefeeds/huobi.js.map +1 -1
  33. package/dist/realtimefeeds/kucoinfutures.d.ts.map +1 -1
  34. package/dist/realtimefeeds/kucoinfutures.js.map +1 -1
  35. package/dist/realtimefeeds/realtimefeed.d.ts.map +1 -1
  36. package/dist/realtimefeeds/realtimefeed.js.map +1 -1
  37. package/dist/realtimefeeds/serum.d.ts.map +1 -1
  38. package/dist/realtimefeeds/serum.js.map +1 -1
  39. package/dist/replay.d.ts.map +1 -1
  40. package/dist/replay.js +11 -7
  41. package/dist/replay.js.map +1 -1
  42. package/dist/worker.d.ts +1 -0
  43. package/dist/worker.d.ts.map +1 -1
  44. package/dist/worker.js +58 -24
  45. package/dist/worker.js.map +1 -1
  46. package/package.json +2 -2
  47. package/src/computable/computable.ts +14 -11
  48. package/src/handy.ts +34 -20
  49. package/src/instrumentinfo.ts +1 -1
  50. package/src/mappers/binance.ts +16 -8
  51. package/src/mappers/bybit.ts +21 -16
  52. package/src/mappers/gateio.ts +4 -1
  53. package/src/mappers/gateiofutures.ts +2 -2
  54. package/src/mappers/huobi.ts +8 -6
  55. package/src/mappers/index.ts +2 -2
  56. package/src/mappers/kucoin.ts +4 -1
  57. package/src/mappers/okex.ts +24 -6
  58. package/src/mappers/phemex.ts +14 -4
  59. package/src/realtimefeeds/bitnomial.ts +14 -11
  60. package/src/realtimefeeds/coinbase.ts +14 -11
  61. package/src/realtimefeeds/hitbtc.ts +10 -7
  62. package/src/realtimefeeds/huobi.ts +10 -2
  63. package/src/realtimefeeds/kucoinfutures.ts +4 -1
  64. package/src/realtimefeeds/realtimefeed.ts +5 -1
  65. package/src/realtimefeeds/serum.ts +14 -11
  66. package/src/replay.ts +14 -10
  67. package/src/worker.ts +68 -27
package/src/replay.ts CHANGED
@@ -35,14 +35,14 @@ export async function* replay<T extends Exchange, U extends boolean = false, Z e
35
35
  ? { localTimestamp: Buffer; message: Buffer } | undefined
36
36
  : { localTimestamp: Date; message: any } | undefined
37
37
  : U extends true
38
- ? { localTimestamp: Buffer; message: Buffer }
39
- : { localTimestamp: Date; message: any }
38
+ ? { localTimestamp: Buffer; message: Buffer }
39
+ : { localTimestamp: Date; message: any }
40
40
  > {
41
41
  validateReplayOptions(exchange, from, to, filters)
42
42
 
43
43
  const fromDate = parseAsUTCDate(from)
44
44
  const toDate = parseAsUTCDate(to)
45
- const cachedSlicePaths = new Map<string, string>()
45
+ const cachedSlicePaths = new Map<string, { slicePath: string; sliceSize: number }>()
46
46
  let replayError
47
47
  debug('replay for exchange: %s started - from: %s, to: %s, filters: %o', exchange, fromDate.toISOString(), toDate.toISOString(), filters)
48
48
 
@@ -65,7 +65,10 @@ export async function* replay<T extends Exchange, U extends boolean = false, Z e
65
65
  const worker = new ReliableWorker(payload)
66
66
 
67
67
  worker.on('message', (message: WorkerMessage) => {
68
- cachedSlicePaths.set(message.sliceKey, message.slicePath)
68
+ cachedSlicePaths.set(message.sliceKey, {
69
+ slicePath: message.slicePath,
70
+ sliceSize: message.sliceSize
71
+ })
69
72
  })
70
73
 
71
74
  worker.on('error', (err) => {
@@ -100,16 +103,16 @@ export async function* replay<T extends Exchange, U extends boolean = false, Z e
100
103
 
101
104
  debug('getting slice: %s, exchange: %s', sliceKey, exchange)
102
105
 
103
- let cachedSlicePath
104
- while (cachedSlicePath === undefined) {
105
- cachedSlicePath = cachedSlicePaths.get(sliceKey)
106
+ let cachedSlice
107
+ while (cachedSlice === undefined) {
108
+ cachedSlice = cachedSlicePaths.get(sliceKey)
106
109
 
107
110
  // if something went wrong(network issue, auth issue, gunzip issue etc)
108
111
  if (replayError !== undefined) {
109
112
  throw replayError
110
113
  }
111
114
 
112
- if (cachedSlicePath === undefined) {
115
+ if (cachedSlice === undefined) {
113
116
  // if response for requested date is not ready yet wait 100ms and try again
114
117
  debug('waiting for slice: %s, exchange: %s', sliceKey, exchange)
115
118
  await wait(100)
@@ -117,6 +120,7 @@ export async function* replay<T extends Exchange, U extends boolean = false, Z e
117
120
  }
118
121
 
119
122
  // response is a path to file on disk let' read it as stream
123
+ const { slicePath: cachedSlicePath, sliceSize } = cachedSlice
120
124
  const isZstdSlice = cachedSlicePath.endsWith('.zst')
121
125
  const linesStream = createReadStream(cachedSlicePath, { highWaterMark: CHUNK_SIZE })
122
126
  // decompress it while preserving the on-disk cache in the negotiated wire format
@@ -185,8 +189,8 @@ export async function* replay<T extends Exchange, U extends boolean = false, Z e
185
189
  if (autoCleanup) {
186
190
  await cleanupSlice(cachedSlicePath)
187
191
  }
188
- // move one minute forward
189
- currentSliceDate.setUTCMinutes(currentSliceDate.getUTCMinutes() + 1)
192
+ // move by the number of minutes covered by this cached response
193
+ currentSliceDate.setUTCMinutes(currentSliceDate.getUTCMinutes() + sliceSize)
190
194
  }
191
195
 
192
196
  debug(
package/src/worker.ts CHANGED
@@ -7,6 +7,8 @@ import type { DataFeedCompression } from './options.ts'
7
7
  import { Exchange, Filter } from './types.ts'
8
8
  const debug = dbg('tardis-dev')
9
9
 
10
+ const DEFAULT_DATA_FEED_SLICE_SIZE = 1
11
+
10
12
  if (isMainThread) {
11
13
  debug('current worker is not meant to run in main thread')
12
14
  } else {
@@ -77,15 +79,32 @@ async function getDataFeedSlices(payload: WorkerJobPayload) {
77
79
  }
78
80
  } else {
79
81
  // fetch last slice - it will tell us if user has access to the end of requested date range and data is available
80
- await getDataFeedSlice(payload, minutesCountToFetch - 1, filters, cacheDir)
82
+ // also fetch it from API to get current suggested slice size headers
83
+ const lastSlice = await getDataFeedSlice(payload, minutesCountToFetch - 1, filters, cacheDir, DEFAULT_DATA_FEED_SLICE_SIZE, false)
81
84
 
82
85
  // fetch first slice - it will tell us if user has access to the beginning of requested date range
83
- await getDataFeedSlice(payload, 0, filters, cacheDir)
86
+ const firstSlice =
87
+ minutesCountToFetch === 1 ? lastSlice : await getDataFeedSlice(payload, 0, filters, cacheDir, DEFAULT_DATA_FEED_SLICE_SIZE, false)
88
+
89
+ const replaySliceSize =
90
+ filters.length === 0 ? DEFAULT_DATA_FEED_SLICE_SIZE : Math.max(firstSlice.suggestedSliceSize, lastSlice.suggestedSliceSize)
91
+ const sliceOffsets: number[] = []
92
+ for (let offset = 1; offset < minutesCountToFetch - 1; offset += replaySliceSize) {
93
+ sliceOffsets.push(offset)
94
+ }
84
95
 
85
96
  // it both begining and end date of the range is accessible fetch all remaning slices concurently with CONCURRENCY_LIMIT
86
97
  await pMap(
87
- sequence(minutesCountToFetch, 1), // this will produce Iterable sequence from 1 to minutesCountToFetch
88
- (offset) => getDataFeedSlice(payload, offset, filters, cacheDir),
98
+ sliceOffsets,
99
+ async (offset) => {
100
+ let currentOffset = offset
101
+ let remainingSliceSize = Math.min(replaySliceSize, minutesCountToFetch - 1 - offset)
102
+ while (remainingSliceSize > 0) {
103
+ const result = await getDataFeedSlice(payload, currentOffset, filters, cacheDir, remainingSliceSize)
104
+ currentOffset += result.sliceSize
105
+ remainingSliceSize -= result.sliceSize
106
+ }
107
+ },
89
108
  { concurrency: CONCURRENCY_LIMIT }
90
109
  )
91
110
  }
@@ -95,51 +114,73 @@ async function getDataFeedSlice(
95
114
  { exchange, fromDate, endpoint, apiKey, dataFeedCompression, userAgent }: WorkerJobPayload,
96
115
  offset: number,
97
116
  filters: object[],
98
- cacheDir: string
117
+ cacheDir: string,
118
+ requestedSliceSize = DEFAULT_DATA_FEED_SLICE_SIZE,
119
+ useCache = true
99
120
  ) {
100
121
  const sliceTimestamp = addMinutes(fromDate, offset)
101
122
  const sliceKey = sliceTimestamp.toISOString()
102
- const sliceBasePath = `${cacheDir}/${formatDateToPath(sliceTimestamp)}.json`
123
+ const sliceSizeSuffix = requestedSliceSize === DEFAULT_DATA_FEED_SLICE_SIZE ? '' : `.size-${requestedSliceSize}`
124
+ const sliceBasePath = `${cacheDir}/${formatDateToPath(sliceTimestamp)}${sliceSizeSuffix}.json`
103
125
  const zstdSlicePath = `${sliceBasePath}.zst`
104
126
  const gzipSlicePath = `${sliceBasePath}.gz`
105
- const cachedSlicePath = existsSync(zstdSlicePath) ? zstdSlicePath : existsSync(gzipSlicePath) ? gzipSlicePath : undefined
127
+ let cachedSlicePath
128
+ if (useCache) {
129
+ cachedSlicePath = existsSync(zstdSlicePath) ? zstdSlicePath : existsSync(gzipSlicePath) ? gzipSlicePath : undefined
130
+ }
131
+
132
+ if (cachedSlicePath !== undefined) {
133
+ debug('getDataFeedSlice already cached: %s, sliceSize: %d', sliceKey, requestedSliceSize)
134
+ const message: WorkerMessage = {
135
+ sliceKey,
136
+ slicePath: cachedSlicePath,
137
+ sliceSize: requestedSliceSize
138
+ }
139
+ parentPort!.postMessage(message)
140
+ return {
141
+ sliceSize: requestedSliceSize,
142
+ suggestedSliceSize: DEFAULT_DATA_FEED_SLICE_SIZE
143
+ }
144
+ }
106
145
 
107
146
  let url = `${endpoint}/data-feeds/${exchange}?from=${fromDate.toISOString()}&offset=${offset}&compression=${dataFeedCompression}`
147
+ if (requestedSliceSize > DEFAULT_DATA_FEED_SLICE_SIZE) {
148
+ url += `&sliceSize=${requestedSliceSize}`
149
+ }
108
150
 
109
151
  if (filters.length > 0) {
110
152
  url += `&filters=${encodeURIComponent(JSON.stringify(filters))}`
111
153
  }
112
154
 
113
- const slicePath =
114
- cachedSlicePath ||
115
- (
116
- await download({
117
- apiKey,
118
- downloadPath: sliceBasePath,
119
- url,
120
- userAgent,
121
- appendContentEncodingExtension: true,
122
- acceptEncoding: dataFeedCompression === 'gzip' ? 'gzip' : 'zstd, gzip'
123
- })
124
- ).downloadPath
125
-
126
- if (cachedSlicePath === undefined) {
127
- debug('getDataFeedSlice fetched from API and cached, %s', sliceKey)
128
- } else {
129
- debug('getDataFeedSlice already cached: %s', sliceKey)
130
- }
155
+ const downloadResult = await download({
156
+ apiKey,
157
+ downloadPath: sliceBasePath,
158
+ url,
159
+ userAgent,
160
+ appendContentEncodingExtension: true,
161
+ acceptEncoding: dataFeedCompression === 'gzip' ? 'gzip' : 'zstd, gzip'
162
+ })
163
+ const responseSliceSize = Number(downloadResult.headers['x-slice-size'])
164
+ const suggestedSliceSize = Number(downloadResult.headers['x-suggested-slice-size'] ?? DEFAULT_DATA_FEED_SLICE_SIZE)
131
165
 
132
- // everything went well (already cached or successfull cached) let's communicate it to parent thread
166
+ debug('getDataFeedSlice fetched from API and cached, %s, sliceSize: %d', sliceKey, responseSliceSize)
133
167
  const message: WorkerMessage = {
134
168
  sliceKey,
135
- slicePath
169
+ slicePath: downloadResult.downloadPath,
170
+ sliceSize: responseSliceSize
136
171
  }
137
172
  parentPort!.postMessage(message)
173
+
174
+ return {
175
+ sliceSize: responseSliceSize,
176
+ suggestedSliceSize
177
+ }
138
178
  }
139
179
 
140
180
  export type WorkerMessage = {
141
181
  sliceKey: string
142
182
  slicePath: string
183
+ sliceSize: number
143
184
  }
144
185
 
145
186
  export type WorkerJobPayload = {