@speckle/objectloader2 2.24.2 → 2.25.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/index.js +6 -7
- package/dist/esm/index.js +3 -3
- package/eslint.config.mjs +3 -1
- package/package.json +2 -2
- package/src/helpers/__snapshots__/cachePump.spec.ts.snap +31 -0
- package/src/helpers/__snapshots__/cacheReader.spec.ts.snap +8 -0
- package/src/helpers/aggregateQueue.ts +20 -0
- package/src/helpers/batchedPool.ts +5 -9
- package/src/helpers/batchingQueue.ts +21 -13
- package/src/helpers/cachePump.disposal.spec.ts +49 -0
- package/src/helpers/cachePump.spec.ts +103 -0
- package/src/helpers/cachePump.ts +99 -0
- package/src/helpers/cacheReader.spec.ts +35 -0
- package/src/helpers/cacheReader.ts +64 -0
- package/src/helpers/defermentManager.disposal.spec.ts +28 -0
- package/src/helpers/defermentManager.spec.ts +25 -1
- package/src/helpers/defermentManager.ts +128 -12
- package/src/helpers/deferredBase.ts +44 -6
- package/src/helpers/keyedQueue.ts +45 -0
- package/src/helpers/memoryPump.ts +40 -0
- package/src/helpers/pump.ts +8 -0
- package/src/index.ts +3 -4
- package/src/operations/__snapshots__/objectLoader2.spec.ts.snap +16 -16
- package/src/operations/{__snapshots__ → databases/__snapshots__}/indexedDatabase.spec.ts.snap +0 -21
- package/src/operations/{indexedDatabase.spec.ts → databases/indexedDatabase.spec.ts} +2 -28
- package/src/operations/databases/indexedDatabase.ts +150 -0
- package/src/operations/databases/memoryDatabase.ts +43 -0
- package/src/operations/{__snapshots__ → downloaders/__snapshots__}/serverDownloader.spec.ts.snap +34 -0
- package/src/operations/{memoryDownloader.ts → downloaders/memoryDownloader.ts} +15 -14
- package/src/operations/{serverDownloader.spec.ts → downloaders/serverDownloader.spec.ts} +68 -43
- package/src/operations/{serverDownloader.ts → downloaders/serverDownloader.ts} +92 -38
- package/src/operations/interfaces.ts +11 -12
- package/src/operations/objectLoader2.spec.ts +76 -144
- package/src/operations/objectLoader2.ts +57 -79
- package/src/operations/objectLoader2Factory.ts +56 -0
- package/src/operations/options.ts +18 -37
- package/src/operations/traverser.spec.ts +1 -1
- package/src/operations/traverser.ts +1 -1
- package/src/test/e2e.spec.ts +4 -4
- package/src/types/types.ts +11 -0
- package/src/operations/indexedDatabase.ts +0 -167
- package/src/operations/memoryDatabase.ts +0 -42
package/src/operations/{__snapshots__ → downloaders/__snapshots__}/serverDownloader.spec.ts.snap
RENAMED
|
@@ -10,6 +10,7 @@ exports[`downloader > add extra header 1`] = `
|
|
|
10
10
|
"speckle_type": "type",
|
|
11
11
|
},
|
|
12
12
|
"baseId": "id",
|
|
13
|
+
"size": 0,
|
|
13
14
|
}
|
|
14
15
|
`;
|
|
15
16
|
|
|
@@ -21,6 +22,36 @@ exports[`downloader > download batch of one 1`] = `
|
|
|
21
22
|
"speckle_type": "type",
|
|
22
23
|
},
|
|
23
24
|
"baseId": "id",
|
|
25
|
+
"size": 33,
|
|
26
|
+
},
|
|
27
|
+
]
|
|
28
|
+
`;
|
|
29
|
+
|
|
30
|
+
exports[`downloader > download batch of three 1`] = `
|
|
31
|
+
[
|
|
32
|
+
{
|
|
33
|
+
"base": {
|
|
34
|
+
"id": "id1",
|
|
35
|
+
"speckle_type": "type",
|
|
36
|
+
},
|
|
37
|
+
"baseId": "id1",
|
|
38
|
+
"size": 34,
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
"base": {
|
|
42
|
+
"id": "id2",
|
|
43
|
+
"speckle_type": "type",
|
|
44
|
+
},
|
|
45
|
+
"baseId": "id2",
|
|
46
|
+
"size": 34,
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
"base": {
|
|
50
|
+
"id": "id3",
|
|
51
|
+
"speckle_type": "type",
|
|
52
|
+
},
|
|
53
|
+
"baseId": "id3",
|
|
54
|
+
"size": 34,
|
|
24
55
|
},
|
|
25
56
|
]
|
|
26
57
|
`;
|
|
@@ -33,6 +64,7 @@ exports[`downloader > download batch of two 1`] = `
|
|
|
33
64
|
"speckle_type": "type",
|
|
34
65
|
},
|
|
35
66
|
"baseId": "id1",
|
|
67
|
+
"size": 34,
|
|
36
68
|
},
|
|
37
69
|
{
|
|
38
70
|
"base": {
|
|
@@ -40,6 +72,7 @@ exports[`downloader > download batch of two 1`] = `
|
|
|
40
72
|
"speckle_type": "type",
|
|
41
73
|
},
|
|
42
74
|
"baseId": "id2",
|
|
75
|
+
"size": 34,
|
|
43
76
|
},
|
|
44
77
|
]
|
|
45
78
|
`;
|
|
@@ -54,5 +87,6 @@ exports[`downloader > download single exists 1`] = `
|
|
|
54
87
|
"speckle_type": "type",
|
|
55
88
|
},
|
|
56
89
|
"baseId": "id",
|
|
90
|
+
"size": 0,
|
|
57
91
|
}
|
|
58
92
|
`;
|
|
@@ -1,24 +1,25 @@
|
|
|
1
|
-
import
|
|
2
|
-
import { Base, Item } from '
|
|
3
|
-
import { Downloader } from '
|
|
1
|
+
import Queue from '../../helpers/queue.js'
|
|
2
|
+
import { Base, Item } from '../../types/types.js'
|
|
3
|
+
import { Downloader } from '../interfaces.js'
|
|
4
4
|
|
|
5
5
|
export class MemoryDownloader implements Downloader {
|
|
6
|
-
#items:
|
|
6
|
+
#items: Map<string, Base>
|
|
7
7
|
#rootId: string
|
|
8
|
-
#results?:
|
|
8
|
+
#results?: Queue<Item>
|
|
9
9
|
|
|
10
|
-
constructor(
|
|
11
|
-
rootId: string,
|
|
12
|
-
items: Record<string, Base>,
|
|
13
|
-
results?: AsyncGeneratorQueue<Item>
|
|
14
|
-
) {
|
|
10
|
+
constructor(rootId: string, items: Map<string, Base>) {
|
|
15
11
|
this.#rootId = rootId
|
|
16
12
|
this.#items = items
|
|
17
|
-
this.#results = results
|
|
18
13
|
}
|
|
19
|
-
initializePool(
|
|
14
|
+
initializePool(params: {
|
|
15
|
+
results: Queue<Item>
|
|
16
|
+
total: number
|
|
17
|
+
maxDownloadBatchWait?: number
|
|
18
|
+
}): void {
|
|
19
|
+
this.#results = params.results
|
|
20
|
+
}
|
|
20
21
|
downloadSingle(): Promise<Item> {
|
|
21
|
-
const root = this.#items
|
|
22
|
+
const root = this.#items.get(this.#rootId)
|
|
22
23
|
if (root) {
|
|
23
24
|
return Promise.resolve({ baseId: this.#rootId, base: root })
|
|
24
25
|
}
|
|
@@ -28,7 +29,7 @@ export class MemoryDownloader implements Downloader {
|
|
|
28
29
|
return Promise.resolve()
|
|
29
30
|
}
|
|
30
31
|
add(id: string): void {
|
|
31
|
-
const base = this.#items
|
|
32
|
+
const base = this.#items.get(id)
|
|
32
33
|
if (base) {
|
|
33
34
|
this.#results?.add({ baseId: id, base })
|
|
34
35
|
return
|
|
@@ -1,42 +1,33 @@
|
|
|
1
1
|
import { describe, expect, test } from 'vitest'
|
|
2
2
|
import createFetchMock from 'vitest-fetch-mock'
|
|
3
3
|
import { vi } from 'vitest'
|
|
4
|
-
import
|
|
5
|
-
import { Item } from '../types/types.js'
|
|
6
|
-
import { Cache } from './interfaces.js'
|
|
4
|
+
import { Item } from '../../types/types.js'
|
|
7
5
|
import ServerDownloader from './serverDownloader.js'
|
|
6
|
+
import { MemoryPump } from '../../helpers/memoryPump.js'
|
|
8
7
|
|
|
9
8
|
describe('downloader', () => {
|
|
10
9
|
test('download batch of one', async () => {
|
|
11
10
|
const fetchMocker = createFetchMock(vi)
|
|
12
11
|
const i: Item = { baseId: 'id', base: { id: 'id', speckle_type: 'type' } }
|
|
13
12
|
fetchMocker.mockResponseOnce('id\t' + JSON.stringify(i.base) + '\n')
|
|
14
|
-
const
|
|
15
|
-
const db = {
|
|
16
|
-
async add(): Promise<void> {
|
|
17
|
-
return Promise.resolve()
|
|
18
|
-
}
|
|
19
|
-
} as unknown as Cache
|
|
13
|
+
const pump = new MemoryPump()
|
|
20
14
|
const downloader = new ServerDownloader({
|
|
21
|
-
database: db,
|
|
22
|
-
results,
|
|
23
15
|
serverUrl: 'http://speckle.test',
|
|
24
16
|
streamId: 'streamId',
|
|
25
17
|
objectId: 'objectId',
|
|
26
18
|
token: 'token',
|
|
27
|
-
|
|
28
19
|
fetch: fetchMocker
|
|
29
20
|
})
|
|
30
|
-
downloader.initializePool({ total: 1, maxDownloadBatchWait: 200 })
|
|
21
|
+
downloader.initializePool({ results: pump, total: 1, maxDownloadBatchWait: 200 })
|
|
31
22
|
downloader.add('id')
|
|
32
23
|
await downloader.disposeAsync()
|
|
33
|
-
results.dispose()
|
|
34
24
|
const r = []
|
|
35
|
-
for await (const x of
|
|
25
|
+
for await (const x of pump.gather([i.baseId])) {
|
|
36
26
|
r.push(x)
|
|
37
27
|
}
|
|
38
28
|
|
|
39
29
|
expect(r).toMatchSnapshot()
|
|
30
|
+
await downloader.disposeAsync()
|
|
40
31
|
})
|
|
41
32
|
|
|
42
33
|
test('download batch of two', async () => {
|
|
@@ -46,15 +37,9 @@ describe('downloader', () => {
|
|
|
46
37
|
fetchMocker.mockResponseOnce(
|
|
47
38
|
'id1\t' + JSON.stringify(i1.base) + '\nid2\t' + JSON.stringify(i2.base) + '\n'
|
|
48
39
|
)
|
|
49
|
-
|
|
50
|
-
const
|
|
51
|
-
async add(): Promise<void> {
|
|
52
|
-
return Promise.resolve()
|
|
53
|
-
}
|
|
54
|
-
} as unknown as Cache
|
|
40
|
+
|
|
41
|
+
const pump = new MemoryPump()
|
|
55
42
|
const downloader = new ServerDownloader({
|
|
56
|
-
database: db,
|
|
57
|
-
results,
|
|
58
43
|
serverUrl: 'http://speckle.test',
|
|
59
44
|
streamId: 'streamId',
|
|
60
45
|
objectId: 'objectId',
|
|
@@ -62,16 +47,55 @@ describe('downloader', () => {
|
|
|
62
47
|
|
|
63
48
|
fetch: fetchMocker
|
|
64
49
|
})
|
|
65
|
-
downloader.initializePool({ total: 2, maxDownloadBatchWait: 200 })
|
|
66
|
-
downloader.add('
|
|
50
|
+
downloader.initializePool({ results: pump, total: 2, maxDownloadBatchWait: 200 })
|
|
51
|
+
downloader.add('id1')
|
|
52
|
+
downloader.add('id2')
|
|
53
|
+
await downloader.disposeAsync()
|
|
54
|
+
const r = []
|
|
55
|
+
for await (const x of pump.gather([i1.baseId, i2.baseId])) {
|
|
56
|
+
r.push(x)
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
expect(r).toMatchSnapshot()
|
|
60
|
+
await downloader.disposeAsync()
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
test('download batch of three', async () => {
|
|
64
|
+
const fetchMocker = createFetchMock(vi)
|
|
65
|
+
const i1: Item = { baseId: 'id1', base: { id: 'id1', speckle_type: 'type' } }
|
|
66
|
+
const i2: Item = { baseId: 'id2', base: { id: 'id2', speckle_type: 'type' } }
|
|
67
|
+
const i3: Item = { baseId: 'id3', base: { id: 'id3', speckle_type: 'type' } }
|
|
68
|
+
fetchMocker.mockResponseOnce(
|
|
69
|
+
'id1\t' +
|
|
70
|
+
JSON.stringify(i1.base) +
|
|
71
|
+
'\nid2\t' +
|
|
72
|
+
JSON.stringify(i2.base) +
|
|
73
|
+
'\nid3\t' +
|
|
74
|
+
JSON.stringify(i3.base) +
|
|
75
|
+
'\n'
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
const pump = new MemoryPump()
|
|
79
|
+
const downloader = new ServerDownloader({
|
|
80
|
+
serverUrl: 'http://speckle.test',
|
|
81
|
+
streamId: 'streamId',
|
|
82
|
+
objectId: 'objectId',
|
|
83
|
+
token: 'token',
|
|
84
|
+
|
|
85
|
+
fetch: fetchMocker
|
|
86
|
+
})
|
|
87
|
+
downloader.initializePool({ results: pump, total: 3, maxDownloadBatchWait: 200 })
|
|
88
|
+
downloader.add('id1')
|
|
89
|
+
downloader.add('id2')
|
|
90
|
+
downloader.add('id3')
|
|
67
91
|
await downloader.disposeAsync()
|
|
68
|
-
results.dispose()
|
|
69
92
|
const r = []
|
|
70
|
-
for await (const x of
|
|
93
|
+
for await (const x of pump.gather([i1.baseId, i2.baseId, i3.baseId])) {
|
|
71
94
|
r.push(x)
|
|
72
95
|
}
|
|
73
96
|
|
|
74
97
|
expect(r).toMatchSnapshot()
|
|
98
|
+
await downloader.disposeAsync()
|
|
75
99
|
})
|
|
76
100
|
|
|
77
101
|
test('download single exists', async () => {
|
|
@@ -81,15 +105,7 @@ describe('downloader', () => {
|
|
|
81
105
|
base: { id: 'id', speckle_type: 'type', __closure: { childIds: 1 } }
|
|
82
106
|
}
|
|
83
107
|
fetchMocker.mockResponseOnce(JSON.stringify(i.base))
|
|
84
|
-
const results = new AsyncGeneratorQueue()
|
|
85
|
-
const db = {
|
|
86
|
-
async add(): Promise<void> {
|
|
87
|
-
return Promise.resolve()
|
|
88
|
-
}
|
|
89
|
-
} as unknown as Cache
|
|
90
108
|
const downloader = new ServerDownloader({
|
|
91
|
-
database: db,
|
|
92
|
-
results,
|
|
93
109
|
serverUrl: 'http://speckle.test',
|
|
94
110
|
streamId: 'streamId',
|
|
95
111
|
objectId: i.baseId,
|
|
@@ -99,6 +115,7 @@ describe('downloader', () => {
|
|
|
99
115
|
})
|
|
100
116
|
const x = await downloader.downloadSingle()
|
|
101
117
|
expect(x).toMatchSnapshot()
|
|
118
|
+
await downloader.disposeAsync()
|
|
102
119
|
})
|
|
103
120
|
|
|
104
121
|
test('add extra header', async () => {
|
|
@@ -111,17 +128,9 @@ describe('downloader', () => {
|
|
|
111
128
|
(req) => req.headers.get('x-test') === 'asdf',
|
|
112
129
|
JSON.stringify(i.base)
|
|
113
130
|
)
|
|
114
|
-
const results = new AsyncGeneratorQueue()
|
|
115
|
-
const db = {
|
|
116
|
-
async add(): Promise<void> {
|
|
117
|
-
return Promise.resolve()
|
|
118
|
-
}
|
|
119
|
-
} as unknown as Cache
|
|
120
131
|
const headers = new Headers()
|
|
121
132
|
headers.set('x-test', 'asdf')
|
|
122
133
|
const downloader = new ServerDownloader({
|
|
123
|
-
database: db,
|
|
124
|
-
results,
|
|
125
134
|
serverUrl: 'http://speckle.test',
|
|
126
135
|
headers,
|
|
127
136
|
streamId: 'streamId',
|
|
@@ -132,5 +141,21 @@ describe('downloader', () => {
|
|
|
132
141
|
})
|
|
133
142
|
const x = await downloader.downloadSingle()
|
|
134
143
|
expect(x).toMatchSnapshot()
|
|
144
|
+
await downloader.disposeAsync()
|
|
145
|
+
})
|
|
146
|
+
|
|
147
|
+
test('can dispose used', async () => {
|
|
148
|
+
const fetchMocker = createFetchMock(vi)
|
|
149
|
+
const headers = new Headers()
|
|
150
|
+
const downloader = new ServerDownloader({
|
|
151
|
+
serverUrl: 'http://speckle.test',
|
|
152
|
+
headers,
|
|
153
|
+
streamId: 'streamId',
|
|
154
|
+
objectId: 'objectId',
|
|
155
|
+
token: 'token',
|
|
156
|
+
|
|
157
|
+
fetch: fetchMocker
|
|
158
|
+
})
|
|
159
|
+
await downloader.disposeAsync()
|
|
135
160
|
})
|
|
136
161
|
})
|
|
@@ -1,22 +1,33 @@
|
|
|
1
|
-
import BatchedPool from '
|
|
2
|
-
import Queue from '
|
|
3
|
-
import { ObjectLoaderRuntimeError } from '
|
|
4
|
-
import { Fetcher, isBase, Item } from '
|
|
5
|
-
import { Downloader } from '
|
|
6
|
-
|
|
1
|
+
import BatchedPool from '../../helpers/batchedPool.js'
|
|
2
|
+
import Queue from '../../helpers/queue.js'
|
|
3
|
+
import { ObjectLoaderRuntimeError } from '../../types/errors.js'
|
|
4
|
+
import { Fetcher, isBase, Item, take } from '../../types/types.js'
|
|
5
|
+
import { Downloader } from '../interfaces.js'
|
|
6
|
+
|
|
7
|
+
export interface ServerDownloaderOptions {
|
|
8
|
+
serverUrl: string
|
|
9
|
+
streamId: string
|
|
10
|
+
objectId: string
|
|
11
|
+
token?: string
|
|
12
|
+
headers?: Headers
|
|
13
|
+
fetch?: Fetcher
|
|
14
|
+
}
|
|
7
15
|
|
|
8
16
|
export default class ServerDownloader implements Downloader {
|
|
9
17
|
#requestUrlRootObj: string
|
|
10
18
|
#requestUrlChildren: string
|
|
11
19
|
#headers: HeadersInit
|
|
12
|
-
#options:
|
|
20
|
+
#options: ServerDownloaderOptions
|
|
13
21
|
#fetch: Fetcher
|
|
22
|
+
#results?: Queue<Item>
|
|
14
23
|
|
|
15
24
|
#downloadQueue?: BatchedPool<string>
|
|
25
|
+
#decoder = new TextDecoder()
|
|
16
26
|
|
|
17
|
-
constructor(options:
|
|
27
|
+
constructor(options: ServerDownloaderOptions) {
|
|
18
28
|
this.#options = options
|
|
19
|
-
this.#fetch =
|
|
29
|
+
this.#fetch =
|
|
30
|
+
options.fetch ?? ((...args): Promise<Response> => globalThis.fetch(...args))
|
|
20
31
|
|
|
21
32
|
this.#headers = {}
|
|
22
33
|
if (options.headers) {
|
|
@@ -45,17 +56,21 @@ export default class ServerDownloader implements Downloader {
|
|
|
45
56
|
return [10000, 30000, 10000, 1000]
|
|
46
57
|
}
|
|
47
58
|
|
|
48
|
-
initializePool(params: {
|
|
49
|
-
|
|
59
|
+
initializePool(params: {
|
|
60
|
+
results: Queue<Item>
|
|
61
|
+
total: number
|
|
62
|
+
maxDownloadBatchWait?: number
|
|
63
|
+
}): void {
|
|
64
|
+
const { results, total } = params
|
|
65
|
+
this.#results = results
|
|
50
66
|
this.#downloadQueue = new BatchedPool<string>({
|
|
51
67
|
concurrencyAndSizes: this.#getDownloadCountAndSizes(total),
|
|
52
68
|
maxWaitTime: params.maxDownloadBatchWait,
|
|
53
|
-
processFunction: (batch: string[]) =>
|
|
69
|
+
processFunction: (batch: string[]): Promise<void> =>
|
|
54
70
|
this.downloadBatch({
|
|
55
71
|
batch,
|
|
56
72
|
url: this.#requestUrlChildren,
|
|
57
|
-
headers: this.#headers
|
|
58
|
-
results: this.#options.results
|
|
73
|
+
headers: this.#headers
|
|
59
74
|
})
|
|
60
75
|
})
|
|
61
76
|
}
|
|
@@ -73,7 +88,6 @@ export default class ServerDownloader implements Downloader {
|
|
|
73
88
|
|
|
74
89
|
async disposeAsync(): Promise<void> {
|
|
75
90
|
await this.#downloadQueue?.disposeAsync()
|
|
76
|
-
await this.#getPool().disposeAsync()
|
|
77
91
|
}
|
|
78
92
|
|
|
79
93
|
#processJson(baseId: string, unparsedBase: string): Item {
|
|
@@ -94,9 +108,9 @@ export default class ServerDownloader implements Downloader {
|
|
|
94
108
|
batch: string[]
|
|
95
109
|
url: string
|
|
96
110
|
headers: HeadersInit
|
|
97
|
-
results: Queue<Item>
|
|
98
111
|
}): Promise<void> {
|
|
99
|
-
const { batch, url, headers
|
|
112
|
+
const { batch, url, headers } = params
|
|
113
|
+
const keys = new Set<string>(batch)
|
|
100
114
|
const response = await this.#fetch(url, {
|
|
101
115
|
method: 'POST',
|
|
102
116
|
headers: { ...headers, 'Content-Type': 'application/json' },
|
|
@@ -109,35 +123,74 @@ export default class ServerDownloader implements Downloader {
|
|
|
109
123
|
}
|
|
110
124
|
|
|
111
125
|
const reader = response.body.getReader()
|
|
112
|
-
|
|
113
|
-
let buffer = '' // Temporary buffer to store incoming chunks
|
|
126
|
+
let leftover = new Uint8Array(0)
|
|
114
127
|
|
|
115
128
|
let count = 0
|
|
116
129
|
while (true) {
|
|
117
130
|
const { done, value } = await reader.read()
|
|
118
131
|
if (done) break
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
while (boundary !== -1) {
|
|
125
|
-
const jsonString = buffer.slice(0, boundary)
|
|
126
|
-
buffer = buffer.slice(boundary + 1)
|
|
127
|
-
boundary = buffer.indexOf('\n')
|
|
128
|
-
if (jsonString) {
|
|
129
|
-
const pieces = jsonString.split('\t')
|
|
130
|
-
const [id, unparsedObj] = pieces
|
|
131
|
-
const item = this.#processJson(id, unparsedObj)
|
|
132
|
-
await this.#options.database.add(item)
|
|
133
|
-
results.add(item)
|
|
134
|
-
count++
|
|
135
|
-
if (count % 1000 === 0) {
|
|
136
|
-
await new Promise((resolve) => setTimeout(resolve, 100)) //allow other stuff to happen
|
|
137
|
-
}
|
|
132
|
+
|
|
133
|
+
leftover = await this.processArray(leftover, value, keys, async () => {
|
|
134
|
+
count++
|
|
135
|
+
if (count % 1000 === 0) {
|
|
136
|
+
await new Promise((resolve) => setTimeout(resolve, 100)) //allow other stuff to happen
|
|
138
137
|
}
|
|
138
|
+
})
|
|
139
|
+
}
|
|
140
|
+
if (keys.size > 0) {
|
|
141
|
+
throw new Error(
|
|
142
|
+
'Items requested were not downloaded: ' + take(keys.values(), 10).join(',')
|
|
143
|
+
)
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
async processArray(
|
|
148
|
+
leftover: Uint8Array,
|
|
149
|
+
value: Uint8Array,
|
|
150
|
+
keys: Set<string>,
|
|
151
|
+
callback: () => Promise<void>
|
|
152
|
+
): Promise<Uint8Array> {
|
|
153
|
+
//this concat will allocate a new array
|
|
154
|
+
const combined = this.concatUint8Arrays(leftover, value)
|
|
155
|
+
let start = 0
|
|
156
|
+
|
|
157
|
+
//subarray doesn't allocate
|
|
158
|
+
for (let i = 0; i < combined.length; i++) {
|
|
159
|
+
if (combined[i] === 0x0a) {
|
|
160
|
+
const line = combined.subarray(start, i) // line without \n
|
|
161
|
+
//strings are allocated here
|
|
162
|
+
const item = this.processLine(line)
|
|
163
|
+
this.#results?.add(item)
|
|
164
|
+
start = i + 1
|
|
165
|
+
await callback()
|
|
166
|
+
keys.delete(item.baseId)
|
|
139
167
|
}
|
|
140
168
|
}
|
|
169
|
+
return combined.subarray(start) // carry over remainder
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
processLine(line: Uint8Array): Item {
|
|
173
|
+
for (let i = 0; i < line.length; i++) {
|
|
174
|
+
if (line[i] === 0x09) {
|
|
175
|
+
//this is a tab
|
|
176
|
+
const baseId = this.#decoder.decode(line.subarray(0, i))
|
|
177
|
+
const json = line.subarray(i + 1)
|
|
178
|
+
const base = this.#decoder.decode(json)
|
|
179
|
+
const item = this.#processJson(baseId, base)
|
|
180
|
+
item.size = json.length
|
|
181
|
+
return item
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
throw new ObjectLoaderRuntimeError(
|
|
185
|
+
'Invalid line format: ' + this.#decoder.decode(line)
|
|
186
|
+
)
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
concatUint8Arrays(a: Uint8Array, b: Uint8Array): Uint8Array {
|
|
190
|
+
const c = new Uint8Array(a.length + b.length)
|
|
191
|
+
c.set(a, 0)
|
|
192
|
+
c.set(b, a.length)
|
|
193
|
+
return c
|
|
141
194
|
}
|
|
142
195
|
|
|
143
196
|
async downloadSingle(): Promise<Item> {
|
|
@@ -147,6 +200,7 @@ export default class ServerDownloader implements Downloader {
|
|
|
147
200
|
this.#validateResponse(response)
|
|
148
201
|
const responseText = await response.text()
|
|
149
202
|
const item = this.#processJson(this.#options.objectId, responseText)
|
|
203
|
+
item.size = 0
|
|
150
204
|
return item
|
|
151
205
|
}
|
|
152
206
|
|
|
@@ -1,20 +1,19 @@
|
|
|
1
1
|
import Queue from '../helpers/queue.js'
|
|
2
2
|
import { Item } from '../types/types.js'
|
|
3
3
|
|
|
4
|
-
export interface
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
add(item: Item): Promise<void>
|
|
4
|
+
export interface Downloader extends Queue<string> {
|
|
5
|
+
initializePool(params: {
|
|
6
|
+
results: Queue<Item>
|
|
7
|
+
total: number
|
|
8
|
+
maxDownloadBatchWait?: number
|
|
9
|
+
}): void
|
|
10
|
+
downloadSingle(): Promise<Item>
|
|
13
11
|
disposeAsync(): Promise<void>
|
|
14
12
|
}
|
|
15
13
|
|
|
16
|
-
export interface
|
|
17
|
-
|
|
18
|
-
|
|
14
|
+
export interface Database {
|
|
15
|
+
getAll(keys: string[]): Promise<(Item | undefined)[]>
|
|
16
|
+
getItem(params: { id: string }): Promise<Item | undefined>
|
|
17
|
+
cacheSaveBatch(params: { batch: Item[] }): Promise<void>
|
|
19
18
|
disposeAsync(): Promise<void>
|
|
20
19
|
}
|