teraslice 2.10.0 → 2.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/interfaces.js +12 -0
- package/dist/src/lib/cluster/cluster_master.js +246 -0
- package/dist/src/lib/cluster/node_master.js +355 -0
- package/dist/src/lib/cluster/services/api.js +663 -0
- package/dist/src/lib/cluster/services/assets.js +226 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetes/index.js +192 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetes/k8s.js +481 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetes/k8sResource.js +414 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetes/k8sState.js +59 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetes/utils.js +43 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/index.js +192 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/interfaces.js +2 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/k8s.js +423 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/k8sDeploymentResource.js +60 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/k8sJobResource.js +55 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/k8sResource.js +359 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/k8sServiceResource.js +37 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/k8sState.js +60 -0
- package/dist/src/lib/cluster/services/cluster/backends/kubernetesV2/utils.js +170 -0
- package/dist/src/lib/cluster/services/cluster/backends/native/dispatch.js +13 -0
- package/dist/src/lib/cluster/services/cluster/backends/native/index.js +526 -0
- package/dist/src/lib/cluster/services/cluster/backends/native/messaging.js +547 -0
- package/dist/src/lib/cluster/services/cluster/backends/state-utils.js +26 -0
- package/dist/src/lib/cluster/services/cluster/index.js +17 -0
- package/dist/src/lib/cluster/services/execution.js +435 -0
- package/dist/src/lib/cluster/services/index.js +6 -0
- package/dist/src/lib/cluster/services/interfaces.js +2 -0
- package/dist/src/lib/cluster/services/jobs.js +454 -0
- package/dist/src/lib/config/default-sysconfig.js +26 -0
- package/dist/src/lib/config/index.js +22 -0
- package/dist/src/lib/config/schemas/system.js +360 -0
- package/dist/src/lib/storage/analytics.js +86 -0
- package/dist/src/lib/storage/assets.js +401 -0
- package/dist/src/lib/storage/backends/elasticsearch_store.js +494 -0
- package/dist/src/lib/storage/backends/mappings/analytics.js +50 -0
- package/dist/src/lib/storage/backends/mappings/asset.js +41 -0
- package/dist/src/lib/storage/backends/mappings/ex.js +62 -0
- package/dist/src/lib/storage/backends/mappings/job.js +38 -0
- package/dist/src/lib/storage/backends/mappings/state.js +38 -0
- package/dist/src/lib/storage/backends/s3_store.js +237 -0
- package/dist/src/lib/storage/execution.js +300 -0
- package/dist/src/lib/storage/index.js +7 -0
- package/dist/src/lib/storage/jobs.js +81 -0
- package/dist/src/lib/storage/state.js +255 -0
- package/dist/src/lib/utils/api_utils.js +157 -0
- package/dist/src/lib/utils/asset_utils.js +94 -0
- package/dist/src/lib/utils/date_utils.js +52 -0
- package/dist/src/lib/utils/encoding_utils.js +27 -0
- package/dist/src/lib/utils/events.js +4 -0
- package/dist/src/lib/utils/file_utils.js +124 -0
- package/dist/src/lib/utils/id_utils.js +15 -0
- package/dist/src/lib/utils/port_utils.js +32 -0
- package/dist/src/lib/workers/assets/index.js +3 -0
- package/dist/src/lib/workers/assets/loader-executable.js +40 -0
- package/dist/src/lib/workers/assets/loader.js +73 -0
- package/dist/src/lib/workers/assets/spawn.js +55 -0
- package/dist/src/lib/workers/context/execution-context.js +12 -0
- package/dist/src/lib/workers/context/terafoundation-context.js +8 -0
- package/dist/src/lib/workers/execution-controller/execution-analytics.js +188 -0
- package/dist/src/lib/workers/execution-controller/index.js +1024 -0
- package/dist/src/lib/workers/execution-controller/recovery.js +151 -0
- package/dist/src/lib/workers/execution-controller/scheduler.js +390 -0
- package/dist/src/lib/workers/execution-controller/slice-analytics.js +96 -0
- package/dist/src/lib/workers/helpers/job.js +80 -0
- package/dist/src/lib/workers/helpers/op-analytics.js +22 -0
- package/dist/src/lib/workers/helpers/terafoundation.js +34 -0
- package/dist/src/lib/workers/helpers/worker-shutdown.js +169 -0
- package/dist/src/lib/workers/metrics/index.js +108 -0
- package/dist/src/lib/workers/worker/index.js +378 -0
- package/dist/src/lib/workers/worker/slice.js +122 -0
- package/dist/test/config/schemas/system_schema-spec.js +37 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/k8s-spec.js +316 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/k8sResource-spec.js +795 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/k8sState-multicluster-spec.js +67 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/k8sState-spec.js +84 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/utils-spec.js +132 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/v2/k8s-v2-spec.js +455 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/v2/k8sResource-v2-spec.js +818 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/v2/k8sState-multicluster-v2-spec.js +67 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/v2/k8sState-v2-spec.js +84 -0
- package/dist/test/lib/cluster/services/cluster/backends/kubernetes/v2/utils-v2-spec.js +320 -0
- package/dist/test/lib/cluster/services/cluster/backends/state-utils-spec.js +37 -0
- package/dist/test/node_master-spec.js +188 -0
- package/dist/test/services/api-spec.js +80 -0
- package/dist/test/services/assets-spec.js +158 -0
- package/dist/test/services/messaging-spec.js +440 -0
- package/dist/test/storage/assets_storage-spec.js +95 -0
- package/dist/test/storage/s3_store-spec.js +138 -0
- package/dist/test/test.config.js +8 -0
- package/dist/test/test.setup.js +6 -0
- package/dist/test/utils/api_utils-spec.js +86 -0
- package/dist/test/utils/asset_utils-spec.js +141 -0
- package/dist/test/utils/elastic_utils-spec.js +25 -0
- package/dist/test/workers/execution-controller/execution-controller-spec.js +371 -0
- package/dist/test/workers/execution-controller/execution-special-test-cases-spec.js +520 -0
- package/dist/test/workers/execution-controller/execution-test-cases-spec.js +338 -0
- package/dist/test/workers/execution-controller/recovery-spec.js +160 -0
- package/dist/test/workers/execution-controller/scheduler-spec.js +249 -0
- package/dist/test/workers/execution-controller/slice-analytics-spec.js +121 -0
- package/dist/test/workers/fixtures/ops/example-op/processor.js +20 -0
- package/dist/test/workers/fixtures/ops/example-op/schema.js +19 -0
- package/dist/test/workers/fixtures/ops/example-reader/fetcher.js +20 -0
- package/dist/test/workers/fixtures/ops/example-reader/schema.js +41 -0
- package/dist/test/workers/fixtures/ops/example-reader/slicer.js +37 -0
- package/dist/test/workers/fixtures/ops/new-op/processor.js +29 -0
- package/dist/test/workers/fixtures/ops/new-op/schema.js +18 -0
- package/dist/test/workers/fixtures/ops/new-reader/fetcher.js +19 -0
- package/dist/test/workers/fixtures/ops/new-reader/schema.js +23 -0
- package/dist/test/workers/fixtures/ops/new-reader/slicer.js +13 -0
- package/dist/test/workers/helpers/configs.js +130 -0
- package/dist/test/workers/helpers/execution-controller-helper.js +49 -0
- package/dist/test/workers/helpers/index.js +5 -0
- package/dist/test/workers/helpers/test-context.js +210 -0
- package/dist/test/workers/helpers/zip-directory.js +25 -0
- package/dist/test/workers/worker/slice-spec.js +333 -0
- package/dist/test/workers/worker/worker-spec.js +356 -0
- package/package.json +94 -93
- package/service.js +0 -0
|
@@ -0,0 +1,520 @@
|
|
|
1
|
+
import { v4 as uuidv4 } from 'uuid';
|
|
2
|
+
import { pDelay, times, random } from '@terascope/utils';
|
|
3
|
+
import { RecoveryCleanupType } from '@terascope/job-components';
|
|
4
|
+
import { ExecutionController as ExController } from '@terascope/teraslice-messaging';
|
|
5
|
+
import { TestContext } from '../helpers/index.js';
|
|
6
|
+
import { makeShutdownEarlyFn, getTestCases } from '../helpers/execution-controller-helper.js';
|
|
7
|
+
import { ExecutionController } from '../../../src/lib/workers/execution-controller/index.js';
|
|
8
|
+
import { findPort } from '../../../src/lib/utils/port_utils.js';
|
|
9
|
+
import { newId } from '../../../src/lib/utils/id_utils.js';
|
|
10
|
+
const ExecutionControllerClient = ExController.Client;
|
|
11
|
+
process.env.BLUEBIRD_LONG_STACK_TRACES = '1';
|
|
12
|
+
describe('ExecutionController Special Tests', () => {
|
|
13
|
+
// [ message, config ]
|
|
14
|
+
const testCases = [
|
|
15
|
+
[
|
|
16
|
+
'when recovering a slicer no cleanup type',
|
|
17
|
+
{
|
|
18
|
+
slicerResults: [
|
|
19
|
+
{ example: 'slice-recovery-after' },
|
|
20
|
+
{ example: 'slice-recovery-after' },
|
|
21
|
+
null
|
|
22
|
+
],
|
|
23
|
+
isRecovery: true,
|
|
24
|
+
recoverySlices: [
|
|
25
|
+
{
|
|
26
|
+
state: 'start',
|
|
27
|
+
slice: {
|
|
28
|
+
slice_id: uuidv4(),
|
|
29
|
+
request: {
|
|
30
|
+
example: 'slice-recovery'
|
|
31
|
+
},
|
|
32
|
+
slicer_id: 0,
|
|
33
|
+
slicer_order: 0,
|
|
34
|
+
_created: new Date().toISOString()
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
{
|
|
38
|
+
state: 'start',
|
|
39
|
+
slice: {
|
|
40
|
+
slice_id: uuidv4(),
|
|
41
|
+
request: {
|
|
42
|
+
example: 'slice-recovery'
|
|
43
|
+
},
|
|
44
|
+
slicer_id: 0,
|
|
45
|
+
slicer_order: 1,
|
|
46
|
+
_created: new Date().toISOString()
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
],
|
|
50
|
+
processedSliceCount: 4,
|
|
51
|
+
incompleteSliceCount: 0,
|
|
52
|
+
completedSliceCount: 4,
|
|
53
|
+
analytics: true
|
|
54
|
+
}
|
|
55
|
+
],
|
|
56
|
+
[
|
|
57
|
+
'when recovering with no slices to recover',
|
|
58
|
+
{
|
|
59
|
+
slicerResults: [
|
|
60
|
+
{ example: 'slice-recovery-no-slices-after' },
|
|
61
|
+
{ example: 'slice-recovery-no-slices-after' },
|
|
62
|
+
null
|
|
63
|
+
],
|
|
64
|
+
isRecovery: true,
|
|
65
|
+
recoverySlices: [],
|
|
66
|
+
incompleteSliceCount: 0,
|
|
67
|
+
completedSliceCount: 2,
|
|
68
|
+
processedSliceCount: 2,
|
|
69
|
+
analytics: true
|
|
70
|
+
}
|
|
71
|
+
],
|
|
72
|
+
[
|
|
73
|
+
'when recovering a slicer with a cleanup type of errors',
|
|
74
|
+
{
|
|
75
|
+
slicerResults: [
|
|
76
|
+
{ example: 'slice-recovery-error-after' },
|
|
77
|
+
null
|
|
78
|
+
],
|
|
79
|
+
isRecovery: true,
|
|
80
|
+
cleanupType: RecoveryCleanupType.errors,
|
|
81
|
+
recoverySlices: [
|
|
82
|
+
{
|
|
83
|
+
state: 'completed',
|
|
84
|
+
slice: {
|
|
85
|
+
slice_id: uuidv4(),
|
|
86
|
+
request: {
|
|
87
|
+
example: 'slice-recovery-error-completed'
|
|
88
|
+
},
|
|
89
|
+
slicer_id: 0,
|
|
90
|
+
slicer_order: 0,
|
|
91
|
+
_created: new Date().toISOString()
|
|
92
|
+
}
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
state: 'start',
|
|
96
|
+
slice: {
|
|
97
|
+
slice_id: uuidv4(),
|
|
98
|
+
request: {
|
|
99
|
+
example: 'slice-recovery-error-start'
|
|
100
|
+
},
|
|
101
|
+
slicer_id: 0,
|
|
102
|
+
slicer_order: 1,
|
|
103
|
+
_created: new Date().toISOString()
|
|
104
|
+
}
|
|
105
|
+
},
|
|
106
|
+
{
|
|
107
|
+
state: 'error',
|
|
108
|
+
slice: {
|
|
109
|
+
slice_id: uuidv4(),
|
|
110
|
+
request: {
|
|
111
|
+
example: 'slice-recovery-error'
|
|
112
|
+
},
|
|
113
|
+
slicer_id: 0,
|
|
114
|
+
slicer_order: 2,
|
|
115
|
+
_created: new Date().toISOString()
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
],
|
|
119
|
+
incompleteSliceCount: 1,
|
|
120
|
+
completedSliceCount: 2,
|
|
121
|
+
processedSliceCount: 1,
|
|
122
|
+
analytics: false
|
|
123
|
+
}
|
|
124
|
+
],
|
|
125
|
+
[
|
|
126
|
+
'when recovering a slicer with a cleanup type of all',
|
|
127
|
+
{
|
|
128
|
+
slicerResults: [
|
|
129
|
+
{ example: 'slice-recovery-all-after' },
|
|
130
|
+
null
|
|
131
|
+
],
|
|
132
|
+
isRecovery: true,
|
|
133
|
+
cleanupType: RecoveryCleanupType.all,
|
|
134
|
+
recoverySlices: [
|
|
135
|
+
{
|
|
136
|
+
state: 'error',
|
|
137
|
+
slice: {
|
|
138
|
+
slice_id: uuidv4(),
|
|
139
|
+
request: {
|
|
140
|
+
example: 'slice-recovery-all'
|
|
141
|
+
},
|
|
142
|
+
slicer_id: 0,
|
|
143
|
+
slicer_order: 0,
|
|
144
|
+
_created: new Date().toISOString()
|
|
145
|
+
}
|
|
146
|
+
},
|
|
147
|
+
{
|
|
148
|
+
state: 'start',
|
|
149
|
+
slice: {
|
|
150
|
+
slice_id: uuidv4(),
|
|
151
|
+
request: {
|
|
152
|
+
example: 'slice-recovery-all'
|
|
153
|
+
},
|
|
154
|
+
slicer_id: 0,
|
|
155
|
+
slicer_order: 1,
|
|
156
|
+
_created: new Date().toISOString()
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
],
|
|
160
|
+
processedSliceCount: 2,
|
|
161
|
+
incompleteSliceCount: 0,
|
|
162
|
+
completedSliceCount: 2,
|
|
163
|
+
analytics: true
|
|
164
|
+
}
|
|
165
|
+
],
|
|
166
|
+
[
|
|
167
|
+
'when recovering a slicer with a cleanup type of pending',
|
|
168
|
+
{
|
|
169
|
+
slicerResults: [
|
|
170
|
+
{ example: 'slice-recovery-pending-after' },
|
|
171
|
+
null
|
|
172
|
+
],
|
|
173
|
+
isRecovery: true,
|
|
174
|
+
cleanupType: RecoveryCleanupType.pending,
|
|
175
|
+
recoverySlices: [
|
|
176
|
+
{
|
|
177
|
+
state: 'completed',
|
|
178
|
+
slice: {
|
|
179
|
+
slice_id: uuidv4(),
|
|
180
|
+
request: {
|
|
181
|
+
example: 'slice-recovery-pending-completed'
|
|
182
|
+
},
|
|
183
|
+
slicer_id: 0,
|
|
184
|
+
slicer_order: 0,
|
|
185
|
+
_created: new Date().toISOString()
|
|
186
|
+
}
|
|
187
|
+
},
|
|
188
|
+
{
|
|
189
|
+
state: 'start',
|
|
190
|
+
slice: {
|
|
191
|
+
slice_id: uuidv4(),
|
|
192
|
+
request: {
|
|
193
|
+
example: 'slice-recovery-pending-start'
|
|
194
|
+
},
|
|
195
|
+
slicer_id: 0,
|
|
196
|
+
slicer_order: 1,
|
|
197
|
+
_created: new Date().toISOString()
|
|
198
|
+
}
|
|
199
|
+
},
|
|
200
|
+
{
|
|
201
|
+
state: 'pending',
|
|
202
|
+
slice: {
|
|
203
|
+
slice_id: uuidv4(),
|
|
204
|
+
request: {
|
|
205
|
+
example: 'slice-recovery-pending'
|
|
206
|
+
},
|
|
207
|
+
slicer_id: 0,
|
|
208
|
+
slicer_order: 2,
|
|
209
|
+
_created: new Date().toISOString()
|
|
210
|
+
}
|
|
211
|
+
},
|
|
212
|
+
{
|
|
213
|
+
state: 'pending',
|
|
214
|
+
slice: {
|
|
215
|
+
slice_id: uuidv4(),
|
|
216
|
+
request: {
|
|
217
|
+
example: 'slice-recovery-pending'
|
|
218
|
+
},
|
|
219
|
+
slicer_id: 0,
|
|
220
|
+
slicer_order: 3,
|
|
221
|
+
_created: new Date().toISOString()
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
],
|
|
225
|
+
incompleteSliceCount: 1,
|
|
226
|
+
completedSliceCount: 3,
|
|
227
|
+
processedSliceCount: 2,
|
|
228
|
+
analytics: false
|
|
229
|
+
}
|
|
230
|
+
],
|
|
231
|
+
[
|
|
232
|
+
'when autorecovering a slicer with a cleanup type of pending',
|
|
233
|
+
{
|
|
234
|
+
slicerResults: [
|
|
235
|
+
{ example: 'slice-autorecover-pending-after-1' },
|
|
236
|
+
{ example: 'slice-autorecover-pending-after-2' },
|
|
237
|
+
{ example: 'slice-autorecover-pending-after-3' },
|
|
238
|
+
null
|
|
239
|
+
],
|
|
240
|
+
autorecover: true,
|
|
241
|
+
isRecovery: true,
|
|
242
|
+
recoverySlices: [
|
|
243
|
+
{
|
|
244
|
+
state: 'completed',
|
|
245
|
+
slice: {
|
|
246
|
+
slice_id: uuidv4(),
|
|
247
|
+
request: {
|
|
248
|
+
example: 'slice-autorecover-pending-completed'
|
|
249
|
+
},
|
|
250
|
+
slicer_id: 0,
|
|
251
|
+
slicer_order: 0,
|
|
252
|
+
_created: new Date().toISOString()
|
|
253
|
+
}
|
|
254
|
+
},
|
|
255
|
+
{
|
|
256
|
+
state: 'start',
|
|
257
|
+
slice: {
|
|
258
|
+
slice_id: uuidv4(),
|
|
259
|
+
request: {
|
|
260
|
+
example: 'slice-autorecover-pending-start'
|
|
261
|
+
},
|
|
262
|
+
slicer_id: 0,
|
|
263
|
+
slicer_order: 1,
|
|
264
|
+
_created: new Date().toISOString()
|
|
265
|
+
}
|
|
266
|
+
},
|
|
267
|
+
{
|
|
268
|
+
state: 'pending',
|
|
269
|
+
slice: {
|
|
270
|
+
slice_id: uuidv4(),
|
|
271
|
+
request: {
|
|
272
|
+
example: 'slice-autorecover-pending'
|
|
273
|
+
},
|
|
274
|
+
slicer_id: 0,
|
|
275
|
+
slicer_order: 2,
|
|
276
|
+
_created: new Date().toISOString()
|
|
277
|
+
}
|
|
278
|
+
},
|
|
279
|
+
{
|
|
280
|
+
state: 'pending',
|
|
281
|
+
slice: {
|
|
282
|
+
slice_id: uuidv4(),
|
|
283
|
+
request: {
|
|
284
|
+
example: 'slice-autorecover-pending'
|
|
285
|
+
},
|
|
286
|
+
slicer_id: 0,
|
|
287
|
+
slicer_order: 3,
|
|
288
|
+
_created: new Date().toISOString()
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
],
|
|
292
|
+
incompleteSliceCount: 1,
|
|
293
|
+
completedSliceCount: 6,
|
|
294
|
+
processedSliceCount: 5,
|
|
295
|
+
analytics: false
|
|
296
|
+
}
|
|
297
|
+
],
|
|
298
|
+
[
|
|
299
|
+
'when processing slices and the execution gets shutdown early',
|
|
300
|
+
{
|
|
301
|
+
slicerResults: [
|
|
302
|
+
{ example: 'slice-shutdown-early' },
|
|
303
|
+
{ example: 'slice-shutdown-early' },
|
|
304
|
+
{ example: 'slice-shutdown-early' },
|
|
305
|
+
{ example: 'slice-shutdown-early' }
|
|
306
|
+
],
|
|
307
|
+
lifecycle: 'persistent',
|
|
308
|
+
shutdownTimeout: 2000,
|
|
309
|
+
shutdownEarly: true,
|
|
310
|
+
incompleteSliceCount: 1,
|
|
311
|
+
completedSliceCount: 1,
|
|
312
|
+
processedSliceCount: 1,
|
|
313
|
+
analytics: false
|
|
314
|
+
}
|
|
315
|
+
]
|
|
316
|
+
];
|
|
317
|
+
// for testing add a "only" property to the test cases you want
|
|
318
|
+
// or add a skip property to the test cases you don't want
|
|
319
|
+
describe.each(getTestCases(testCases))('%s', (m, options) => {
|
|
320
|
+
const { slicerResults, slicerQueueLength, incompleteSliceCount = 0, completedSliceCount = 0, processedSliceCount = 0, lifecycle = 'once', reconnect = false, analytics = false, workers = 1, lastStatus, shutdownTimeout = 4000, shutdownEarly = false, cleanupType, isRecovery = false, autorecover = false, recoverySlices = [] } = options;
|
|
321
|
+
let exController;
|
|
322
|
+
let testContext;
|
|
323
|
+
let slices;
|
|
324
|
+
let exStore;
|
|
325
|
+
let stateStore;
|
|
326
|
+
let shutdownEarlyFn;
|
|
327
|
+
let executionRecord;
|
|
328
|
+
beforeAll(async () => {
|
|
329
|
+
slices = [];
|
|
330
|
+
const port = await findPort();
|
|
331
|
+
testContext = new TestContext({
|
|
332
|
+
assignment: 'execution_controller',
|
|
333
|
+
slicerPort: port,
|
|
334
|
+
slicerQueueLength,
|
|
335
|
+
slicerResults,
|
|
336
|
+
shutdownTimeout,
|
|
337
|
+
timeout: reconnect ? 5000 : 3000,
|
|
338
|
+
lifecycle,
|
|
339
|
+
workers,
|
|
340
|
+
autorecover,
|
|
341
|
+
analytics
|
|
342
|
+
});
|
|
343
|
+
// needs to be in this order
|
|
344
|
+
await testContext.initialize(true, {
|
|
345
|
+
isRecovery,
|
|
346
|
+
cleanupType,
|
|
347
|
+
lastStatus,
|
|
348
|
+
recoverySlices
|
|
349
|
+
});
|
|
350
|
+
await testContext.addClusterMaster();
|
|
351
|
+
const { clusterMaster, exId } = testContext;
|
|
352
|
+
stateStore = await testContext.addStateStore();
|
|
353
|
+
exStore = await testContext.addExStore();
|
|
354
|
+
if (shutdownEarly) {
|
|
355
|
+
// @ts-expect-error TODO fix this
|
|
356
|
+
testContext.executionContext.slicer().maxQueueLength = () => 1;
|
|
357
|
+
}
|
|
358
|
+
exController = new ExecutionController(testContext.context, testContext.executionContext);
|
|
359
|
+
const { network_latency_buffer: networkLatencyBuffer, action_timeout: actionTimeout } = testContext.context.sysconfig.teraslice;
|
|
360
|
+
testContext.attachCleanup(() => exController.shutdown());
|
|
361
|
+
const opCount = testContext.executionContext.config.operations.length;
|
|
362
|
+
await exController.initialize();
|
|
363
|
+
const socketOptions = reconnect
|
|
364
|
+
? {
|
|
365
|
+
reconnection: true,
|
|
366
|
+
reconnectionAttempts: 10,
|
|
367
|
+
reconnectionDelay: 500,
|
|
368
|
+
reconnectionDelayMax: 500
|
|
369
|
+
}
|
|
370
|
+
: {
|
|
371
|
+
reconnection: false
|
|
372
|
+
};
|
|
373
|
+
let firedReconnect = false;
|
|
374
|
+
shutdownEarlyFn = makeShutdownEarlyFn({
|
|
375
|
+
enabled: shutdownEarly,
|
|
376
|
+
exController
|
|
377
|
+
});
|
|
378
|
+
const workerClients = [];
|
|
379
|
+
clusterMaster.onExecutionFinished(() => {
|
|
380
|
+
workerClients.forEach((workerClient) => {
|
|
381
|
+
workerClient.shutdown();
|
|
382
|
+
});
|
|
383
|
+
});
|
|
384
|
+
async function startWorker() {
|
|
385
|
+
const workerId = newId('worker');
|
|
386
|
+
const workerClient = new ExecutionControllerClient({
|
|
387
|
+
executionControllerUrl: `http://localhost:${port}`,
|
|
388
|
+
workerId,
|
|
389
|
+
networkLatencyBuffer,
|
|
390
|
+
workerDisconnectTimeout: 5000,
|
|
391
|
+
actionTimeout,
|
|
392
|
+
connectTimeout: 1000,
|
|
393
|
+
socketOptions
|
|
394
|
+
});
|
|
395
|
+
workerClients.push(workerClient);
|
|
396
|
+
testContext.attachCleanup(() => workerClient.shutdown());
|
|
397
|
+
await workerClient.start();
|
|
398
|
+
async function waitForReconnect() {
|
|
399
|
+
if (!reconnect)
|
|
400
|
+
return;
|
|
401
|
+
if (firedReconnect)
|
|
402
|
+
return;
|
|
403
|
+
firedReconnect = true;
|
|
404
|
+
await Promise.all([
|
|
405
|
+
workerClient.forceReconnect(),
|
|
406
|
+
exController.server.waitForClientReady(workerId)
|
|
407
|
+
]);
|
|
408
|
+
}
|
|
409
|
+
const isDone = () => exController.isExecutionDone;
|
|
410
|
+
async function processWork() {
|
|
411
|
+
if (isDone())
|
|
412
|
+
return;
|
|
413
|
+
const slice = await workerClient.waitForSlice(isDone);
|
|
414
|
+
if (!slice)
|
|
415
|
+
return;
|
|
416
|
+
slices.push(slice);
|
|
417
|
+
const msg = { slice };
|
|
418
|
+
if (analytics) {
|
|
419
|
+
msg.analytics = {
|
|
420
|
+
time: times(opCount, () => random(0, 2000)),
|
|
421
|
+
size: times(opCount, () => random(0, 100)),
|
|
422
|
+
memory: times(opCount, () => random(0, 10000))
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
// add a natural delay for completing a slice
|
|
426
|
+
await pDelay(100);
|
|
427
|
+
await stateStore.updateState(slice, 'completed');
|
|
428
|
+
async function completeSlice() {
|
|
429
|
+
await pDelay(0);
|
|
430
|
+
await workerClient.sendSliceComplete(msg);
|
|
431
|
+
await shutdownEarlyFn.shutdown();
|
|
432
|
+
}
|
|
433
|
+
await Promise.all([waitForReconnect(), completeSlice()]);
|
|
434
|
+
await processWork();
|
|
435
|
+
}
|
|
436
|
+
await processWork();
|
|
437
|
+
}
|
|
438
|
+
function startWorkers() {
|
|
439
|
+
return Promise.all(times(workers, startWorker));
|
|
440
|
+
}
|
|
441
|
+
const requestAnalytics = setTimeout(async () => {
|
|
442
|
+
try {
|
|
443
|
+
await clusterMaster.sendExecutionAnalyticsRequest(exId);
|
|
444
|
+
}
|
|
445
|
+
catch (err) {
|
|
446
|
+
// it shouldn't matter
|
|
447
|
+
}
|
|
448
|
+
}, 100);
|
|
449
|
+
testContext.attachCleanup(() => clearTimeout(requestAnalytics));
|
|
450
|
+
await Promise.all([shutdownEarlyFn.wait(), startWorkers(), exController.run()]);
|
|
451
|
+
clearTimeout(requestAnalytics);
|
|
452
|
+
executionRecord = await exStore.get(exId);
|
|
453
|
+
});
|
|
454
|
+
afterAll(() => testContext.cleanup());
|
|
455
|
+
it('should have the correct complete slices', async () => {
|
|
456
|
+
const { exId } = testContext.executionContext;
|
|
457
|
+
const recoverFrom = testContext.executionContext.config.recovered_execution;
|
|
458
|
+
const exIds = recoverFrom ? [exId, recoverFrom] : [exId];
|
|
459
|
+
expect(await stateStore.count(`ex_id:("${exIds.join('" OR "')}") AND state:completed`, 0)).toEqual(completedSliceCount);
|
|
460
|
+
});
|
|
461
|
+
it('should have the correct incomplete slices', async () => {
|
|
462
|
+
const { exId } = testContext.executionContext;
|
|
463
|
+
const recoverFrom = testContext.executionContext.config.recovered_execution;
|
|
464
|
+
const exIds = recoverFrom ? [exId, recoverFrom] : [exId];
|
|
465
|
+
expect(await stateStore.count(`ex_id:("${exIds.join('" OR "')}") AND NOT state:completed`, 0)).toEqual(incompleteSliceCount);
|
|
466
|
+
});
|
|
467
|
+
if (isRecovery) {
|
|
468
|
+
it('should recover the correct slices', async () => {
|
|
469
|
+
const { exId } = testContext.executionContext;
|
|
470
|
+
const recoverFrom = testContext.executionContext.config.recovered_execution;
|
|
471
|
+
expect(recoverFrom).toBeString();
|
|
472
|
+
expect(recoverFrom).not.toEqual(exId);
|
|
473
|
+
if (cleanupType) {
|
|
474
|
+
const actualCleanupType = testContext
|
|
475
|
+
.executionContext
|
|
476
|
+
.config
|
|
477
|
+
.recovered_slice_type;
|
|
478
|
+
expect(actualCleanupType).toEqual(cleanupType);
|
|
479
|
+
}
|
|
480
|
+
});
|
|
481
|
+
}
|
|
482
|
+
it('should process the right number of slices', async () => {
|
|
483
|
+
if (shutdownEarly) {
|
|
484
|
+
expect(slices.length).toBeGreaterThanOrEqual(processedSliceCount);
|
|
485
|
+
}
|
|
486
|
+
else {
|
|
487
|
+
expect(slices).toBeArrayOfSize(processedSliceCount);
|
|
488
|
+
}
|
|
489
|
+
});
|
|
490
|
+
it('should have the correct execution status', () => {
|
|
491
|
+
const { exId } = testContext.executionContext;
|
|
492
|
+
expect(executionRecord).toBeObject();
|
|
493
|
+
expect(executionRecord).toHaveProperty('_slicer_stats.processed');
|
|
494
|
+
expect(executionRecord).toHaveProperty('_slicer_stats.queued');
|
|
495
|
+
expect(executionRecord).toHaveProperty('_slicer_stats.slicers');
|
|
496
|
+
if (shutdownEarly) {
|
|
497
|
+
expect(executionRecord).toHaveProperty('_failureReason', `execution ${exId} received shutdown before the slicer could complete, setting status to "terminated"`);
|
|
498
|
+
expect(executionRecord._slicer_stats.failed).toEqual(0);
|
|
499
|
+
expect(executionRecord).toMatchObject({
|
|
500
|
+
_has_errors: true,
|
|
501
|
+
_status: 'terminated'
|
|
502
|
+
});
|
|
503
|
+
}
|
|
504
|
+
else {
|
|
505
|
+
expect(executionRecord).toMatchObject({
|
|
506
|
+
_has_errors: false,
|
|
507
|
+
_status: 'completed'
|
|
508
|
+
});
|
|
509
|
+
if (slicerQueueLength !== 'QUEUE_MINIMUM_SIZE') {
|
|
510
|
+
expect(executionRecord._slicer_stats.processed).toEqual(processedSliceCount);
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
expect(executionRecord._slicer_stats.workers_joined).toBeGreaterThanOrEqual(1);
|
|
514
|
+
if (reconnect && slicerQueueLength !== 'QUEUE_MINIMUM_SIZE') {
|
|
515
|
+
expect(executionRecord._slicer_stats.workers_reconnected).toBeGreaterThan(0);
|
|
516
|
+
}
|
|
517
|
+
});
|
|
518
|
+
});
|
|
519
|
+
});
|
|
520
|
+
//# sourceMappingURL=execution-special-test-cases-spec.js.map
|