@human-protocol/sdk 1.0.2 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/constants.d.ts +46 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +203 -0
- package/dist/decorators.d.ts +2 -0
- package/dist/decorators.d.ts.map +1 -0
- package/dist/decorators.js +17 -0
- package/dist/enums.d.ts +17 -0
- package/dist/enums.d.ts.map +1 -0
- package/dist/enums.js +20 -0
- package/dist/error.d.ts +196 -0
- package/dist/error.d.ts.map +1 -0
- package/dist/error.js +229 -0
- package/dist/escrow.d.ts +176 -0
- package/dist/escrow.d.ts.map +1 -0
- package/dist/escrow.js +590 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +33 -0
- package/dist/init.d.ts +13 -0
- package/dist/init.d.ts.map +1 -0
- package/dist/init.js +35 -0
- package/dist/interfaces.d.ts +44 -0
- package/dist/interfaces.d.ts.map +1 -0
- package/dist/interfaces.js +2 -0
- package/dist/kvstore.d.ts +40 -0
- package/dist/kvstore.d.ts.map +1 -0
- package/dist/kvstore.js +106 -0
- package/dist/queries.d.ts +4 -0
- package/dist/queries.d.ts.map +1 -0
- package/dist/queries.js +22 -0
- package/dist/staking.d.ts +121 -0
- package/dist/staking.d.ts.map +1 -0
- package/dist/staking.js +381 -0
- package/dist/storage.d.ts +48 -0
- package/dist/storage.d.ts.map +1 -0
- package/dist/storage.js +164 -0
- package/dist/types.d.ts +123 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +35 -0
- package/dist/utils.d.ts +32 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +99 -0
- package/package.json +4 -7
- package/src/constants.ts +221 -4
- package/src/decorators.ts +21 -0
- package/src/enums.ts +16 -0
- package/src/error.ts +295 -18
- package/src/escrow.ts +754 -0
- package/src/index.ts +14 -1
- package/src/init.ts +45 -0
- package/src/interfaces.ts +50 -0
- package/src/kvstore.ts +93 -0
- package/src/queries.ts +18 -0
- package/src/staking.ts +421 -0
- package/src/storage.ts +159 -131
- package/src/types.ts +36 -586
- package/src/utils.ts +80 -142
- package/test/escrow.test.ts +1339 -0
- package/test/init.test.ts +88 -0
- package/test/kvstore.test.ts +208 -0
- package/test/staking.test.ts +640 -0
- package/test/storage.test.ts +422 -0
- package/test/utils/constants.ts +38 -1
- package/example/simple-existing-job.ts +0 -86
- package/example/simple-new-job-public.ts +0 -74
- package/example/simple-new-job.ts +0 -72
- package/src/job.ts +0 -977
- package/src/logger.ts +0 -29
- package/test/job.test.ts +0 -716
- package/test/utils/manifest.ts +0 -33
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
import { describe, test, expect, vi, beforeAll } from 'vitest';
|
|
2
|
+
import axios from 'axios';
|
|
3
|
+
import crypto from 'crypto';
|
|
4
|
+
import {
|
|
5
|
+
DEFAULT_ENDPOINT,
|
|
6
|
+
DEFAULT_PORT,
|
|
7
|
+
DEFAULT_PUBLIC_BUCKET,
|
|
8
|
+
DEFAULT_REGION,
|
|
9
|
+
DEFAULT_USE_SSL,
|
|
10
|
+
HttpStatus,
|
|
11
|
+
StorageCredentials,
|
|
12
|
+
StorageParams,
|
|
13
|
+
} from '../src';
|
|
14
|
+
import {
|
|
15
|
+
ErrorInvalidUrl,
|
|
16
|
+
ErrorStorageFileNotFound,
|
|
17
|
+
ErrorStorageFileNotUploaded,
|
|
18
|
+
} from '../src/error';
|
|
19
|
+
import StorageClient from '../src/storage';
|
|
20
|
+
import {
|
|
21
|
+
FAKE_URL,
|
|
22
|
+
STORAGE_FAKE_BUCKET,
|
|
23
|
+
STORAGE_TEST_ACCESS_KEY,
|
|
24
|
+
STORAGE_TEST_FILE_VALUE,
|
|
25
|
+
STORAGE_TEST_FILE_VALUE_2,
|
|
26
|
+
STORAGE_TEST_SECRET_KEY,
|
|
27
|
+
} from './utils/constants';
|
|
28
|
+
|
|
29
|
+
// Create a Minio.Client mock for the tests
|
|
30
|
+
vi.mock('minio', () => {
|
|
31
|
+
// Define a constructor for the Minio.Client mock
|
|
32
|
+
class Client {
|
|
33
|
+
getObject = vi.fn().mockImplementation(() => {
|
|
34
|
+
const read = () => {
|
|
35
|
+
return JSON.stringify({ key: STORAGE_TEST_FILE_VALUE });
|
|
36
|
+
};
|
|
37
|
+
return Promise.resolve({ read });
|
|
38
|
+
}); // getObject mock
|
|
39
|
+
putObject = vi.fn(); // putObject mock
|
|
40
|
+
bucketExists = vi.fn().mockImplementation((bucketName) => {
|
|
41
|
+
// Add conditional logic here based on the test scenario
|
|
42
|
+
if (bucketName === STORAGE_FAKE_BUCKET) {
|
|
43
|
+
return Promise.resolve(false); // Return false for fake scenario
|
|
44
|
+
} else {
|
|
45
|
+
return Promise.resolve(true); // Return true for other scenarios
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Return Minio.Client mock
|
|
51
|
+
return { Client };
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
vi.mock('axios');
|
|
55
|
+
|
|
56
|
+
describe('Storage tests', () => {
|
|
57
|
+
describe('Client initialization', () => {
|
|
58
|
+
test('should set correct credentials', async () => {
|
|
59
|
+
const storageCredentials: StorageCredentials = {
|
|
60
|
+
accessKey: STORAGE_TEST_ACCESS_KEY,
|
|
61
|
+
secretKey: STORAGE_TEST_SECRET_KEY,
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
expect(storageCredentials.accessKey).toEqual(STORAGE_TEST_ACCESS_KEY);
|
|
65
|
+
expect(storageCredentials.secretKey).toEqual(STORAGE_TEST_SECRET_KEY);
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
test('should set correct params', async () => {
|
|
69
|
+
const storageParams: StorageParams = {
|
|
70
|
+
endPoint: DEFAULT_ENDPOINT,
|
|
71
|
+
port: DEFAULT_PORT,
|
|
72
|
+
useSSL: DEFAULT_USE_SSL,
|
|
73
|
+
region: DEFAULT_REGION,
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
expect(storageParams.endPoint).toEqual(DEFAULT_ENDPOINT);
|
|
77
|
+
expect(storageParams.port).toEqual(DEFAULT_PORT);
|
|
78
|
+
expect(storageParams.useSSL).toEqual(false);
|
|
79
|
+
expect(storageParams.region).toEqual(DEFAULT_REGION);
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
test('should init client with empty credentials', async () => {
|
|
83
|
+
const storageCredentials: StorageCredentials = {
|
|
84
|
+
accessKey: '',
|
|
85
|
+
secretKey: '',
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
const storageParams: StorageParams = {
|
|
89
|
+
endPoint: DEFAULT_ENDPOINT,
|
|
90
|
+
port: DEFAULT_PORT,
|
|
91
|
+
useSSL: DEFAULT_USE_SSL,
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
const storageClient = new StorageClient(
|
|
95
|
+
storageCredentials,
|
|
96
|
+
storageParams
|
|
97
|
+
);
|
|
98
|
+
|
|
99
|
+
expect(storageClient).toBeInstanceOf(StorageClient);
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
test('should not init client with an error', async () => {
|
|
103
|
+
// TODO: Adapt it for particular test case
|
|
104
|
+
/* vi.mock('../src/storage', () => {
|
|
105
|
+
const StorageClient = vi.fn().mockImplementation(() => {
|
|
106
|
+
throw ErrorStorageClientNotInitialized;
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
return {
|
|
110
|
+
default: StorageClient
|
|
111
|
+
}
|
|
112
|
+
}) */
|
|
113
|
+
// expect(() => new StorageClient(storageCredentials, storageParams)).toThrow(ErrorStorageClientNotInitialized);
|
|
114
|
+
});
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
describe('Client anonymous access', () => {
|
|
118
|
+
let storageClient: StorageClient;
|
|
119
|
+
|
|
120
|
+
beforeAll(async () => {
|
|
121
|
+
const storageCredentials: StorageCredentials = {
|
|
122
|
+
accessKey: '',
|
|
123
|
+
secretKey: '',
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
const storageParams: StorageParams = {
|
|
127
|
+
endPoint: DEFAULT_ENDPOINT,
|
|
128
|
+
port: DEFAULT_PORT,
|
|
129
|
+
useSSL: DEFAULT_USE_SSL,
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
storageClient = new StorageClient(storageCredentials, storageParams);
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
test('should return the bucket exists', async () => {
|
|
136
|
+
const isExists = await storageClient.bucketExists(DEFAULT_PUBLIC_BUCKET);
|
|
137
|
+
expect(isExists).toEqual(true);
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
test('should return the bucket does not exist', async () => {
|
|
141
|
+
const isExists = await storageClient.bucketExists(STORAGE_FAKE_BUCKET);
|
|
142
|
+
expect(isExists).toEqual(false);
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
test('should upload the file with success', async () => {
|
|
146
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
147
|
+
|
|
148
|
+
const uploadedResults = await storageClient.uploadFiles(
|
|
149
|
+
[file],
|
|
150
|
+
DEFAULT_PUBLIC_BUCKET
|
|
151
|
+
);
|
|
152
|
+
|
|
153
|
+
const hash = crypto
|
|
154
|
+
.createHash('sha1')
|
|
155
|
+
.update(JSON.stringify(file))
|
|
156
|
+
.digest('hex');
|
|
157
|
+
const key = hash;
|
|
158
|
+
|
|
159
|
+
expect(storageClient['client'].putObject).toHaveBeenCalledWith(
|
|
160
|
+
DEFAULT_PUBLIC_BUCKET,
|
|
161
|
+
key,
|
|
162
|
+
JSON.stringify(file),
|
|
163
|
+
{
|
|
164
|
+
'Content-Type': 'application/json',
|
|
165
|
+
}
|
|
166
|
+
);
|
|
167
|
+
expect(uploadedResults[0].key).toEqual(key);
|
|
168
|
+
expect(uploadedResults[0].hash).toEqual(hash);
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
test('should not upload the file with an error', async () => {
|
|
172
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
173
|
+
vi.spyOn(storageClient, 'uploadFiles').mockImplementation(() => {
|
|
174
|
+
throw ErrorStorageFileNotUploaded;
|
|
175
|
+
});
|
|
176
|
+
expect(() =>
|
|
177
|
+
storageClient.uploadFiles([file], DEFAULT_PUBLIC_BUCKET)
|
|
178
|
+
).toThrow(ErrorStorageFileNotUploaded);
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
test('should download the files with success', async () => {
|
|
182
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
183
|
+
|
|
184
|
+
const hash = crypto
|
|
185
|
+
.createHash('sha1')
|
|
186
|
+
.update(JSON.stringify(file))
|
|
187
|
+
.digest('hex');
|
|
188
|
+
const key = hash;
|
|
189
|
+
|
|
190
|
+
const downloadedResults = await storageClient.downloadFiles(
|
|
191
|
+
[key],
|
|
192
|
+
DEFAULT_PUBLIC_BUCKET
|
|
193
|
+
);
|
|
194
|
+
|
|
195
|
+
expect(storageClient['client'].getObject).toHaveBeenCalledWith(
|
|
196
|
+
DEFAULT_PUBLIC_BUCKET,
|
|
197
|
+
key
|
|
198
|
+
);
|
|
199
|
+
expect(downloadedResults[0].key).toEqual(key);
|
|
200
|
+
expect(downloadedResults[0].content).toEqual(file);
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
test('should not download the files with an error', async () => {
|
|
204
|
+
vi.spyOn(storageClient, 'downloadFiles').mockImplementation(() => {
|
|
205
|
+
throw ErrorStorageFileNotFound;
|
|
206
|
+
});
|
|
207
|
+
expect(() =>
|
|
208
|
+
storageClient.downloadFiles(
|
|
209
|
+
[STORAGE_TEST_FILE_VALUE],
|
|
210
|
+
DEFAULT_PUBLIC_BUCKET
|
|
211
|
+
)
|
|
212
|
+
).toThrow(ErrorStorageFileNotFound);
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
test('should fail URL validation', async () => {
|
|
216
|
+
expect(StorageClient.downloadFileFromUrl(FAKE_URL)).rejects.toThrow(
|
|
217
|
+
ErrorInvalidUrl
|
|
218
|
+
);
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
test('should download the file from URL with success', async () => {
|
|
222
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
223
|
+
|
|
224
|
+
vi.spyOn(axios, 'get').mockImplementation(() =>
|
|
225
|
+
Promise.resolve({ data: file, status: HttpStatus.OK })
|
|
226
|
+
);
|
|
227
|
+
|
|
228
|
+
const hash = crypto
|
|
229
|
+
.createHash('sha1')
|
|
230
|
+
.update(JSON.stringify(file))
|
|
231
|
+
.digest('hex');
|
|
232
|
+
const url = `http://${DEFAULT_PUBLIC_BUCKET}/${hash}.json`;
|
|
233
|
+
|
|
234
|
+
const result = await StorageClient.downloadFileFromUrl(url);
|
|
235
|
+
expect(result).toEqual(file);
|
|
236
|
+
});
|
|
237
|
+
|
|
238
|
+
test('should not download the file from URL with an error', async () => {
|
|
239
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
240
|
+
|
|
241
|
+
const hash = crypto
|
|
242
|
+
.createHash('sha1')
|
|
243
|
+
.update(JSON.stringify(file))
|
|
244
|
+
.digest('hex');
|
|
245
|
+
const url = `http://${DEFAULT_PUBLIC_BUCKET}/${hash}.json`;
|
|
246
|
+
|
|
247
|
+
vi.spyOn(StorageClient, 'downloadFileFromUrl').mockImplementation(() => {
|
|
248
|
+
throw ErrorStorageFileNotFound;
|
|
249
|
+
});
|
|
250
|
+
expect(() => StorageClient.downloadFileFromUrl(url)).toThrow(
|
|
251
|
+
ErrorStorageFileNotFound
|
|
252
|
+
);
|
|
253
|
+
});
|
|
254
|
+
|
|
255
|
+
test('should return a list of objects with success', async () => {
|
|
256
|
+
const file1 = { key: STORAGE_TEST_FILE_VALUE };
|
|
257
|
+
const hash1 = crypto
|
|
258
|
+
.createHash('sha1')
|
|
259
|
+
.update(JSON.stringify(file1))
|
|
260
|
+
.digest('hex');
|
|
261
|
+
const key1 = hash1;
|
|
262
|
+
|
|
263
|
+
const file2 = { key: STORAGE_TEST_FILE_VALUE_2 };
|
|
264
|
+
const hash2 = crypto
|
|
265
|
+
.createHash('sha1')
|
|
266
|
+
.update(JSON.stringify(file2))
|
|
267
|
+
.digest('hex');
|
|
268
|
+
const key2 = hash2;
|
|
269
|
+
|
|
270
|
+
vi.spyOn(storageClient, 'listObjects').mockImplementation(() =>
|
|
271
|
+
Promise.resolve([key1, key2])
|
|
272
|
+
);
|
|
273
|
+
|
|
274
|
+
const results = await storageClient.listObjects(DEFAULT_PUBLIC_BUCKET);
|
|
275
|
+
|
|
276
|
+
expect(results[0]).toEqual(key1);
|
|
277
|
+
expect(results[1]).toEqual(key2);
|
|
278
|
+
});
|
|
279
|
+
|
|
280
|
+
test('should not return a list of objects with an error', async () => {
|
|
281
|
+
vi.spyOn(storageClient, 'listObjects').mockImplementation(() => {
|
|
282
|
+
throw new Error();
|
|
283
|
+
});
|
|
284
|
+
expect(() => storageClient.listObjects(DEFAULT_PUBLIC_BUCKET)).toThrow(
|
|
285
|
+
new Error()
|
|
286
|
+
);
|
|
287
|
+
});
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
describe('Client with credentials', () => {
|
|
291
|
+
let storageClient: StorageClient;
|
|
292
|
+
|
|
293
|
+
beforeAll(async () => {
|
|
294
|
+
const storageCredentials: StorageCredentials = {
|
|
295
|
+
accessKey: STORAGE_TEST_ACCESS_KEY,
|
|
296
|
+
secretKey: STORAGE_TEST_SECRET_KEY,
|
|
297
|
+
};
|
|
298
|
+
|
|
299
|
+
const storageParams: StorageParams = {
|
|
300
|
+
endPoint: DEFAULT_ENDPOINT,
|
|
301
|
+
port: DEFAULT_PORT,
|
|
302
|
+
useSSL: DEFAULT_USE_SSL,
|
|
303
|
+
};
|
|
304
|
+
|
|
305
|
+
storageClient = new StorageClient(storageCredentials, storageParams);
|
|
306
|
+
});
|
|
307
|
+
|
|
308
|
+
test('should return the bucket exists', async () => {
|
|
309
|
+
const isExists = await storageClient.bucketExists(DEFAULT_PUBLIC_BUCKET);
|
|
310
|
+
expect(isExists).toEqual(true);
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
test('should return the bucket does not exist', async () => {
|
|
314
|
+
const isExists = await storageClient.bucketExists(STORAGE_FAKE_BUCKET);
|
|
315
|
+
expect(isExists).toEqual(false);
|
|
316
|
+
});
|
|
317
|
+
|
|
318
|
+
test('should upload the file with success', async () => {
|
|
319
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
320
|
+
|
|
321
|
+
const uploadedResults = await storageClient.uploadFiles(
|
|
322
|
+
[file],
|
|
323
|
+
DEFAULT_PUBLIC_BUCKET
|
|
324
|
+
);
|
|
325
|
+
|
|
326
|
+
const hash = crypto
|
|
327
|
+
.createHash('sha1')
|
|
328
|
+
.update(JSON.stringify(file))
|
|
329
|
+
.digest('hex');
|
|
330
|
+
const key = hash;
|
|
331
|
+
|
|
332
|
+
expect(storageClient['client'].putObject).toHaveBeenCalledWith(
|
|
333
|
+
DEFAULT_PUBLIC_BUCKET,
|
|
334
|
+
key,
|
|
335
|
+
JSON.stringify(file),
|
|
336
|
+
{
|
|
337
|
+
'Content-Type': 'application/json',
|
|
338
|
+
}
|
|
339
|
+
);
|
|
340
|
+
expect(uploadedResults[0].key).toEqual(key);
|
|
341
|
+
expect(uploadedResults[0].hash).toEqual(hash);
|
|
342
|
+
});
|
|
343
|
+
|
|
344
|
+
test('should not upload the file with an error', async () => {
|
|
345
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
346
|
+
vi.spyOn(storageClient, 'uploadFiles').mockImplementation(() => {
|
|
347
|
+
throw ErrorStorageFileNotUploaded;
|
|
348
|
+
});
|
|
349
|
+
expect(() =>
|
|
350
|
+
storageClient.uploadFiles([file], DEFAULT_PUBLIC_BUCKET)
|
|
351
|
+
).toThrow(ErrorStorageFileNotUploaded);
|
|
352
|
+
});
|
|
353
|
+
|
|
354
|
+
test('should download the file with success', async () => {
|
|
355
|
+
const file = { key: STORAGE_TEST_FILE_VALUE };
|
|
356
|
+
|
|
357
|
+
const hash = crypto
|
|
358
|
+
.createHash('sha1')
|
|
359
|
+
.update(JSON.stringify(file))
|
|
360
|
+
.digest('hex');
|
|
361
|
+
const key = hash;
|
|
362
|
+
|
|
363
|
+
const downloadedResults = await storageClient.downloadFiles(
|
|
364
|
+
[key],
|
|
365
|
+
DEFAULT_PUBLIC_BUCKET
|
|
366
|
+
);
|
|
367
|
+
|
|
368
|
+
expect(storageClient['client'].getObject).toHaveBeenCalledWith(
|
|
369
|
+
DEFAULT_PUBLIC_BUCKET,
|
|
370
|
+
key
|
|
371
|
+
);
|
|
372
|
+
expect(downloadedResults[0].key).toEqual(key);
|
|
373
|
+
expect(downloadedResults[0].content).toEqual(file);
|
|
374
|
+
});
|
|
375
|
+
|
|
376
|
+
test('should not download the file with an error', async () => {
|
|
377
|
+
vi.spyOn(storageClient, 'downloadFiles').mockImplementation(() => {
|
|
378
|
+
throw ErrorStorageFileNotFound;
|
|
379
|
+
});
|
|
380
|
+
expect(() =>
|
|
381
|
+
storageClient.downloadFiles(
|
|
382
|
+
[STORAGE_TEST_FILE_VALUE],
|
|
383
|
+
DEFAULT_PUBLIC_BUCKET
|
|
384
|
+
)
|
|
385
|
+
).toThrow(ErrorStorageFileNotFound);
|
|
386
|
+
});
|
|
387
|
+
|
|
388
|
+
test('should return a list of objects with success', async () => {
|
|
389
|
+
const file1 = { key: STORAGE_TEST_FILE_VALUE };
|
|
390
|
+
const hash1 = crypto
|
|
391
|
+
.createHash('sha1')
|
|
392
|
+
.update(JSON.stringify(file1))
|
|
393
|
+
.digest('hex');
|
|
394
|
+
const key1 = hash1;
|
|
395
|
+
|
|
396
|
+
const file2 = { key: STORAGE_TEST_FILE_VALUE_2 };
|
|
397
|
+
const hash2 = crypto
|
|
398
|
+
.createHash('sha1')
|
|
399
|
+
.update(JSON.stringify(file2))
|
|
400
|
+
.digest('hex');
|
|
401
|
+
const key2 = hash2;
|
|
402
|
+
|
|
403
|
+
vi.spyOn(storageClient, 'listObjects').mockImplementation(() =>
|
|
404
|
+
Promise.resolve([key1, key2])
|
|
405
|
+
);
|
|
406
|
+
|
|
407
|
+
const results = await storageClient.listObjects(DEFAULT_PUBLIC_BUCKET);
|
|
408
|
+
|
|
409
|
+
expect(results[0]).toEqual(key1);
|
|
410
|
+
expect(results[1]).toEqual(key2);
|
|
411
|
+
});
|
|
412
|
+
|
|
413
|
+
test('should not return a list of objects with an error', async () => {
|
|
414
|
+
vi.spyOn(storageClient, 'listObjects').mockImplementation(() => {
|
|
415
|
+
throw new Error();
|
|
416
|
+
});
|
|
417
|
+
expect(() => storageClient.listObjects(DEFAULT_PUBLIC_BUCKET)).toThrow(
|
|
418
|
+
new Error()
|
|
419
|
+
);
|
|
420
|
+
});
|
|
421
|
+
});
|
|
422
|
+
});
|
package/test/utils/constants.ts
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
|
+
import { ChainId } from '../../src/enums';
|
|
2
|
+
|
|
1
3
|
export const DEFAULT_HMTOKEN_ADDR =
|
|
2
4
|
'0x5FbDB2315678afecb367f032d93F642f64180aa3';
|
|
3
5
|
|
|
4
6
|
export const DEFAULT_STAKING_ADDR =
|
|
5
|
-
'
|
|
7
|
+
'0x9fE46736679d2D9a65F0992F2272dE9f3c7fa6e0';
|
|
6
8
|
|
|
7
9
|
export const DEFAULT_GAS_PAYER_ADDR =
|
|
8
10
|
'0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266';
|
|
@@ -28,3 +30,38 @@ export const WORKER3_ADDR = '0x976EA74026E726554dB657fA54763abd0C3a0aa9';
|
|
|
28
30
|
|
|
29
31
|
export const NOT_TRUSTED_OPERATOR_PRIVKEY =
|
|
30
32
|
'5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365b';
|
|
33
|
+
|
|
34
|
+
export const STORAGE_TEST_ACCESS_KEY = 'my-access-key';
|
|
35
|
+
export const STORAGE_TEST_SECRET_KEY = 'my-secret-key';
|
|
36
|
+
export const STORAGE_FAKE_BUCKET = 'fake-bucket';
|
|
37
|
+
export const STORAGE_TEST_FILE_VALUE = 'value';
|
|
38
|
+
export const STORAGE_TEST_FILE_VALUE_2 = 'another value';
|
|
39
|
+
export const FAKE_URL = 'fakeUrl';
|
|
40
|
+
export const FAKE_NETWORK_NAME = 'fake_network';
|
|
41
|
+
export const VALID_URL = 'https://www.humanprotocol.org';
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* @constant Default network parameters
|
|
45
|
+
*/
|
|
46
|
+
export const FAKE_NETWORK = {
|
|
47
|
+
chainId: ChainId.LOCALHOST,
|
|
48
|
+
title: FAKE_NETWORK_NAME,
|
|
49
|
+
scanUrl: '',
|
|
50
|
+
factoryAddress: '0x0000000000000000000000000000000000000000',
|
|
51
|
+
hmtAddress: '0x0000000000000000000000000000000000000000',
|
|
52
|
+
stakingAddress: '0x0000000000000000000000000000000000000000',
|
|
53
|
+
kvstoreAddress: '0x0000000000000000000000000000000000000000',
|
|
54
|
+
subgraphUrl: 'http://fake.url',
|
|
55
|
+
oldSubgraphUrl: 'http://fake.url',
|
|
56
|
+
oldFactoryAddress: 'http://fake.url',
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
export const FAKE_TRANSACTION_HASH =
|
|
60
|
+
'0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef';
|
|
61
|
+
export const FAKE_TRANSACTION_CONFIRMATIONS = 1;
|
|
62
|
+
export const FAKE_BLOCK_NUMBER = '1234';
|
|
63
|
+
export const FAKE_ADDRESS = '0x1234567890abcdef';
|
|
64
|
+
export const FAKE_AMOUNT = 100;
|
|
65
|
+
export const FAKE_NEGATIVE_AMOUNT = -100;
|
|
66
|
+
export const FAKE_HASH =
|
|
67
|
+
'1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef';
|
|
@@ -1,86 +0,0 @@
|
|
|
1
|
-
/* eslint-disable no-console */
|
|
2
|
-
import { Job } from '../src';
|
|
3
|
-
import {
|
|
4
|
-
DEFAULT_GAS_PAYER_PRIVKEY,
|
|
5
|
-
DEFAULT_HMTOKEN_ADDR,
|
|
6
|
-
REPUTATION_ORACLE_PRIVKEY,
|
|
7
|
-
WORKER1_ADDR,
|
|
8
|
-
WORKER2_ADDR,
|
|
9
|
-
} from '../test/utils/constants';
|
|
10
|
-
import { manifest } from '../test/utils/manifest';
|
|
11
|
-
import * as dotenv from 'dotenv';
|
|
12
|
-
|
|
13
|
-
dotenv.config();
|
|
14
|
-
|
|
15
|
-
const main = async () => {
|
|
16
|
-
// Create job object
|
|
17
|
-
const newJob = new Job({
|
|
18
|
-
gasPayer: DEFAULT_GAS_PAYER_PRIVKEY,
|
|
19
|
-
reputationOracle: REPUTATION_ORACLE_PRIVKEY,
|
|
20
|
-
manifest: manifest,
|
|
21
|
-
hmTokenAddr: DEFAULT_HMTOKEN_ADDR,
|
|
22
|
-
logLevel: 'debug',
|
|
23
|
-
});
|
|
24
|
-
|
|
25
|
-
// Initialize new job object
|
|
26
|
-
await newJob.initialize();
|
|
27
|
-
|
|
28
|
-
// Launch the job
|
|
29
|
-
await newJob.launch();
|
|
30
|
-
|
|
31
|
-
// Access the existing job
|
|
32
|
-
const job = new Job({
|
|
33
|
-
gasPayer: DEFAULT_GAS_PAYER_PRIVKEY,
|
|
34
|
-
reputationOracle: REPUTATION_ORACLE_PRIVKEY,
|
|
35
|
-
manifest: manifest,
|
|
36
|
-
hmTokenAddr: DEFAULT_HMTOKEN_ADDR,
|
|
37
|
-
factoryAddr: newJob.contractData?.factoryAddr,
|
|
38
|
-
escrowAddr: newJob.contractData?.escrowAddr,
|
|
39
|
-
storageAccessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
40
|
-
storageSecretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
41
|
-
storageEndpoint: process.env.AWS_ENDPOINT,
|
|
42
|
-
storageBucket: process.env.AWS_BUCKET,
|
|
43
|
-
storagePublicBucket: process.env.AWS_PUBLIC_BUCKET,
|
|
44
|
-
logLevel: 'debug',
|
|
45
|
-
});
|
|
46
|
-
|
|
47
|
-
// Initialize the job object
|
|
48
|
-
await job.initialize();
|
|
49
|
-
|
|
50
|
-
// Setup the job
|
|
51
|
-
await job.setup();
|
|
52
|
-
|
|
53
|
-
console.log(
|
|
54
|
-
`Status: ${await job.status()}, Balance: ${(
|
|
55
|
-
await job.balance()
|
|
56
|
-
)?.toString()}`
|
|
57
|
-
);
|
|
58
|
-
|
|
59
|
-
// Bulk payout workers
|
|
60
|
-
await job.bulkPayout(
|
|
61
|
-
[
|
|
62
|
-
{
|
|
63
|
-
address: WORKER1_ADDR,
|
|
64
|
-
amount: 70,
|
|
65
|
-
},
|
|
66
|
-
{
|
|
67
|
-
address: WORKER2_ADDR,
|
|
68
|
-
amount: 30,
|
|
69
|
-
},
|
|
70
|
-
],
|
|
71
|
-
{
|
|
72
|
-
result: 'result',
|
|
73
|
-
}
|
|
74
|
-
);
|
|
75
|
-
|
|
76
|
-
// Complete the job
|
|
77
|
-
await job.complete();
|
|
78
|
-
|
|
79
|
-
console.log(
|
|
80
|
-
`Status: ${await job.status()}, Balance: ${(
|
|
81
|
-
await job.balance()
|
|
82
|
-
)?.toString()}`
|
|
83
|
-
);
|
|
84
|
-
};
|
|
85
|
-
|
|
86
|
-
main();
|
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
/* eslint-disable no-console */
|
|
2
|
-
import { Job } from '../src';
|
|
3
|
-
import {
|
|
4
|
-
DEFAULT_GAS_PAYER_PRIVKEY,
|
|
5
|
-
DEFAULT_HMTOKEN_ADDR,
|
|
6
|
-
REPUTATION_ORACLE_PRIVKEY,
|
|
7
|
-
WORKER1_ADDR,
|
|
8
|
-
WORKER2_ADDR,
|
|
9
|
-
} from '../test/utils/constants';
|
|
10
|
-
import { manifest } from '../test/utils/manifest';
|
|
11
|
-
import * as dotenv from 'dotenv';
|
|
12
|
-
|
|
13
|
-
dotenv.config();
|
|
14
|
-
|
|
15
|
-
const main = async () => {
|
|
16
|
-
// Create job object
|
|
17
|
-
const job = new Job({
|
|
18
|
-
gasPayer: DEFAULT_GAS_PAYER_PRIVKEY,
|
|
19
|
-
reputationOracle: REPUTATION_ORACLE_PRIVKEY,
|
|
20
|
-
manifest: manifest,
|
|
21
|
-
hmTokenAddr: DEFAULT_HMTOKEN_ADDR,
|
|
22
|
-
storageAccessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
23
|
-
storageSecretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
24
|
-
storageEndpoint: process.env.AWS_ENDPOINT,
|
|
25
|
-
storageBucket: process.env.AWS_BUCKET,
|
|
26
|
-
storagePublicBucket: process.env.AWS_PUBLIC_BUCKET,
|
|
27
|
-
logLevel: 'debug',
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
// Initialize new job
|
|
31
|
-
await job.initialize();
|
|
32
|
-
|
|
33
|
-
// Launch the job
|
|
34
|
-
await job.launch();
|
|
35
|
-
|
|
36
|
-
// Setup the job
|
|
37
|
-
await job.setup();
|
|
38
|
-
|
|
39
|
-
console.log(
|
|
40
|
-
`Status: ${await job.status()}, Balance: ${(
|
|
41
|
-
await job.balance()
|
|
42
|
-
)?.toString()}`
|
|
43
|
-
);
|
|
44
|
-
|
|
45
|
-
// Bulk payout workers
|
|
46
|
-
await job.bulkPayout(
|
|
47
|
-
[
|
|
48
|
-
{
|
|
49
|
-
address: WORKER1_ADDR,
|
|
50
|
-
amount: 70,
|
|
51
|
-
},
|
|
52
|
-
{
|
|
53
|
-
address: WORKER2_ADDR,
|
|
54
|
-
amount: 30,
|
|
55
|
-
},
|
|
56
|
-
],
|
|
57
|
-
{
|
|
58
|
-
result: 'result',
|
|
59
|
-
},
|
|
60
|
-
false,
|
|
61
|
-
true
|
|
62
|
-
);
|
|
63
|
-
|
|
64
|
-
// Complete the job
|
|
65
|
-
await job.complete();
|
|
66
|
-
|
|
67
|
-
console.log(
|
|
68
|
-
`Status: ${await job.status()}, Balance: ${(
|
|
69
|
-
await job.balance()
|
|
70
|
-
)?.toString()}`
|
|
71
|
-
);
|
|
72
|
-
};
|
|
73
|
-
|
|
74
|
-
main();
|
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
/* eslint-disable no-console */
|
|
2
|
-
import { Job } from '../src';
|
|
3
|
-
import {
|
|
4
|
-
DEFAULT_GAS_PAYER_PRIVKEY,
|
|
5
|
-
DEFAULT_HMTOKEN_ADDR,
|
|
6
|
-
REPUTATION_ORACLE_PRIVKEY,
|
|
7
|
-
WORKER1_ADDR,
|
|
8
|
-
WORKER2_ADDR,
|
|
9
|
-
} from '../test/utils/constants';
|
|
10
|
-
import { manifest } from '../test/utils/manifest';
|
|
11
|
-
import * as dotenv from 'dotenv';
|
|
12
|
-
|
|
13
|
-
dotenv.config();
|
|
14
|
-
|
|
15
|
-
const main = async () => {
|
|
16
|
-
// Create job object
|
|
17
|
-
const job = new Job({
|
|
18
|
-
gasPayer: DEFAULT_GAS_PAYER_PRIVKEY,
|
|
19
|
-
reputationOracle: REPUTATION_ORACLE_PRIVKEY,
|
|
20
|
-
manifest: manifest,
|
|
21
|
-
hmTokenAddr: DEFAULT_HMTOKEN_ADDR,
|
|
22
|
-
storageAccessKeyId: process.env.AWS_ACCESS_KEY_ID,
|
|
23
|
-
storageSecretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
|
|
24
|
-
storageEndpoint: process.env.AWS_ENDPOINT,
|
|
25
|
-
storageBucket: process.env.AWS_BUCKET,
|
|
26
|
-
storagePublicBucket: process.env.AWS_PUBLIC_BUCKET,
|
|
27
|
-
logLevel: 'debug',
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
// Initialize new job
|
|
31
|
-
await job.initialize();
|
|
32
|
-
|
|
33
|
-
// Launch the job
|
|
34
|
-
await job.launch();
|
|
35
|
-
|
|
36
|
-
// Setup the job
|
|
37
|
-
await job.setup();
|
|
38
|
-
|
|
39
|
-
console.log(
|
|
40
|
-
`Status: ${await job.status()}, Balance: ${(
|
|
41
|
-
await job.balance()
|
|
42
|
-
)?.toString()}`
|
|
43
|
-
);
|
|
44
|
-
|
|
45
|
-
// Bulk payout workers
|
|
46
|
-
await job.bulkPayout(
|
|
47
|
-
[
|
|
48
|
-
{
|
|
49
|
-
address: WORKER1_ADDR,
|
|
50
|
-
amount: 70,
|
|
51
|
-
},
|
|
52
|
-
{
|
|
53
|
-
address: WORKER2_ADDR,
|
|
54
|
-
amount: 30,
|
|
55
|
-
},
|
|
56
|
-
],
|
|
57
|
-
{
|
|
58
|
-
result: 'result',
|
|
59
|
-
}
|
|
60
|
-
);
|
|
61
|
-
|
|
62
|
-
// Complete the job
|
|
63
|
-
await job.complete();
|
|
64
|
-
|
|
65
|
-
console.log(
|
|
66
|
-
`Status: ${await job.status()}, Balance: ${(
|
|
67
|
-
await job.balance()
|
|
68
|
-
)?.toString()}`
|
|
69
|
-
);
|
|
70
|
-
};
|
|
71
|
-
|
|
72
|
-
main();
|