zkcloudworker 0.8.2 → 0.9.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (37) hide show
  1. package/lib/ts/src/cloud/config.d.ts +1 -0
  2. package/lib/ts/src/cloud/config.js +1 -0
  3. package/lib/{web/src/cloud/utils/hash.d.ts → ts/src/cloud/utils/base64.d.ts} +0 -1
  4. package/lib/ts/src/cloud/utils/{hash.js → base64.js} +1 -11
  5. package/lib/ts/src/cloud/utils/index.d.ts +1 -2
  6. package/lib/ts/src/cloud/utils/index.js +1 -2
  7. package/lib/ts/src/cloud/worker/job.d.ts +26 -26
  8. package/lib/ts/src/mina/api/api.js +4 -1
  9. package/lib/ts/src/mina/local/local.d.ts +4 -1
  10. package/lib/ts/src/mina/local/local.js +44 -28
  11. package/lib/ts/src/mina/utils/base64.js +5 -5
  12. package/lib/ts/tsconfig.tsbuildinfo +1 -1
  13. package/lib/web/src/cloud/config.d.ts +1 -0
  14. package/lib/web/src/cloud/config.js +1 -0
  15. package/lib/web/src/cloud/config.js.map +1 -1
  16. package/lib/{ts/src/cloud/utils/hash.d.ts → web/src/cloud/utils/base64.d.ts} +0 -1
  17. package/lib/web/src/cloud/utils/{hash.js → base64.js} +1 -7
  18. package/lib/web/src/cloud/utils/base64.js.map +1 -0
  19. package/lib/web/src/cloud/utils/index.d.ts +1 -2
  20. package/lib/web/src/cloud/utils/index.js +1 -2
  21. package/lib/web/src/cloud/utils/index.js.map +1 -1
  22. package/lib/web/src/cloud/worker/job.d.ts +26 -26
  23. package/lib/web/src/mina/api/api.js +4 -1
  24. package/lib/web/src/mina/api/api.js.map +1 -1
  25. package/lib/web/src/mina/local/local.d.ts +4 -1
  26. package/lib/web/src/mina/local/local.js +43 -27
  27. package/lib/web/src/mina/local/local.js.map +1 -1
  28. package/lib/web/src/mina/utils/base64.js +1 -1
  29. package/lib/web/src/mina/utils/base64.js.map +1 -1
  30. package/lib/web/tsconfig.web.tsbuildinfo +1 -1
  31. package/package.json +4 -4
  32. package/lib/ts/src/cloud/utils/files.d.ts +0 -11
  33. package/lib/ts/src/cloud/utils/files.js +0 -74
  34. package/lib/web/src/cloud/utils/files.d.ts +0 -11
  35. package/lib/web/src/cloud/utils/files.js +0 -65
  36. package/lib/web/src/cloud/utils/files.js.map +0 -1
  37. package/lib/web/src/cloud/utils/hash.js.map +0 -1
@@ -2,5 +2,6 @@ declare const config: {
2
2
  MINAFEE: string;
3
3
  ZKCLOUDWORKER_AUTH: string;
4
4
  ZKCLOUDWORKER_API: string;
5
+ ZKCLOUDWORKER_NATS: string;
5
6
  };
6
7
  export default config;
@@ -4,5 +4,6 @@ const config = {
4
4
  MINAFEE: "200000000",
5
5
  ZKCLOUDWORKER_AUTH: "M6t4jtbBAFFXhLERHQWyEB9JA9xi4cWqmYduaCXtbrFjb7yaY7TyaXDunKDJNiUTBEcyUomNXJgC",
6
6
  ZKCLOUDWORKER_API: "https://api.zkcloudworker.com/v1/",
7
+ ZKCLOUDWORKER_NATS: "http://cloud.zkcloudworker.com:4222",
7
8
  };
8
9
  exports.default = config;
@@ -1,4 +1,3 @@
1
- export declare function stringHash(jsonString: string): string;
2
1
  export declare function bigintToBase56(value: bigint): string;
3
2
  export declare function bigintFromBase56(str: string): bigint;
4
3
  export declare function bigintToBase64(value: bigint): string;
@@ -1,16 +1,6 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
2
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.toBase = exports.fromBase = exports.bigintFromBase64 = exports.bigintToBase64 = exports.bigintFromBase56 = exports.bigintToBase56 = exports.stringHash = void 0;
7
- const crypto_1 = __importDefault(require("crypto"));
8
- function stringHash(jsonString) {
9
- if (typeof jsonString !== "string")
10
- throw new Error("stringHash: input must be a string");
11
- return bigintToBase56(BigInt("0x" + crypto_1.default.createHash("sha256").update(jsonString).digest("hex")));
12
- }
13
- exports.stringHash = stringHash;
3
+ exports.toBase = exports.fromBase = exports.bigintFromBase64 = exports.bigintToBase64 = exports.bigintFromBase56 = exports.bigintToBase56 = void 0;
14
4
  // URL friendly base64 encoding
15
5
  const TABLE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
16
6
  function bigintToBase56(value) {
@@ -1,4 +1,3 @@
1
- export * from "./files";
2
1
  export * from "./graphql";
3
2
  export * from "./utils";
4
- export * from "./hash";
3
+ export * from "./base64";
@@ -14,7 +14,6 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
14
14
  for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
15
15
  };
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
- __exportStar(require("./files"), exports);
18
17
  __exportStar(require("./graphql"), exports);
19
18
  __exportStar(require("./utils"), exports);
20
- __exportStar(require("./hash"), exports);
19
+ __exportStar(require("./base64"), exports);
@@ -18,40 +18,37 @@ export interface LogStream {
18
18
  logStreamName: string;
19
19
  awsRequestId: string;
20
20
  }
21
+ /**
22
+ * @param logStreams the log streams of the job
23
+ * @param logs the logs of the job
24
+ * @param isFullLog whether the logs are full
25
+ */
26
+ export interface JobLogs {
27
+ logStreams?: LogStream[];
28
+ logs?: string[];
29
+ isFullLog?: boolean;
30
+ }
21
31
  /**
22
32
  * JobData is the data structure for a job, keeping track of the job status, result, logs, and metadata
23
33
  * @param id the id of the user
24
34
  * @param jobId the id of the job
25
35
  * @param taskId the id of the task
36
+ *
26
37
  * @param developer the developer of the repo executing the job
27
38
  * @param repo the repo executing the job
39
+ *
28
40
  * @param task the task to execute
29
41
  * @param userId the id of the user
30
42
  * @param args the arguments for the job
31
43
  * @param metadata the metadata for the job
32
44
  * @param chain the blockchain to execute the job on
33
- * @param webhook the webhook to call after the job finishes
34
- * @param cloudhook the cloudhook to call after the job finishes
35
- * @param cloudIteration the recursive call number, must be less than 5
36
- * @param previousJob the previous job data, provided in case of the cloudhook
37
- *
38
45
  * @param filename the filename where transactions data is stored
39
46
  * @param txNumber the number of transactions
40
47
  * @param timeCreated the time the job was created
41
- * @param timeCreatedString the time the job was created as a string
42
48
  * @param timeStarted the time the job was started
43
49
  * @param timeFinished the time the job was finished
44
50
  * @param timeFailed the time the job failed
45
51
  * @param timeUsed the time the job result was used
46
- * @param billedDuration the duration the job was billed for
47
- * @param feeMINA the fee in MINA
48
- * @param feeUSD the fee in USD
49
- * @param jobStatus the status of the job
50
- * @param maxAttempts the maximum number of attempts
51
- * @param result the result of the job
52
- * @param logStreams the log streams of the job
53
- * @param logs the logs of the job
54
- * @param isFullLog whether the logs are full
55
52
  */
56
53
  export interface JobData {
57
54
  id: string;
@@ -64,25 +61,28 @@ export interface JobData {
64
61
  args?: string;
65
62
  metadata?: string;
66
63
  chain: blockchain;
67
- webhook?: string;
68
- cloudhook?: string;
69
- cloudIteration?: number;
70
- previousJob?: JobData;
71
64
  filename?: string;
72
65
  txNumber: number;
73
66
  timeCreated: number;
74
- timeCreatedString: string;
75
67
  timeStarted?: number;
76
68
  timeFinished?: number;
77
69
  timeFailed?: number;
78
70
  timeUsed?: number;
71
+ jobStatus: JobStatus;
79
72
  billedDuration?: number;
80
- feeMINA?: number;
81
- feeUSD?: number;
73
+ result?: string;
74
+ }
75
+ /**
76
+ * JobData is the data structure for a job, keeping track of the job status, result, logs, and metadata
77
+ * @param jobId the id of the job
78
+ * @param eventTime the time the event occurred
79
+ * @param jobStatus the status of the job
80
+ * @param billedDuration the duration the job was billed for
81
+ * @param result the result of the job
82
+ */
83
+ export interface JobEvent {
84
+ jobId: string;
85
+ eventTime: number;
82
86
  jobStatus: JobStatus;
83
- maxAttempts: number;
84
87
  result?: string;
85
- logStreams?: LogStream[];
86
- logs?: string[];
87
- isFullLog?: boolean;
88
88
  }
@@ -381,7 +381,10 @@ class zkCloudWorkerClient {
381
381
  localWorker: this.localWorker,
382
382
  });
383
383
  if (data.mode === "sync")
384
- return { success: true, data: local_1.LocalStorage.jobs[jobId].result };
384
+ return {
385
+ success: true,
386
+ data: local_1.LocalStorage.jobEvents[jobId].result,
387
+ };
385
388
  else
386
389
  return {
387
390
  success: true,
@@ -1,6 +1,6 @@
1
1
  /// <reference types="node" />
2
2
  import { Cloud, zkCloudWorker } from "../../cloud";
3
- import { JobData } from "../../cloud";
3
+ import { JobData, JobEvent } from "../../cloud";
4
4
  import { TaskData } from "../../cloud";
5
5
  import { blockchain } from "../../cloud";
6
6
  import { CloudTransaction, DeployerKeyPair } from "../../cloud";
@@ -234,6 +234,9 @@ export declare class LocalStorage {
234
234
  static jobs: {
235
235
  [key: string]: JobData;
236
236
  };
237
+ static jobEvents: {
238
+ [key: string]: JobEvent;
239
+ };
237
240
  static data: {
238
241
  [key: string]: string;
239
242
  };
@@ -3,8 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.LocalStorage = exports.LocalCloud = void 0;
4
4
  const cloud_1 = require("../../cloud");
5
5
  const cloud_2 = require("../../cloud");
6
- const cloud_3 = require("../../cloud");
7
- const cloud_4 = require("../../cloud");
8
6
  /**
9
7
  * LocalCloud is a cloud that runs on the local machine for testing and development
10
8
  * It uses LocalStorage to store jobs, tasks, transactions, and data
@@ -94,7 +92,8 @@ class LocalCloud extends cloud_1.Cloud {
94
92
  * @param value the value to save
95
93
  */
96
94
  async saveFile(filename, value) {
97
- await (0, cloud_3.saveBinaryFile)({ data: value, filename });
95
+ throw new Error("Method not implemented.");
96
+ //await saveBinaryFile({ data: value, filename });
98
97
  }
99
98
  /**
100
99
  * Loads the file
@@ -102,8 +101,9 @@ class LocalCloud extends cloud_1.Cloud {
102
101
  * @returns the file data
103
102
  */
104
103
  async loadFile(filename) {
105
- const data = await (0, cloud_3.loadBinaryFile)(filename);
106
- return data;
104
+ throw new Error("Method not implemented.");
105
+ //const data = await loadBinaryFile(filename);
106
+ //return data;
107
107
  }
108
108
  /**
109
109
  * Loads the environment
@@ -117,8 +117,10 @@ class LocalCloud extends cloud_1.Cloud {
117
117
  * @returns generated unique id
118
118
  */
119
119
  static generateId(tx = undefined) {
120
- const data = tx ?? JSON.stringify({ time: Date.now(), data: (0, cloud_2.makeString)(32) });
121
- return (0, cloud_4.stringHash)(data);
120
+ //const data =
121
+ // tx ?? JSON.stringify({ time: Date.now(), data: makeString(32) });
122
+ //return stringHash(data);
123
+ return Date.now() + "." + (0, cloud_2.makeString)(32);
122
124
  }
123
125
  /**
124
126
  * Send transactions to the local cloud
@@ -195,10 +197,8 @@ class LocalCloud extends cloud_1.Cloud {
195
197
  metadata,
196
198
  txNumber: command === "recursiveProof" ? transactions.length : 1,
197
199
  timeCreated,
198
- timeCreatedString: new Date(timeCreated).toISOString(),
199
200
  timeStarted: timeCreated,
200
- jobStatus: "started",
201
- maxAttempts: 0,
201
+ chain,
202
202
  };
203
203
  const cloud = new LocalCloud({
204
204
  job,
@@ -218,15 +218,22 @@ class LocalCloud extends cloud_1.Cloud {
218
218
  : undefined;
219
219
  const timeFinished = Date.now();
220
220
  if (result !== undefined) {
221
- job.jobStatus = "finished";
221
+ LocalStorage.jobEvents[jobId] = {
222
+ jobId,
223
+ jobStatus: "finished",
224
+ eventTime: timeFinished,
225
+ result,
226
+ };
222
227
  job.timeFinished = timeFinished;
223
- job.result = result;
224
228
  }
225
229
  else {
226
- job.jobStatus = "failed";
230
+ LocalStorage.jobEvents[jobId] = {
231
+ jobId,
232
+ jobStatus: "failed",
233
+ eventTime: timeFinished,
234
+ };
227
235
  job.timeFailed = timeFinished;
228
236
  }
229
- job.maxAttempts = 1;
230
237
  job.billedDuration = timeFinished - timeCreated;
231
238
  LocalStorage.jobs[jobId] = job;
232
239
  return jobId;
@@ -362,10 +369,6 @@ class LocalCloud extends cloud_1.Cloud {
362
369
  metadata: data.metadata,
363
370
  txNumber: 1,
364
371
  timeCreated: timeCreated,
365
- timeCreatedString: new Date(timeCreated).toISOString(),
366
- timeStarted: Date.now(),
367
- jobStatus: "started",
368
- maxAttempts: 0,
369
372
  };
370
373
  const cloud = new LocalCloud({
371
374
  job,
@@ -374,16 +377,25 @@ class LocalCloud extends cloud_1.Cloud {
374
377
  });
375
378
  const worker = await localWorker(cloud);
376
379
  const result = await worker.task();
377
- job.timeFinished = Date.now();
378
- job.maxAttempts = 1;
379
- job.billedDuration = job.timeFinished - timeCreated;
380
+ const timeFinished = Date.now();
380
381
  if (result !== undefined) {
381
- job.jobStatus = "finished";
382
- job.result = result;
382
+ LocalStorage.jobEvents[jobId] = {
383
+ jobId,
384
+ jobStatus: "finished",
385
+ eventTime: timeFinished,
386
+ result,
387
+ };
388
+ job.timeFinished = timeFinished;
383
389
  }
384
390
  else {
385
- job.jobStatus = "failed";
391
+ LocalStorage.jobEvents[jobId] = {
392
+ jobId,
393
+ jobStatus: "failed",
394
+ eventTime: timeFinished,
395
+ };
396
+ job.timeFailed = timeFinished;
386
397
  }
398
+ job.billedDuration = timeFinished - timeCreated;
387
399
  LocalStorage.jobs[jobId] = job;
388
400
  }
389
401
  let count = 0;
@@ -442,6 +454,7 @@ class LocalStorage {
442
454
  * @param name the name to save the data
443
455
  */
444
456
  static async saveData(name) {
457
+ throw new Error("Method not implemented.");
445
458
  const data = {
446
459
  jobs: LocalStorage.jobs,
447
460
  data: LocalStorage.data,
@@ -449,25 +462,28 @@ class LocalStorage {
449
462
  tasks: LocalStorage.tasks,
450
463
  };
451
464
  const filename = name + ".cloud";
452
- await (0, cloud_3.saveFile)({ data, filename });
465
+ //await saveFile({ data, filename });
453
466
  }
454
467
  /**
455
468
  * Loads the data
456
469
  * @param name the name to load the data
457
470
  */
458
471
  static async loadData(name) {
472
+ throw new Error("Method not implemented.");
459
473
  const filename = name + ".cloud";
460
- const data = await (0, cloud_3.loadFile)(filename);
461
- if (data === undefined)
462
- return;
474
+ /*
475
+ const data = await loadFile(filename);
476
+ if (data === undefined) return;
463
477
  LocalStorage.jobs = data.jobs;
464
478
  LocalStorage.data = data.data;
465
479
  LocalStorage.transactions = data.transactions;
466
480
  LocalStorage.tasks = data.tasks;
481
+ */
467
482
  }
468
483
  }
469
484
  exports.LocalStorage = LocalStorage;
470
485
  LocalStorage.jobs = {};
486
+ LocalStorage.jobEvents = {};
471
487
  LocalStorage.data = {};
472
488
  LocalStorage.transactions = {};
473
489
  LocalStorage.tasks = {};
@@ -2,11 +2,11 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.fieldFromBase64 = exports.fieldToBase64 = exports.fieldFromBase56 = exports.fieldToBase56 = void 0;
4
4
  const o1js_1 = require("o1js");
5
- const hash_1 = require("../../cloud/utils/hash");
5
+ const base64_1 = require("../../cloud/utils/base64");
6
6
  // URL friendly base64 encoding
7
7
  const TABLE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
8
8
  function fieldToBase56(field) {
9
- const digits = (0, hash_1.toBase)(field.toBigInt(), 56n);
9
+ const digits = (0, base64_1.toBase)(field.toBigInt(), 56n);
10
10
  //console.log("digits:", digits);
11
11
  const str = digits.map((x) => TABLE[Number(x)]).join("");
12
12
  //console.log("str:", str);
@@ -15,12 +15,12 @@ function fieldToBase56(field) {
15
15
  exports.fieldToBase56 = fieldToBase56;
16
16
  function fieldFromBase56(str) {
17
17
  const base56Digits = str.split("").map((x) => BigInt(TABLE.indexOf(x)));
18
- const x = (0, hash_1.fromBase)(base56Digits, 56n);
18
+ const x = (0, base64_1.fromBase)(base56Digits, 56n);
19
19
  return (0, o1js_1.Field)(x);
20
20
  }
21
21
  exports.fieldFromBase56 = fieldFromBase56;
22
22
  function fieldToBase64(field) {
23
- const digits = (0, hash_1.toBase)(field.toBigInt(), 64n);
23
+ const digits = (0, base64_1.toBase)(field.toBigInt(), 64n);
24
24
  //console.log("digits:", digits);
25
25
  const str = digits.map((x) => TABLE[Number(x)]).join("");
26
26
  //console.log("str:", str);
@@ -29,7 +29,7 @@ function fieldToBase64(field) {
29
29
  exports.fieldToBase64 = fieldToBase64;
30
30
  function fieldFromBase64(str) {
31
31
  const base64Digits = str.split("").map((x) => BigInt(TABLE.indexOf(x)));
32
- const x = (0, hash_1.fromBase)(base64Digits, 64n);
32
+ const x = (0, base64_1.fromBase)(base64Digits, 64n);
33
33
  return (0, o1js_1.Field)(x);
34
34
  }
35
35
  exports.fieldFromBase64 = fieldFromBase64;