@coherentglobal/spark-execute-sdk 0.3.13 → 0.3.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@coherentglobal/spark-execute-sdk",
3
- "version": "0.3.13",
3
+ "version": "0.3.14",
4
4
  "description": "",
5
5
  "main": "src/node.js",
6
6
  "browser": "dist/browser.js",
@@ -29,10 +29,10 @@
29
29
  "http-status-codes": "^2.2.0",
30
30
  "joi": "^17.6.0",
31
31
  "jstify": "^0.14.0",
32
- "pino": "^8.5.0",
33
- "prettier": "^2.7.1"
32
+ "pino": "^8.5.0"
34
33
  },
35
34
  "devDependencies": {
35
+ "prettier": "^2.7.1",
36
36
  "@babel/cli": "^7.18.10",
37
37
  "@babel/core": "^7.19.0",
38
38
  "@babel/plugin-transform-runtime": "^7.18.10",
@@ -1,50 +0,0 @@
1
- name: Build and Publish Action
2
-
3
- on:
4
- push:
5
- branches:
6
- - dev
7
- - v*.*.*
8
-
9
- jobs:
10
- build:
11
- name: Build & Publish
12
- environment: PUBLIC
13
- runs-on: ubuntu-latest
14
- steps:
15
- - name: Checkout
16
- uses: actions/checkout@v3
17
-
18
- - name: Cache node modules
19
- id: cache-npm
20
- uses: actions/cache@v3
21
- env:
22
- cache-name: cache-node-modules
23
- with:
24
- # npm cache files are stored in `~/.npm` on Linux/macOS
25
- path: ~/.npm
26
- key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }}
27
- restore-keys: |
28
- ${{ runner.os }}-build-${{ env.cache-name }}-
29
- ${{ runner.os }}-build-
30
- ${{ runner.os }}-
31
-
32
- - if: ${{ steps.cache-npm.outputs.cache-hit != 'true' }}
33
- name: List the state of node modules
34
- continue-on-error: true
35
- run: npm list
36
-
37
- - name: Setup Node
38
- uses: actions/setup-node@v3
39
- with:
40
- node-version: 16
41
- registry-url: 'https://registry.npmjs.org'
42
- always-auth: true
43
-
44
- - name: Install dependencies and build 🔧
45
- run: npm install && npm run bundle:full
46
-
47
- - name: Publish package on NPM 📦
48
- run: npm publish --access public
49
- env:
50
- NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
@@ -1,118 +0,0 @@
1
- const { StatusCodes } = require("http-status-codes");
2
- const { logger } = require("@coherentglobal/node-sdk");
3
- // const config = require("../config"); env vars
4
- const {
5
- handleDirectRequest,
6
- modelServiceValidation,
7
- getLatestVersionFromServiceMapTable,
8
- } = require("../logic/runnerLogic");
9
- const { getModelViaFolder } = require("../services/entityStore");
10
- const tenant = "coherent";
11
-
12
- const CALL_TYPE = {
13
- 0: "SparkService",
14
- 1: "ExternalApi",
15
- };
16
-
17
- const ERROR_CODES = {
18
- A1: {
19
- code: "a1",
20
- message: "ERROR:[Main item] missing in Xcall [NAME] table.",
21
- },
22
- };
23
-
24
- const XcallResBuilder = {
25
- Json: function (callType, name, status, errorCode, responseTime, data) {
26
- return JSON.stringify({
27
- metadata: {
28
- calltype: callType,
29
- name: name,
30
- status: status,
31
- error_code: errorCode,
32
- response_time: responseTime,
33
- },
34
- data: data || {},
35
- });
36
- },
37
- XML: function (callType, name, status, errorCode, responseTime, data) {
38
- const trimmedData = data.replace(/\<\?xml.+\?\>/gm, "");
39
-
40
- const response = `
41
- <data calltype="${callType}" name="${name}" status="${status}" error_code="${errorCode}" response_time = "${responseTime}">
42
- ${trimmedData}
43
- </data>
44
- `;
45
-
46
- return response;
47
- },
48
- };
49
-
50
- module.exports = {
51
- sparkService: async (requestData, context) => {
52
- const modelLocation = config().modelLocation;
53
- const headers = context.headers;
54
- const folderName = requestData.folder_name;
55
- const serviceName = requestData.service_name;
56
-
57
- logger.info({
58
- eventType: "SparkService",
59
- msg: `WASMSERVER:DISPATCH_TYPE.SPARK_SERVICE ${tenant}`,
60
- });
61
- logger.info({ eventType: "RequestData", msg: requestData });
62
- const requestBodyFromEngine = JSON.parse(requestData.request_body);
63
- const requestMetaFromEngine = requestBodyFromEngine.request_meta;
64
- const findAvailableLatestModelFromLocal =
65
- getLatestVersionFromServiceMapTable(
66
- context.servicemap,
67
- headers.tenant,
68
- decodeURI(folderName),
69
- decodeURI(serviceName)
70
- );
71
- requestMetaFromEngine.version_id = findAvailableLatestModelFromLocal;
72
- const serviceMap = modelServiceValidation(
73
- context.servicemap,
74
- headers.tenant,
75
- requestMetaFromEngine
76
- );
77
- // const modelVersion = modelLocation
78
- // ? // ? await resolveOffline(requestMetaFromEngine, headers.tenant, headers, serviceMap)
79
- // findAvailableLatestModelFromLocal
80
- // : await getModelViaFolder(folderName, serviceName, headers);
81
-
82
- const modelVersion = await getModelViaFolder(
83
- folderName,
84
- serviceName,
85
- headers
86
- );
87
- const params = {
88
- tenant: headers.tenant,
89
- };
90
- const body = {
91
- ...requestBodyFromEngine,
92
- request_meta: {
93
- ...requestBodyFromEngine.request_meta,
94
- version_id: modelVersion,
95
- call_purpose: "Spark - API Tester",
96
- source_system: "SPARK",
97
- correlation_id: "",
98
- requested_output: null,
99
- service_category: "",
100
- },
101
- };
102
- const payload = {
103
- body,
104
- headers,
105
- params,
106
- };
107
- const results = await handleDirectRequest(payload);
108
- const response = XcallResBuilder.Json(
109
- CALL_TYPE[0],
110
- serviceName,
111
- StatusCodes.ACCEPTED,
112
- "",
113
- 0,
114
- results
115
- );
116
- return response;
117
- },
118
- };
@@ -1,118 +0,0 @@
1
- const { StatusCodes } = require("http-status-codes");
2
- const { logger } = require("@coherentglobal/node-sdk");
3
- // const config = require("../config"); env vars
4
- const {
5
- handleDirectRequest,
6
- modelServiceValidation,
7
- getLatestVersionFromServiceMapTable,
8
- } = require("../logic/runnerLogic");
9
- const { getModelViaFolder } = require("../services/entityStore");
10
- const tenant = "test";
11
-
12
- const CALL_TYPE = {
13
- 0: "SparkService",
14
- 1: "ExternalApi",
15
- };
16
-
17
- const ERROR_CODES = {
18
- A1: {
19
- code: "a1",
20
- message: "ERROR:[Main item] missing in Xcall [NAME] table.",
21
- },
22
- };
23
-
24
- const XcallResBuilder = {
25
- Json: function (callType, name, status, errorCode, responseTime, data) {
26
- return JSON.stringify({
27
- metadata: {
28
- calltype: callType,
29
- name: name,
30
- status: status,
31
- error_code: errorCode,
32
- response_time: responseTime,
33
- },
34
- data: data || {},
35
- });
36
- },
37
- XML: function (callType, name, status, errorCode, responseTime, data) {
38
- const trimmedData = data.replace(/\<\?xml.+\?\>/gm, "");
39
-
40
- const response = `
41
- <data calltype="${callType}" name="${name}" status="${status}" error_code="${errorCode}" response_time = "${responseTime}">
42
- ${trimmedData}
43
- </data>
44
- `;
45
-
46
- return response;
47
- },
48
- };
49
-
50
- module.exports = {
51
- sparkService: async (requestData, context) => {
52
- const modelLocation = config().modelLocation;
53
- const headers = context.headers;
54
- const folderName = requestData.folder_name;
55
- const serviceName = requestData.service_name;
56
-
57
- logger.info({
58
- eventType: "SparkService",
59
- msg: `WASMSERVER:DISPATCH_TYPE.SPARK_SERVICE ${tenant}`,
60
- });
61
- logger.info({ eventType: "RequestData", msg: requestData });
62
- const requestBodyFromEngine = JSON.parse(requestData.request_body);
63
- const requestMetaFromEngine = requestBodyFromEngine.request_meta;
64
- const findAvailableLatestModelFromLocal =
65
- getLatestVersionFromServiceMapTable(
66
- context.servicemap,
67
- headers.tenant,
68
- decodeURI(folderName),
69
- decodeURI(serviceName)
70
- );
71
- requestMetaFromEngine.version_id = findAvailableLatestModelFromLocal;
72
- const serviceMap = modelServiceValidation(
73
- context.servicemap,
74
- headers.tenant,
75
- requestMetaFromEngine
76
- );
77
- // const modelVersion = modelLocation
78
- // ? // ? await resolveOffline(requestMetaFromEngine, headers.tenant, headers, serviceMap)
79
- // findAvailableLatestModelFromLocal
80
- // : await getModelViaFolder(folderName, serviceName, headers);
81
-
82
- const modelVersion = await getModelViaFolder(
83
- folderName,
84
- serviceName,
85
- headers
86
- );
87
- const params = {
88
- tenant: headers.tenant,
89
- };
90
- const body = {
91
- ...requestBodyFromEngine,
92
- request_meta: {
93
- ...requestBodyFromEngine.request_meta,
94
- version_id: modelVersion,
95
- call_purpose: "Spark - API Tester",
96
- source_system: "SPARK",
97
- correlation_id: "",
98
- requested_output: null,
99
- service_category: "",
100
- },
101
- };
102
- const payload = {
103
- body,
104
- headers,
105
- params,
106
- };
107
- const results = await handleDirectRequest(payload);
108
- const response = XcallResBuilder.Json(
109
- CALL_TYPE[0],
110
- serviceName,
111
- StatusCodes.ACCEPTED,
112
- "",
113
- 0,
114
- results
115
- );
116
- return response;
117
- },
118
- };
@@ -1,124 +0,0 @@
1
- const { expect } = require("chai");
2
- const validate = require("../src/validate.js");
3
-
4
- describe("config", () => {
5
- it("invalidate empty config", () => {
6
- const fixture = {};
7
- const checkIfValid = validate(fixture);
8
- const errorName = checkIfValid.error.name;
9
- const errorType = checkIfValid.error.details[0].type;
10
- expect(errorName).to.equal("ValidationError");
11
- expect(errorType).to.equal("any.required");
12
- });
13
- it('empty url not allowed', () => {
14
- const fixture = {
15
- sparkEndpoint: {
16
- url: "",
17
- tenant: "",
18
- authType: "",
19
- syntheticKey: "",
20
- bearerToken: ""
21
- },
22
- nodeGenModels: []
23
- }
24
- const checkIfValid = validate(fixture)
25
- const errorName = checkIfValid.error.name;
26
- const errorType = checkIfValid.error.details[0].type;
27
- const errorLabel = checkIfValid.error.details[0].context.label;
28
- // console.log('ASD: ', JSON.stringify(checkIfValid))
29
- expect(errorName).to.equal("ValidationError");
30
- expect(errorType).to.equal("string.empty");
31
- expect(errorLabel).to.equal("sparkEndpoint.url");
32
- })
33
- it('invalid url', () => {
34
- const fixture = {
35
- sparkEndpoint: {
36
- url: "http:",
37
- tenant: "",
38
- authType: "",
39
- syntheticKey: "",
40
- bearerToken: ""
41
- },
42
- nodeGenModels: []
43
- }
44
- const checkIfValid = validate(fixture)
45
- const errorName = checkIfValid.error.name;
46
- const errorType = checkIfValid.error.details[0].type;
47
- // console.log('ASD: ', JSON.stringify(checkIfValid))
48
- expect(errorName).to.equal("ValidationError");
49
- expect(errorType).to.equal("string.uriCustomScheme");
50
- })
51
- it('tenant not allowed to be emtpy', () => {
52
- const fixture = {
53
- sparkEndpoint: {
54
- url: "https://spark.dev.coherent.global",
55
- tenant: "",
56
- authType: "",
57
- syntheticKey: "",
58
- bearerToken: ""
59
- },
60
- nodeGenModels: []
61
- }
62
- const checkIfValid = validate(fixture)
63
- const errorName = checkIfValid.error.name;
64
- const errorType = checkIfValid.error.details[0].type;
65
- const errorLabel = checkIfValid.error.details[0].context.label;
66
- expect(errorName).to.equal("ValidationError");
67
- expect(errorType).to.equal("string.empty");
68
- expect(errorLabel).to.equal("sparkEndpoint.tenant");
69
- })
70
- it('authType not allowed to be emtpy', () => {
71
- const fixture = {
72
- sparkEndpoint: {
73
- url: "https://spark.dev.coherent.global",
74
- tenant: "coherent",
75
- authType: "",
76
- syntheticKey: "",
77
- bearerToken: ""
78
- },
79
- nodeGenModels: []
80
- }
81
- const checkIfValid = validate(fixture)
82
- const errorName = checkIfValid.error.name;
83
- const errorType = checkIfValid.error.details[0].type;
84
- const errorLabel = checkIfValid.error.details[0].context.label;
85
- expect(errorName).to.equal("ValidationError");
86
- expect(errorType).to.equal("any.only");
87
- expect(errorLabel).to.equal("sparkEndpoint.authType");
88
- })
89
- it('authType must use valid strings', () => {
90
- const fixture = {
91
- sparkEndpoint: {
92
- url: "https://spark.dev.coherent.global",
93
- tenant: "coherent",
94
- authType: "publics",
95
- syntheticKey: "",
96
- bearerToken: ""
97
- },
98
- nodeGenModels: []
99
- }
100
- const checkIfValid = validate(fixture)
101
- const errorName = checkIfValid.error.name;
102
- const errorType = checkIfValid.error.details[0].type;
103
- const errorLabel = checkIfValid.error.details[0].context.label;
104
- expect(errorName).to.equal("ValidationError");
105
- expect(errorType).to.equal("any.only");
106
- expect(errorLabel).to.equal("sparkEndpoint.authType");
107
- })
108
- it('valid config', () => {
109
- const fixture = {
110
- sparkEndpoint: {
111
- url: "https://spark.dev.coherent.global",
112
- tenant: "coherent",
113
- authType: "public",
114
- syntheticKey: "",
115
- bearerToken: ""
116
- },
117
- nodeGenModels: []
118
- }
119
- const checkIfValid = validate(fixture)
120
- // console.log('ASD: ', JSON.stringify(checkIfValid))
121
- const errorName = checkIfValid.error;
122
- expect(errorName).to.equal(undefined);
123
- })
124
- });
@@ -1,102 +0,0 @@
1
- {
2
- "sparkEndpoint": {
3
- "url": "https://spark.dev.coherent.global",
4
- "tenant": "test",
5
- "authType": "bearerToken",
6
- "bearerToken": "eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJ5Z09WRzJNVmdjMXN6ajB1eUJaNm8tTUZqV2dPNUI2RXBrQzZ6MWNBTTFRIn0.eyJleHAiOjE2NjE5MjI3NzYsImlhdCI6MTY2MTkxNTU3NiwiYXV0aF90aW1lIjoxNjYxOTE1NTc1LCJqdGkiOiIyM2RiYjk0ZC0xMzI4LTRjNmItOGIxYy1lM2I4NGU0ODc3ZjQiLCJpc3MiOiJodHRwczovL2tleWNsb2FrLmRldi5jb2hlcmVudC5nbG9iYWwvYXV0aC9yZWFsbXMvY29oZXJlbnQiLCJhdWQiOiJwcm9kdWN0LWZhY3RvcnkiLCJzdWIiOiI3MGU1ZmQ3MC1kNGUwLTQ3NWYtYmVkZS0xYzBmY2Y2OTE1NTEiLCJ0eXAiOiJCZWFyZXIiLCJhenAiOiJwcm9kdWN0LWZhY3RvcnkiLCJzZXNzaW9uX3N0YXRlIjoiMDRlMDVjNzgtODU1My00NTRiLTg3MWEtNGU2YTE1NmJjMzM1IiwiYWNyIjoiMSIsImFsbG93ZWQtb3JpZ2lucyI6WyJodHRwczovL3NwYXJrLXVzZXItbWFuYWdlci5kZXYuY29oZXJlbnQuZ2xvYmFsKiIsImh0dHA6Ly9sb2NhbGhvc3Q6NjI3ODAiLCJodHRwczovL3NhLnN0YWdpbmcuY29oZXJlbnQuZ2xvYmFsKiIsImh0dHBzOi8vc3BhcmsuZGV2LmNvaGVyZW50Lmdsb2JhbC8qIiwiaHR0cHM6Ly9wcm9kdWN0ZmFjdG9yeS5kZXYuY29oZXJlbnQuY29tLmhrIiwiaHR0cHM6Ly9wZmEuZGV2LmNvaGVyZW50LmNvbS5oayIsImh0dHBzOi8vZXBvcy1lcy5kZXYuY29oZXJlbnQuY29tLmhrIiwiaHR0cHM6Ly9lcG9zLmRldi5jb2hlcmVudC5jb20uaGsiLCJodHRwOi8vbG9jYWxob3N0OjgwMjIiLCJodHRwczovL2NvaGVyZW50Y2FwaXRhbC1teS5zaGFyZXBvaW50LmNvbS5tY2FzLm1zIiwiaHR0cHM6Ly9zYS5kZXYuY29oZXJlbnQuZ2xvYmFsKiIsImh0dHBzOi8vc2EuZGV2LmNvaGVyZW50Lmdsb2JhbCIsImh0dHBzOi8vZXhwbGFpbmVyLmRldm9wcy55ZWxsb3cuY29oZXJlbnQuZ2xvYmFsIiwiaHR0cHM6Ly9sb2NhbGhvc3Q6MzAwMCIsImh0dHBzOi8vdXNlci1tYW5hZ2VyLmRldi55ZWxsb3cuY29oZXJlbnQuZ2xvYmFsLyoiLCJodHRwczovL3NhLnN0YWdpbmcuY29oZXJlbnQuZ2xvYmFsIiwiaHR0cHM6Ly91c2VyLW1hbmFnZXIuZGV2LnllbGxvdy5jb2hlcmVudC5nbG9iYWwiLCJodHRwczovL3NwYXJrLmRldi5jb2hlcmVudC5nbG9iYWwiLCJodHRwczovL3Byb2R1Y3RmYWN0b3J5LmFpYS51YXQuY29oZXJlbnQuY29tLmhrIiwiaHR0cHM6Ly9zcGFyay1yZXZhbXAuZGV2LmNvaGVyZW50Lmdsb2JhbC8qIiwiaHR0cHM6Ly9zcGFya3MuZGV2LmNvaGVyZW50LmNvbS5oayIsImh0dHBzOi8vbG9jYWxob3N0OjgwMjIiLCJodHRwczovL3NwYXJrLXVzZXItbWFuYWdlci5kZXYuY29oZXJlbnQuZ2xvYmFsIiwiaHR0cDovL2xvY2FsaG9zdDozMDAxIiwiaHR0cDovL2xvY2FsaG9zdDozMDAwIiwiaHR0cHM6Ly9jb2hlcmVudGNhcGl0YWwtbXkuc2hhcmVwb2ludC5jb20ubWNhcy5tcy8iLCJodHRwczovLzhlNjEtMTE1LTExMC05Ni0xNjYubmdyb2suaW8iLCJodHRwczovL2NsaWVudC1jb2hlci1wZi0zMDk2LXQyZ3hvc3B0OC5oZXJva3VhcHAuY29tL2h0dHBzOi8vZXhwbGFpbmVyLmRldm9wcy55ZWxsb3cuY29oZXJlbnQuZ2xvYmFsLyoiLCJodHRwczovL2V4cGxhaW5lci5kZXZvcHMueWVsbG93LmNvaGVyZW50Lmdsb2JhbC8qIiwiaHR0cHM6Ly9sb2NhbGhvc3Q6NTA1NSIsImh0dHBzOi8vZ29vZ2xlLmNvbSIsImh0dHBzOi8vc3BhcmstcmV2YW1wLmRldi5jb2hlcmVudC5nbG9iYWwiLCJodHRwczovL3NwYXJrLXJldmFtcC5kZXYuY29oZXJlbnQuZ2xvYmFsLyJdLCJyZWFsbV9hY2Nlc3MiOnsicm9sZXMiOlsib2ZmbGluZV9hY2Nlc3MiXX0sInJlc291cmNlX2FjY2VzcyI6eyJwcm9kdWN0LWZhY3RvcnkiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJ2aWV3LXByb2ZpbGUiXX19LCJzY29wZSI6Im9wZW5pZCBwcm9maWxlIG9mZmxpbmVfYWNjZXNzIiwic2lkIjoiMDRlMDVjNzgtODU1My00NTRiLTg3MWEtNGU2YTE1NmJjMzM1IiwibmFtZSI6IkplZWt5IE1vamljYSIsImdyb3VwcyI6WyJ1c2VyOmNodWJiLmFnZW5jeWxlYWRlciIsInVzZXI6Y29oZXJlbnQuZm9ybXMiLCJ1c2VyOnBmIl0sInJlYWxtIjoiY29oZXJlbnQiLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJqZWVreXZpbmNlbnQubW9qaWNhQGNvaGVyZW50Lmdsb2JhbCIsImdpdmVuX25hbWUiOiJKZWVreSIsImZhbWlseV9uYW1lIjoiTW9qaWNhIn0.TRgKvOfOoAVr3h3-CK_CA74xIvh-5sV8MBmjNjrMamVzqSmgyALyuOCL9Q5tJ-XaYD3wvXBApi8-GSE7mK_Ub-2rQ969ZrXDgEY2pn88Qne0madm2kanT1cpRh_4icXi6ob4_EPYPwPzOZgpS3wUqnD0MWUL1BYfW0Q4JukudwNu1tA7tVJ4Cv8bD_yC_K48TnCcR5yctYRhRn0Sl2P0H94cHE6X9W2IX37niokoIZ6ssGkdxyecqoQsB_9i7Dsr-brB-DAWgdlTu1c9-ISDKBsueRrN3rwHgHyLTm6rnsK6BDqK9vPp9uT_wHnRMCcDKGgaxMEM0TnLCtgYCw6yOw"
7
- },
8
- "nodeGenModels": [
9
- {
10
- "versionId": "dummy-model-uuid",
11
- "binary": "examples/nodejs/test_models/BlackScholes.zip",
12
- "metaData": {
13
- "id": "b48371d6-1aa1-458a-91a9-fd9083ff2ebf",
14
- "url": "/docstore/api/v1/documents/versions/tokens/ItHPu2LY%2F626xeFbg%2BlSsjuCTsCtI8frdgIGsHNtt0V2se1DbRkjPWuI1sqS8zRogJZcT451UoxhZZnhagNZ2amgST7hQVVkmG9yEUy5YV4plECN3pdGAJznIqsJPFT7cXGMA8Szj1Nj1itB%2BO9B59u5a1xP%2FhvYbG9QCZtsorYV4Xs%2F3mcPkAITa18idqcSCTOkZVKA%2F7oN0Rs5ZTYvFCIzaPOOQkHG3bvN%2Br0BNKw%3D/BlackScholes.c1engine",
15
- "EngineInformation": {
16
- "FileSize": 51221,
17
- "Author": "francisc.camillo@coherent.global",
18
- "ProductName": "Offline Demo",
19
- "Revision": "1.0.0",
20
- "Description": null,
21
- "FileMD5Hash": "F6C5BC1F3A79381A507800BA88D15799",
22
- "UniversalFileHash": null,
23
- "ReleaseDate": "2022-06-09T03:48:56.3Z",
24
- "ServiceName": "BlackScholes",
25
- "NoOfInstance": 0,
26
- "UploaderEmail": "francisc.camillo@coherent.global",
27
- "DefaultEngineType": "Type3",
28
- "OriginalFileName": "BlackScholes.xlsx",
29
- "SizeOfUploadedFile": 51221,
30
- "ReleaseNote": null,
31
- "IsTypeScriptFile": false,
32
- "IsJsSheetEngineGenerated": false,
33
- "EngineUpgradeType": "minor",
34
- "PublicAPI": true,
35
- "FileGuid": "572153ab-ad3e-4889-9eaf-8b2ec822a764",
36
- "ServiceGuid": "8e15cdcd-732c-415d-9963-4ac1b5491e47",
37
- "ServiceVersionGuid": "af345405-1d31-4ed0-9a41-a4c816d51411",
38
- "BaseUrl": "https://excel.dev.coherent.global",
39
- "Tenant": "pru_cn",
40
- "AllowToStoreHistory": true,
41
- "CalcMode": "AUTO",
42
- "ForceInputsWriteBeforeCalcModeChanges": true,
43
- "Provenance": null,
44
- "VersionLabel": "BlackScholes",
45
- "ExplainerType": "",
46
- "IsXParameter": false,
47
- "XParameterSetVersionId": ""
48
- },
49
- "XInputTable": [
50
- {
51
- "Input Name": "ExercisePrice",
52
- "Description": null,
53
- "Address": "D5"
54
- },
55
- {
56
- "Input Name": "RisklessRate",
57
- "Description": null,
58
- "Address": "D6"
59
- },
60
- { "Input Name": "StdDev", "Description": null, "Address": "D7" },
61
- { "Input Name": "StockPrice", "Description": null, "Address": "D4" },
62
- { "Input Name": "TimeToExpiry", "Description": null, "Address": "D8" }
63
- ],
64
- "XOutputTable": [
65
- { "Output Name": "Delta", "Description": null, "Address": "G4" },
66
- { "Output Name": "Gamma", "Description": null, "Address": "G5" },
67
- { "Output Name": "Rho", "Description": null, "Address": "G8" },
68
- { "Output Name": "Theta", "Description": null, "Address": "G6" },
69
- { "Output Name": "Vega", "Description": null, "Address": "G7" }
70
- ],
71
- "FormsService": null,
72
- "StaticService": null,
73
- "VersionId": "8e15cdcd-732c-415d-9963-4ac1b5491e47",
74
- "created": "2022-06-09T03:49:02.144541Z",
75
- "effectiveStartDate": "2009-12-31T16:00:00Z",
76
- "effectiveEndDate": "2119-12-31T16:00:00Z",
77
- "path": "/products/Snowflake Demo/productfactory/engines/BlackScholes/BlackScholes.c1engine",
78
- "privacyLevel": "Private",
79
- "dateTime1": "2009-12-31T16:00:00Z",
80
- "dateTime2": "2119-12-31T16:00:00Z",
81
- "status1": "type3",
82
- "status2": "BlackScholes.xlsx",
83
- "email1": null,
84
- "email2": null,
85
- "amount1": 51221.0,
86
- "amount2": 39072.0,
87
- "amount3": null,
88
- "type": "EngineDocument",
89
- "policyNumber": "francisc.camillo@coherent.global",
90
- "name1": "BlackScholes",
91
- "name2": "BlackScholes.c3engine",
92
- "otherString1": "1.0.0",
93
- "otherString2": "EngineFile",
94
- "referenceId": "Snowflake Demo~BlackScholes.c3engine",
95
- "disposition": null,
96
- "productName": "Snowflake Demo",
97
- "content": null,
98
- "latestVersionId": "8e15cdcd-732c-415d-9963-4ac1b5491e47"
99
- }
100
- }
101
- ]
102
- }
@@ -1,21 +0,0 @@
1
- {
2
- "request_data": {
3
- "inputs": {
4
- "ExercisePrice": 80,
5
- "RisklessRate": 0.1,
6
- "StdDev": 0.4,
7
- "StockPrice": 75,
8
- "TimeToExpiry": 0.5
9
- }
10
- },
11
- "request_meta": {
12
- "version_id": "dummy-model-uuid",
13
- "transaction_date": "2022-06-09T02:45:43.482Z",
14
- "call_purpose": "Spark - API Tester",
15
- "source_system": "SPARK",
16
- "correlation_id": "",
17
- "requested_output": "",
18
- "service_category": "",
19
- "compiler_type": "NodeGen"
20
- }
21
- }
@@ -1,20 +0,0 @@
1
- const Spark = require("../src/node");
2
- const dummyConfig = require("./mock-data/dummy-config.json");
3
- const should = require("should");
4
- const validMockData = require("./mock-data/valid.json");
5
-
6
- describe("Spark Execute", async function () {
7
- it("should execute model", async function () {
8
- const model = "dummy-model-uuid";
9
- const spark = new Spark(dummyConfig);
10
-
11
- const res = await spark.execute(validMockData);
12
-
13
- // const data = JSON.parse(res);
14
- const data =
15
- typeof res === "object" ? JSON.parse(JSON.stringify(res)) : res;
16
-
17
- data.response_data.should.exist;
18
- data.response_data.outputs.should.exist;
19
- });
20
- });