@raytio/decrypt-helper 6.0.0 → 6.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,20 +13,21 @@ exports.authedFetch = void 0;
13
13
  function authedFetch(apiToken, url, options, retrying = false) {
14
14
  return __awaiter(this, void 0, void 0, function* () {
15
15
  console.log(`[API] ${retrying ? "Retry" : "Start"} ${url}`);
16
- const startTime = +new Date();
17
- const req = yield fetch(url, Object.assign(Object.assign({}, options), { headers: { Authorization: `Bearer ${apiToken}` } }));
18
- const apiResp = yield req.json();
19
- if (apiResp.message) {
20
- if (!retrying && req.status === 504) {
21
- console.log(`[API] Error ${req.status} (will retry) ${url}`);
16
+ const startTime = Date.now();
17
+ const response = yield fetch(url, Object.assign(Object.assign({}, options), { headers: { Authorization: `Bearer ${apiToken}` } }));
18
+ const apiResponse = yield response.json();
19
+ const error = apiResponse.message || apiResponse.error;
20
+ if (error) {
21
+ if (!retrying && response.status === 504) {
22
+ console.log(`[API] Error ${response.status} (will retry) ${url}`);
22
23
  return authedFetch(apiToken, url, options, true);
23
24
  }
24
- console.log(`[API] Error ${req.status} (no retry) ${url}`);
25
- throw new Error(`Failed due to API Error from ${url}: "${apiResp.message}"`);
25
+ console.log(`[API] Error ${response.status} (no retry) ${url}`);
26
+ throw new Error(`Failed due to API Error from ${url}: "${error}"`);
26
27
  }
27
- const totalTime = ((+new Date() - startTime) / 1000).toFixed(1);
28
+ const totalTime = ((Date.now() - startTime) / 1000).toFixed(1);
28
29
  console.log(`[API] Finish${retrying ? " after retry" : ""} (${totalTime}s) ${url}`);
29
- return apiResp;
30
+ return apiResponse;
30
31
  });
31
32
  }
32
33
  exports.authedFetch = authedFetch;
@@ -1,4 +1,6 @@
1
1
  import type { Instance, NId, ProfileObject } from "@raytio/types";
2
2
  import { ApplicationEncryptorLike } from "../types";
3
3
  import { EnvConfig } from "./fetchEnvConfig";
4
- export declare function getFiles(profileObjects: ProfileObject[], instance: Instance, apiToken: string, envConfig: EnvConfig, applicationDecryptor: ApplicationEncryptorLike): Promise<Record<NId, [dataUrl: string, fileExtension: string]>>;
4
+ type FileObject = Record<NId, [dataUrl: string, fileExtension: string]>;
5
+ export declare function getFiles(profileObjects: ProfileObject[], instance: Instance, apiToken: string, envConfig: EnvConfig, applicationDecryptor: ApplicationEncryptorLike): Promise<FileObject>;
6
+ export {};
@@ -14,12 +14,13 @@ const core_1 = require("@raytio/core");
14
14
  const mime_types_1 = require("mime-types");
15
15
  const file_1 = require("../helpers/file");
16
16
  const authedFetch_1 = require("./authedFetch");
17
+ const videoToImage_1 = require("./videoToImage");
17
18
  const TEMP_OBJ_PREFIX = "urn:temp_object:";
18
19
  const getFileExtn = (b64) => { var _a; return (0, mime_types_1.extension)(((_a = b64.split(":")[1]) === null || _a === void 0 ? void 0 : _a.split(";base64,")[0]) || "text/plain") || "txt"; };
19
- const decryptFile = (encryptedData, encryptedObj, applicationDecryptor, wdek) => __awaiter(void 0, void 0, void 0, function* () {
20
- const clonedEncryptedObj = JSON.parse(JSON.stringify(encryptedObj));
21
- clonedEncryptedObj.encrypted_data.data = encryptedData;
22
- const decrypted = yield applicationDecryptor.decrypt(clonedEncryptedObj, wdek);
20
+ const decryptFile = (encryptedData, encryptedObject, applicationDecryptor, wdek) => __awaiter(void 0, void 0, void 0, function* () {
21
+ const clonedEncryptedObject = JSON.parse(JSON.stringify(encryptedObject));
22
+ clonedEncryptedObject.encrypted_data.data = encryptedData;
23
+ const decrypted = yield applicationDecryptor.decrypt(clonedEncryptedObject, wdek);
23
24
  return decrypted;
24
25
  });
25
26
  const cleanApiResponse = (responseBody) => {
@@ -35,6 +36,7 @@ const cleanApiResponse = (responseBody) => {
35
36
  return responseBody;
36
37
  };
37
38
  function getFiles(profileObjects, instance, apiToken, envConfig, applicationDecryptor) {
39
+ var _a;
38
40
  return __awaiter(this, void 0, void 0, function* () {
39
41
  // [nId: NId, fieldName: string, file: RaytFile][]
40
42
  const urnOrEncryptedList = profileObjects.flatMap((PO) => {
@@ -50,29 +52,41 @@ function getFiles(profileObjects, instance, apiToken, envConfig, applicationDecr
50
52
  });
51
53
  });
52
54
  const filesBase64 = yield Promise.all(urnOrEncryptedList.map(([PONId, fileNId, fieldName, urnOrEncrypted]) => __awaiter(this, void 0, void 0, function* () {
53
- var _a, _b;
55
+ var _b, _c;
54
56
  // handle urn:temp_object:
55
57
  if (typeof urnOrEncrypted === "string" &&
56
58
  urnOrEncrypted.startsWith(TEMP_OBJ_PREFIX)) {
57
59
  const url = Buffer.from(urnOrEncrypted.slice(TEMP_OBJ_PREFIX.length), "base64").toString("binary");
58
- const res = yield fetch(url);
59
- const b64 = new TextDecoder().decode(yield res.arrayBuffer());
60
- const type = res.headers.get("content-type") || "text/plain";
61
- const dataUrl = `data:${type};base64,${b64}`;
60
+ const response = yield fetch(url);
61
+ const base64 = Buffer.from(yield response.arrayBuffer()).toString("base64");
62
+ const type = response.headers.get("content-type") || "text/plain";
63
+ const dataUrl = `data:${type};base64,${base64}`;
62
64
  return [fileNId, [dataUrl, getFileExtn(dataUrl)]];
63
65
  }
64
- // TODO: should realB64 be renamed to "realDataUrl"?
65
- const realB64 = yield (0, authedFetch_1.authedFetch)(apiToken, `${envConfig.api_url}/share/v2/access_application/instance/${instance.i_id}/profile_object/${fileNId}/content`).then(cleanApiResponse);
66
+ const realDataUrl = yield (0, authedFetch_1.authedFetch)(apiToken, `${envConfig.api_url}/share/v2/access_application/instance/${instance.i_id}/profile_object/${fileNId}/content`).then(cleanApiResponse);
66
67
  if ((0, core_1.isEncrypted)(urnOrEncrypted)) {
67
- const wdek = (_b = (_a = instance.keys[PONId]) === null || _a === void 0 ? void 0 : _a[fieldName]) === null || _b === void 0 ? void 0 : _b.data;
68
+ const wdek = (_c = (_b = instance.keys[PONId]) === null || _b === void 0 ? void 0 : _b[fieldName]) === null || _c === void 0 ? void 0 : _c.data;
68
69
  if (!wdek)
69
70
  return [fileNId, undefined];
70
- const decryptedDataUrl = yield decryptFile(realB64, urnOrEncrypted, applicationDecryptor, wdek);
71
+ const decryptedDataUrl = yield decryptFile(realDataUrl, urnOrEncrypted, applicationDecryptor, wdek);
71
72
  return [fileNId, [decryptedDataUrl, getFileExtn(decryptedDataUrl)]];
72
73
  }
73
- return [fileNId, [realB64, getFileExtn(realB64)]];
74
+ return [fileNId, [realDataUrl, getFileExtn(realDataUrl)]];
74
75
  })));
75
- return Object.fromEntries(filesBase64.filter((file) => { var _a; return !!file[1] && ((_a = file[1][0]) === null || _a === void 0 ? void 0 : _a.includes(",")); }));
76
+ const allDataUrls = Object.fromEntries(filesBase64.filter((file) => { var _a; return !!file[1] && ((_a = file[1][0]) === null || _a === void 0 ? void 0 : _a.includes(",")); }));
77
+ // for all videos, also store a static frame from the video, since we can't embedded a video into the PDF
78
+ for (const nId in allDataUrls) {
79
+ const [dataUrl, fileExtn] = allDataUrls[nId];
80
+ const isVideo = (_a = mime_types_1.types[fileExtn]) === null || _a === void 0 ? void 0 : _a.startsWith("video/");
81
+ if (isVideo) {
82
+ const imageDataUrl = yield (0, videoToImage_1.videoToImage)(apiToken, envConfig, dataUrl);
83
+ allDataUrls[`${nId}_videoFrame`] = [
84
+ imageDataUrl,
85
+ getFileExtn(imageDataUrl),
86
+ ];
87
+ }
88
+ }
89
+ return allDataUrls;
76
90
  });
77
91
  }
78
92
  exports.getFiles = getFiles;
@@ -16,22 +16,22 @@ const core_1 = require("@raytio/core");
16
16
  function signInWithPasswordMigration(username, password) {
17
17
  return __awaiter(this, void 0, void 0, function* () {
18
18
  try {
19
- const userObj = yield auth_1.Auth.signIn(username, password);
20
- return userObj;
19
+ const userObject = yield auth_1.Auth.signIn(username, password);
20
+ return userObject;
21
21
  }
22
22
  catch (_a) {
23
23
  // if the login fails, try again with their hashed password.
24
24
  // if it's successful the second time, we quietly change their password.
25
25
  const hashedPassword = yield (0, core_1.hashPassword)(password);
26
- const userObj = yield auth_1.Auth.signIn(username, hashedPassword);
26
+ const userObject = yield auth_1.Auth.signIn(username, hashedPassword);
27
27
  // the login was successful. So we need to migrate their account.
28
28
  // No changes to the maxcryptor, purely to cognito.
29
29
  // we can only migrate their password if there are no login challenges
30
- if (!userObj.challengeName) {
30
+ if (!userObject.challengeName) {
31
31
  console.log("Migrating credentials...");
32
- yield auth_1.Auth.changePassword(userObj, hashedPassword, password);
32
+ yield auth_1.Auth.changePassword(userObject, hashedPassword, password);
33
33
  }
34
- return userObj;
34
+ return userObject;
35
35
  }
36
36
  });
37
37
  }
@@ -43,8 +43,8 @@ function signIn(CONFIG, envConfig) {
43
43
  userPoolId: envConfig.cognito_user_pool_id,
44
44
  userPoolWebClientId: envConfig.cognito_web_client_id,
45
45
  });
46
- const userObj = yield signInWithPasswordMigration(CONFIG.RAYTIO_USERNAME, CONFIG.RAYTIO_PASSWORD);
47
- if (userObj.challengeName === "SOFTWARE_TOKEN_MFA") {
46
+ const userObject = yield signInWithPasswordMigration(CONFIG.RAYTIO_USERNAME, CONFIG.RAYTIO_PASSWORD);
47
+ if (userObject.challengeName === "SOFTWARE_TOKEN_MFA") {
48
48
  throw new Error(`The configured account (${CONFIG.RAYTIO_USERNAME}) has two factor authentication enabled. You must disable 2FA or use a different account`);
49
49
  }
50
50
  const user = yield auth_1.Auth.currentAuthenticatedUser();
@@ -0,0 +1,5 @@
1
+ import { EnvConfig } from "./fetchEnvConfig";
2
+ export declare function uploadToObjectStore(apiToken: string, envConfig: EnvConfig, dataUrl: string, expiryDate?: Date): Promise<{
3
+ objectStoreId: string;
4
+ retrievalUrl: string;
5
+ }>;
@@ -0,0 +1,42 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ Object.defineProperty(exports, "__esModule", { value: true });
12
+ exports.uploadToObjectStore = void 0;
13
+ const authedFetch_1 = require("./authedFetch");
14
+ //
15
+ // this file is mostly copy-pasted from the client repo. If you make
16
+ // a change here, consider making the same change in the client.
17
+ //
18
+ function uploadToObjectStore(apiToken, envConfig, dataUrl, expiryDate) {
19
+ return __awaiter(this, void 0, void 0, function* () {
20
+ const mimeType = dataUrl.split(";")[0].split(":")[1];
21
+ const base64 = dataUrl.split(",")[1];
22
+ // eslint-disable-next-line unicorn/prefer-code-point -- deliberate, this will only ever be ASCII
23
+ const arrayBuffer = Uint8Array.from(atob(base64), (c) => c.charCodeAt(0));
24
+ // this is a weird API that returns a double stringified string
25
+ const temporaryUrl = yield (0, authedFetch_1.authedFetch)(apiToken, `${envConfig.api_url}/org/v1/object/url${expiryDate ? `?expires=${+expiryDate}` : ""}`);
26
+ const { status, statusText } = yield fetch(temporaryUrl, {
27
+ method: "PUT",
28
+ body: arrayBuffer,
29
+ headers: { "Content-Type": mimeType },
30
+ });
31
+ if (status !== 200) {
32
+ throw new Error(`Status ${status} from object store: ${statusText}`);
33
+ }
34
+ // the v4 API doesn't return the ID
35
+ const { pathname, origin } = new URL(temporaryUrl);
36
+ return {
37
+ objectStoreId: pathname.slice(1),
38
+ retrievalUrl: origin + pathname,
39
+ };
40
+ });
41
+ }
42
+ exports.uploadToObjectStore = uploadToObjectStore;
@@ -0,0 +1,3 @@
1
+ import { EnvConfig } from "./fetchEnvConfig";
2
+ /** Use the extractor API to get the middle frame from a video */
3
+ export declare function videoToImage(apiToken: string, envConfig: EnvConfig, videoDataUrl: string): Promise<string>;
@@ -0,0 +1,30 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ Object.defineProperty(exports, "__esModule", { value: true });
12
+ exports.videoToImage = void 0;
13
+ const authedFetch_1 = require("./authedFetch");
14
+ const uploadToObjectStore_1 = require("./uploadToObjectStore");
15
+ /** Use the extractor API to get the middle frame from a video */
16
+ function videoToImage(apiToken, envConfig, videoDataUrl) {
17
+ return __awaiter(this, void 0, void 0, function* () {
18
+ // the new API can't handle big dataUrls, so we need to store the data in
19
+ // the object store first...
20
+ const in10Mins = new Date();
21
+ in10Mins.setMinutes(in10Mins.getMinutes() + 10);
22
+ const { retrievalUrl: videoUrl } = yield (0, uploadToObjectStore_1.uploadToObjectStore)(apiToken, envConfig, videoDataUrl, in10Mins);
23
+ const extractedImageBase64 = yield (0, authedFetch_1.authedFetch)(apiToken, `${envConfig.api_url}/face-detect/v1/image-extract-single`,
24
+ // note: inspite of being called "video_urn", the new API actually expects a url, not a urn...
25
+ { method: "POST", body: JSON.stringify({ video_urn: videoUrl }) });
26
+ // annoyingly the new API doesn't return the data URL prefix, so we have to hardcode it here
27
+ return `data:image/jpeg;base64,${extractedImageBase64}`;
28
+ });
29
+ }
30
+ exports.videoToImage = videoToImage;
@@ -29,36 +29,37 @@ const byPriority = (schema) => ([a], [b]) => {
29
29
  };
30
30
  function formatOutput(profileObjects, allSchemas, realVers, apiToken, envConfig) {
31
31
  return __awaiter(this, void 0, void 0, function* () {
32
- const PODetails = profileObjects.reduce((acPromise, PO) => __awaiter(this, void 0, void 0, function* () {
32
+ const PODetails = profileObjects.reduce((accumulatorPromiseOuter, PO) => __awaiter(this, void 0, void 0, function* () {
33
33
  var _a;
34
- const ac = yield acPromise;
34
+ const accumulatorOuter = yield accumulatorPromiseOuter;
35
35
  const schemaName = (0, core_1.findSchemaLabel)(PO.labels);
36
36
  const schema = allSchemas.find((x) => x.name === schemaName);
37
37
  if (!schema)
38
38
  throw new Error(`${schema} is missing!`);
39
- const verDetails = (0, core_1.getPOVerification)({ PO, schema, realVers });
40
- const realProps = (0, ramda_1.omit)(constants_1.FIELDS_TO_REMOVE, PO.properties);
39
+ const versionDetails = (0, core_1.getPOVerification)({ PO, schema, realVers });
40
+ const realProperties = (0, ramda_1.omit)(constants_1.FIELDS_TO_REMOVE, PO.properties);
41
41
  (0, types_2.assertSafeProperty)(schemaName);
42
- const existing = ac[schemaName] || [];
43
- const poProperties = Object.entries(realProps).sort(byPriority(schema));
44
- const reducedProperties = yield poProperties.reduce((acc, [key, value]) => __awaiter(this, void 0, void 0, function* () {
42
+ const existing = accumulatorOuter[schemaName] || [];
43
+ const poProperties = Object.entries(realProperties).sort(byPriority(schema));
44
+ const reducedProperties = yield poProperties.reduce((accumulatorPromiseInner, [key, value]) => __awaiter(this, void 0, void 0, function* () {
45
45
  var _b, _c;
46
- const accP = yield acc;
46
+ const accumulatorInner = yield accumulatorPromiseInner;
47
47
  (0, types_2.assertSafeProperty)(key);
48
48
  const prettyValue = typeof value === "string"
49
49
  ? yield (0, lookup_1.maybeGetLookupValue)(schema, key, value, apiToken)
50
50
  : undefined;
51
- const obj = {
51
+ const POInfo = {
52
52
  title: ((_c = (_b = schema.properties) === null || _b === void 0 ? void 0 : _b[key]) === null || _c === void 0 ? void 0 : _c.title) || key,
53
53
  value,
54
- verification: verDetails.fieldVerifications[key] || types_1.FieldVerification.NotVerified,
54
+ verification: versionDetails.fieldVerifications[key] ||
55
+ types_1.FieldVerification.NotVerified,
55
56
  };
56
57
  if (prettyValue)
57
- obj.formatted_value = prettyValue;
58
- return Object.assign(Object.assign({}, accP), { [key]: obj });
58
+ POInfo.formatted_value = prettyValue;
59
+ return Object.assign(Object.assign({}, accumulatorInner), { [key]: POInfo });
59
60
  }), Promise.resolve({}));
60
61
  const thisPO = {
61
- $verification_details: yield (0, api_1.resolveVerificationDetails)(verDetails.details.verifiers, envConfig),
62
+ $verification_details: yield (0, api_1.resolveVerificationDetails)(versionDetails.details.verifiers, envConfig),
62
63
  $shouldBeVerifiedFields: (_a = schema.verified_fields) === null || _a === void 0 ? void 0 : _a.map((x) => typeof x === "string" ? x : x.field),
63
64
  $nId: PO.n_id,
64
65
  $schemaName: schemaName,
@@ -66,25 +67,25 @@ function formatOutput(profileObjects, allSchemas, realVers, apiToken, envConfig)
66
67
  $properties: reducedProperties,
67
68
  $badges: {
68
69
  verified: {
69
- code: verDetails.status,
70
- statusText: constants_1.PO_VER_TEXT_MAP[verDetails.status],
70
+ code: versionDetails.status,
71
+ statusText: constants_1.PO_VER_TEXT_MAP[versionDetails.status],
71
72
  },
72
73
  safeHarbour: schemaName === constants_1.SCHEMA.PERSON
73
- ? (({ isSafe }) => ({
74
- code: isSafe,
75
- statusText: isSafe
76
- ? (0, locales_1.$$)("POVerificationBadge.safe-harbour-yes")
77
- : (0, locales_1.$$)("POVerificationBadge.safe-harbour-no"),
78
- }))(yield (0, core_1.calcSafeHarbourScore)({
74
+ ? yield (0, core_1.calcSafeHarbourScore)({
79
75
  person: PO,
80
76
  getSchema: (name) => __awaiter(this, void 0, void 0, function* () { return allSchemas.find((x) => x.name === name); }),
81
77
  profileObjects,
82
78
  realVers,
79
+ }).then(({ isSafe }) => ({
80
+ code: isSafe,
81
+ statusText: isSafe
82
+ ? (0, locales_1.$$)("POVerificationBadge.safe-harbour-yes")
83
+ : (0, locales_1.$$)("POVerificationBadge.safe-harbour-no"),
83
84
  }))
84
85
  : undefined,
85
86
  },
86
87
  };
87
- return Object.assign(Object.assign({}, ac), { [schemaName]: [...existing, thisPO] });
88
+ return Object.assign(Object.assign({}, accumulatorOuter), { [schemaName]: [...existing, thisPO] });
88
89
  }), Promise.resolve({}));
89
90
  return PODetails;
90
91
  });
@@ -1,2 +1,2 @@
1
- export declare const flattenObj: (obj: Record<string, any>) => Record<string, any>;
1
+ export declare const flattenObject: (rootObject: Record<string, unknown>) => Record<string, never>;
2
2
  export declare function deepJsonToCsv(json: Record<string, unknown>): string;
@@ -1,24 +1,24 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.deepJsonToCsv = exports.flattenObj = void 0;
3
+ exports.deepJsonToCsv = exports.flattenObject = void 0;
4
4
  const ramda_1 = require("ramda");
5
5
  // copied from raytio-client
6
- const flattenObj = (obj) => {
7
- const go = (obj_) => (0, ramda_1.chain)(([k, v]) => {
6
+ const flattenObject = (rootObject) => {
7
+ const go = (object) => (0, ramda_1.chain)(([k, v]) => {
8
8
  if ((0, ramda_1.type)(v) === "Object" || (0, ramda_1.type)(v) === "Array") {
9
9
  return (0, ramda_1.map)(([k_, v_]) => [`${k}.${k_}`, v_], go(v));
10
10
  }
11
11
  return [[k, v]];
12
- }, (0, ramda_1.toPairs)(obj_));
13
- return (0, ramda_1.fromPairs)(go(obj));
12
+ }, (0, ramda_1.toPairs)(object));
13
+ return (0, ramda_1.fromPairs)(go(rootObject));
14
14
  };
15
- exports.flattenObj = flattenObj;
15
+ exports.flattenObject = flattenObject;
16
+ const toCsvRow = (row) => row
17
+ .map((field) => typeof field === "string" ? `"${field.replaceAll('"', '""')}"` : field)
18
+ .join(",");
16
19
  function deepJsonToCsv(json) {
17
- const flatJson = (0, exports.flattenObj)(json);
20
+ const flatJson = (0, exports.flattenObject)(json);
18
21
  const [headerRow, values] = (0, ramda_1.transpose)(Object.entries(flatJson));
19
- const toCsvRow = (row) => row
20
- .map((field) => typeof field === "string" ? `"${field.replace(/"/g, '""')}"` : field)
21
- .join(",");
22
22
  return `${toCsvRow(headerRow)}\n${toCsvRow(values)}`;
23
23
  }
24
24
  exports.deepJsonToCsv = deepJsonToCsv;
@@ -13,15 +13,15 @@ exports.setupMaxcryptor = void 0;
13
13
  const maxcryptor_1 = require("@raytio/maxcryptor");
14
14
  const ramda_1 = require("ramda");
15
15
  const constants_1 = require("../constants");
16
- const isMaxcryptorAttr = (name) => name in constants_1.ATTRIBUTE_MAP;
16
+ const isMaxcryptorAttribute = (name) => name in constants_1.ATTRIBUTE_MAP;
17
17
  function setupMaxcryptor(CONFIG, cognitoAttributes) {
18
18
  return __awaiter(this, void 0, void 0, function* () {
19
- const userDoc = cognitoAttributes.reduce((acc, { Name, Value }) => {
20
- if (!isMaxcryptorAttr(Name))
21
- return acc;
22
- return (0, ramda_1.assocPath)(constants_1.ATTRIBUTE_MAP[Name], JSON.parse(Value), acc);
19
+ const userDocument = cognitoAttributes.reduce((accumulator, { Name, Value }) => {
20
+ if (!isMaxcryptorAttribute(Name))
21
+ return accumulator;
22
+ return (0, ramda_1.assocPath)(constants_1.ATTRIBUTE_MAP[Name], JSON.parse(Value), accumulator);
23
23
  }, {});
24
- const { encryptor: maxcryptor } = yield (0, maxcryptor_1.encryptorFromExistingUser)(userDoc, CONFIG.RAYTIO_PASSWORD);
24
+ const { encryptor: maxcryptor } = yield (0, maxcryptor_1.encryptorFromExistingUser)(userDocument, CONFIG.RAYTIO_PASSWORD);
25
25
  return maxcryptor;
26
26
  });
27
27
  }
@@ -3,9 +3,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.splitPOAndVers = void 0;
4
4
  const core_1 = require("@raytio/core");
5
5
  const constants_1 = require("../constants");
6
- const splitPOAndVers = (list) => list.reduce((ac, PO) => {
7
- const isVer = (0, core_1.findSchemaLabel)(PO.labels) === constants_1.SCHEMA.VERIFICATION;
8
- ac[+isVer].push(PO);
9
- return ac;
6
+ const splitPOAndVers = (list) => list.reduce((accumulator, PO) => {
7
+ const isVerification = (0, core_1.findSchemaLabel)(PO.labels) === constants_1.SCHEMA.VERIFICATION;
8
+ // move to the left or right array depending on whther +isVerification is 0 or 1
9
+ accumulator[+isVerification].push(PO);
10
+ return accumulator;
10
11
  }, [[], []]);
11
12
  exports.splitPOAndVers = splitPOAndVers;
@@ -10,13 +10,13 @@ const isValidLocale = (lang) => lang in locales;
10
10
  exports.isValidLocale = isValidLocale;
11
11
  const $$ = (key, variables) => {
12
12
  const strings = locales[global.lang || "en"];
13
- const replaceWithVariable = (_, varName) => {
14
- const value = variables === null || variables === void 0 ? void 0 : variables[varName];
15
- if (typeof value === "undefined") {
16
- throw new Error(`[i18n] variable '${varName}' not defined`);
13
+ const replaceWithVariable = (_, variableName) => {
14
+ const value = variables === null || variables === void 0 ? void 0 : variables[variableName];
15
+ if (value === undefined) {
16
+ throw new TypeError(`[i18n] variable '${variableName}' not defined`);
17
17
  }
18
18
  return `${value}`;
19
19
  };
20
- return strings[key].replace(/{([^}]+)}/g, replaceWithVariable);
20
+ return strings[key].replaceAll(/{([^}]+)}/g, replaceWithVariable);
21
21
  };
22
22
  exports.$$ = $$;
@@ -10,7 +10,8 @@ const helpers_1 = require("../../helpers");
10
10
  const Images = ({ nIds, files, }) => {
11
11
  return (jsx_pdf_1.default.createElement(jsx_pdf_1.default.Fragment, null, nIds.map((nId) => {
12
12
  var _a;
13
- const file = (_a = files[nId]) === null || _a === void 0 ? void 0 : _a[0];
13
+ // for videos, prefer the static frame over the original video
14
+ const file = (_a = (files[`${nId}_videoFrame`] || files[nId])) === null || _a === void 0 ? void 0 : _a[0];
14
15
  if (file)
15
16
  (0, helpers_1.assertSafeProperty)(file);
16
17
  return file ? (jsx_pdf_1.default.createElement("image", { src: file, width: 300 })) : (jsx_pdf_1.default.createElement("text", null, (0, locales_1.$$)("Images.file-not-found")));
@@ -4,8 +4,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.Report = void 0;
7
+ const node_crypto_1 = require("node:crypto");
7
8
  const jsx_pdf_1 = __importDefault(require("jsx-pdf"));
8
- const crypto_1 = require("crypto");
9
9
  const core_1 = require("@raytio/core");
10
10
  const locales_1 = require("../../locales");
11
11
  const POVerificationBadge_1 = require("./POVerificationBadge");
@@ -21,7 +21,7 @@ const constants_2 = require("../../constants");
21
21
  const version_1 = require("../../public-methods/version");
22
22
  const Report = ({ data, files, config, aId, clientUrl, envConfig, }) => {
23
23
  // The PDF is read only; there's no reason why anyone would ever need to unlock it.
24
- const randomToken = (0, crypto_1.randomBytes)(32).toString("base64");
24
+ const randomToken = (0, node_crypto_1.randomBytes)(32).toString("base64");
25
25
  const schemas = Object.values(data.profile_objects).sort(
26
26
  // sort it so that the person schema comes first
27
27
  (a, b) => {
@@ -1,17 +1,17 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.formatDate = exports.verifyColour = exports.loadAsset = exports.asset = void 0;
4
+ const node_fs_1 = require("node:fs");
5
+ const node_path_1 = require("node:path");
4
6
  const types_1 = require("@raytio/types");
5
- const fs_1 = require("fs");
6
- const path_1 = require("path");
7
7
  const asset = (name) => {
8
- if (name.match(/[^A-Za-z\-_.]/) || name.includes("..")) {
8
+ if (/[^.A-Z_a-z-]/.test(name) || name.includes("..")) {
9
9
  throw new Error("Invalid asset name");
10
10
  }
11
- return (0, path_1.join)(__dirname, "../../../assets/", name);
11
+ return (0, node_path_1.join)(__dirname, "../../../assets/", name);
12
12
  };
13
13
  exports.asset = asset;
14
- const loadAsset = (name) => (0, fs_1.readFileSync)((0, exports.asset)(name), { encoding: "utf-8" });
14
+ const loadAsset = (name) => (0, node_fs_1.readFileSync)((0, exports.asset)(name), { encoding: "utf8" });
15
15
  exports.loadAsset = loadAsset;
16
16
  const verifyColour = (x) => {
17
17
  if (x === types_1.FieldVerification.Verified)
@@ -13,10 +13,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
13
13
  };
14
14
  Object.defineProperty(exports, "__esModule", { value: true });
15
15
  exports.generatePDF = exports.generatePdfJson = void 0;
16
+ const node_fs_1 = require("node:fs");
17
+ const node_path_1 = require("node:path");
16
18
  const pdfmake_1 = __importDefault(require("pdfmake"));
17
19
  const jsx_pdf_1 = __importDefault(require("jsx-pdf"));
18
- const fs_1 = require("fs");
19
- const path_1 = require("path");
20
20
  const locales_1 = require("../locales");
21
21
  const Report_1 = require("../pdf/components/Report");
22
22
  const style_1 = require("../pdf/style");
@@ -24,15 +24,16 @@ function generatePdfJson(data, DATE_FORMAT, TIMEZONE) {
24
24
  return jsx_pdf_1.default.renderPdf(jsx_pdf_1.default.createElement(Report_1.Report, { data: data.json, files: data.files, aId: data.a_id, clientUrl: data.client_url, envConfig: data.envConfig, config: { DATE_FORMAT, TIMEZONE } }));
25
25
  }
26
26
  exports.generatePdfJson = generatePdfJson;
27
- const generatePDF = () => (data) => __awaiter(void 0, void 0, void 0, function* () {
27
+ const generatePDF = () =>
28
+ // eslint-disable-next-line unicorn/consistent-function-scoping -- deliberately to future proof the SDK for options
29
+ (data) => __awaiter(void 0, void 0, void 0, function* () {
28
30
  console.log("Generating PDF Report...");
29
- const { DATE_FORMAT = "en-nz", TIMEZONE = "Pacific/Auckland" } = process.env;
31
+ const { DATE_FORMAT = "en-nz", TIMEZONE = "Pacific/Auckland", PDF_LANGUAGE: customLang, } = process.env;
30
32
  if (data.envConfig.logo_url) {
31
33
  // there is a white labelling URL, so fetch it and write it to disk, overriding the default logo
32
34
  const arrayBuf = yield fetch(data.envConfig.logo_url).then((r) => r.arrayBuffer());
33
- yield fs_1.promises.writeFile((0, path_1.join)(__dirname, "../../assets/custom-logo.png"), Buffer.from(arrayBuf));
35
+ yield node_fs_1.promises.writeFile((0, node_path_1.join)(__dirname, "../../assets/custom-logo.png"), Buffer.from(arrayBuf));
34
36
  }
35
- const customLang = process.env.PDF_LANGUAGE;
36
37
  if (customLang) {
37
38
  if ((0, locales_1.isValidLocale)(customLang)) {
38
39
  global.lang = customLang;
@@ -43,17 +44,17 @@ const generatePDF = () => (data) => __awaiter(void 0, void 0, void 0, function*
43
44
  }
44
45
  return new Promise((resolve) => {
45
46
  const printer = new pdfmake_1.default(style_1.fonts);
46
- const pdfDoc = printer.createPdfKitDocument(generatePdfJson(data, DATE_FORMAT, TIMEZONE));
47
+ const pdfDocument = printer.createPdfKitDocument(generatePdfJson(data, DATE_FORMAT, TIMEZONE));
47
48
  const chunks = [];
48
- pdfDoc.on("data", (chunk) => chunks.push(chunk));
49
- pdfDoc.on("end", () => {
49
+ pdfDocument.on("data", (chunk) => chunks.push(chunk));
50
+ pdfDocument.on("end", () => {
50
51
  const final = Buffer.concat(chunks);
51
52
  const dataUrl = `data:application/pdf;base64,${final.toString("base64")}`;
52
53
  resolve(Object.assign(Object.assign({}, data), { files: Object.assign(Object.assign({}, data.files), {
53
54
  // do not change name, will be a breaking change
54
55
  [data.json.i_id]: [dataUrl, "pdf"] }) }));
55
56
  });
56
- pdfDoc.end();
57
+ pdfDocument.end();
57
58
  });
58
59
  });
59
60
  exports.generatePDF = generatePDF;
@@ -9,11 +9,11 @@ const constants_1 = require("../constants");
9
9
  * any are missing.
10
10
  */
11
11
  function getAndValidateConfig() {
12
- const CONFIG = constants_1.ENV_VARIABLES.reduce((acc, name) => {
12
+ const CONFIG = constants_1.ENV_VARIABLES.reduce((accumulator, name) => {
13
13
  const value = process.env[name];
14
14
  if (!value)
15
15
  throw new Error(`${name} is not configured`);
16
- return Object.assign(Object.assign({}, acc), { [name]: value });
16
+ return Object.assign(Object.assign({}, accumulator), { [name]: value });
17
17
  }, {});
18
18
  return CONFIG;
19
19
  }
@@ -100,14 +100,14 @@ function processSubmission({ applicationId, instanceId, verbose, config, _suplie
100
100
  // final outputs
101
101
  const csv = (0, helpers_1.deepJsonToCsv)(Object.assign(Object.assign({}, instanceDataToPassOn), (0, ramda_1.mapObjIndexed)((POList) => POList.map((flatPO) => {
102
102
  // this is a bit weird, but it prevents an even bigger breaking change for the csv format
103
- const obj = Object.entries(flatPO.$properties).flatMap(([fieldName, field]) => [
103
+ const object = Object.entries(flatPO.$properties).flatMap(([fieldName, field]) => [
104
104
  [fieldName, field.value],
105
105
  [
106
106
  `${fieldName}.verification`,
107
107
  constants_1.FIELD_VER_TEXT_MAP[field.verification],
108
108
  ],
109
109
  ]);
110
- return Object.assign(Object.assign({}, flatPO), Object.fromEntries(obj));
110
+ return Object.assign(Object.assign({}, flatPO), Object.fromEntries(object));
111
111
  }), PODetails)));
112
112
  log("Success!");
113
113
  return {
@@ -119,9 +119,9 @@ function processSubmission({ applicationId, instanceId, verbose, config, _suplie
119
119
  envConfig,
120
120
  };
121
121
  }
122
- catch (err) {
123
- log("Processing submission failed:", err);
124
- throw err; // pass on error
122
+ catch (error) {
123
+ log("Processing submission failed:", error);
124
+ throw error; // pass on error
125
125
  }
126
126
  });
127
127
  }
@@ -21,16 +21,18 @@ function upload(s3, { bucketName, fileName, fileContent }) {
21
21
  Bucket: bucketName,
22
22
  Key: fileName,
23
23
  Body: fileContent,
24
- }, (err, data) => {
25
- if (err)
26
- return reject(err);
24
+ }, (error, data) => {
25
+ if (error)
26
+ return reject(error);
27
27
  console.log(`Uploaded ${data.Location}`);
28
28
  return resolve(data);
29
29
  });
30
30
  });
31
31
  });
32
32
  }
33
- const saveToS3Bucket = () => (input) => __awaiter(void 0, void 0, void 0, function* () {
33
+ const saveToS3Bucket = () =>
34
+ // eslint-disable-next-line unicorn/consistent-function-scoping -- deliberately to future proof the SDK for options
35
+ (input) => __awaiter(void 0, void 0, void 0, function* () {
34
36
  console.log("Uploading data to S3...");
35
37
  const bucketName = process.env.S3_BUCKET;
36
38
  if (!bucketName)
@@ -1,9 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.version = exports.packageDotJson = void 0;
4
- const fs_1 = require("fs");
5
- const path_1 = require("path");
6
- exports.packageDotJson = JSON.parse((0, fs_1.readFileSync)((0, path_1.join)(__dirname, "../../package.json"), "utf8"));
4
+ const node_fs_1 = require("node:fs");
5
+ const node_path_1 = require("node:path");
6
+ exports.packageDotJson = JSON.parse((0, node_fs_1.readFileSync)((0, node_path_1.join)(__dirname, "../../package.json"), "utf8"));
7
7
  exports.version = Object.entries({
8
8
  LANG: process.env.PDF_LANGUAGE || "default",
9
9
  N: process.version.slice(1),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@raytio/decrypt-helper",
3
- "version": "6.0.0",
3
+ "version": "6.1.1",
4
4
  "author": "Raytio",
5
5
  "description": "A helper to decrypt data shared by Raytio users",
6
6
  "main": "dist",
@@ -34,24 +34,24 @@
34
34
  },
35
35
  "devDependencies": {
36
36
  "@raytio/react-intl-manager": "^6.3.1",
37
- "@types/jest": "^29.5.1",
37
+ "@types/jest": "^29.5.2",
38
38
  "@types/jest-image-snapshot": "^6.1.0",
39
39
  "@types/jsx-pdf": "^2.2.2",
40
40
  "@types/mime-types": "^2.1.1",
41
- "@types/node": "^18.15.12",
41
+ "@types/node": "^18.16.18",
42
42
  "@types/pdfmake": "^0.2.2",
43
- "@types/ramda": "^0.29.0",
43
+ "@types/ramda": "^0.29.2",
44
44
  "babel-preset-react-app": "^10.0.1",
45
- "dotenv": "^16.0.3",
46
- "eslint": "^8.38.0",
47
- "eslint-config-kyle": "^11.7.0",
45
+ "dotenv": "^16.3.1",
46
+ "eslint": "^8.43.0",
47
+ "eslint-config-kyle": "^11.14.2",
48
48
  "jest": "^29.5.0",
49
49
  "jest-image-snapshot": "^6.1.0",
50
50
  "jest-junit": "^16.0.0",
51
51
  "pdf-to-img": "^2.1.1",
52
52
  "ts-jest": "^29.1.0",
53
53
  "ts-node": "^10.9.1",
54
- "typescript": "^5.0.4"
54
+ "typescript": "^5.1.3"
55
55
  },
56
56
  "eslintConfig": {
57
57
  "extends": "kyle",
@@ -61,7 +61,16 @@
61
61
  "react/react-in-jsx-scope": 0,
62
62
  "react/style-prop-object": 0,
63
63
  "react/no-unknown-property": 0,
64
- "@typescript-eslint/no-non-null-assertion": 0,
64
+ "unicorn/prevent-abbreviations": [
65
+ 1,
66
+ {
67
+ "replacements": {
68
+ "i": false,
69
+ "env": false,
70
+ "props": false
71
+ }
72
+ }
73
+ ],
65
74
  "@typescript-eslint/no-unused-vars": [
66
75
  2,
67
76
  {