@edirect/tokenization 0.0.5 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -0
- package/dist/index.d.mts +7 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.js +56 -8
- package/dist/index.mjs +56 -8
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -67,6 +67,18 @@ const detokenizedData = await tokenization.detokenize(auth, tenant, config, payl
|
|
|
67
67
|
console.log("Detokenized Payload:", detokenizedData);
|
|
68
68
|
```
|
|
69
69
|
|
|
70
|
+
### Parsing the Token
|
|
71
|
+
|
|
72
|
+
The tokenized payload is a string that contains the tokenized values. To parse the tokenized payload, you can use the `parseToken` method.
|
|
73
|
+
|
|
74
|
+
```javascript
|
|
75
|
+
const token = Tokenization.parseToken("token:tenant1:str:asdf1234");
|
|
76
|
+
console.log(`Is Token: ${token.isToken}`);
|
|
77
|
+
console.log(`Tenant: ${token.tenant}`);
|
|
78
|
+
console.log(`Type: ${token.type}`);
|
|
79
|
+
console.log(`Hash: ${token.Hash}`);
|
|
80
|
+
```
|
|
81
|
+
|
|
70
82
|
## API
|
|
71
83
|
|
|
72
84
|
### `Tokenization`
|
package/dist/index.d.mts
CHANGED
|
@@ -7,6 +7,12 @@ interface ITokenizationApp {
|
|
|
7
7
|
tokenize(auth: string, tenant: string, config: string, payload: TokenPayload): Promise<TokenPayload>;
|
|
8
8
|
detokenize(auth: string, tenant: string, config: string, payload: TokenPayload): Promise<TokenPayload>;
|
|
9
9
|
}
|
|
10
|
+
type Token = {
|
|
11
|
+
isToken: boolean;
|
|
12
|
+
tenant: string;
|
|
13
|
+
type: string;
|
|
14
|
+
hash: string;
|
|
15
|
+
};
|
|
10
16
|
|
|
11
17
|
/**
|
|
12
18
|
* The TokenizationClient class.
|
|
@@ -32,6 +38,7 @@ declare class Tokenization {
|
|
|
32
38
|
* @returns The detokenized payload.
|
|
33
39
|
*/
|
|
34
40
|
detokenize(auth: string, tenant: string, config: string, payload: TokenPayload): Promise<TokenPayload>;
|
|
41
|
+
static parseToken(token: string): Token;
|
|
35
42
|
}
|
|
36
43
|
|
|
37
44
|
export { Tokenization };
|
package/dist/index.d.ts
CHANGED
|
@@ -7,6 +7,12 @@ interface ITokenizationApp {
|
|
|
7
7
|
tokenize(auth: string, tenant: string, config: string, payload: TokenPayload): Promise<TokenPayload>;
|
|
8
8
|
detokenize(auth: string, tenant: string, config: string, payload: TokenPayload): Promise<TokenPayload>;
|
|
9
9
|
}
|
|
10
|
+
type Token = {
|
|
11
|
+
isToken: boolean;
|
|
12
|
+
tenant: string;
|
|
13
|
+
type: string;
|
|
14
|
+
hash: string;
|
|
15
|
+
};
|
|
10
16
|
|
|
11
17
|
/**
|
|
12
18
|
* The TokenizationClient class.
|
|
@@ -32,6 +38,7 @@ declare class Tokenization {
|
|
|
32
38
|
* @returns The detokenized payload.
|
|
33
39
|
*/
|
|
34
40
|
detokenize(auth: string, tenant: string, config: string, payload: TokenPayload): Promise<TokenPayload>;
|
|
41
|
+
static parseToken(token: string): Token;
|
|
35
42
|
}
|
|
36
43
|
|
|
37
44
|
export { Tokenization };
|
package/dist/index.js
CHANGED
|
@@ -40,6 +40,18 @@ var traverse = (obj, callback, path = []) => {
|
|
|
40
40
|
};
|
|
41
41
|
|
|
42
42
|
// src/core/utils/object.ts
|
|
43
|
+
var get = (obj, path) => {
|
|
44
|
+
let currentObj = obj;
|
|
45
|
+
for (let i = 0; i < path.length; i++) {
|
|
46
|
+
const key = path[i];
|
|
47
|
+
if (currentObj[key] !== void 0) {
|
|
48
|
+
currentObj = currentObj[key];
|
|
49
|
+
} else {
|
|
50
|
+
return void 0;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return currentObj;
|
|
54
|
+
};
|
|
43
55
|
var set = (obj, path, value) => {
|
|
44
56
|
let currentObj = obj;
|
|
45
57
|
for (let i = 0; i < path.length; i++) {
|
|
@@ -136,8 +148,14 @@ var TokenizationApp = class {
|
|
|
136
148
|
processedPayload.notFound
|
|
137
149
|
);
|
|
138
150
|
for (const key in tokenPayload) {
|
|
139
|
-
|
|
140
|
-
|
|
151
|
+
const path = key.split(".");
|
|
152
|
+
const token = tokenPayload[key];
|
|
153
|
+
const original = get(respPayload, path);
|
|
154
|
+
set(respPayload, path, token);
|
|
155
|
+
if (typeof token === "string") {
|
|
156
|
+
this.cache.set(token, original);
|
|
157
|
+
this.cache.set(original.toString(), token);
|
|
158
|
+
}
|
|
141
159
|
}
|
|
142
160
|
}
|
|
143
161
|
if (Object.keys(processedPayload.found).length > 0) {
|
|
@@ -162,8 +180,14 @@ var TokenizationApp = class {
|
|
|
162
180
|
processedPayload.notFound
|
|
163
181
|
);
|
|
164
182
|
for (const key in tokenPayload) {
|
|
165
|
-
|
|
166
|
-
|
|
183
|
+
const path = key.split(".");
|
|
184
|
+
const token = get(respPayload, path);
|
|
185
|
+
const original = tokenPayload[key];
|
|
186
|
+
if (typeof original === "string") {
|
|
187
|
+
this.cache.set(token, original);
|
|
188
|
+
this.cache.set(original.toString(), token);
|
|
189
|
+
}
|
|
190
|
+
set(respPayload, key.split("."), original);
|
|
167
191
|
}
|
|
168
192
|
}
|
|
169
193
|
if (Object.keys(processedPayload.found).length > 0) {
|
|
@@ -186,12 +210,11 @@ var TokenizationApp = class {
|
|
|
186
210
|
};
|
|
187
211
|
traverse(payload, (value, path) => {
|
|
188
212
|
if (this.evaluator.shouldTokenizeField(config, path)) {
|
|
189
|
-
const
|
|
190
|
-
const fromCache = this.cache.get(pathName);
|
|
213
|
+
const fromCache = this.cache.get(value.toString());
|
|
191
214
|
if (fromCache) {
|
|
192
|
-
req.found[
|
|
215
|
+
req.found[path.join(".")] = fromCache;
|
|
193
216
|
} else {
|
|
194
|
-
req.notFound[
|
|
217
|
+
req.notFound[path.join(".")] = value;
|
|
195
218
|
}
|
|
196
219
|
}
|
|
197
220
|
});
|
|
@@ -374,6 +397,31 @@ var Tokenization = class {
|
|
|
374
397
|
detokenize(auth, tenant, config, payload) {
|
|
375
398
|
return this.app.detokenize(auth, tenant, config, payload);
|
|
376
399
|
}
|
|
400
|
+
static parseToken(token) {
|
|
401
|
+
const parts = token.split(":");
|
|
402
|
+
if (parts.length !== 4) {
|
|
403
|
+
return {
|
|
404
|
+
isToken: false,
|
|
405
|
+
tenant: "",
|
|
406
|
+
type: "",
|
|
407
|
+
hash: ""
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
if (parts[0] !== "token") {
|
|
411
|
+
return {
|
|
412
|
+
isToken: false,
|
|
413
|
+
tenant: "",
|
|
414
|
+
type: "",
|
|
415
|
+
hash: ""
|
|
416
|
+
};
|
|
417
|
+
}
|
|
418
|
+
return {
|
|
419
|
+
isToken: true,
|
|
420
|
+
tenant: parts[1],
|
|
421
|
+
type: parts[2],
|
|
422
|
+
hash: parts[3]
|
|
423
|
+
};
|
|
424
|
+
}
|
|
377
425
|
};
|
|
378
426
|
// Annotate the CommonJS export names for ESM import in node:
|
|
379
427
|
0 && (module.exports = {
|
package/dist/index.mjs
CHANGED
|
@@ -14,6 +14,18 @@ var traverse = (obj, callback, path = []) => {
|
|
|
14
14
|
};
|
|
15
15
|
|
|
16
16
|
// src/core/utils/object.ts
|
|
17
|
+
var get = (obj, path) => {
|
|
18
|
+
let currentObj = obj;
|
|
19
|
+
for (let i = 0; i < path.length; i++) {
|
|
20
|
+
const key = path[i];
|
|
21
|
+
if (currentObj[key] !== void 0) {
|
|
22
|
+
currentObj = currentObj[key];
|
|
23
|
+
} else {
|
|
24
|
+
return void 0;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return currentObj;
|
|
28
|
+
};
|
|
17
29
|
var set = (obj, path, value) => {
|
|
18
30
|
let currentObj = obj;
|
|
19
31
|
for (let i = 0; i < path.length; i++) {
|
|
@@ -110,8 +122,14 @@ var TokenizationApp = class {
|
|
|
110
122
|
processedPayload.notFound
|
|
111
123
|
);
|
|
112
124
|
for (const key in tokenPayload) {
|
|
113
|
-
|
|
114
|
-
|
|
125
|
+
const path = key.split(".");
|
|
126
|
+
const token = tokenPayload[key];
|
|
127
|
+
const original = get(respPayload, path);
|
|
128
|
+
set(respPayload, path, token);
|
|
129
|
+
if (typeof token === "string") {
|
|
130
|
+
this.cache.set(token, original);
|
|
131
|
+
this.cache.set(original.toString(), token);
|
|
132
|
+
}
|
|
115
133
|
}
|
|
116
134
|
}
|
|
117
135
|
if (Object.keys(processedPayload.found).length > 0) {
|
|
@@ -136,8 +154,14 @@ var TokenizationApp = class {
|
|
|
136
154
|
processedPayload.notFound
|
|
137
155
|
);
|
|
138
156
|
for (const key in tokenPayload) {
|
|
139
|
-
|
|
140
|
-
|
|
157
|
+
const path = key.split(".");
|
|
158
|
+
const token = get(respPayload, path);
|
|
159
|
+
const original = tokenPayload[key];
|
|
160
|
+
if (typeof original === "string") {
|
|
161
|
+
this.cache.set(token, original);
|
|
162
|
+
this.cache.set(original.toString(), token);
|
|
163
|
+
}
|
|
164
|
+
set(respPayload, key.split("."), original);
|
|
141
165
|
}
|
|
142
166
|
}
|
|
143
167
|
if (Object.keys(processedPayload.found).length > 0) {
|
|
@@ -160,12 +184,11 @@ var TokenizationApp = class {
|
|
|
160
184
|
};
|
|
161
185
|
traverse(payload, (value, path) => {
|
|
162
186
|
if (this.evaluator.shouldTokenizeField(config, path)) {
|
|
163
|
-
const
|
|
164
|
-
const fromCache = this.cache.get(pathName);
|
|
187
|
+
const fromCache = this.cache.get(value.toString());
|
|
165
188
|
if (fromCache) {
|
|
166
|
-
req.found[
|
|
189
|
+
req.found[path.join(".")] = fromCache;
|
|
167
190
|
} else {
|
|
168
|
-
req.notFound[
|
|
191
|
+
req.notFound[path.join(".")] = value;
|
|
169
192
|
}
|
|
170
193
|
}
|
|
171
194
|
});
|
|
@@ -348,6 +371,31 @@ var Tokenization = class {
|
|
|
348
371
|
detokenize(auth, tenant, config, payload) {
|
|
349
372
|
return this.app.detokenize(auth, tenant, config, payload);
|
|
350
373
|
}
|
|
374
|
+
static parseToken(token) {
|
|
375
|
+
const parts = token.split(":");
|
|
376
|
+
if (parts.length !== 4) {
|
|
377
|
+
return {
|
|
378
|
+
isToken: false,
|
|
379
|
+
tenant: "",
|
|
380
|
+
type: "",
|
|
381
|
+
hash: ""
|
|
382
|
+
};
|
|
383
|
+
}
|
|
384
|
+
if (parts[0] !== "token") {
|
|
385
|
+
return {
|
|
386
|
+
isToken: false,
|
|
387
|
+
tenant: "",
|
|
388
|
+
type: "",
|
|
389
|
+
hash: ""
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
return {
|
|
393
|
+
isToken: true,
|
|
394
|
+
tenant: parts[1],
|
|
395
|
+
type: parts[2],
|
|
396
|
+
hash: parts[3]
|
|
397
|
+
};
|
|
398
|
+
}
|
|
351
399
|
};
|
|
352
400
|
export {
|
|
353
401
|
Tokenization
|