@edirect/tokenization 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -0
- package/dist/index.js +97 -21
- package/dist/index.mjs +97 -21
- package/package.json +4 -1
package/README.md
CHANGED
|
@@ -10,6 +10,12 @@ To install the necessary dependencies, run:
|
|
|
10
10
|
npm install --save @edirect/tokenization
|
|
11
11
|
```
|
|
12
12
|
|
|
13
|
+
## Important Note
|
|
14
|
+
|
|
15
|
+
This client **caches** the **configurations** for `5 minutes` and the **tokens** for `1 minute`.
|
|
16
|
+
|
|
17
|
+
This is to avoid making unnecessary requests to the tokenization service. If you want to change any configuration on the tokenization service, you'll need to wait for the cache to expire.
|
|
18
|
+
|
|
13
19
|
## Usage
|
|
14
20
|
|
|
15
21
|
The `Tokenization` class provides methods to tokenize and detokenize payloads. Below is an example of how to use the class.
|
package/dist/index.js
CHANGED
|
@@ -40,6 +40,18 @@ var traverse = (obj, callback, path = []) => {
|
|
|
40
40
|
};
|
|
41
41
|
|
|
42
42
|
// src/core/utils/object.ts
|
|
43
|
+
var get = (obj, path) => {
|
|
44
|
+
let currentObj = obj;
|
|
45
|
+
for (let i = 0; i < path.length; i++) {
|
|
46
|
+
const key = path[i];
|
|
47
|
+
if (currentObj[key] !== void 0) {
|
|
48
|
+
currentObj = currentObj[key];
|
|
49
|
+
} else {
|
|
50
|
+
return void 0;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return currentObj;
|
|
54
|
+
};
|
|
43
55
|
var set = (obj, path, value) => {
|
|
44
56
|
let currentObj = obj;
|
|
45
57
|
for (let i = 0; i < path.length; i++) {
|
|
@@ -102,7 +114,9 @@ var TokenEvaluator = class {
|
|
|
102
114
|
};
|
|
103
115
|
|
|
104
116
|
// src/core/app.ts
|
|
117
|
+
var import_lru_cache = require("lru-cache");
|
|
105
118
|
var TokenizationApp = class {
|
|
119
|
+
cache;
|
|
106
120
|
tokenizationService;
|
|
107
121
|
configurationService;
|
|
108
122
|
evaluator;
|
|
@@ -110,22 +124,44 @@ var TokenizationApp = class {
|
|
|
110
124
|
this.tokenizationService = tokenizationService;
|
|
111
125
|
this.configurationService = configurationService;
|
|
112
126
|
this.evaluator = new TokenEvaluator(evaluator);
|
|
127
|
+
this.cache = new import_lru_cache.LRUCache({
|
|
128
|
+
max: 5e3,
|
|
129
|
+
ttl: 1e3 * 60 * 1,
|
|
130
|
+
// 1 minute
|
|
131
|
+
allowStale: false,
|
|
132
|
+
updateAgeOnGet: false,
|
|
133
|
+
updateAgeOnHas: false
|
|
134
|
+
});
|
|
113
135
|
}
|
|
114
136
|
async tokenize(auth, tenant, configKey, payload) {
|
|
115
137
|
const config = await this.configurationService.get(auth, tenant, configKey);
|
|
116
138
|
if (!config) return payload;
|
|
117
139
|
if ((!config.fields || config.fields.length === 0) && config.defaultClassification === 0)
|
|
118
140
|
return payload;
|
|
119
|
-
const processedPayload = await this.processRequest(config, payload);
|
|
120
|
-
const tokenPayload = await this.tokenizationService.tokenize(
|
|
121
|
-
auth,
|
|
122
|
-
tenant,
|
|
123
|
-
configKey,
|
|
124
|
-
processedPayload
|
|
125
|
-
);
|
|
126
141
|
const respPayload = payload;
|
|
127
|
-
|
|
128
|
-
|
|
142
|
+
const processedPayload = await this.processRequest(config, payload);
|
|
143
|
+
if (Object.keys(processedPayload.notFound).length > 0) {
|
|
144
|
+
const tokenPayload = await this.tokenizationService.tokenize(
|
|
145
|
+
auth,
|
|
146
|
+
tenant,
|
|
147
|
+
configKey,
|
|
148
|
+
processedPayload.notFound
|
|
149
|
+
);
|
|
150
|
+
for (const key in tokenPayload) {
|
|
151
|
+
const path = key.split(".");
|
|
152
|
+
const token = tokenPayload[key];
|
|
153
|
+
const original = get(respPayload, path);
|
|
154
|
+
set(respPayload, path, token);
|
|
155
|
+
if (typeof token === "string") {
|
|
156
|
+
this.cache.set(token, original);
|
|
157
|
+
this.cache.set(original.toString(), token);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
if (Object.keys(processedPayload.found).length > 0) {
|
|
162
|
+
for (const key in processedPayload.found) {
|
|
163
|
+
set(respPayload, key.split("."), processedPayload.found[key]);
|
|
164
|
+
}
|
|
129
165
|
}
|
|
130
166
|
return respPayload;
|
|
131
167
|
}
|
|
@@ -134,27 +170,52 @@ var TokenizationApp = class {
|
|
|
134
170
|
if (!config) return payload;
|
|
135
171
|
if ((!config.fields || config.fields.length === 0) && config.defaultClassification === 0)
|
|
136
172
|
return payload;
|
|
137
|
-
const processedPayload = this.processRequest(config, payload);
|
|
138
|
-
const tokenPayload = await this.tokenizationService.detokenize(
|
|
139
|
-
auth,
|
|
140
|
-
tenant,
|
|
141
|
-
configKey,
|
|
142
|
-
processedPayload
|
|
143
|
-
);
|
|
144
173
|
const respPayload = payload;
|
|
145
|
-
|
|
146
|
-
|
|
174
|
+
const processedPayload = this.processRequest(config, payload);
|
|
175
|
+
if (Object.keys(processedPayload.notFound).length > 0) {
|
|
176
|
+
const tokenPayload = await this.tokenizationService.detokenize(
|
|
177
|
+
auth,
|
|
178
|
+
tenant,
|
|
179
|
+
configKey,
|
|
180
|
+
processedPayload.notFound
|
|
181
|
+
);
|
|
182
|
+
for (const key in tokenPayload) {
|
|
183
|
+
const path = key.split(".");
|
|
184
|
+
const token = get(respPayload, path);
|
|
185
|
+
const original = tokenPayload[key];
|
|
186
|
+
if (typeof original === "string") {
|
|
187
|
+
this.cache.set(token, original);
|
|
188
|
+
this.cache.set(original.toString(), token);
|
|
189
|
+
}
|
|
190
|
+
set(respPayload, key.split("."), original);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
if (Object.keys(processedPayload.found).length > 0) {
|
|
194
|
+
for (const key in processedPayload.found) {
|
|
195
|
+
set(respPayload, key.split("."), processedPayload.found[key]);
|
|
196
|
+
}
|
|
147
197
|
}
|
|
148
198
|
return respPayload;
|
|
149
199
|
}
|
|
150
200
|
processRequest(config, payload) {
|
|
151
201
|
if (config.fields.length === 0) {
|
|
152
|
-
return
|
|
202
|
+
return {
|
|
203
|
+
found: payload,
|
|
204
|
+
notFound: {}
|
|
205
|
+
};
|
|
153
206
|
}
|
|
154
|
-
const req = {
|
|
207
|
+
const req = {
|
|
208
|
+
found: {},
|
|
209
|
+
notFound: {}
|
|
210
|
+
};
|
|
155
211
|
traverse(payload, (value, path) => {
|
|
156
212
|
if (this.evaluator.shouldTokenizeField(config, path)) {
|
|
157
|
-
|
|
213
|
+
const fromCache = this.cache.get(value.toString());
|
|
214
|
+
if (fromCache) {
|
|
215
|
+
req.found[path.join(".")] = fromCache;
|
|
216
|
+
} else {
|
|
217
|
+
req.notFound[path.join(".")] = value;
|
|
218
|
+
}
|
|
158
219
|
}
|
|
159
220
|
});
|
|
160
221
|
return req;
|
|
@@ -162,14 +223,28 @@ var TokenizationApp = class {
|
|
|
162
223
|
};
|
|
163
224
|
|
|
164
225
|
// src/core/services/configuration.ts
|
|
226
|
+
var import_lru_cache2 = require("lru-cache");
|
|
165
227
|
var ConfigurationService = class {
|
|
166
228
|
constructor(baseUrl) {
|
|
167
229
|
this.baseUrl = baseUrl;
|
|
168
230
|
if (!this.baseUrl) {
|
|
169
231
|
throw new Error("Configuration Service BaseUrl is required");
|
|
170
232
|
}
|
|
233
|
+
this.cache = new import_lru_cache2.LRUCache({
|
|
234
|
+
max: 100,
|
|
235
|
+
ttl: 1e3 * 60 * 5,
|
|
236
|
+
// 5 minutes
|
|
237
|
+
allowStale: false,
|
|
238
|
+
updateAgeOnGet: false,
|
|
239
|
+
updateAgeOnHas: false
|
|
240
|
+
});
|
|
171
241
|
}
|
|
242
|
+
cache;
|
|
172
243
|
async get(auth, tenant, config) {
|
|
244
|
+
const cached = this.cache.get(`${tenant}/${config}`);
|
|
245
|
+
if (cached) {
|
|
246
|
+
return cached;
|
|
247
|
+
}
|
|
173
248
|
const resp = await fetch(
|
|
174
249
|
`${this.baseUrl}/api/v1/${tenant}/${config}/config`,
|
|
175
250
|
{
|
|
@@ -186,6 +261,7 @@ var ConfigurationService = class {
|
|
|
186
261
|
throw new Error("Failed to get configuration");
|
|
187
262
|
}
|
|
188
263
|
const data = await resp.json();
|
|
264
|
+
this.cache.set(`${tenant}/${config}`, data);
|
|
189
265
|
return data;
|
|
190
266
|
}
|
|
191
267
|
async create(auth, tenant, config) {
|
package/dist/index.mjs
CHANGED
|
@@ -14,6 +14,18 @@ var traverse = (obj, callback, path = []) => {
|
|
|
14
14
|
};
|
|
15
15
|
|
|
16
16
|
// src/core/utils/object.ts
|
|
17
|
+
var get = (obj, path) => {
|
|
18
|
+
let currentObj = obj;
|
|
19
|
+
for (let i = 0; i < path.length; i++) {
|
|
20
|
+
const key = path[i];
|
|
21
|
+
if (currentObj[key] !== void 0) {
|
|
22
|
+
currentObj = currentObj[key];
|
|
23
|
+
} else {
|
|
24
|
+
return void 0;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return currentObj;
|
|
28
|
+
};
|
|
17
29
|
var set = (obj, path, value) => {
|
|
18
30
|
let currentObj = obj;
|
|
19
31
|
for (let i = 0; i < path.length; i++) {
|
|
@@ -76,7 +88,9 @@ var TokenEvaluator = class {
|
|
|
76
88
|
};
|
|
77
89
|
|
|
78
90
|
// src/core/app.ts
|
|
91
|
+
import { LRUCache } from "lru-cache";
|
|
79
92
|
var TokenizationApp = class {
|
|
93
|
+
cache;
|
|
80
94
|
tokenizationService;
|
|
81
95
|
configurationService;
|
|
82
96
|
evaluator;
|
|
@@ -84,22 +98,44 @@ var TokenizationApp = class {
|
|
|
84
98
|
this.tokenizationService = tokenizationService;
|
|
85
99
|
this.configurationService = configurationService;
|
|
86
100
|
this.evaluator = new TokenEvaluator(evaluator);
|
|
101
|
+
this.cache = new LRUCache({
|
|
102
|
+
max: 5e3,
|
|
103
|
+
ttl: 1e3 * 60 * 1,
|
|
104
|
+
// 1 minute
|
|
105
|
+
allowStale: false,
|
|
106
|
+
updateAgeOnGet: false,
|
|
107
|
+
updateAgeOnHas: false
|
|
108
|
+
});
|
|
87
109
|
}
|
|
88
110
|
async tokenize(auth, tenant, configKey, payload) {
|
|
89
111
|
const config = await this.configurationService.get(auth, tenant, configKey);
|
|
90
112
|
if (!config) return payload;
|
|
91
113
|
if ((!config.fields || config.fields.length === 0) && config.defaultClassification === 0)
|
|
92
114
|
return payload;
|
|
93
|
-
const processedPayload = await this.processRequest(config, payload);
|
|
94
|
-
const tokenPayload = await this.tokenizationService.tokenize(
|
|
95
|
-
auth,
|
|
96
|
-
tenant,
|
|
97
|
-
configKey,
|
|
98
|
-
processedPayload
|
|
99
|
-
);
|
|
100
115
|
const respPayload = payload;
|
|
101
|
-
|
|
102
|
-
|
|
116
|
+
const processedPayload = await this.processRequest(config, payload);
|
|
117
|
+
if (Object.keys(processedPayload.notFound).length > 0) {
|
|
118
|
+
const tokenPayload = await this.tokenizationService.tokenize(
|
|
119
|
+
auth,
|
|
120
|
+
tenant,
|
|
121
|
+
configKey,
|
|
122
|
+
processedPayload.notFound
|
|
123
|
+
);
|
|
124
|
+
for (const key in tokenPayload) {
|
|
125
|
+
const path = key.split(".");
|
|
126
|
+
const token = tokenPayload[key];
|
|
127
|
+
const original = get(respPayload, path);
|
|
128
|
+
set(respPayload, path, token);
|
|
129
|
+
if (typeof token === "string") {
|
|
130
|
+
this.cache.set(token, original);
|
|
131
|
+
this.cache.set(original.toString(), token);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
if (Object.keys(processedPayload.found).length > 0) {
|
|
136
|
+
for (const key in processedPayload.found) {
|
|
137
|
+
set(respPayload, key.split("."), processedPayload.found[key]);
|
|
138
|
+
}
|
|
103
139
|
}
|
|
104
140
|
return respPayload;
|
|
105
141
|
}
|
|
@@ -108,27 +144,52 @@ var TokenizationApp = class {
|
|
|
108
144
|
if (!config) return payload;
|
|
109
145
|
if ((!config.fields || config.fields.length === 0) && config.defaultClassification === 0)
|
|
110
146
|
return payload;
|
|
111
|
-
const processedPayload = this.processRequest(config, payload);
|
|
112
|
-
const tokenPayload = await this.tokenizationService.detokenize(
|
|
113
|
-
auth,
|
|
114
|
-
tenant,
|
|
115
|
-
configKey,
|
|
116
|
-
processedPayload
|
|
117
|
-
);
|
|
118
147
|
const respPayload = payload;
|
|
119
|
-
|
|
120
|
-
|
|
148
|
+
const processedPayload = this.processRequest(config, payload);
|
|
149
|
+
if (Object.keys(processedPayload.notFound).length > 0) {
|
|
150
|
+
const tokenPayload = await this.tokenizationService.detokenize(
|
|
151
|
+
auth,
|
|
152
|
+
tenant,
|
|
153
|
+
configKey,
|
|
154
|
+
processedPayload.notFound
|
|
155
|
+
);
|
|
156
|
+
for (const key in tokenPayload) {
|
|
157
|
+
const path = key.split(".");
|
|
158
|
+
const token = get(respPayload, path);
|
|
159
|
+
const original = tokenPayload[key];
|
|
160
|
+
if (typeof original === "string") {
|
|
161
|
+
this.cache.set(token, original);
|
|
162
|
+
this.cache.set(original.toString(), token);
|
|
163
|
+
}
|
|
164
|
+
set(respPayload, key.split("."), original);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
if (Object.keys(processedPayload.found).length > 0) {
|
|
168
|
+
for (const key in processedPayload.found) {
|
|
169
|
+
set(respPayload, key.split("."), processedPayload.found[key]);
|
|
170
|
+
}
|
|
121
171
|
}
|
|
122
172
|
return respPayload;
|
|
123
173
|
}
|
|
124
174
|
processRequest(config, payload) {
|
|
125
175
|
if (config.fields.length === 0) {
|
|
126
|
-
return
|
|
176
|
+
return {
|
|
177
|
+
found: payload,
|
|
178
|
+
notFound: {}
|
|
179
|
+
};
|
|
127
180
|
}
|
|
128
|
-
const req = {
|
|
181
|
+
const req = {
|
|
182
|
+
found: {},
|
|
183
|
+
notFound: {}
|
|
184
|
+
};
|
|
129
185
|
traverse(payload, (value, path) => {
|
|
130
186
|
if (this.evaluator.shouldTokenizeField(config, path)) {
|
|
131
|
-
|
|
187
|
+
const fromCache = this.cache.get(value.toString());
|
|
188
|
+
if (fromCache) {
|
|
189
|
+
req.found[path.join(".")] = fromCache;
|
|
190
|
+
} else {
|
|
191
|
+
req.notFound[path.join(".")] = value;
|
|
192
|
+
}
|
|
132
193
|
}
|
|
133
194
|
});
|
|
134
195
|
return req;
|
|
@@ -136,14 +197,28 @@ var TokenizationApp = class {
|
|
|
136
197
|
};
|
|
137
198
|
|
|
138
199
|
// src/core/services/configuration.ts
|
|
200
|
+
import { LRUCache as LRUCache2 } from "lru-cache";
|
|
139
201
|
var ConfigurationService = class {
|
|
140
202
|
constructor(baseUrl) {
|
|
141
203
|
this.baseUrl = baseUrl;
|
|
142
204
|
if (!this.baseUrl) {
|
|
143
205
|
throw new Error("Configuration Service BaseUrl is required");
|
|
144
206
|
}
|
|
207
|
+
this.cache = new LRUCache2({
|
|
208
|
+
max: 100,
|
|
209
|
+
ttl: 1e3 * 60 * 5,
|
|
210
|
+
// 5 minutes
|
|
211
|
+
allowStale: false,
|
|
212
|
+
updateAgeOnGet: false,
|
|
213
|
+
updateAgeOnHas: false
|
|
214
|
+
});
|
|
145
215
|
}
|
|
216
|
+
cache;
|
|
146
217
|
async get(auth, tenant, config) {
|
|
218
|
+
const cached = this.cache.get(`${tenant}/${config}`);
|
|
219
|
+
if (cached) {
|
|
220
|
+
return cached;
|
|
221
|
+
}
|
|
147
222
|
const resp = await fetch(
|
|
148
223
|
`${this.baseUrl}/api/v1/${tenant}/${config}/config`,
|
|
149
224
|
{
|
|
@@ -160,6 +235,7 @@ var ConfigurationService = class {
|
|
|
160
235
|
throw new Error("Failed to get configuration");
|
|
161
236
|
}
|
|
162
237
|
const data = await resp.json();
|
|
238
|
+
this.cache.set(`${tenant}/${config}`, data);
|
|
163
239
|
return data;
|
|
164
240
|
}
|
|
165
241
|
async create(auth, tenant, config) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@edirect/tokenization",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.6",
|
|
4
4
|
"description": "Javascript library for tokenization service",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"module": "dist/index.mjs",
|
|
@@ -32,5 +32,8 @@
|
|
|
32
32
|
"tsup": "^8.3.5",
|
|
33
33
|
"typescript": "^5.7.2",
|
|
34
34
|
"vitest": "^2.1.8"
|
|
35
|
+
},
|
|
36
|
+
"dependencies": {
|
|
37
|
+
"lru-cache": "^11.0.2"
|
|
35
38
|
}
|
|
36
39
|
}
|