@positronic/core 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +141 -0
- package/dist/src/adapters/types.js +1 -16
- package/dist/src/clients/types.js +4 -1
- package/dist/src/dsl/brain-runner.js +487 -0
- package/dist/src/dsl/brain-runner.test.js +733 -0
- package/dist/src/dsl/brain.js +1128 -0
- package/dist/src/dsl/brain.test.js +4225 -0
- package/dist/src/dsl/constants.js +6 -6
- package/dist/src/dsl/json-patch.js +37 -9
- package/dist/src/index.js +11 -10
- package/dist/src/resources/resources.js +371 -0
- package/dist/src/test-utils.js +474 -0
- package/dist/src/testing.js +3 -0
- package/dist/types/adapters/types.d.ts +3 -8
- package/dist/types/adapters/types.d.ts.map +1 -1
- package/dist/types/clients/types.d.ts +46 -6
- package/dist/types/clients/types.d.ts.map +1 -1
- package/dist/types/dsl/brain-runner.d.ts +24 -0
- package/dist/types/dsl/brain-runner.d.ts.map +1 -0
- package/dist/types/dsl/brain.d.ts +136 -0
- package/dist/types/dsl/brain.d.ts.map +1 -0
- package/dist/types/dsl/constants.d.ts +5 -5
- package/dist/types/dsl/constants.d.ts.map +1 -1
- package/dist/types/dsl/json-patch.d.ts +2 -1
- package/dist/types/dsl/json-patch.d.ts.map +1 -1
- package/dist/types/index.d.ts +13 -11
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/resources/resource-loader.d.ts +6 -0
- package/dist/types/resources/resource-loader.d.ts.map +1 -0
- package/dist/types/resources/resources.d.ts +23 -0
- package/dist/types/resources/resources.d.ts.map +1 -0
- package/dist/types/test-utils.d.ts +94 -0
- package/dist/types/test-utils.d.ts.map +1 -0
- package/dist/types/testing.d.ts +2 -0
- package/dist/types/testing.d.ts.map +1 -0
- package/docs/core-testing-guide.md +289 -0
- package/package.json +26 -7
- package/src/adapters/types.ts +3 -22
- package/src/clients/types.ts +50 -10
- package/src/dsl/brain-runner.test.ts +384 -0
- package/src/dsl/brain-runner.ts +111 -0
- package/src/dsl/brain.test.ts +1981 -0
- package/src/dsl/brain.ts +740 -0
- package/src/dsl/constants.ts +6 -6
- package/src/dsl/json-patch.ts +24 -9
- package/src/dsl/types.ts +1 -1
- package/src/index.ts +30 -16
- package/src/resources/resource-loader.ts +8 -0
- package/src/resources/resources.ts +267 -0
- package/src/test-utils.ts +254 -0
- package/test/resources.test.ts +248 -0
- package/tsconfig.json +2 -2
- package/.swcrc +0 -31
- package/dist/src/dsl/extensions.js +0 -19
- package/dist/src/dsl/workflow-runner.js +0 -93
- package/dist/src/dsl/workflow.js +0 -308
- package/dist/src/file-stores/local-file-store.js +0 -12
- package/dist/src/utils/temp-files.js +0 -27
- package/dist/types/dsl/extensions.d.ts +0 -18
- package/dist/types/dsl/extensions.d.ts.map +0 -1
- package/dist/types/dsl/workflow-runner.d.ts +0 -28
- package/dist/types/dsl/workflow-runner.d.ts.map +0 -1
- package/dist/types/dsl/workflow.d.ts +0 -118
- package/dist/types/dsl/workflow.d.ts.map +0 -1
- package/dist/types/file-stores/local-file-store.d.ts +0 -7
- package/dist/types/file-stores/local-file-store.d.ts.map +0 -1
- package/dist/types/file-stores/types.d.ts +0 -4
- package/dist/types/file-stores/types.d.ts.map +0 -1
- package/dist/types/utils/temp-files.d.ts +0 -12
- package/dist/types/utils/temp-files.d.ts.map +0 -1
- package/src/dsl/extensions.ts +0 -58
- package/src/dsl/workflow-runner.test.ts +0 -203
- package/src/dsl/workflow-runner.ts +0 -146
- package/src/dsl/workflow.test.ts +0 -1435
- package/src/dsl/workflow.ts +0 -554
- package/src/file-stores/local-file-store.ts +0 -11
- package/src/file-stores/types.ts +0 -3
- package/src/utils/temp-files.ts +0 -46
- /package/dist/src/{file-stores/types.js → resources/resource-loader.js} +0 -0
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
import { createResources, type Manifest } from '../src/resources/resources.js';
|
|
2
|
+
import { ResourceLoader } from '../src/resources/resource-loader.js';
|
|
3
|
+
|
|
4
|
+
// Mock loader for testing
|
|
5
|
+
class MockLoader implements ResourceLoader {
|
|
6
|
+
private mockData: Record<string, string | Buffer> = {
|
|
7
|
+
'example.md': 'Example content',
|
|
8
|
+
'my file with spaces.txt': 'Content with spaces in filename',
|
|
9
|
+
'data/2024-report.pdf': Buffer.from('Mock PDF content'),
|
|
10
|
+
'docs/readme.md': 'Documentation content',
|
|
11
|
+
'special-chars!@#.txt': 'Special characters content',
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
addMockData(path: string, content: string | Buffer): void {
|
|
15
|
+
this.mockData[path] = content;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async load(key: string, type: 'text'): Promise<string>;
|
|
19
|
+
async load(key: string, type: 'binary'): Promise<Buffer>;
|
|
20
|
+
async load(key: string, type: 'text' | 'binary'): Promise<string | Buffer> {
|
|
21
|
+
const data = this.mockData[key];
|
|
22
|
+
if (!data) {
|
|
23
|
+
throw new Error(`Resource not found: ${key}`);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if (type === 'text' && Buffer.isBuffer(data)) {
|
|
27
|
+
return data.toString();
|
|
28
|
+
}
|
|
29
|
+
if (type === 'binary' && typeof data === 'string') {
|
|
30
|
+
return Buffer.from(data);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
return data;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
describe('Resources API', () => {
|
|
38
|
+
let resources: any;
|
|
39
|
+
|
|
40
|
+
beforeEach(() => {
|
|
41
|
+
const manifest: Manifest = {
|
|
42
|
+
'example.md': {
|
|
43
|
+
type: 'text',
|
|
44
|
+
path: 'example.md',
|
|
45
|
+
key: 'example.md',
|
|
46
|
+
},
|
|
47
|
+
'my file with spaces.txt': {
|
|
48
|
+
type: 'text',
|
|
49
|
+
path: 'my file with spaces.txt',
|
|
50
|
+
key: 'my file with spaces.txt',
|
|
51
|
+
},
|
|
52
|
+
data: {
|
|
53
|
+
'2024-report.pdf': {
|
|
54
|
+
type: 'binary',
|
|
55
|
+
path: 'data/2024-report.pdf',
|
|
56
|
+
key: 'data/2024-report.pdf',
|
|
57
|
+
},
|
|
58
|
+
},
|
|
59
|
+
docs: {
|
|
60
|
+
'readme.md': {
|
|
61
|
+
type: 'text',
|
|
62
|
+
path: 'docs/readme.md',
|
|
63
|
+
key: 'docs/readme.md',
|
|
64
|
+
},
|
|
65
|
+
},
|
|
66
|
+
'special-chars!@#.txt': {
|
|
67
|
+
type: 'text',
|
|
68
|
+
path: 'special-chars!@#.txt',
|
|
69
|
+
key: 'special-chars!@#.txt',
|
|
70
|
+
},
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
const loader = new MockLoader();
|
|
74
|
+
resources = createResources(loader, manifest);
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
describe('Proxy API (for JS-identifier compatible names)', () => {
|
|
78
|
+
it('should load simple resource via proxy', async () => {
|
|
79
|
+
const content = await resources.example.loadText();
|
|
80
|
+
expect(content).toBe('Example content');
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
it('should load nested resource via proxy', async () => {
|
|
84
|
+
const content = await resources.docs.readme.loadText();
|
|
85
|
+
expect(content).toBe('Documentation content');
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it('should load binary resource via proxy', async () => {
|
|
89
|
+
const buffer = await resources.data['2024-report.pdf'].loadBinary();
|
|
90
|
+
expect(buffer.toString()).toBe('Mock PDF content');
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
describe('Method API (for any filename)', () => {
|
|
95
|
+
it('should load resource with spaces in name', async () => {
|
|
96
|
+
const content = await resources.loadText('my file with spaces.txt');
|
|
97
|
+
expect(content).toBe('Content with spaces in filename');
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
it('should load nested resource by path', async () => {
|
|
101
|
+
const content = await resources.loadText('docs/readme.md');
|
|
102
|
+
expect(content).toBe('Documentation content');
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
it('should load binary resource by path', async () => {
|
|
106
|
+
const buffer = await resources.loadBinary('data/2024-report.pdf');
|
|
107
|
+
expect(buffer.toString()).toBe('Mock PDF content');
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
it('should load resource with special characters', async () => {
|
|
111
|
+
const content = await resources.loadText('special-chars!@#.txt');
|
|
112
|
+
expect(content).toBe('Special characters content');
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
it('should throw error for non-existent resource', async () => {
|
|
116
|
+
await expect(resources.loadText('non-existent.txt')).rejects.toThrow(
|
|
117
|
+
'Resource not found: non-existent.txt'
|
|
118
|
+
);
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
it('should throw error when using wrong type method', async () => {
|
|
122
|
+
await expect(resources.loadText('data/2024-report.pdf')).rejects.toThrow(
|
|
123
|
+
'Resource "data/2024-report.pdf" is of type "binary", but was accessed with loadText()'
|
|
124
|
+
);
|
|
125
|
+
|
|
126
|
+
await expect(resources.loadBinary('example.md')).rejects.toThrow(
|
|
127
|
+
'Resource "example.md" is of type "text", but was accessed with loadBinary()'
|
|
128
|
+
);
|
|
129
|
+
});
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
describe('Mixed usage', () => {
|
|
133
|
+
it('should support both APIs in the same brain', async () => {
|
|
134
|
+
// Use proxy API for clean names
|
|
135
|
+
const example = await resources.example.loadText();
|
|
136
|
+
|
|
137
|
+
// Use method API for names with spaces
|
|
138
|
+
const withSpaces = await resources.loadText('my file with spaces.txt');
|
|
139
|
+
|
|
140
|
+
// Both should work
|
|
141
|
+
expect(example).toBe('Example content');
|
|
142
|
+
expect(withSpaces).toBe('Content with spaces in filename');
|
|
143
|
+
});
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
describe('Ambiguous resource names', () => {
|
|
147
|
+
let ambiguousResources: any;
|
|
148
|
+
|
|
149
|
+
beforeEach(() => {
|
|
150
|
+
const ambiguousManifest: Manifest = {
|
|
151
|
+
'example.md': {
|
|
152
|
+
type: 'text',
|
|
153
|
+
path: 'example.md',
|
|
154
|
+
key: 'example.md',
|
|
155
|
+
},
|
|
156
|
+
'example.txt': {
|
|
157
|
+
type: 'text',
|
|
158
|
+
path: 'example.txt',
|
|
159
|
+
key: 'example.txt',
|
|
160
|
+
},
|
|
161
|
+
'report.pdf': {
|
|
162
|
+
type: 'binary',
|
|
163
|
+
path: 'report.pdf',
|
|
164
|
+
key: 'report.pdf',
|
|
165
|
+
},
|
|
166
|
+
'report.docx': {
|
|
167
|
+
type: 'binary',
|
|
168
|
+
path: 'report.docx',
|
|
169
|
+
key: 'report.docx',
|
|
170
|
+
},
|
|
171
|
+
nested: {
|
|
172
|
+
'config.json': {
|
|
173
|
+
type: 'text',
|
|
174
|
+
path: 'nested/config.json',
|
|
175
|
+
key: 'nested/config.json',
|
|
176
|
+
},
|
|
177
|
+
'config.yaml': {
|
|
178
|
+
type: 'text',
|
|
179
|
+
path: 'nested/config.yaml',
|
|
180
|
+
key: 'nested/config.yaml',
|
|
181
|
+
},
|
|
182
|
+
},
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
const mockLoader = new MockLoader();
|
|
186
|
+
// Add the ambiguous files to mock data
|
|
187
|
+
mockLoader.addMockData('example.md', 'Example markdown content');
|
|
188
|
+
mockLoader.addMockData('example.txt', 'Example text content');
|
|
189
|
+
mockLoader.addMockData('report.pdf', Buffer.from('PDF content'));
|
|
190
|
+
mockLoader.addMockData('report.docx', Buffer.from('DOCX content'));
|
|
191
|
+
mockLoader.addMockData('nested/config.json', '{"config": "json"}');
|
|
192
|
+
mockLoader.addMockData('nested/config.yaml', 'config: yaml');
|
|
193
|
+
|
|
194
|
+
ambiguousResources = createResources(mockLoader, ambiguousManifest);
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
it('should throw error when accessing ambiguous resource via proxy', () => {
|
|
198
|
+
expect(() => ambiguousResources.example).toThrow(
|
|
199
|
+
"Ambiguous resource name 'example': found example.md, example.txt. " +
|
|
200
|
+
"Please use resources.loadText('example.md') or resources.loadBinary('example.txt') instead."
|
|
201
|
+
);
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
it('should throw error for ambiguous binary resources', () => {
|
|
205
|
+
expect(() => ambiguousResources.report).toThrow(
|
|
206
|
+
"Ambiguous resource name 'report': found report.pdf, report.docx"
|
|
207
|
+
);
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
it('should throw error for ambiguous nested resources', () => {
|
|
211
|
+
expect(() => ambiguousResources.nested.config).toThrow(
|
|
212
|
+
"Ambiguous resource name 'config': found config.json, config.yaml"
|
|
213
|
+
);
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
it('should allow direct access with full filename', async () => {
|
|
217
|
+
const markdownContent = await ambiguousResources['example.md'].loadText();
|
|
218
|
+
const textContent = await ambiguousResources['example.txt'].loadText();
|
|
219
|
+
|
|
220
|
+
expect(markdownContent).toBe('Example markdown content');
|
|
221
|
+
expect(textContent).toBe('Example text content');
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
it('should work with method API for ambiguous resources', async () => {
|
|
225
|
+
const markdownContent = await ambiguousResources.loadText('example.md');
|
|
226
|
+
const textContent = await ambiguousResources.loadText('example.txt');
|
|
227
|
+
|
|
228
|
+
expect(markdownContent).toBe('Example markdown content');
|
|
229
|
+
expect(textContent).toBe('Example text content');
|
|
230
|
+
});
|
|
231
|
+
|
|
232
|
+
it('should handle ambiguous paths in loadText/loadBinary methods', async () => {
|
|
233
|
+
// Should work with full path
|
|
234
|
+
const jsonContent = await ambiguousResources.loadText(
|
|
235
|
+
'nested/config.json'
|
|
236
|
+
);
|
|
237
|
+
expect(jsonContent).toBe('{"config": "json"}');
|
|
238
|
+
|
|
239
|
+
// Should throw error for ambiguous path without extension
|
|
240
|
+
await expect(
|
|
241
|
+
ambiguousResources.loadText('nested/config')
|
|
242
|
+
).rejects.toThrow(
|
|
243
|
+
"Ambiguous resource path 'nested/config': found config.json, config.yaml. " +
|
|
244
|
+
'Please specify the full filename with extension.'
|
|
245
|
+
);
|
|
246
|
+
});
|
|
247
|
+
});
|
|
248
|
+
});
|
package/tsconfig.json
CHANGED
package/.swcrc
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"jsc": {
|
|
3
|
-
"parser": {
|
|
4
|
-
"syntax": "typescript"
|
|
5
|
-
},
|
|
6
|
-
"target": "es2022",
|
|
7
|
-
"experimental": {
|
|
8
|
-
"plugins": [
|
|
9
|
-
[
|
|
10
|
-
"@swc/plugin-transform-imports",
|
|
11
|
-
{
|
|
12
|
-
"^(\\.{1,2}\\/.*?)$": {
|
|
13
|
-
"skipDefaultConversion": true,
|
|
14
|
-
"transform": "{{matches.[1]}}.js"
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
]
|
|
18
|
-
]
|
|
19
|
-
}
|
|
20
|
-
},
|
|
21
|
-
"module": {
|
|
22
|
-
"type": "es6"
|
|
23
|
-
},
|
|
24
|
-
"exclude": [
|
|
25
|
-
".*\\.test\\.ts$",
|
|
26
|
-
"node_modules",
|
|
27
|
-
"dist",
|
|
28
|
-
"coverage",
|
|
29
|
-
"jest.config.*"
|
|
30
|
-
]
|
|
31
|
-
}
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
import { Workflow } from "./workflow.js";
|
|
2
|
-
export function createExtension(key, extension) {
|
|
3
|
-
return {
|
|
4
|
-
install () {
|
|
5
|
-
Object.defineProperty(Workflow.prototype, key, {
|
|
6
|
-
get () {
|
|
7
|
-
const boundMethods = {};
|
|
8
|
-
for (const [methodKey, fn] of Object.entries(extension)){
|
|
9
|
-
boundMethods[methodKey] = fn.bind(this);
|
|
10
|
-
}
|
|
11
|
-
return boundMethods;
|
|
12
|
-
}
|
|
13
|
-
});
|
|
14
|
-
},
|
|
15
|
-
augment () {
|
|
16
|
-
return {};
|
|
17
|
-
}
|
|
18
|
-
};
|
|
19
|
-
}
|
|
@@ -1,93 +0,0 @@
|
|
|
1
|
-
import { WORKFLOW_EVENTS } from "./constants.js";
|
|
2
|
-
import { applyPatches } from "./json-patch.js";
|
|
3
|
-
export class WorkflowRunner {
|
|
4
|
-
options;
|
|
5
|
-
constructor(options){
|
|
6
|
-
this.options = options;
|
|
7
|
-
}
|
|
8
|
-
async run(workflow, { initialState = {}, options, initialCompletedSteps, workflowRunId, endAfter } = {}) {
|
|
9
|
-
const { adapters, logger: { log }, verbose, fileStore, client } = this.options;
|
|
10
|
-
let currentState = initialState ?? {};
|
|
11
|
-
let stepNumber = 1;
|
|
12
|
-
// Apply any patches from completed steps
|
|
13
|
-
// to the initial state so that the workflow
|
|
14
|
-
// starts with a state that reflects all of the completed steps.
|
|
15
|
-
// Need to do this when a workflow is restarted with completed steps.
|
|
16
|
-
initialCompletedSteps?.forEach((step)=>{
|
|
17
|
-
if (step.patch) {
|
|
18
|
-
currentState = applyPatches(currentState, [
|
|
19
|
-
step.patch
|
|
20
|
-
]);
|
|
21
|
-
stepNumber++;
|
|
22
|
-
}
|
|
23
|
-
});
|
|
24
|
-
const workflowRun = workflowRunId && initialCompletedSteps ? workflow.run({
|
|
25
|
-
initialState,
|
|
26
|
-
initialCompletedSteps,
|
|
27
|
-
workflowRunId,
|
|
28
|
-
options,
|
|
29
|
-
client,
|
|
30
|
-
fileStore
|
|
31
|
-
}) : workflow.run({
|
|
32
|
-
initialState,
|
|
33
|
-
options,
|
|
34
|
-
client,
|
|
35
|
-
fileStore
|
|
36
|
-
});
|
|
37
|
-
for await (const event of workflowRun){
|
|
38
|
-
// Dispatch event to all adapters
|
|
39
|
-
await Promise.all(adapters.map((adapter)=>adapter.dispatch(event)));
|
|
40
|
-
// Update current state when steps complete
|
|
41
|
-
if (event.type === WORKFLOW_EVENTS.STEP_COMPLETE) {
|
|
42
|
-
if (event.patch) {
|
|
43
|
-
currentState = applyPatches(currentState, [
|
|
44
|
-
event.patch
|
|
45
|
-
]);
|
|
46
|
-
}
|
|
47
|
-
// Check if we should stop after this step
|
|
48
|
-
if (endAfter && stepNumber >= endAfter) {
|
|
49
|
-
// Log final state if verbose
|
|
50
|
-
if (verbose) {
|
|
51
|
-
log(`\nWorkflow stopped after step ${endAfter} as requested: \n\n ${JSON.stringify(this.truncateDeep(structuredClone(currentState)), null, 2)}`);
|
|
52
|
-
}
|
|
53
|
-
return;
|
|
54
|
-
}
|
|
55
|
-
stepNumber++;
|
|
56
|
-
}
|
|
57
|
-
// Log final state on workflow completion/error if verbose
|
|
58
|
-
if ((event.type === WORKFLOW_EVENTS.COMPLETE || event.type === WORKFLOW_EVENTS.ERROR) && verbose) {
|
|
59
|
-
log(`\nWorkflow completed: \n\n ${JSON.stringify(this.truncateDeep(structuredClone(currentState)), null, 2)}`);
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
truncateDeep(obj, maxLength = 100) {
|
|
64
|
-
if (obj === null || obj === undefined) return obj;
|
|
65
|
-
if (typeof obj === 'string') {
|
|
66
|
-
return obj.length > maxLength ? obj.slice(0, maxLength) + '...' : obj;
|
|
67
|
-
}
|
|
68
|
-
if (Array.isArray(obj)) {
|
|
69
|
-
if (obj.length === 0) return obj;
|
|
70
|
-
let truncatedArray = [];
|
|
71
|
-
let currentLength = 2; // Account for [] brackets
|
|
72
|
-
for(let i = 0; i < obj.length; i++){
|
|
73
|
-
const processedItem = this.truncateDeep(obj[i], maxLength);
|
|
74
|
-
const itemStr = JSON.stringify(processedItem);
|
|
75
|
-
if (currentLength + itemStr.length + (i > 0 ? 1 : 0) > maxLength) {
|
|
76
|
-
truncatedArray.push(`... (${obj.length})`);
|
|
77
|
-
break;
|
|
78
|
-
}
|
|
79
|
-
truncatedArray.push(processedItem);
|
|
80
|
-
currentLength += itemStr.length + (i > 0 ? 1 : 0); // Add 1 for comma
|
|
81
|
-
}
|
|
82
|
-
return truncatedArray;
|
|
83
|
-
}
|
|
84
|
-
if (typeof obj === 'object') {
|
|
85
|
-
const truncated = {};
|
|
86
|
-
for (const [key, value] of Object.entries(obj)){
|
|
87
|
-
truncated[key] = this.truncateDeep(value, maxLength);
|
|
88
|
-
}
|
|
89
|
-
return truncated;
|
|
90
|
-
}
|
|
91
|
-
return obj;
|
|
92
|
-
}
|
|
93
|
-
}
|