@contrail/util 1.3.0 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/dynamodb-util/dynamodb-util.methods.d.ts +26 -0
- package/lib/dynamodb-util/dynamodb-util.methods.js +192 -0
- package/lib/dynamodb-util/dynamodb-util.types.d.ts +20 -0
- package/lib/dynamodb-util/dynamodb-util.types.js +2 -0
- package/lib/dynamodb-util/index.d.ts +2 -0
- package/lib/dynamodb-util/index.js +18 -0
- package/lib/file-util/file-util.d.ts +7 -0
- package/lib/file-util/file-util.js +46 -0
- package/lib/object-util/cloneDeep/cloneDeep.js +7 -0
- package/lib/object-util/cloneDeep/cloneDeepArray.d.ts +1 -0
- package/lib/object-util/cloneDeep/cloneDeepArray.js +26 -0
- package/lib/object-util/cloneDeepPreserveDates/cloneDeepPreserveDates.js +7 -0
- package/package.json +1 -1
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { DynamoDBItem, FlatDynamoItem } from './dynamodb-util.types';
|
|
2
|
+
export declare type Environment = 'dev' | 'prod';
|
|
3
|
+
export declare function getAll(scanOperations: AsyncGenerator<{
|
|
4
|
+
[key: string]: any;
|
|
5
|
+
}[]>): Promise<{
|
|
6
|
+
[key: string]: any;
|
|
7
|
+
}[]>;
|
|
8
|
+
interface CacheOptions {
|
|
9
|
+
useCache: boolean;
|
|
10
|
+
overwriteCache: boolean;
|
|
11
|
+
cacheName: string;
|
|
12
|
+
}
|
|
13
|
+
export declare function getAllWithCaching<T>(getAllFunc: () => Promise<T[]>, options: CacheOptions): Promise<T[]>;
|
|
14
|
+
declare type DyanmoQuery = {
|
|
15
|
+
TableName: string;
|
|
16
|
+
ExpressionAttributeValues: {
|
|
17
|
+
[key: string]: any;
|
|
18
|
+
};
|
|
19
|
+
FilterExpression: string;
|
|
20
|
+
};
|
|
21
|
+
export declare function scanDynamo(query: DyanmoQuery): AsyncGenerator<{
|
|
22
|
+
[key: string]: any;
|
|
23
|
+
}[]>;
|
|
24
|
+
export declare function pushData<T>(data: DynamoDBItem<T>[], tableName: string): Promise<void>;
|
|
25
|
+
export declare function flattenDynamoItem<T>(item: DynamoDBItem<T>): FlatDynamoItem<T>;
|
|
26
|
+
export {};
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|
12
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
13
|
+
var m = o[Symbol.asyncIterator], i;
|
|
14
|
+
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
|
15
|
+
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
|
16
|
+
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
|
17
|
+
};
|
|
18
|
+
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
|
|
19
|
+
var __asyncDelegator = (this && this.__asyncDelegator) || function (o) {
|
|
20
|
+
var i, p;
|
|
21
|
+
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
|
|
22
|
+
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
|
|
23
|
+
};
|
|
24
|
+
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
|
|
25
|
+
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
|
26
|
+
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
|
27
|
+
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
|
28
|
+
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
|
29
|
+
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
|
30
|
+
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
|
31
|
+
function fulfill(value) { resume("next", value); }
|
|
32
|
+
function reject(value) { resume("throw", value); }
|
|
33
|
+
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
|
34
|
+
};
|
|
35
|
+
var __rest = (this && this.__rest) || function (s, e) {
|
|
36
|
+
var t = {};
|
|
37
|
+
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
|
38
|
+
t[p] = s[p];
|
|
39
|
+
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
|
40
|
+
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
|
41
|
+
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
|
42
|
+
t[p[i]] = s[p[i]];
|
|
43
|
+
}
|
|
44
|
+
return t;
|
|
45
|
+
};
|
|
46
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
47
|
+
exports.flattenDynamoItem = exports.pushData = exports.scanDynamo = exports.getAllWithCaching = exports.getAll = void 0;
|
|
48
|
+
const aws = require('aws-sdk');
|
|
49
|
+
const ddt = require('dynamodb-data-types');
|
|
50
|
+
const fs_1 = require("fs");
|
|
51
|
+
const file_util_1 = require("../file-util/file-util");
|
|
52
|
+
const dynamo = new aws.DynamoDB.DocumentClient({
|
|
53
|
+
region: 'us-east-1',
|
|
54
|
+
});
|
|
55
|
+
const CACHE_DIR = 'dynamodb_cache';
|
|
56
|
+
ddt.preserveArrays();
|
|
57
|
+
const attr = ddt.AttributeValue;
|
|
58
|
+
function getAllAppsFromDynamo(environment) {
|
|
59
|
+
return () => __awaiter(this, void 0, void 0, function* () {
|
|
60
|
+
return getAll(scanForApps(environment));
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
function getAll(scanOperations) {
|
|
64
|
+
var e_1, _a;
|
|
65
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
66
|
+
const scan = scanOperations;
|
|
67
|
+
const allItems = [];
|
|
68
|
+
try {
|
|
69
|
+
for (var scan_1 = __asyncValues(scan), scan_1_1; scan_1_1 = yield scan_1.next(), !scan_1_1.done;) {
|
|
70
|
+
const items = scan_1_1.value;
|
|
71
|
+
allItems.push(...items);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
|
75
|
+
finally {
|
|
76
|
+
try {
|
|
77
|
+
if (scan_1_1 && !scan_1_1.done && (_a = scan_1.return)) yield _a.call(scan_1);
|
|
78
|
+
}
|
|
79
|
+
finally { if (e_1) throw e_1.error; }
|
|
80
|
+
}
|
|
81
|
+
return allItems;
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
exports.getAll = getAll;
|
|
85
|
+
const getAllApps = (environment, options) => __awaiter(void 0, void 0, void 0, function* () {
|
|
86
|
+
var _a, _b;
|
|
87
|
+
return getAllWithCaching(getAllAppsFromDynamo(environment), {
|
|
88
|
+
useCache: (_a = options.useCache) !== null && _a !== void 0 ? _a : false,
|
|
89
|
+
overwriteCache: (_b = options.overwriteCache) !== null && _b !== void 0 ? _b : false,
|
|
90
|
+
cacheName: `${environment}-apps`,
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
function getAllWithCaching(getAllFunc, options) {
|
|
94
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
95
|
+
const { useCache, overwriteCache, cacheName } = options;
|
|
96
|
+
if (useCache) {
|
|
97
|
+
console.log('Attempt to use cache:', cacheName);
|
|
98
|
+
if (yield (0, file_util_1.doesFileExist)(getCacheName(cacheName))) {
|
|
99
|
+
return readCachedResults(cacheName);
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
console.log('Cache not found:', cacheName);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
console.log('Fetching from DynamoDB:', cacheName);
|
|
106
|
+
const data = yield getAllFunc();
|
|
107
|
+
if (overwriteCache) {
|
|
108
|
+
console.log('Caching results:', cacheName);
|
|
109
|
+
yield cacheResultsInJSON(cacheName, data);
|
|
110
|
+
}
|
|
111
|
+
return data;
|
|
112
|
+
});
|
|
113
|
+
}
|
|
114
|
+
exports.getAllWithCaching = getAllWithCaching;
|
|
115
|
+
const cacheResultsInJSON = (cacheName, data) => __awaiter(void 0, void 0, void 0, function* () {
|
|
116
|
+
yield (0, file_util_1.createDirectoryIfNotExists)(CACHE_DIR);
|
|
117
|
+
yield fs_1.promises.writeFile(getCacheName(cacheName), JSON.stringify(data, null, 2), 'utf8');
|
|
118
|
+
});
|
|
119
|
+
const readCachedResults = (cacheName) => __awaiter(void 0, void 0, void 0, function* () {
|
|
120
|
+
const data = yield fs_1.promises.readFile(getCacheName(cacheName), 'utf8');
|
|
121
|
+
return JSON.parse(data);
|
|
122
|
+
});
|
|
123
|
+
const getCacheName = (cacheName) => `${CACHE_DIR}/${cacheName}.json`;
|
|
124
|
+
function scanForApps(environment) {
|
|
125
|
+
return __asyncGenerator(this, arguments, function* scanForApps_1() {
|
|
126
|
+
const query = {
|
|
127
|
+
TableName: `${environment}-apps`,
|
|
128
|
+
ExpressionAttributeValues: {
|
|
129
|
+
':pk': 'app:',
|
|
130
|
+
':sk': 'app:',
|
|
131
|
+
},
|
|
132
|
+
FilterExpression: 'begins_with(PK, :pk) AND begins_with(SK, :sk)',
|
|
133
|
+
};
|
|
134
|
+
yield __await(yield* __asyncDelegator(__asyncValues(scanDynamo(query))));
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
function scanDynamo(query) {
|
|
138
|
+
return __asyncGenerator(this, arguments, function* scanDynamo_1() {
|
|
139
|
+
let lastEvaluatedKey = null;
|
|
140
|
+
const params = Object.assign(Object.assign({}, query), { ExclusiveStartKey: null });
|
|
141
|
+
do {
|
|
142
|
+
const response = (yield __await(dynamo.scan(params).promise()));
|
|
143
|
+
yield yield __await(response.Items);
|
|
144
|
+
lastEvaluatedKey = response.LastEvaluatedKey;
|
|
145
|
+
params.ExclusiveStartKey = lastEvaluatedKey;
|
|
146
|
+
} while (lastEvaluatedKey);
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
exports.scanDynamo = scanDynamo;
|
|
150
|
+
function pushData(data, tableName) {
|
|
151
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
152
|
+
const ddb = new aws.DynamoDB({ apiVersion: '2012-08-10', region: 'us-east-1' });
|
|
153
|
+
let itemsToPush = [];
|
|
154
|
+
for (let index = 0; index < data.length; index++) {
|
|
155
|
+
const element = data[index];
|
|
156
|
+
const item = {
|
|
157
|
+
PutRequest: {
|
|
158
|
+
Item: attr.wrap(element),
|
|
159
|
+
},
|
|
160
|
+
};
|
|
161
|
+
itemsToPush.push(item);
|
|
162
|
+
if (itemsToPush.length === 25 || index === data.length - 1) {
|
|
163
|
+
const params = {
|
|
164
|
+
RequestItems: {},
|
|
165
|
+
};
|
|
166
|
+
params.RequestItems[tableName] = itemsToPush;
|
|
167
|
+
ddb.batchWriteItem(params, function (err, data) {
|
|
168
|
+
if (err) {
|
|
169
|
+
console.log('Error', err);
|
|
170
|
+
}
|
|
171
|
+
else {
|
|
172
|
+
console.log('Success', data);
|
|
173
|
+
}
|
|
174
|
+
});
|
|
175
|
+
yield delay(1000);
|
|
176
|
+
console.log('pushed', index + 1, 'of', data.length);
|
|
177
|
+
itemsToPush = [];
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
exports.pushData = pushData;
|
|
183
|
+
function delay(ms) {
|
|
184
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
185
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
function flattenDynamoItem(item) {
|
|
189
|
+
const { properties } = item, rest = __rest(item, ["properties"]);
|
|
190
|
+
return Object.assign(Object.assign({}, rest), properties);
|
|
191
|
+
}
|
|
192
|
+
exports.flattenDynamoItem = flattenDynamoItem;
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export declare type DynamoDBItem<T> = {
|
|
2
|
+
PK: string;
|
|
3
|
+
SK: string;
|
|
4
|
+
ALT_PK1?: string;
|
|
5
|
+
ALT_SK1?: string;
|
|
6
|
+
ALT_PK2?: string;
|
|
7
|
+
ALT_SK2?: string;
|
|
8
|
+
ALT_PK3?: string;
|
|
9
|
+
ALT_SK3?: string;
|
|
10
|
+
properties: BaseProperties & T;
|
|
11
|
+
};
|
|
12
|
+
declare type BaseProperties = {
|
|
13
|
+
id: string;
|
|
14
|
+
createdById: string;
|
|
15
|
+
createdOn: string;
|
|
16
|
+
updatedById: string;
|
|
17
|
+
updatedOn: string;
|
|
18
|
+
};
|
|
19
|
+
export declare type FlatDynamoItem<T> = Omit<DynamoDBItem<T>, 'properties'> & BaseProperties & T;
|
|
20
|
+
export {};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./dynamodb-util.types"), exports);
|
|
18
|
+
__exportStar(require("./dynamodb-util.methods"), exports);
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export declare function doesFileExist(filePath: any): Promise<boolean>;
|
|
2
|
+
export declare const createDirectoryIfNotExists: (directoryPath: string) => Promise<void>;
|
|
3
|
+
export declare const deleteDirectoryAndContentsIfExists: (directoryPath: string) => Promise<void>;
|
|
4
|
+
export declare const copyFile: (options: {
|
|
5
|
+
sourcePath: string;
|
|
6
|
+
destinationPath: string;
|
|
7
|
+
}) => Promise<void>;
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.copyFile = exports.deleteDirectoryAndContentsIfExists = exports.createDirectoryIfNotExists = exports.doesFileExist = void 0;
|
|
13
|
+
const fs_1 = require("fs");
|
|
14
|
+
function doesFileExist(filePath) {
|
|
15
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
16
|
+
try {
|
|
17
|
+
yield fs_1.promises.access(filePath);
|
|
18
|
+
return true;
|
|
19
|
+
}
|
|
20
|
+
catch (error) {
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
exports.doesFileExist = doesFileExist;
|
|
26
|
+
const createDirectoryIfNotExists = (directoryPath) => __awaiter(void 0, void 0, void 0, function* () {
|
|
27
|
+
yield fs_1.promises.mkdir(directoryPath, { recursive: true }).catch((err) => {
|
|
28
|
+
if (err.code !== 'EEXIST') {
|
|
29
|
+
console.error(err);
|
|
30
|
+
}
|
|
31
|
+
});
|
|
32
|
+
});
|
|
33
|
+
exports.createDirectoryIfNotExists = createDirectoryIfNotExists;
|
|
34
|
+
const deleteDirectoryAndContentsIfExists = (directoryPath) => __awaiter(void 0, void 0, void 0, function* () {
|
|
35
|
+
yield fs_1.promises.rmdir(directoryPath, { recursive: true }).catch((err) => {
|
|
36
|
+
if (err.code !== 'ENOENT') {
|
|
37
|
+
console.error(err);
|
|
38
|
+
}
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
exports.deleteDirectoryAndContentsIfExists = deleteDirectoryAndContentsIfExists;
|
|
42
|
+
const copyFile = (options) => __awaiter(void 0, void 0, void 0, function* () {
|
|
43
|
+
const { sourcePath, destinationPath } = options;
|
|
44
|
+
yield fs_1.promises.copyFile(sourcePath, destinationPath);
|
|
45
|
+
});
|
|
46
|
+
exports.copyFile = copyFile;
|
|
@@ -1,8 +1,15 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.cloneDeep = cloneDeep;
|
|
4
|
+
const cloneDeepArray_1 = require("./cloneDeepArray");
|
|
4
5
|
function cloneDeep(obj) {
|
|
5
6
|
if (obj == null)
|
|
6
7
|
return null;
|
|
8
|
+
if (Array.isArray(obj)) {
|
|
9
|
+
return (0, cloneDeepArray_1.cloneDeepArray)(obj, jsonClone);
|
|
10
|
+
}
|
|
11
|
+
return jsonClone(obj);
|
|
12
|
+
}
|
|
13
|
+
function jsonClone(obj) {
|
|
7
14
|
return JSON.parse(JSON.stringify(obj));
|
|
8
15
|
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function cloneDeepArray<T, R>(arr: T[], cloneFn: (chunk: T[]) => R[]): R[];
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.cloneDeepArray = cloneDeepArray;
|
|
4
|
+
const OPTIMAL_CHUNK_SIZE_BYTES = 10 * 1024 * 1024;
|
|
5
|
+
const MIN_ARRAY_LENGTH_BEFORE_CHUNKING = 1024;
|
|
6
|
+
function cloneDeepArray(arr, cloneFn) {
|
|
7
|
+
var _a, _b;
|
|
8
|
+
if (!arr || arr.length === 0)
|
|
9
|
+
return [];
|
|
10
|
+
if (arr.length <= MIN_ARRAY_LENGTH_BEFORE_CHUNKING) {
|
|
11
|
+
return cloneFn(arr);
|
|
12
|
+
}
|
|
13
|
+
const sampleSize = ((_b = (_a = JSON.stringify(arr[0])) === null || _a === void 0 ? void 0 : _a.length) !== null && _b !== void 0 ? _b : 2) * 2;
|
|
14
|
+
const elementsPerChunk = Math.max(1, Math.floor(OPTIMAL_CHUNK_SIZE_BYTES / sampleSize));
|
|
15
|
+
if (elementsPerChunk >= arr.length) {
|
|
16
|
+
return cloneFn(arr);
|
|
17
|
+
}
|
|
18
|
+
const result = [];
|
|
19
|
+
for (let i = 0; i < arr.length; i += elementsPerChunk) {
|
|
20
|
+
const clonedChunk = cloneFn(arr.slice(i, i + elementsPerChunk));
|
|
21
|
+
for (let j = 0; j < clonedChunk.length; j++) {
|
|
22
|
+
result.push(clonedChunk[j]);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
return result;
|
|
26
|
+
}
|
|
@@ -4,9 +4,16 @@ exports.cloneDeepPreserveDates = cloneDeepPreserveDates;
|
|
|
4
4
|
exports.tagDates = tagDates;
|
|
5
5
|
exports.reviveTaggedDates = reviveTaggedDates;
|
|
6
6
|
exports.isTaggedDate = isTaggedDate;
|
|
7
|
+
const cloneDeepArray_1 = require("../cloneDeep/cloneDeepArray");
|
|
7
8
|
function cloneDeepPreserveDates(obj) {
|
|
8
9
|
if (obj == null)
|
|
9
10
|
return null;
|
|
11
|
+
if (Array.isArray(obj)) {
|
|
12
|
+
return (0, cloneDeepArray_1.cloneDeepArray)(obj, jsonClonePreserveDates);
|
|
13
|
+
}
|
|
14
|
+
return jsonClonePreserveDates(obj);
|
|
15
|
+
}
|
|
16
|
+
function jsonClonePreserveDates(obj) {
|
|
10
17
|
const tagged = tagDates(obj);
|
|
11
18
|
const json = JSON.stringify(tagged);
|
|
12
19
|
return JSON.parse(json, reviveTaggedDates);
|