@rebasepro/schema-inference 0.0.1-canary.09e5ec5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,114 @@
1
+ Business Source License 1.1
2
+
3
+ Parameters
4
+
5
+ Licensor: Rebase S.L.
6
+ Licensed Work: Rebase CMS packages:
7
+ cli
8
+ collection_editor
9
+ collection_editor_firebase
10
+ data_enhancement
11
+ data_export
12
+ data_export
13
+ editor
14
+ rebase_cloud
15
+ schema_inference
16
+ user_management
17
+
18
+ The Licensed Work is (c) 2024 Rebase S.L
19
+ Additional Use Grant: You may make use of the Licensed Work, provided that
20
+ you may not use the Licensed Work for a CMS Data Enhancement
21
+ Service.
22
+
23
+ A “CMS package” is a commercial offering that
24
+ allows third parties (other than your employees and
25
+ contractors) to access the functionality of the
26
+ Licensed Work by using software to extend the base features of
27
+ content management system controlled by such third parties.
28
+
29
+ Change Date: Four years from the date the Licensed Work is published.
30
+
31
+ Change License: MIT
32
+
33
+ For information about alternative licensing arrangements for the Software,
34
+ please visit: https://rebase.pro
35
+
36
+ Notice
37
+
38
+ The Business Source License (this document, or the “License”) is not an Open
39
+ Source license. However, the Licensed Work will eventually be made available
40
+ under an Open Source License, as stated in this License.
41
+
42
+ License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved.
43
+ “Business Source License” is a trademark of MariaDB Corporation Ab.
44
+
45
+ -----------------------------------------------------------------------------
46
+
47
+ Business Source License 1.1
48
+
49
+ Terms
50
+
51
+ The Licensor hereby grants you the right to copy, modify, create derivative
52
+ works, redistribute, and make non-production use of the Licensed Work. The
53
+ Licensor may make an Additional Use Grant, above, permitting limited
54
+ production use.
55
+
56
+ Effective on the Change Date, or the fourth anniversary of the first publicly
57
+ available distribution of a specific version of the Licensed Work under this
58
+ License, whichever comes first, the Licensor hereby grants you rights under
59
+ the terms of the Change License, and the rights granted in the paragraph
60
+ above terminate.
61
+
62
+ If your use of the Licensed Work does not comply with the requirements
63
+ currently in effect as described in this License, you must purchase a
64
+ commercial license from the Licensor, its affiliated entities, or authorized
65
+ resellers, or you must refrain from using the Licensed Work.
66
+
67
+ All copies of the original and modified Licensed Work, and derivative works
68
+ of the Licensed Work, are subject to this License. This License applies
69
+ separately for each version of the Licensed Work and the Change Date may vary
70
+ for each version of the Licensed Work released by Licensor.
71
+
72
+ You must conspicuously display this License on each original or modified copy
73
+ of the Licensed Work. If you receive the Licensed Work in original or
74
+ modified form from a third party, the terms and conditions set forth in this
75
+ License apply to your use of that work.
76
+
77
+ Any use of the Licensed Work in violation of this License will automatically
78
+ terminate your rights under this License for the current and all other
79
+ versions of the Licensed Work.
80
+
81
+ This License does not grant you any right in any trademark or logo of
82
+ Licensor or its affiliates (provided that you may use a trademark or logo of
83
+ Licensor as expressly required by this License).
84
+
85
+ TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
86
+ AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
87
+ EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
88
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
89
+ TITLE.
90
+
91
+ MariaDB hereby grants you permission to use this License’s text to license
92
+ your works, and to refer to it using the trademark “Business Source License”,
93
+ as long as you comply with the Covenants of Licensor below.
94
+
95
+ Covenants of Licensor
96
+
97
+ In consideration of the right to use this License’s text and the “Business
98
+ Source License” name and trademark, Licensor covenants to MariaDB, and to all
99
+ other recipients of the licensed work to be provided by Licensor:
100
+
101
+ 1. To specify as the Change License the GPL Version 2.0 or any later version,
102
+ or a license that is compatible with GPL Version 2.0 or a later version,
103
+ where “compatible” means that software provided under the Change License can
104
+ be included in a program with software provided under GPL Version 2.0 or a
105
+ later version. Licensor may specify additional Change Licenses without
106
+ limitation.
107
+
108
+ 2. To either: (a) specify an additional grant of rights to use that does not
109
+ impose any additional restriction on the right granted in this License, as
110
+ the Additional Use Grant; or (b) insert the text “None”.
111
+
112
+ 3. To specify a Change Date.
113
+
114
+ 4. Not to modify this License in any other way.
package/README.md ADDED
@@ -0,0 +1 @@
1
+ # schema_inference
@@ -0,0 +1,3 @@
1
+ import { InferencePropertyBuilderProps } from "../types";
2
+ import { Property } from "@rebasepro/types";
3
+ export declare function buildReferenceProperty({ name, totalDocsCount, valuesResult }: InferencePropertyBuilderProps): Property;
@@ -0,0 +1,3 @@
1
+ import { Property } from "@rebasepro/types";
2
+ import { InferencePropertyBuilderProps } from "../types";
3
+ export declare function buildStringProperty({ name, totalDocsCount, valuesResult }: InferencePropertyBuilderProps): Property;
@@ -0,0 +1,3 @@
1
+ import { PropertyValidationSchema } from "@rebasepro/types";
2
+ import { InferencePropertyBuilderProps } from "../types";
3
+ export declare function buildValidation({ totalDocsCount, valuesResult }: InferencePropertyBuilderProps): PropertyValidationSchema | undefined;
@@ -0,0 +1,6 @@
1
+ import { DataType, Properties, Property } from "@rebasepro/types";
2
+ export type InferenceTypeBuilder = (value: any) => DataType;
3
+ export declare function buildEntityPropertiesFromData(data: object[], getType: InferenceTypeBuilder): Promise<Properties>;
4
+ export declare function buildPropertyFromData(data: any[], property: Property, getType: InferenceTypeBuilder): Property;
5
+ export declare function buildPropertiesOrder(properties: Properties, propertiesOrder?: string[], priorityKeys?: string[]): string[];
6
+ export declare function inferTypeFromValue(value: any): DataType;
@@ -0,0 +1,3 @@
1
+ export * from "./collection_builder";
2
+ export * from "./util";
3
+ export * from "./strings";
@@ -0,0 +1,523 @@
1
+ import { unslugify, mergeDeep, prettifyIdentifier } from "@rebasepro/utils";
2
+ import { isObject, isPlainObject, mergeDeep as mergeDeep2, prettifyIdentifier as prettifyIdentifier2, unslugify as unslugify2 } from "@rebasepro/utils";
3
+ function parseReferenceString(value) {
4
+ if (!value) return null;
5
+ let database = void 0;
6
+ let fullPath = value;
7
+ if (value.includes(":::")) {
8
+ const [dbName, pathPart] = value.split(":::");
9
+ if (dbName && dbName !== "(default)") {
10
+ database = dbName;
11
+ }
12
+ fullPath = pathPart;
13
+ }
14
+ if (!fullPath || !fullPath.includes("/")) {
15
+ return null;
16
+ }
17
+ const path = fullPath.substring(0, fullPath.lastIndexOf("/"));
18
+ return {
19
+ path,
20
+ database
21
+ };
22
+ }
23
+ function looksLikeReference(value) {
24
+ if (typeof value !== "string") return false;
25
+ return parseReferenceString(value) !== null;
26
+ }
27
+ function findCommonInitialStringInPath(valuesCount) {
28
+ if (!valuesCount) return void 0;
29
+ function getPath(value) {
30
+ let pathString;
31
+ if (typeof value === "string") {
32
+ pathString = value;
33
+ } else if (value.slug) {
34
+ pathString = value.slug;
35
+ } else {
36
+ console.warn("findCommonInitialStringInPath: value is not a string or document with path", value);
37
+ return void 0;
38
+ }
39
+ if (!pathString) return void 0;
40
+ if (pathString.includes(":::")) {
41
+ const [, pathPart] = pathString.split(":::");
42
+ pathString = pathPart;
43
+ }
44
+ return pathString;
45
+ }
46
+ const strings = valuesCount.values.map((v) => getPath(v)).filter((v) => !!v);
47
+ const pathWithSlash = strings.find((s) => s.includes("/"));
48
+ if (!pathWithSlash)
49
+ return void 0;
50
+ const searchedPath = pathWithSlash.substring(0, pathWithSlash.lastIndexOf("/"));
51
+ const yep = valuesCount.values.filter((value) => {
52
+ const path = getPath(value);
53
+ if (!path) return false;
54
+ return path.startsWith(searchedPath);
55
+ }).length > valuesCount.values.length / 3 * 2;
56
+ return yep ? searchedPath : void 0;
57
+ }
58
+ function removeInitialAndTrailingSlashes(s) {
59
+ return removeInitialSlash(removeTrailingSlash(s));
60
+ }
61
+ function removeInitialSlash(s) {
62
+ if (s.startsWith("/"))
63
+ return s.slice(1);
64
+ else return s;
65
+ }
66
+ function removeTrailingSlash(s) {
67
+ if (s.endsWith("/"))
68
+ return s.slice(0, -1);
69
+ else return s;
70
+ }
71
+ function extractEnumFromValues(values) {
72
+ if (!Array.isArray(values)) {
73
+ return [];
74
+ }
75
+ const enumValues = values.map((value) => {
76
+ if (typeof value === "string") {
77
+ return {
78
+ id: value,
79
+ label: unslugify(value)
80
+ };
81
+ } else
82
+ return null;
83
+ }).filter(Boolean);
84
+ enumValues.sort((a, b) => a.label.localeCompare(b.label));
85
+ return enumValues;
86
+ }
87
+ function resolveEnumValues(input) {
88
+ if (Array.isArray(input)) {
89
+ return input;
90
+ } else if (typeof input === "object" && input !== null) {
91
+ return Object.entries(input).map(([id, value]) => typeof value === "string" ? {
92
+ id,
93
+ label: value
94
+ } : value);
95
+ } else {
96
+ return void 0;
97
+ }
98
+ }
99
+ const IMAGE_EXTENSIONS = [".jpg", ".jpeg", ".png", ".webp", ".gif", ".avif"];
100
+ const AUDIO_EXTENSIONS = [".mp3", ".ogg", ".opus", ".aac"];
101
+ const VIDEO_EXTENSIONS = [".avi", ".mp4"];
102
+ const emailRegEx = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/;
103
+ function buildStringProperty({
104
+ name,
105
+ totalDocsCount,
106
+ valuesResult
107
+ }) {
108
+ let stringProperty = {
109
+ name: name ?? "",
110
+ type: "string"
111
+ };
112
+ if (valuesResult) {
113
+ const totalEntriesCount = valuesResult.values.length;
114
+ const totalValues = Array.from(valuesResult.valuesCount.keys()).length;
115
+ const config = {};
116
+ const probablyAURL = valuesResult.values.filter((value) => typeof value === "string" && value.toString().startsWith("http")).length > totalDocsCount / 3 * 2;
117
+ if (probablyAURL) {
118
+ config.url = true;
119
+ }
120
+ const probablyAnEmail = valuesResult.values.filter((value) => typeof value === "string" && emailRegEx.test(value)).length > totalDocsCount / 3 * 2;
121
+ if (probablyAnEmail) {
122
+ config.email = true;
123
+ }
124
+ const probablyUserIds = valuesResult.values.filter((value) => typeof value === "string" && value.length === 28 && !value.includes(" ")).length > totalDocsCount / 3 * 2;
125
+ if (probablyUserIds)
126
+ config.readOnly = true;
127
+ if (!probablyAnEmail && !probablyAURL && !probablyUserIds && !probablyAURL && totalValues < totalEntriesCount / 3) {
128
+ const enumValues = extractEnumFromValues(Array.from(valuesResult.valuesCount.keys()));
129
+ if (Object.keys(enumValues).length > 1)
130
+ config.enum = enumValues;
131
+ }
132
+ if (!probablyAnEmail && !probablyAURL && !probablyUserIds && !probablyAURL && !config.enum) {
133
+ const fileType = probableFileType(valuesResult, totalDocsCount);
134
+ if (fileType) {
135
+ config.storage = {
136
+ acceptedFiles: fileType,
137
+ storagePath: findCommonInitialStringInPath(valuesResult) ?? "/"
138
+ };
139
+ }
140
+ }
141
+ if (Object.keys(config).length > 0)
142
+ stringProperty = {
143
+ ...stringProperty,
144
+ ...config
145
+ };
146
+ }
147
+ return stringProperty;
148
+ }
149
+ function probableFileType(valuesCount, totalDocsCount) {
150
+ const isImage = (value) => IMAGE_EXTENSIONS.some((extension) => value.toString().endsWith(extension));
151
+ const isAudio = (value) => AUDIO_EXTENSIONS.some((extension) => value.toString().endsWith(extension));
152
+ const isVideo = (value) => VIDEO_EXTENSIONS.some((extension) => value.toString().endsWith(extension));
153
+ const stringValues = valuesCount.values.filter((v) => typeof v === "string");
154
+ let imageCount = 0;
155
+ let audioCount = 0;
156
+ let videoCount = 0;
157
+ for (const value of stringValues) {
158
+ if (isImage(value)) imageCount++;
159
+ else if (isAudio(value)) audioCount++;
160
+ else if (isVideo(value)) videoCount++;
161
+ }
162
+ const totalMediaCount = imageCount + audioCount + videoCount;
163
+ if (totalMediaCount > totalDocsCount * 2 / 3) {
164
+ const fileTypes = [];
165
+ if (imageCount > 0) fileTypes.push("image/*");
166
+ if (audioCount > 0) fileTypes.push("audio/*");
167
+ if (videoCount > 0) fileTypes.push("video/*");
168
+ return fileTypes.length > 0 ? fileTypes : false;
169
+ }
170
+ return false;
171
+ }
172
+ function buildValidation({
173
+ totalDocsCount,
174
+ valuesResult
175
+ }) {
176
+ if (valuesResult) {
177
+ const totalEntriesCount = valuesResult.values.length;
178
+ if (totalDocsCount === totalEntriesCount)
179
+ return {
180
+ required: true
181
+ };
182
+ }
183
+ return void 0;
184
+ }
185
+ function buildReferenceProperty({
186
+ name,
187
+ totalDocsCount,
188
+ valuesResult
189
+ }) {
190
+ const property = {
191
+ name: name ?? "",
192
+ type: "reference",
193
+ path: findCommonInitialStringInPath(valuesResult) ?? "!!!FIX_ME!!!"
194
+ };
195
+ return property;
196
+ }
197
+ async function buildEntityPropertiesFromData(data, getType) {
198
+ const typesCount = {};
199
+ const valuesCount = {};
200
+ if (data) {
201
+ data.forEach((entry) => {
202
+ if (entry) {
203
+ Object.entries(entry).forEach(([key, value]) => {
204
+ if (key.startsWith("_")) return;
205
+ increaseMapTypeCount(typesCount, key, value, getType);
206
+ increaseValuesCount(valuesCount, key, value, getType);
207
+ });
208
+ }
209
+ });
210
+ }
211
+ return buildPropertiesFromCount(data.length, typesCount, valuesCount);
212
+ }
213
+ function buildPropertyFromData(data, property, getType) {
214
+ const typesCount = {};
215
+ const valuesCount = {};
216
+ if (data) {
217
+ data.forEach((entry) => {
218
+ increaseTypeCount(property.type, typesCount, entry, getType);
219
+ increaseValuesCount(valuesCount, "inferred_prop", entry, getType);
220
+ });
221
+ }
222
+ const enumValues = "enum" in property ? resolveEnumValues(property["enum"]) : void 0;
223
+ if (enumValues) {
224
+ const newEnumValues = extractEnumFromValues(Array.from(valuesCount["inferred_prop"].valuesCount.keys()));
225
+ return {
226
+ ...property,
227
+ enum: [...newEnumValues, ...enumValues]
228
+ };
229
+ }
230
+ const generatedProperty = buildPropertyFromCount(
231
+ "inferred_prop",
232
+ data.length,
233
+ property.type,
234
+ typesCount,
235
+ valuesCount["inferred_prop"]
236
+ );
237
+ return mergeDeep(generatedProperty, property);
238
+ }
239
+ function buildPropertiesOrder(properties, propertiesOrder, priorityKeys) {
240
+ const lowerCasePriorityKeys = (priorityKeys ?? []).map((key) => key.toLowerCase());
241
+ function propOrder(s) {
242
+ const k = s.toLowerCase();
243
+ if (lowerCasePriorityKeys.includes(k)) return 4;
244
+ if (k === "title" || k === "name") return 3;
245
+ if (k.includes("title") || k.includes("name")) return 2;
246
+ if (k.includes("image") || k.includes("picture")) return 1;
247
+ return 0;
248
+ }
249
+ const keys = propertiesOrder ?? Object.keys(properties);
250
+ keys.sort();
251
+ keys.sort((a, b) => {
252
+ return propOrder(b) - propOrder(a);
253
+ });
254
+ return keys;
255
+ }
256
+ function increaseTypeCount(type, typesCount, fieldValue, getType) {
257
+ if (type === "map") {
258
+ if (fieldValue) {
259
+ let mapTypesCount = typesCount[type];
260
+ if (!mapTypesCount) {
261
+ mapTypesCount = {};
262
+ typesCount[type] = mapTypesCount;
263
+ }
264
+ Object.entries(fieldValue).forEach(([key, value]) => {
265
+ increaseMapTypeCount(mapTypesCount, key, value, getType);
266
+ });
267
+ }
268
+ } else if (type === "array") {
269
+ let arrayTypesCount = typesCount[type];
270
+ if (!arrayTypesCount) {
271
+ arrayTypesCount = {};
272
+ typesCount[type] = arrayTypesCount;
273
+ }
274
+ if (fieldValue && Array.isArray(fieldValue) && fieldValue.length > 0) {
275
+ const arrayType = getMostProbableTypeInArray(fieldValue, getType);
276
+ if (arrayType === "map") {
277
+ let mapTypesCount = arrayTypesCount[arrayType];
278
+ if (!mapTypesCount) {
279
+ mapTypesCount = {};
280
+ }
281
+ fieldValue.forEach((value) => {
282
+ if (value && typeof value === "object" && !Array.isArray(value)) {
283
+ Object.entries(value).forEach(
284
+ ([key, v]) => increaseMapTypeCount(mapTypesCount, key, v, getType)
285
+ );
286
+ }
287
+ });
288
+ arrayTypesCount[arrayType] = mapTypesCount;
289
+ } else {
290
+ if (!arrayTypesCount[arrayType]) arrayTypesCount[arrayType] = 1;
291
+ else arrayTypesCount[arrayType] = Number(arrayTypesCount[arrayType]) + 1;
292
+ }
293
+ }
294
+ } else {
295
+ if (!typesCount[type]) typesCount[type] = 1;
296
+ else typesCount[type] = Number(typesCount[type]) + 1;
297
+ }
298
+ }
299
+ function increaseMapTypeCount(typesCountRecord, key, fieldValue, getType) {
300
+ if (key.startsWith("_")) return;
301
+ let typesCount = typesCountRecord[key];
302
+ if (!typesCount) {
303
+ typesCount = {};
304
+ typesCountRecord[key] = typesCount;
305
+ }
306
+ if (fieldValue != null) {
307
+ const type = getType(fieldValue);
308
+ increaseTypeCount(type, typesCount, fieldValue, getType);
309
+ }
310
+ }
311
+ function increaseValuesCount(typeValuesRecord, key, fieldValue, getType) {
312
+ if (key.startsWith("_")) return;
313
+ const type = getType(fieldValue);
314
+ let valuesRecord = typeValuesRecord[key];
315
+ if (!valuesRecord) {
316
+ valuesRecord = {
317
+ values: [],
318
+ valuesCount: /* @__PURE__ */ new Map()
319
+ };
320
+ typeValuesRecord[key] = valuesRecord;
321
+ }
322
+ if (type === "map") {
323
+ let mapValuesRecord = valuesRecord.map;
324
+ if (!mapValuesRecord) {
325
+ mapValuesRecord = {};
326
+ valuesRecord.map = mapValuesRecord;
327
+ }
328
+ if (fieldValue)
329
+ Object.entries(fieldValue).forEach(
330
+ ([subKey, value]) => increaseValuesCount(mapValuesRecord, subKey, value, getType)
331
+ );
332
+ } else if (type === "array") {
333
+ if (Array.isArray(fieldValue)) {
334
+ fieldValue.forEach((value) => {
335
+ valuesRecord.values.push(value);
336
+ valuesRecord.valuesCount.set(value, (valuesRecord.valuesCount.get(value) ?? 0) + 1);
337
+ });
338
+ }
339
+ } else {
340
+ if (fieldValue !== null && fieldValue !== void 0) {
341
+ valuesRecord.values.push(fieldValue);
342
+ valuesRecord.valuesCount.set(fieldValue, (valuesRecord.valuesCount.get(fieldValue) ?? 0) + 1);
343
+ }
344
+ }
345
+ }
346
+ function getHighestTypesCount(typesCount) {
347
+ let highestCount = 0;
348
+ Object.entries(typesCount).forEach(([type, count]) => {
349
+ let countValue = 0;
350
+ if (type === "map") {
351
+ countValue = getHighestRecordCount(count);
352
+ } else if (type === "array") {
353
+ countValue = getHighestTypesCount(count);
354
+ } else {
355
+ countValue = Number(count);
356
+ }
357
+ if (countValue > highestCount) {
358
+ highestCount = countValue;
359
+ }
360
+ });
361
+ return highestCount;
362
+ }
363
+ function getHighestRecordCount(record) {
364
+ return Object.entries(record).map(([key, typesCount]) => getHighestTypesCount(typesCount)).reduce((a, b) => Math.max(a, b), 0);
365
+ }
366
+ function getMostProbableType(typesCount) {
367
+ let highestCount = -1;
368
+ let probableType = "string";
369
+ Object.entries(typesCount).forEach(([type, count]) => {
370
+ let countValue;
371
+ if (type === "map") {
372
+ countValue = getHighestRecordCount(count);
373
+ } else if (type === "array") {
374
+ countValue = getHighestTypesCount(count);
375
+ } else {
376
+ countValue = Number(count);
377
+ }
378
+ if (countValue > highestCount) {
379
+ highestCount = countValue;
380
+ probableType = type;
381
+ }
382
+ });
383
+ return probableType;
384
+ }
385
+ function buildPropertyFromCount(key, totalDocsCount, mostProbableType, typesCount, valuesResult) {
386
+ let title;
387
+ if (key) {
388
+ title = prettifyIdentifier(key);
389
+ }
390
+ let result = void 0;
391
+ if (mostProbableType === "map") {
392
+ const highVariability = checkTypesCountHighVariability(typesCount);
393
+ if (highVariability) {
394
+ result = {
395
+ type: "map",
396
+ name: title ?? key ?? "",
397
+ keyValue: true,
398
+ properties: {}
399
+ };
400
+ }
401
+ const properties = buildPropertiesFromCount(
402
+ totalDocsCount,
403
+ typesCount.map,
404
+ valuesResult ? valuesResult.mapValues : void 0
405
+ );
406
+ result = {
407
+ type: "map",
408
+ name: title ?? key ?? "",
409
+ properties
410
+ };
411
+ } else if (mostProbableType === "array") {
412
+ const arrayTypesCount = typesCount.array;
413
+ const arrayMostProbableType = getMostProbableType(arrayTypesCount);
414
+ const of = buildPropertyFromCount(
415
+ key,
416
+ totalDocsCount,
417
+ arrayMostProbableType,
418
+ arrayTypesCount,
419
+ valuesResult
420
+ );
421
+ result = {
422
+ type: "array",
423
+ name: title ?? key ?? "",
424
+ of
425
+ };
426
+ }
427
+ if (!result) {
428
+ const propertyProps = {
429
+ name: key,
430
+ totalDocsCount,
431
+ valuesResult
432
+ };
433
+ if (mostProbableType === "string") {
434
+ result = buildStringProperty(propertyProps);
435
+ } else if (mostProbableType === "reference") {
436
+ result = buildReferenceProperty(propertyProps);
437
+ } else {
438
+ result = {
439
+ type: mostProbableType
440
+ };
441
+ }
442
+ if (title) {
443
+ result.name = title;
444
+ }
445
+ const validation = buildValidation(propertyProps);
446
+ if (validation) {
447
+ result.validation = validation;
448
+ }
449
+ }
450
+ return result;
451
+ }
452
+ function buildPropertiesFromCount(totalDocsCount, typesCountRecord, valuesCountRecord) {
453
+ const res = {};
454
+ Object.entries(typesCountRecord).forEach(([key, typesCount]) => {
455
+ const mostProbableType = getMostProbableType(typesCount);
456
+ res[key] = buildPropertyFromCount(
457
+ key,
458
+ totalDocsCount,
459
+ mostProbableType,
460
+ typesCount,
461
+ valuesCountRecord ? valuesCountRecord[key] : void 0
462
+ );
463
+ });
464
+ return res;
465
+ }
466
+ function countMaxDocumentsUnder(typesCount) {
467
+ let count = 0;
468
+ Object.entries(typesCount).forEach(([type, value]) => {
469
+ if (typeof value === "object") {
470
+ count = Math.max(count, countMaxDocumentsUnder(value));
471
+ } else {
472
+ count = Math.max(count, Number(value));
473
+ }
474
+ });
475
+ return count;
476
+ }
477
+ function getMostProbableTypeInArray(array, getType) {
478
+ const typesCount = {};
479
+ array.forEach((value) => {
480
+ increaseTypeCount(getType(value), typesCount, value, getType);
481
+ });
482
+ return getMostProbableType(typesCount);
483
+ }
484
+ function checkTypesCountHighVariability(typesCount) {
485
+ const maxCount = countMaxDocumentsUnder(typesCount);
486
+ let keysWithFewValues = 0;
487
+ Object.entries(typesCount.map ?? {}).forEach(([key, value]) => {
488
+ const count = countMaxDocumentsUnder(value);
489
+ if (count < maxCount / 3) {
490
+ keysWithFewValues++;
491
+ }
492
+ });
493
+ return keysWithFewValues / Object.entries(typesCount.map ?? {}).length > 0.5;
494
+ }
495
+ function inferTypeFromValue(value) {
496
+ if (value === null || value === void 0) return "string";
497
+ if (typeof value === "string") return "string";
498
+ if (typeof value === "number") return "number";
499
+ if (typeof value === "boolean") return "boolean";
500
+ if (Array.isArray(value)) return "array";
501
+ if (typeof value === "object") return "map";
502
+ return "string";
503
+ }
504
+ export {
505
+ buildEntityPropertiesFromData,
506
+ buildPropertiesOrder,
507
+ buildPropertyFromData,
508
+ extractEnumFromValues,
509
+ findCommonInitialStringInPath,
510
+ inferTypeFromValue,
511
+ isObject,
512
+ isPlainObject,
513
+ looksLikeReference,
514
+ mergeDeep2 as mergeDeep,
515
+ parseReferenceString,
516
+ prettifyIdentifier2 as prettifyIdentifier,
517
+ removeInitialAndTrailingSlashes,
518
+ removeInitialSlash,
519
+ removeTrailingSlash,
520
+ resolveEnumValues,
521
+ unslugify2 as unslugify
522
+ };
523
+ //# sourceMappingURL=index.es.js.map