@royalschedule/maps 4.0.16 → 4.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,15 +23,23 @@ function toPositiveInteger(val) {
23
23
  if (num <= 0) throw new Error(`toInteger: te result ${num} is not positive`);
24
24
  return num;
25
25
  }
26
+ /**
27
+ * Converts a string/number to a trimmed string
28
+ */
26
29
  function toString(x) {
27
- if (x == void 0) return void 0;
28
- if (typeof x == "string" && !x.length) return void 0;
29
- if (isNumber(x)) return x.toString();
30
- return x.trim();
30
+ return isNumber(x) ? x.toString() : x.trim();
31
+ }
32
+ /**
33
+ * Converts a possible nullish string/number to a trimmed string or undefined
34
+ */
35
+ function toStringIfExists(x) {
36
+ if (x == null) return void 0;
37
+ return toString(x);
31
38
  }
32
39
  function extractLocations(course, locationsNameMap, warnings) {
40
+ if (course.locations == null) return;
33
41
  const locationsStr = toString(course.locations);
34
- if (!locationsStr) return void 0;
42
+ if (!locationsStr) return;
35
43
  const locationReferences = [];
36
44
  locationsStr.split("+").flatMap((x, i) => x.split(",").map((x$1) => [i, x$1.trim()])).forEach(([i, x]) => {
37
45
  const key = x.toLowerCase();
@@ -58,8 +66,9 @@ function extractLocations(course, locationsNameMap, warnings) {
58
66
  return locationReferences;
59
67
  }
60
68
  function extractGroups(course, groupsNameMap, warnings) {
69
+ if (course.groups == null) return;
61
70
  const groupsStr = toString(course.groups);
62
- if (!groupsStr) return void 0;
71
+ if (!groupsStr) return;
63
72
  const groupReferences = [];
64
73
  groupsStr.split(",").map((x) => x.trim()).forEach((x) => {
65
74
  const key = x.toLowerCase();
@@ -83,6 +92,7 @@ function extractGroups(course, groupsNameMap, warnings) {
83
92
  return groupReferences;
84
93
  }
85
94
  function extractTeachers(course, teachersNameMap, warnings) {
95
+ if (course.teachers == null) return;
86
96
  const teachersStr = toString(course.teachers);
87
97
  if (!teachersStr) return void 0;
88
98
  const teacherReferences = [];
@@ -108,6 +118,7 @@ function extractTeachers(course, teachersNameMap, warnings) {
108
118
  return teacherReferences;
109
119
  }
110
120
  function extractSyllabus(course, syllabusesCodeMap, syllabusesNameMap, warnings) {
121
+ if (course.syllabus == null) return void 0;
111
122
  const syllabusValue = toString(course.syllabus);
112
123
  if (!syllabusValue) return void 0;
113
124
  const syllabus = syllabusesCodeMap.get(syllabusValue) || syllabusesNameMap.get(syllabusValue.toLowerCase());
@@ -122,12 +133,12 @@ function extractSyllabus(course, syllabusesCodeMap, syllabusesNameMap, warnings)
122
133
  }
123
134
  function _schedules(source) {
124
135
  const warnings = [];
125
- source.settings = source.settings?.filter((x) => Object.values(x).some((y) => y.toString().trim().length > 0));
126
- source.locations = source.locations?.filter((x) => Object.values(x).some((y) => y.toString().trim().length > 0));
127
- source.teachers = source.teachers?.filter((x) => Object.values(x).some((y) => y.toString().trim().length > 0));
128
- source.groups = source.groups?.filter((x) => Object.values(x).some((y) => y.toString().trim().length > 0));
129
- source.persons = source.persons?.filter((x) => Object.values(x).some((y) => y.toString().trim().length > 0));
130
- source.courses = source.courses?.filter((x) => Object.values(x).some((y) => y.toString().trim().length > 0));
136
+ source.settings = source.settings?.filter((x) => Object.values(x).some((y) => toString(y).length > 0));
137
+ source.locations = source.locations?.filter((x) => Object.values(x).some((y) => toString(y).length > 0));
138
+ source.teachers = source.teachers?.filter((x) => Object.values(x).some((y) => toString(y).length > 0));
139
+ source.groups = source.groups?.filter((x) => Object.values(x).some((y) => toString(y).length > 0));
140
+ source.persons = source.persons?.filter((x) => Object.values(x).some((y) => toString(y).length > 0));
141
+ source.courses = source.courses?.filter((x) => Object.values(x).some((y) => toString(y).length > 0));
131
142
  const settings = {
132
143
  discretization: 5,
133
144
  numDays: 5,
@@ -158,13 +169,13 @@ function _schedules(source) {
158
169
  const syllabusesCodeMap = /* @__PURE__ */ new Map();
159
170
  if (!source.syllabuses) {}
160
171
  source.syllabuses?.forEach((x) => {
161
- const ids = toString(x.foreign_ID);
162
- const subjectCode = toString(x.subject_code);
163
- const subjectName = toString(x.subject_name);
164
- const courseCode = toString(x.course_code);
165
- const courseName = toString(x.course_name);
166
- const schoolType = toString(x.school_type);
167
- const id = x.ID?.toString() ?? generateId(syllabusesIdMap);
172
+ const ids = toStringIfExists(x.foreign_ID);
173
+ const subjectCode = toStringIfExists(x.subject_code);
174
+ const subjectName = toStringIfExists(x.subject_name);
175
+ const courseCode = toStringIfExists(x.course_code);
176
+ const courseName = toStringIfExists(x.course_name);
177
+ const schoolType = toStringIfExists(x.school_type);
178
+ const id = toStringIfExists(x.ID) ?? generateId(syllabusesIdMap);
168
179
  if (syllabusesIdMap.has(id)) {
169
180
  warnings.push({
170
181
  code: "ignoring_duplicate_syllabus",
@@ -226,9 +237,9 @@ function _schedules(source) {
226
237
  }].forEach(({ name, data, idMap, nameMap }) => {
227
238
  if (!data) warnings.push({ code: `missing_${name.plural}_sheet` });
228
239
  data?.forEach((x) => {
229
- const ids = toString(x.foreign_ID);
230
- const displayName = toString(x.name);
231
- const id = x.ID?.toString() ?? generateId(idMap);
240
+ const ids = toStringIfExists(x.foreign_ID);
241
+ const displayName = toStringIfExists(x.name);
242
+ const id = toStringIfExists(x.ID) ?? generateId(idMap);
232
243
  if (idMap.has(id)) {
233
244
  warnings.push({
234
245
  code: `ignoring_duplicate_${name.singular}`,
@@ -250,9 +261,9 @@ function _schedules(source) {
250
261
  const groupsNameMap = /* @__PURE__ */ new Map();
251
262
  if (!source.groups) warnings.push({ code: "missing_groups_sheet" });
252
263
  source.groups?.forEach((x) => {
253
- const ids = toString(x.foreign_ID);
254
- const displayName = toString(x.name);
255
- const id = x.ID?.toString() ?? generateId(groupsIdMap);
264
+ const ids = toStringIfExists(x.foreign_ID);
265
+ const displayName = toStringIfExists(x.name);
266
+ const id = toStringIfExists(x.ID) ?? generateId(groupsIdMap);
256
267
  if (groupsIdMap.has(id)) {
257
268
  warnings.push({
258
269
  code: "ignoring_duplicate_group",
@@ -260,7 +271,7 @@ function _schedules(source) {
260
271
  });
261
272
  return;
262
273
  }
263
- const species = toString(x.is_class) == "1" ? "class" : void 0;
274
+ const species = toStringIfExists(x.is_class) == "1" ? "class" : void 0;
264
275
  const y = {
265
276
  id,
266
277
  ...ids && { ids },
@@ -275,11 +286,11 @@ function _schedules(source) {
275
286
  const personsSsnMap = /* @__PURE__ */ new Map();
276
287
  if (!source.persons) warnings.push({ code: "missing_persons_sheet" });
277
288
  source.persons?.forEach((x) => {
278
- const ids = toString(x.foreign_ID);
279
- const firstName = toString(x.first_name);
280
- const lastName = toString(x.last_name);
281
- const SSN = toString(x.SSN);
282
- const id = x.ID?.toString() ?? generateId(personsIdMap);
289
+ const ids = toStringIfExists(x.foreign_ID);
290
+ const firstName = toStringIfExists(x.first_name);
291
+ const lastName = toStringIfExists(x.last_name);
292
+ const SSN = toStringIfExists(x.SSN);
293
+ const id = toStringIfExists(x.ID) ?? generateId(personsIdMap);
283
294
  if (personsIdMap.has(id)) {
284
295
  warnings.push({
285
296
  code: "ignoring_duplicate_person",
@@ -323,7 +334,9 @@ function _schedules(source) {
323
334
  });
324
335
  groupsNameMap.forEach((x) => {
325
336
  let duplicateReported = false;
326
- x.members = makeChainable(x.__raw.members).chain((x$1) => toString(x$1 ?? "").split(",").map((x$2) => x$2.trim()).filter((x$2) => x$2 != null)).chain((x$1) => countBy(x$1)).chain((y) => Object.entries(y).map(([ssn, count]) => {
337
+ if (!x.__raw.members) return;
338
+ x.members = makeChainable(x.__raw.members).chain((x$1) => toString(x$1).split(",").map((x$2) => x$2.trim()).filter((x$2) => x$2 != null)).chain((x$1) => countBy(x$1)).chain((y) => Object.entries(y).map(([ssn, count]) => {
339
+ if (!ssn) return;
327
340
  if (!duplicateReported && count > 1) {
328
341
  warnings.push({
329
342
  code: "duplicate_group_member",
@@ -357,11 +370,11 @@ function _schedules(source) {
357
370
  const overlapGroupsKeyMap = /* @__PURE__ */ new Map();
358
371
  if (!source.courses) warnings.push({ code: "missing_courses_sheet" });
359
372
  source.courses?.forEach((x) => {
360
- if (toString(x.merge_with)) return;
361
- const ids = toString(x.foreign_ID);
362
- const displayName = toString(x.name)?.trim();
363
- const subject = toString(x.subject);
364
- const courseId = x.ID?.toString() ?? generateId(coursesIdMap);
373
+ if (toStringIfExists(x.merge_with)) return;
374
+ const ids = toStringIfExists(x.foreign_ID);
375
+ const displayName = toStringIfExists(x.name);
376
+ const subject = toStringIfExists(x.subject);
377
+ const courseId = toStringIfExists(x.ID) ?? generateId(coursesIdMap);
365
378
  if (coursesIdMap.has(courseId)) {
366
379
  warnings.push({
367
380
  code: "ignoring_duplicate_course",
@@ -454,7 +467,7 @@ function _schedules(source) {
454
467
  coursesIdMap.set(courseId, course);
455
468
  });
456
469
  source.courses?.forEach((x) => {
457
- const mergeWith = toString(x.merge_with);
470
+ const mergeWith = toStringIfExists(x.merge_with);
458
471
  if (!mergeWith) return;
459
472
  if (!coursesIdMap.has(mergeWith)) {
460
473
  warnings.push({
@@ -463,7 +476,7 @@ function _schedules(source) {
463
476
  });
464
477
  return;
465
478
  }
466
- const displayName = toString(x.name)?.trim();
479
+ const displayName = toStringIfExists(x.name);
467
480
  const locations = extractLocations(x, locationsNameMap, warnings)?.map((x$1) => ({
468
481
  groupIndex: x$1.groupIndex,
469
482
  locations: [x$1.to]
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["id: string","map","locationReferences: { groupIndex: number; to: string }[]","x","groupReferences: { to: string }[]","teacherReferences: { to: string }[]","warnings: { code: Types.ErrorCode, context?: unknown }[]","settings: C_Settings","y: C_Syllabus","group: string | undefined","sex: C_Person['sex']","person: C_Person","expectedTotalHours: number | undefined","minutesPerWeek: number | undefined","event: C_Event","course: C_Course"],"sources":["../../../../src/Excel/v2/from/index.ts"],"sourcesContent":["import { isString, isNumber, countBy } from 'lodash-es';\nimport { nanoid } from 'nanoid';\nimport type { CoreTypes } from '../../../core';\nimport type { Types } from '../types';\nimport type { FromInterface } from '../../../common/types';\nimport { makeChainable } from '../../../common/make-chainable';\n\ntype C_Settings = Pick<CoreTypes.Serialized.Settings, 'discretization' | 'numDays' | 'dayStart' | 'dayEnd'>;\ntype C_Location = Omit<CoreTypes.Serialized.Location, '_id'> & { __raw: Types.Location };\ntype C_Group = Omit<CoreTypes.Serialized.Group, '_id'> & { __raw: Types.Group };\ntype C_Teacher = Omit<CoreTypes.Serialized.Teacher, '_id'> & { __raw: Types.Teacher };\ntype C_Course = Omit<CoreTypes.Serialized.Course, '_id'> & { __raw: Types.Course };\ntype C_Person = Omit<CoreTypes.Serialized.Person, '_id'> & { __raw: Types.Person };\ntype C_Event = Omit<CoreTypes.Serialized.Event, '_id'> & { __raw?: Types.Course };\ntype C_OverlapGroup = Omit<CoreTypes.Serialized.OverlapGroup, '_id'> & { /* raw: Types.OverlapGroup */ };\ntype C_Syllabus = Omit<CoreTypes.Serialized.Syllabus, '_id'> & { __raw?: Types.Syllabus };\n\ntype Out = {\n settings: C_Settings;\n locations: C_Location[];\n teachers: C_Teacher[];\n persons: C_Person[];\n groups: C_Group[];\n courses: C_Course[];\n events: C_Event[];\n overlapGroups: C_OverlapGroup[];\n syllabuses: C_Syllabus[];\n meta: {\n // errors?: string[];\n warnings?: { code: Types.ErrorCode, context?: unknown }[];\n };\n};\n\n\nfunction generateId (map: Map<string, unknown>) {\n // generate a unique id\n let id: string;\n do {\n id = nanoid(8);\n } while (map.has(id));\n\n return id;\n}\n\nfunction toPositiveMultipleOf5 (val: number | string) {\n const num = isString(val) ? parseInt(val) : val;\n\n // throw error if nan\n if (isNaN(num)) throw new Error(`toMultipleOf5: ${val} is not a number`);\n\n const res = Math.floor(num / 5) * 5;\n\n if (res <= 0) throw new Error(`toMultipleOf5: the result ${res} is not positive`);\n\n return res;\n}\n\nfunction toPositiveInteger (val: number | string) {\n const num = isString(val) ? parseInt(val) : val;\n\n // throw error if nan\n if (isNaN(num)) throw new Error(`toInteger: ${val} is not a number`);\n if (num <= 0) throw new Error(`toInteger: te result ${num} is not positive`);\n\n return num;\n}\n\nfunction toString<T extends string | number | undefined, R = T extends undefined ? (string | undefined) : string> (x: T): R {\n if (x == undefined ) return undefined as R;\n if (typeof x == 'string' && !x.length) return undefined as R;\n if (isNumber(x) ) return x.toString() as R;\n return x.trim() as R;\n}\n\nfunction extractLocations (\n course: Types.Course,\n locationsNameMap: Map<string, C_Location>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n) {\n const locationsStr = toString(course.locations);\n if (!locationsStr) return undefined;\n\n const locationReferences: { groupIndex: number; to: string }[] = [];\n\n locationsStr.split('+')\n .flatMap((x, i) => x.split(',').map(x => [i, x.trim()] as [number, string]))\n .forEach(([i, x]) => {\n const key = x.toLowerCase();\n const ref = locationsNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_location_reference', context: x });\n return;\n }\n\n if (locationReferences.some(y => y.to == ref.id)) {\n warnings.push({ code: 'location_referenced_multiple_times', context: x });\n return;\n }\n\n locationReferences.push({ groupIndex: i, to: ref.id });\n });\n\n return locationReferences;\n}\n\nfunction extractGroups (\n course: Types.Course,\n groupsNameMap: Map<string, C_Group>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n) {\n const groupsStr = toString(course.groups);\n if (!groupsStr) return undefined;\n\n const groupReferences: { to: string }[] = [];\n groupsStr.split(',').map(x => x.trim())\n .forEach(x => {\n const key = x.toLowerCase();\n const ref = groupsNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_group_reference', context: x });\n return;\n }\n\n if (groupReferences.some(y => y.to == ref.id)) {\n warnings.push({ code: 'group_referenced_multiple_times', context: x });\n return;\n }\n\n groupReferences.push({ to: ref.id });\n });\n\n return groupReferences;\n}\n\nfunction extractTeachers (\n course: Types.Course,\n teachersNameMap: Map<string, C_Teacher>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n) {\n const teachersStr = toString(course.teachers);\n if (!teachersStr) return undefined;\n\n const teacherReferences: { to: string }[] = [];\n teachersStr.split(',').map(x => x.trim())\n .forEach(x => {\n const key = x.toLowerCase();\n const ref = teachersNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_teacher_reference', context: x });\n return;\n }\n\n if (teacherReferences.some(y => y.to == ref.id)) {\n warnings.push({ code: 'teacher_referenced_multiple_times', context: x });\n return;\n }\n\n teacherReferences.push({ to: ref.id });\n });\n\n return teacherReferences;\n}\n\nfunction extractSyllabus (\n course: Types.Course,\n syllabusesCodeMap: Map<string, C_Syllabus>,\n syllabusesNameMap: Map<string, C_Syllabus>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n): string | undefined {\n const syllabusValue = toString(course.syllabus);\n if (!syllabusValue) return undefined;\n\n const syllabus = syllabusesCodeMap.get(syllabusValue) || syllabusesNameMap.get(syllabusValue.toLowerCase());\n\n if (!syllabus) {\n warnings.push({ code: 'invalid_syllabus_reference', context: syllabusValue });\n return undefined;\n }\n\n return syllabus.id;\n}\n\n\nfunction _schedules (\n source: Types.Source\n): Out {\n // gather all import related warnings/errors\n const warnings: { code: Types.ErrorCode, context?: unknown }[] = [];\n // let errors: string[] = [];\n\n source.settings = source.settings ?.filter(x => Object.values(x).some(y => y.toString().trim().length > 0));\n source.locations = source.locations?.filter(x => Object.values(x).some(y => y.toString().trim().length > 0));\n source.teachers = source.teachers ?.filter(x => Object.values(x).some(y => y.toString().trim().length > 0));\n source.groups = source.groups ?.filter(x => Object.values(x).some(y => y.toString().trim().length > 0));\n source.persons = source.persons ?.filter(x => Object.values(x).some(y => y.toString().trim().length > 0));\n source.courses = source.courses ?.filter(x => Object.values(x).some(y => y.toString().trim().length > 0));\n\n\n ////\n //// settings\n ////\n const settings: C_Settings = {\n discretization: 5,\n numDays: 5,\n dayStart: '08:00',\n dayEnd: '17:00',\n };\n if (!source.settings) {\n warnings.push({ code: 'missing_settings_sheet' });\n }\n if (source.settings?.[0]?.number_of_days) {\n const numDays = source.settings?.[0]?.number_of_days;\n try {\n settings.numDays = toPositiveInteger(numDays);\n\n // must be 5 or 7\n if (settings.numDays !== 5 && settings.numDays !== 7) {\n warnings.push({ code: 'invalid_number_of_days', context: numDays });\n settings.numDays = 5;\n }\n } catch {\n warnings.push({ code: 'invalid_number_of_days', context: numDays });\n }\n }\n\n ////\n //// syllabuses\n ////\n const syllabusesIdMap = new Map<string, C_Syllabus>();\n const syllabusesNameMap = new Map<string, C_Syllabus>();\n const syllabusesCodeMap = new Map<string, C_Syllabus>();\n if (!source.syllabuses) {\n // warnings.push({ code: 'missing_syllabus_sheet' });\n }\n source.syllabuses?.forEach(x => {\n const ids = toString(x.foreign_ID);\n const subjectCode = toString(x.subject_code);\n const subjectName = toString(x.subject_name);\n const courseCode = toString(x.course_code );\n const courseName = toString(x.course_name );\n const schoolType = toString(x.school_type);\n\n const id = x.ID?.toString() ?? generateId(syllabusesIdMap);\n if (syllabusesIdMap.has(id)) {\n warnings.push({ code: 'ignoring_duplicate_syllabus', context: `${subjectName} (${courseName})` });\n return;\n }\n\n if (!subjectName) {\n warnings.push({ code: 'missing_subject_name', context: x });\n return;\n }\n\n if (!schoolType) {\n warnings.push({ code: 'missing_school_type', context: x });\n return;\n }\n\n // create the syllabus object\n const y: C_Syllabus = {\n id,\n ...ids && { ids },\n ...subjectCode && { subjectCode },\n subjectName,\n ...courseCode && { courseCode },\n ...courseName && { courseName },\n schoolType,\n official: true, // always true for imported syllabuses\n __raw: x\n };\n syllabusesIdMap.set(id, y);\n const code = courseCode || subjectCode;\n const name = courseName || subjectName;\n if (code) syllabusesCodeMap.set(code, y);\n if (name) syllabusesNameMap.set(name.toLowerCase(), y);\n });\n\n\n ////\n //// locations, teachers\n ////\n const locationsIdMap = new Map<string, C_Location>();\n const locationsNameMap = new Map<string, C_Location>();\n const teachersIdMap = new Map<string, C_Teacher>();\n const teachersNameMap = new Map<string, C_Teacher>();\n [\n {\n name: { singular: 'location', plural: 'locations' } as const,\n data: source.locations,\n idMap: locationsIdMap,\n nameMap: locationsNameMap,\n },\n {\n name: { singular: 'teacher', plural: 'teachers' } as const,\n data: source.teachers,\n idMap: teachersIdMap,\n nameMap: teachersNameMap,\n }\n ].forEach(({ name, data, idMap, nameMap }) => {\n if (!data) {\n warnings.push({ code: `missing_${name.plural}_sheet` });\n }\n data?.forEach(x => {\n const ids = toString(x.foreign_ID);\n const displayName = toString(x.name);\n\n const id = x.ID?.toString() ?? generateId(idMap);\n if (idMap.has(id)) {\n warnings.push({ code: `ignoring_duplicate_${name.singular}`, context: displayName });\n return;\n }\n\n const y = {\n id,\n ...ids && { ids },\n ...displayName && { displayName },\n __raw: x\n };\n idMap.set(id, y);\n if (displayName) nameMap.set(displayName.toLowerCase(), y);\n });\n });\n\n\n ////\n //// groups\n ////\n const groupsIdMap = new Map<string, C_Group>();\n const groupsNameMap = new Map<string, C_Group>();\n if (!source.groups) {\n warnings.push({ code: 'missing_groups_sheet' });\n }\n source.groups?.forEach(x => {\n const ids = toString(x.foreign_ID);\n const displayName = toString(x.name);\n\n const id = x.ID?.toString() ?? generateId(groupsIdMap);\n if (groupsIdMap.has(id)) {\n warnings.push({ code: 'ignoring_duplicate_group', context: displayName });\n return;\n }\n\n // parent groups determine the species of the group\n const species = toString(x.is_class) == '1' ? 'class' as const : undefined;\n\n const y = {\n id,\n ...ids && { ids },\n ...displayName && { displayName },\n ...species && { species },\n __raw: x\n } as C_Group;\n groupsIdMap.set(id, y);\n if (displayName) groupsNameMap.set(displayName.toLowerCase(), y);\n });\n\n\n ////\n //// persons\n ////\n const personsIdMap = new Map<string, C_Person>();\n const personsSsnMap = new Map<string, C_Person>();\n if (!source.persons) {\n warnings.push({ code: 'missing_persons_sheet' });\n }\n source.persons?.forEach(x => {\n const ids = toString(x.foreign_ID);\n const firstName = toString(x.first_name);\n const lastName = toString(x.last_name );\n const SSN = toString(x.SSN );\n\n const id = x.ID?.toString() ?? generateId(personsIdMap);\n if (personsIdMap.has(id)) {\n warnings.push({ code: 'ignoring_duplicate_person', context: `${firstName} ${lastName}` });\n return;\n }\n\n let group: string | undefined = undefined;\n if (x.group) {\n const key = toString(x.group).toLowerCase();\n const ref = groupsNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_group_reference', context: x.group });\n } else if (ref.species !== 'class') {\n warnings.push({ code: 'forbidden_group_reference', context: x.group });\n } else {\n group = ref.id;\n }\n }\n\n let sex: C_Person['sex'];\n if (x.sex) {\n if (isString(x.sex) && x.sex.toLowerCase() == 'man') sex = 'Man';\n else if (isString(x.sex) && x.sex.toLowerCase() == 'woman') sex = 'Woman';\n else {\n warnings.push({ code: 'invalid_sex', context: x.sex });\n }\n }\n\n const person: C_Person = {\n id,\n ...ids && { ids },\n ...firstName && { firstName },\n ...lastName && { lastName },\n ...sex && { sex },\n ...group && { group },\n ...SSN && { SSN: { value: SSN } },\n __raw: x\n };\n personsIdMap.set(id, person);\n if (SSN) personsSsnMap.set(SSN, person);\n });\n\n\n ////\n //// group.members (need to wait for persons to be parsed)\n ////\n groupsNameMap.forEach(x => {\n let duplicateReported = false;\n\n // parse and replace SSN by person id\n x.members = makeChainable(x.__raw.members)\n .chain(x => toString(x ?? '')\n .split(',')\n .map(x => x.trim())\n .filter(x => x != null)\n )\n .chain(x => countBy(x))\n .chain(y => Object.entries(y)\n .map(([ssn, count]) => {\n // warn if duplicate member\n if (!duplicateReported && count > 1) {\n warnings.push({ code: 'duplicate_group_member', context: x.displayName });\n duplicateReported = true;\n }\n\n // fetch person and warn if not found\n const person = personsSsnMap.get(ssn);\n if (!person) {\n warnings.push({ code: 'invalid_person_reference', context: ssn });\n return;\n }\n\n // warn if the group is a class and the person is already in a different class\n // (we also need to set the group of the person if it is empty)\n if (x.species == 'class') {\n if (!person.group) {\n person.group = x.id;\n }\n else if (person.group != x.id) {\n warnings.push({ code: 'person_in_multiple_classes', context: ssn });\n return;\n }\n }\n\n return person.id;\n })\n .filter(x => x != null)\n )\n .value;\n });\n\n\n ////\n //// courses\n ////\n const coursesIdMap = new Map<string, C_Course >();\n const eventsIdMap = new Map<string, C_Event >();\n const overlapGroupsKeyMap = new Map<string, C_OverlapGroup>();\n if (!source.courses) {\n warnings.push({ code: 'missing_courses_sheet' });\n }\n source.courses?.forEach(x => {\n // await loading events until courses are created\n const mergeWith = toString(x.merge_with);\n if (mergeWith) return;\n\n const ids = toString(x.foreign_ID);\n const displayName = toString(x.name)?.trim();\n const subject = toString(x.subject);\n\n const courseId = x.ID?.toString() ?? generateId(coursesIdMap);\n if (coursesIdMap.has(courseId)) {\n warnings.push({ code: 'ignoring_duplicate_course', context: displayName + (subject ? ` (${subject})` : '') });\n return;\n }\n\n // try to parse the total hours as an integer\n let expectedTotalHours: number | undefined = undefined;\n if (x.total_hours) {\n try {\n expectedTotalHours = toPositiveInteger(x.total_hours);\n } catch {\n warnings.push({ code: 'invalid_total_hours', context: x.total_hours });\n }\n }\n\n // try to parse the minutes per week as an multiple of 5\n let minutesPerWeek: number | undefined = undefined;\n if (x.minutes_per_week) {\n try {\n minutesPerWeek = toPositiveMultipleOf5(x.minutes_per_week);\n } catch {\n warnings.push({ code: 'invalid_minutes_per_week', context: x.minutes_per_week });\n }\n }\n\n // only one of minutes_per_week or total_hours should be defined\n if (minutesPerWeek != null && expectedTotalHours != null) {\n warnings.push({ code: 'ambiguous_planned_duration', context: `${minutesPerWeek} min/week OR ${expectedTotalHours} hrs` });\n }\n const plannedDuration =\n expectedTotalHours\n ? `${expectedTotalHours} hrs` :\n minutesPerWeek\n ? `${minutesPerWeek} min/week` :\n undefined;\n\n\n // try to parse the lesson durations as an array of integers from which we will generate events\n if (x.lesson_durations) {\n const _durations = typeof x.lesson_durations === 'string'\n ? x.lesson_durations.split(',').map(x => x.trim())\n : [x.lesson_durations];\n\n // truncate to multiple of 5 and filter out zero duration events\n const durations = _durations\n .map(e => {\n try {\n return toPositiveMultipleOf5(e);\n } catch {\n warnings.push({ code: 'invalid_lesson_durations', context: x.lesson_durations });\n return undefined;\n }\n })\n .filter((x): x is number => x !== undefined);\n\n durations.forEach(x => {\n const id = generateId(eventsIdMap);\n const event: C_Event = { id, course: courseId, preferredDuration: x };\n eventsIdMap.set(id, event);\n });\n }\n\n // overlap group\n if (x.overlap_group && toString(x.overlap_group)) {\n const key = toString(x.overlap_group).toLowerCase();\n\n const og = overlapGroupsKeyMap.get(key);\n if (og) og.coalesced?.push({ toModel: 'courses', to: courseId });\n else {\n const id = generateId(overlapGroupsKeyMap);\n const coalesced = [{ toModel: 'courses' as const, to: courseId }];\n overlapGroupsKeyMap.set(key, { id, coalesced });\n }\n }\n\n const locations = extractLocations(x, locationsNameMap, warnings)?.map(x => ({ groupIndex: x.groupIndex, locations: [x.to] as [string] }));\n const groups = extractGroups (x, groupsNameMap, warnings);\n const teachers = extractTeachers (x, teachersNameMap, warnings);\n const syllabus = extractSyllabus(x, syllabusesCodeMap, syllabusesNameMap, warnings);\n\n const course: C_Course = {\n id: courseId,\n ...ids && { ids },\n ...displayName && { displayName },\n ...subject && { subject },\n /* TO BE DEPRECATED */ ...expectedTotalHours && { expectedTotalHours },\n /* TO BE DEPRECATED */ ...minutesPerWeek && { minutesPerWeek },\n ...plannedDuration && { plannedDuration },\n ...groups && { groups },\n ...teachers && { teachers },\n ...locations && { locations },\n ...syllabus && { syllabus },\n __raw: x\n };\n coursesIdMap.set(courseId, course);\n });\n\n\n ////\n //// events\n ////\n source.courses?.forEach(x => {\n // load courses\n const mergeWith = toString(x.merge_with);\n if (!mergeWith) return;\n\n // ensure that the course to merge with exists\n if (!coursesIdMap.has(mergeWith)) {\n warnings.push({ code: 'invalid_course_reference', context: mergeWith });\n return;\n }\n\n const displayName = toString(x.name)?.trim();\n const locations = extractLocations(x, locationsNameMap, warnings)?.map(x => ({ groupIndex: x.groupIndex, locations: [x.to] as [string] }));\n const groups = extractGroups (x, groupsNameMap, warnings);\n const teachers = extractTeachers (x, teachersNameMap, warnings);\n\n // try to parse the lesson durations as an array of integers from which we will generate events\n if (x.lesson_durations) {\n const _durations = typeof x.lesson_durations === 'string'\n ? x.lesson_durations.split(',').map(x => x.trim())\n : [x.lesson_durations];\n\n // truncate to multiple of 5 and filter out zero duration events\n const durations = _durations\n .map(e => {\n try {\n return toPositiveMultipleOf5(e);\n } catch {\n warnings.push({ code: 'invalid_lesson_durations', context: x.lesson_durations });\n return undefined;\n }\n })\n .filter((x): x is number => x !== undefined);\n\n durations.forEach(preferredDuration => {\n const id = generateId(eventsIdMap);\n const event: C_Event = {\n id,\n course: mergeWith,\n preferredDuration: preferredDuration,\n ...displayName && { displayName },\n ...groups && { groups },\n ...teachers && { teachers },\n ...locations && { locations },\n __raw: x\n };\n eventsIdMap.set(id, event);\n });\n }\n });\n\n\n ////\n //// return\n ////\n return {\n settings,\n locations: [...locationsIdMap .values()],\n teachers: [...teachersIdMap .values()],\n persons: [...personsIdMap .values()],\n groups: [...groupsIdMap .values()],\n courses: [...coursesIdMap .values()],\n events: [...eventsIdMap .values()],\n overlapGroups: [...overlapGroupsKeyMap.values()],\n syllabuses: [...syllabusesIdMap .values()],\n meta: {\n // ...errors .length && { errors: errors },\n ...warnings.length && { warnings: warnings }\n }\n };\n}\n\n\nexport default {\n schedules: _schedules\n} satisfies FromInterface;"],"mappings":";;;;;AAkCA,SAAS,WAAY,OAA2B;CAE9C,IAAIA;AACJ;AACE,OAAK,OAAO,EAAE;QACPC,MAAI,IAAI,GAAG;AAEpB,QAAO;;AAGT,SAAS,sBAAuB,KAAsB;CACpD,MAAM,MAAM,SAAS,IAAI,GAAG,SAAS,IAAI,GAAG;AAG5C,KAAI,MAAM,IAAI,CAAE,OAAM,IAAI,MAAM,kBAAkB,IAAI,kBAAkB;CAExE,MAAM,MAAM,KAAK,MAAM,MAAM,EAAE,GAAG;AAElC,KAAI,OAAO,EAAG,OAAM,IAAI,MAAM,6BAA6B,IAAI,kBAAkB;AAEjF,QAAO;;AAGT,SAAS,kBAAmB,KAAsB;CAChD,MAAM,MAAM,SAAS,IAAI,GAAG,SAAS,IAAI,GAAG;AAG5C,KAAI,MAAM,IAAI,CAAE,OAAM,IAAI,MAAM,cAAc,IAAI,kBAAkB;AACpE,KAAI,OAAO,EAAG,OAAM,IAAI,MAAM,wBAAwB,IAAI,kBAAkB;AAE5E,QAAO;;AAGT,SAAS,SAA0G,GAAS;AAC1H,KAAI,KAAK,OAA8B,QAAO;AAC9C,KAAI,OAAO,KAAK,YAAY,CAAC,EAAE,OAAQ,QAAO;AAC9C,KAAI,SAAS,EAAE,CAAwB,QAAO,EAAE,UAAU;AAC1D,QAAO,EAAE,MAAM;;AAGjB,SAAS,iBACP,QACA,kBACA,UACA;CACA,MAAM,eAAe,SAAS,OAAO,UAAU;AAC/C,KAAI,CAAC,aAAc,QAAO;CAE1B,MAAMC,qBAA2D,EAAE;AAEnE,cAAa,MAAM,IAAI,CACpB,SAAS,GAAG,MAAM,EAAE,MAAM,IAAI,CAAC,KAAI,QAAK,CAAC,GAAGC,IAAE,MAAM,CAAC,CAAqB,CAAC,CAC3E,SAAS,CAAC,GAAG,OAAO;EACnB,MAAM,MAAM,EAAE,aAAa;EAC3B,MAAM,MAAM,iBAAiB,IAAI,IAAI;AACrC,MAAI,CAAC,KAAK;AACR,YAAS,KAAK;IAAE,MAAM;IAA8B,SAAS;IAAG,CAAC;AACjE;;AAGF,MAAI,mBAAmB,MAAK,MAAK,EAAE,MAAM,IAAI,GAAG,EAAE;AAChD,YAAS,KAAK;IAAE,MAAM;IAAsC,SAAS;IAAG,CAAC;AACzE;;AAGF,qBAAmB,KAAK;GAAE,YAAY;GAAG,IAAI,IAAI;GAAI,CAAC;GACtD;AAEJ,QAAO;;AAGT,SAAS,cACP,QACA,eACA,UACA;CACA,MAAM,YAAY,SAAS,OAAO,OAAO;AACzC,KAAI,CAAC,UAAW,QAAO;CAEvB,MAAMC,kBAAoC,EAAE;AAC5C,WAAU,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,CACpC,SAAQ,MAAK;EACZ,MAAM,MAAM,EAAE,aAAa;EAC3B,MAAM,MAAM,cAAc,IAAI,IAAI;AAClC,MAAI,CAAC,KAAK;AACR,YAAS,KAAK;IAAE,MAAM;IAA2B,SAAS;IAAG,CAAC;AAC9D;;AAGF,MAAI,gBAAgB,MAAK,MAAK,EAAE,MAAM,IAAI,GAAG,EAAE;AAC7C,YAAS,KAAK;IAAE,MAAM;IAAmC,SAAS;IAAG,CAAC;AACtE;;AAGF,kBAAgB,KAAK,EAAE,IAAI,IAAI,IAAI,CAAC;GACpC;AAEJ,QAAO;;AAGT,SAAS,gBACP,QACA,iBACA,UACA;CACA,MAAM,cAAc,SAAS,OAAO,SAAS;AAC7C,KAAI,CAAC,YAAa,QAAO;CAEzB,MAAMC,oBAAsC,EAAE;AAC9C,aAAY,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,CACtC,SAAQ,MAAK;EACZ,MAAM,MAAM,EAAE,aAAa;EAC3B,MAAM,MAAM,gBAAgB,IAAI,IAAI;AACpC,MAAI,CAAC,KAAK;AACR,YAAS,KAAK;IAAE,MAAM;IAA6B,SAAS;IAAG,CAAC;AAChE;;AAGF,MAAI,kBAAkB,MAAK,MAAK,EAAE,MAAM,IAAI,GAAG,EAAE;AAC/C,YAAS,KAAK;IAAE,MAAM;IAAqC,SAAS;IAAG,CAAC;AACxE;;AAGF,oBAAkB,KAAK,EAAE,IAAI,IAAI,IAAI,CAAC;GACtC;AAEJ,QAAO;;AAGT,SAAS,gBACP,QACA,mBACA,mBACA,UACoB;CACpB,MAAM,gBAAgB,SAAS,OAAO,SAAS;AAC/C,KAAI,CAAC,cAAe,QAAO;CAE3B,MAAM,WAAW,kBAAkB,IAAI,cAAc,IAAI,kBAAkB,IAAI,cAAc,aAAa,CAAC;AAE3G,KAAI,CAAC,UAAU;AACb,WAAS,KAAK;GAAE,MAAM;GAA8B,SAAS;GAAe,CAAC;AAC7E;;AAGF,QAAO,SAAS;;AAIlB,SAAS,WACP,QACK;CAEL,MAAMC,WAA2D,EAAE;AAGnE,QAAO,WAAY,OAAO,UAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAM,EAAE,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;AAC7G,QAAO,YAAY,OAAO,WAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAM,EAAE,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;AAC7G,QAAO,WAAY,OAAO,UAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAM,EAAE,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;AAC7G,QAAO,SAAY,OAAO,QAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAM,EAAE,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;AAC7G,QAAO,UAAY,OAAO,SAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAM,EAAE,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;AAC7G,QAAO,UAAY,OAAO,SAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAM,EAAE,UAAU,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;CAM7G,MAAMC,WAAuB;EAC3B,gBAAgB;EAChB,SAAgB;EAChB,UAAgB;EAChB,QAAgB;EACjB;AACD,KAAI,CAAC,OAAO,SACV,UAAS,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEnD,KAAI,OAAO,WAAW,IAAI,gBAAgB;EACxC,MAAM,UAAU,OAAO,WAAW,IAAI;AACtC,MAAI;AACF,YAAS,UAAU,kBAAkB,QAAQ;AAG7C,OAAI,SAAS,YAAY,KAAK,SAAS,YAAY,GAAG;AACpD,aAAS,KAAK;KAAE,MAAM;KAA0B,SAAS;KAAS,CAAC;AACnE,aAAS,UAAU;;UAEf;AACN,YAAS,KAAK;IAAE,MAAM;IAA0B,SAAS;IAAS,CAAC;;;CAOvE,MAAM,kCAAkB,IAAI,KAAyB;CACrD,MAAM,oCAAoB,IAAI,KAAyB;CACvD,MAAM,oCAAoB,IAAI,KAAyB;AACvD,KAAI,CAAC,OAAO,YAAY;AAGxB,QAAO,YAAY,SAAQ,MAAK;EAC9B,MAAM,MAAc,SAAS,EAAE,WAAW;EAC1C,MAAM,cAAc,SAAS,EAAE,aAAa;EAC5C,MAAM,cAAc,SAAS,EAAE,aAAa;EAC5C,MAAM,aAAc,SAAS,EAAE,YAAa;EAC5C,MAAM,aAAc,SAAS,EAAE,YAAa;EAC5C,MAAM,aAAc,SAAS,EAAE,YAAY;EAE3C,MAAM,KAAK,EAAE,IAAI,UAAU,IAAI,WAAW,gBAAgB;AAC1D,MAAI,gBAAgB,IAAI,GAAG,EAAE;AAC3B,YAAS,KAAK;IAAE,MAAM;IAA+B,SAAS,GAAG,YAAY,IAAI,WAAW;IAAI,CAAC;AACjG;;AAGF,MAAI,CAAC,aAAa;AAChB,YAAS,KAAK;IAAE,MAAM;IAAwB,SAAS;IAAG,CAAC;AAC3D;;AAGF,MAAI,CAAC,YAAY;AACf,YAAS,KAAK;IAAE,MAAM;IAAuB,SAAS;IAAG,CAAC;AAC1D;;EAIF,MAAMC,IAAgB;GACpB;GACA,GAAG,OAAmB,EAAE,KAAiB;GACzC,GAAG,eAAmB,EAAE,aAAiB;GACzC;GACA,GAAG,cAAmB,EAAE,YAAiB;GACzC,GAAG,cAAmB,EAAE,YAAiB;GACzC;GACA,UAAU;GACV,OAAU;GACX;AACD,kBAAgB,IAAI,IAAI,EAAE;EAC1B,MAAM,OAAO,cAAc;EAC3B,MAAM,OAAO,cAAc;AAC3B,MAAI,KAAM,mBAAkB,IAAI,MAAM,EAAE;AACxC,MAAI,KAAM,mBAAkB,IAAI,KAAK,aAAa,EAAE,EAAE;GACtD;CAMF,MAAM,iCAAmB,IAAI,KAAyB;CACtD,MAAM,mCAAmB,IAAI,KAAyB;CACtD,MAAM,gCAAmB,IAAI,KAAwB;CACrD,MAAM,kCAAmB,IAAI,KAAwB;AACrD,EACE;EACE,MAAS;GAAE,UAAU;GAAY,QAAQ;GAAa;EACtD,MAAS,OAAO;EAChB,OAAS;EACT,SAAS;EACV,EACD;EACE,MAAS;GAAE,UAAU;GAAW,QAAQ;GAAY;EACpD,MAAS,OAAO;EAChB,OAAS;EACT,SAAS;EACV,CACF,CAAC,SAAS,EAAE,MAAM,MAAM,OAAO,cAAc;AAC5C,MAAI,CAAC,KACH,UAAS,KAAK,EAAE,MAAM,WAAW,KAAK,OAAO,SAAS,CAAC;AAEzD,QAAM,SAAQ,MAAK;GACjB,MAAM,MAAc,SAAS,EAAE,WAAW;GAC1C,MAAM,cAAc,SAAS,EAAE,KAAK;GAEpC,MAAM,KAAK,EAAE,IAAI,UAAU,IAAI,WAAW,MAAM;AAChD,OAAI,MAAM,IAAI,GAAG,EAAE;AACjB,aAAS,KAAK;KAAE,MAAM,sBAAsB,KAAK;KAAY,SAAS;KAAa,CAAC;AACpF;;GAGF,MAAM,IAAI;IACR;IACA,GAAG,OAAe,EAAE,KAAK;IACzB,GAAG,eAAe,EAAE,aAAa;IACjC,OAAO;IACR;AACD,SAAM,IAAI,IAAI,EAAE;AAChB,OAAI,YAAa,SAAQ,IAAI,YAAY,aAAa,EAAE,EAAE;IAC1D;GACF;CAMF,MAAM,8BAAgB,IAAI,KAAsB;CAChD,MAAM,gCAAgB,IAAI,KAAsB;AAChD,KAAI,CAAC,OAAO,OACV,UAAS,KAAK,EAAE,MAAM,wBAAwB,CAAC;AAEjD,QAAO,QAAQ,SAAQ,MAAK;EAC1B,MAAM,MAAc,SAAS,EAAE,WAAW;EAC1C,MAAM,cAAc,SAAS,EAAE,KAAK;EAEpC,MAAM,KAAK,EAAE,IAAI,UAAU,IAAI,WAAW,YAAY;AACtD,MAAI,YAAY,IAAI,GAAG,EAAE;AACvB,YAAS,KAAK;IAAE,MAAM;IAA4B,SAAS;IAAa,CAAC;AACzE;;EAIF,MAAM,UAAU,SAAS,EAAE,SAAS,IAAI,MAAM,UAAmB;EAEjE,MAAM,IAAI;GACR;GACA,GAAG,OAAe,EAAE,KAAK;GACzB,GAAG,eAAe,EAAE,aAAa;GACjC,GAAG,WAAe,EAAE,SAAS;GAC7B,OAAO;GACR;AACD,cAAY,IAAI,IAAI,EAAE;AACtB,MAAI,YAAa,eAAc,IAAI,YAAY,aAAa,EAAE,EAAE;GAChE;CAMF,MAAM,+BAAgB,IAAI,KAAuB;CACjD,MAAM,gCAAgB,IAAI,KAAuB;AACjD,KAAI,CAAC,OAAO,QACV,UAAS,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAElD,QAAO,SAAS,SAAQ,MAAK;EAC3B,MAAM,MAAY,SAAS,EAAE,WAAW;EACxC,MAAM,YAAY,SAAS,EAAE,WAAW;EACxC,MAAM,WAAY,SAAS,EAAE,UAAW;EACxC,MAAM,MAAY,SAAS,EAAE,IAAW;EAExC,MAAM,KAAK,EAAE,IAAI,UAAU,IAAI,WAAW,aAAa;AACvD,MAAI,aAAa,IAAI,GAAG,EAAE;AACxB,YAAS,KAAK;IAAE,MAAM;IAA6B,SAAS,GAAG,UAAU,GAAG;IAAY,CAAC;AACzF;;EAGF,IAAIC,QAA4B;AAChC,MAAI,EAAE,OAAO;GACX,MAAM,MAAM,SAAS,EAAE,MAAM,CAAC,aAAa;GAC3C,MAAM,MAAM,cAAc,IAAI,IAAI;AAClC,OAAI,CAAC,IACH,UAAS,KAAK;IAAE,MAAM;IAA2B,SAAS,EAAE;IAAO,CAAC;YAC3D,IAAI,YAAY,QACzB,UAAS,KAAK;IAAE,MAAM;IAA6B,SAAS,EAAE;IAAO,CAAC;OAEtE,SAAQ,IAAI;;EAIhB,IAAIC;AACJ,MAAI,EAAE,IACJ,KAAS,SAAS,EAAE,IAAI,IAAI,EAAE,IAAI,aAAa,IAAI,MAAS,OAAM;WACzD,SAAS,EAAE,IAAI,IAAI,EAAE,IAAI,aAAa,IAAI,QAAS,OAAM;MAEhE,UAAS,KAAK;GAAE,MAAM;GAAe,SAAS,EAAE;GAAK,CAAC;EAI1D,MAAMC,SAAmB;GACvB;GACA,GAAG,OAAa,EAAE,KAAK;GACvB,GAAG,aAAa,EAAE,WAAW;GAC7B,GAAG,YAAa,EAAE,UAAU;GAC5B,GAAG,OAAa,EAAE,KAAK;GACvB,GAAG,SAAa,EAAE,OAAO;GACzB,GAAG,OAAa,EAAE,KAAK,EAAE,OAAO,KAAK,EAAE;GACvC,OAAO;GACR;AACD,eAAa,IAAI,IAAI,OAAO;AAC5B,MAAI,IAAK,eAAc,IAAI,KAAK,OAAO;GACvC;AAMF,eAAc,SAAQ,MAAK;EACzB,IAAI,oBAAoB;AAGxB,IAAE,UAAU,cAAc,EAAE,MAAM,QAAQ,CACvC,OAAM,QAAK,SAASR,OAAK,GAAG,CAC1B,MAAM,IAAI,CACV,KAAI,QAAKA,IAAE,MAAM,CAAC,CAClB,QAAO,QAAKA,OAAK,KAAK,CACxB,CACA,OAAM,QAAK,QAAQA,IAAE,CAAC,CACtB,OAAM,MAAK,OAAO,QAAQ,EAAE,CAC1B,KAAK,CAAC,KAAK,WAAW;AAErB,OAAI,CAAC,qBAAqB,QAAQ,GAAG;AACnC,aAAS,KAAK;KAAE,MAAM;KAA0B,SAAS,EAAE;KAAa,CAAC;AACzE,wBAAoB;;GAItB,MAAM,SAAS,cAAc,IAAI,IAAI;AACrC,OAAI,CAAC,QAAQ;AACX,aAAS,KAAK;KAAE,MAAM;KAA4B,SAAS;KAAK,CAAC;AACjE;;AAKF,OAAI,EAAE,WAAW,SACf;QAAI,CAAC,OAAO,MACV,QAAO,QAAQ,EAAE;aAEV,OAAO,SAAS,EAAE,IAAI;AAC7B,cAAS,KAAK;MAAE,MAAM;MAA8B,SAAS;MAAK,CAAC;AACnE;;;AAIJ,UAAO,OAAO;IACd,CACD,QAAO,QAAKA,OAAK,KAAK,CACxB,CACA;GACH;CAMF,MAAM,+BAAsB,IAAI,KAA6B;CAC7D,MAAM,8BAAsB,IAAI,KAA6B;CAC7D,MAAM,sCAAsB,IAAI,KAA6B;AAC7D,KAAI,CAAC,OAAO,QACV,UAAS,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAElD,QAAO,SAAS,SAAQ,MAAK;AAG3B,MADkB,SAAS,EAAE,WAAW,CACzB;EAEf,MAAM,MAAc,SAAS,EAAE,WAAW;EAC1C,MAAM,cAAc,SAAS,EAAE,KAAK,EAAE,MAAM;EAC5C,MAAM,UAAc,SAAS,EAAE,QAAQ;EAEvC,MAAM,WAAW,EAAE,IAAI,UAAU,IAAI,WAAW,aAAa;AAC7D,MAAI,aAAa,IAAI,SAAS,EAAE;AAC9B,YAAS,KAAK;IAAE,MAAM;IAA6B,SAAS,eAAe,UAAU,KAAK,QAAQ,KAAK;IAAK,CAAC;AAC7G;;EAIF,IAAIS,qBAAyC;AAC7C,MAAI,EAAE,YACJ,KAAI;AACF,wBAAqB,kBAAkB,EAAE,YAAY;UAC/C;AACN,YAAS,KAAK;IAAE,MAAM;IAAuB,SAAS,EAAE;IAAa,CAAC;;EAK1E,IAAIC,iBAAqC;AACzC,MAAI,EAAE,iBACJ,KAAI;AACF,oBAAiB,sBAAsB,EAAE,iBAAiB;UACpD;AACN,YAAS,KAAK;IAAE,MAAM;IAA4B,SAAS,EAAE;IAAkB,CAAC;;AAKpF,MAAI,kBAAkB,QAAQ,sBAAsB,KAClD,UAAS,KAAK;GAAE,MAAM;GAA8B,SAAS,GAAG,eAAe,eAAe,mBAAmB;GAAO,CAAC;EAE3H,MAAM,kBACJ,qBACI,GAAG,mBAAmB,QACxB,iBACI,GAAG,eAAe,aACpB;AAIN,MAAI,EAAE,iBAiBJ,EAhBmB,OAAO,EAAE,qBAAqB,WAC7C,EAAE,iBAAiB,MAAM,IAAI,CAAC,KAAI,QAAKV,IAAE,MAAM,CAAC,GAChD,CAAC,EAAE,iBAAiB,EAIrB,KAAI,MAAK;AACR,OAAI;AACF,WAAO,sBAAsB,EAAE;WACzB;AACN,aAAS,KAAK;KAAE,MAAM;KAA4B,SAAS,EAAE;KAAkB,CAAC;AAChF;;IAEF,CACD,QAAQ,QAAmBA,QAAM,OAAU,CAEpC,SAAQ,QAAK;GACrB,MAAM,KAAK,WAAW,YAAY;GAClC,MAAMW,QAAiB;IAAE;IAAI,QAAQ;IAAU,mBAAmBX;IAAG;AACrE,eAAY,IAAI,IAAI,MAAM;IAC1B;AAIJ,MAAI,EAAE,iBAAiB,SAAS,EAAE,cAAc,EAAE;GAChD,MAAM,MAAM,SAAS,EAAE,cAAc,CAAC,aAAa;GAEnD,MAAM,KAAK,oBAAoB,IAAI,IAAI;AACvC,OAAI,GAAI,IAAG,WAAW,KAAK;IAAE,SAAS;IAAW,IAAI;IAAU,CAAC;QAC3D;IACH,MAAM,KAAY,WAAW,oBAAoB;IACjD,MAAM,YAAY,CAAC;KAAE,SAAS;KAAoB,IAAI;KAAU,CAAC;AACjE,wBAAoB,IAAI,KAAK;KAAE;KAAI;KAAW,CAAC;;;EAInD,MAAM,YAAc,iBAAiB,GAAG,kBAAkB,SAAS,EAAE,KAAI,SAAM;GAAE,YAAYA,IAAE;GAAY,WAAW,CAACA,IAAE,GAAG;GAAc,EAAE;EAC5I,MAAM,SAAc,cAAiB,GAAG,eAAkB,SAAS;EACnE,MAAM,WAAc,gBAAiB,GAAG,iBAAkB,SAAS;EACnE,MAAM,WAAc,gBAAgB,GAAG,mBAAmB,mBAAmB,SAAS;EAEtF,MAAMY,SAAmB;GACvB,IAAO;GACP,GAAG,OAAsB,EAAE,KAAoB;GAC/C,GAAG,eAAsB,EAAE,aAAoB;GAC/C,GAAG,WAAsB,EAAE,SAAoB;GACxB,GAAG,sBAAsB,EAAE,oBAAoB;GAC/C,GAAG,kBAAsB,EAAE,gBAAoB;GACtE,GAAG,mBAAsB,EAAE,iBAAoB;GAC/C,GAAG,UAAsB,EAAE,QAAoB;GAC/C,GAAG,YAAsB,EAAE,UAAoB;GAC/C,GAAG,aAAsB,EAAE,WAAoB;GAC/C,GAAG,YAAsB,EAAE,UAAoB;GAC/C,OAAO;GACR;AACD,eAAa,IAAI,UAAU,OAAO;GAClC;AAMF,QAAO,SAAS,SAAQ,MAAK;EAE3B,MAAM,YAAY,SAAS,EAAE,WAAW;AACxC,MAAI,CAAC,UAAW;AAGhB,MAAI,CAAC,aAAa,IAAI,UAAU,EAAE;AAChC,YAAS,KAAK;IAAE,MAAM;IAA4B,SAAS;IAAW,CAAC;AACvE;;EAGF,MAAM,cAAc,SAAS,EAAE,KAAK,EAAE,MAAM;EAC5C,MAAM,YAAc,iBAAiB,GAAG,kBAAkB,SAAS,EAAE,KAAI,SAAM;GAAE,YAAYZ,IAAE;GAAY,WAAW,CAACA,IAAE,GAAG;GAAc,EAAE;EAC5I,MAAM,SAAc,cAAiB,GAAG,eAAkB,SAAS;EACnE,MAAM,WAAc,gBAAiB,GAAG,iBAAkB,SAAS;AAGnE,MAAI,EAAE,iBAiBJ,EAhBmB,OAAO,EAAE,qBAAqB,WAC7C,EAAE,iBAAiB,MAAM,IAAI,CAAC,KAAI,QAAKA,IAAE,MAAM,CAAC,GAChD,CAAC,EAAE,iBAAiB,EAIrB,KAAI,MAAK;AACR,OAAI;AACF,WAAO,sBAAsB,EAAE;WACzB;AACN,aAAS,KAAK;KAAE,MAAM;KAA4B,SAAS,EAAE;KAAkB,CAAC;AAChF;;IAEF,CACD,QAAQ,QAAmBA,QAAM,OAAU,CAEpC,SAAQ,sBAAqB;GACrC,MAAM,KAAK,WAAW,YAAY;GAClC,MAAMW,QAAiB;IACrB;IACA,QAAmB;IACA;IACnB,GAAG,eAAgB,EAAE,aAAa;IAClC,GAAG,UAAgB,EAAE,QAAa;IAClC,GAAG,YAAgB,EAAE,UAAa;IAClC,GAAG,aAAgB,EAAE,WAAa;IAClC,OAAmB;IACpB;AACD,eAAY,IAAI,IAAI,MAAM;IAC1B;GAEJ;AAMF,QAAO;EACL;EACA,WAAe,CAAC,GAAG,eAAoB,QAAQ,CAAC;EAChD,UAAe,CAAC,GAAG,cAAoB,QAAQ,CAAC;EAChD,SAAe,CAAC,GAAG,aAAoB,QAAQ,CAAC;EAChD,QAAe,CAAC,GAAG,YAAoB,QAAQ,CAAC;EAChD,SAAe,CAAC,GAAG,aAAoB,QAAQ,CAAC;EAChD,QAAe,CAAC,GAAG,YAAoB,QAAQ,CAAC;EAChD,eAAe,CAAC,GAAG,oBAAoB,QAAQ,CAAC;EAChD,YAAe,CAAC,GAAG,gBAAoB,QAAQ,CAAC;EAChD,MAAe,EAEb,GAAG,SAAS,UAAU,EAAY,UAAU,EAC7C;EACF;;AAIH,mBAAe,EACb,WAAW,YACZ"}
1
+ {"version":3,"file":"index.js","names":["id: string","map","locationReferences: { groupIndex: number; to: string }[]","x","groupReferences: { to: string }[]","teacherReferences: { to: string }[]","warnings: { code: Types.ErrorCode, context?: unknown }[]","settings: C_Settings","y: C_Syllabus","group: string | undefined","sex: C_Person['sex']","person: C_Person","expectedTotalHours: number | undefined","minutesPerWeek: number | undefined","event: C_Event","course: C_Course"],"sources":["../../../../src/Excel/v2/from/index.ts"],"sourcesContent":["import { isString, isNumber, countBy } from 'lodash-es';\nimport { nanoid } from 'nanoid';\nimport type { CoreTypes } from '../../../core';\nimport type { Types } from '../types';\nimport type { FromInterface } from '../../../common/types';\nimport { makeChainable } from '../../../common/make-chainable';\n\ntype C_Settings = Pick<CoreTypes.Serialized.Settings, 'discretization' | 'numDays' | 'dayStart' | 'dayEnd'>;\ntype C_Location = Omit<CoreTypes.Serialized.Location, '_id'> & { __raw: Types.Location };\ntype C_Group = Omit<CoreTypes.Serialized.Group, '_id'> & { __raw: Types.Group };\ntype C_Teacher = Omit<CoreTypes.Serialized.Teacher, '_id'> & { __raw: Types.Teacher };\ntype C_Course = Omit<CoreTypes.Serialized.Course, '_id'> & { __raw: Types.Course };\ntype C_Person = Omit<CoreTypes.Serialized.Person, '_id'> & { __raw: Types.Person };\ntype C_Event = Omit<CoreTypes.Serialized.Event, '_id'> & { __raw?: Types.Course };\ntype C_OverlapGroup = Omit<CoreTypes.Serialized.OverlapGroup, '_id'> & { /* raw: Types.OverlapGroup */ };\ntype C_Syllabus = Omit<CoreTypes.Serialized.Syllabus, '_id'> & { __raw?: Types.Syllabus };\n\ntype Out = {\n settings: C_Settings;\n locations: C_Location[];\n teachers: C_Teacher[];\n persons: C_Person[];\n groups: C_Group[];\n courses: C_Course[];\n events: C_Event[];\n overlapGroups: C_OverlapGroup[];\n syllabuses: C_Syllabus[];\n meta: {\n // errors?: string[];\n warnings?: { code: Types.ErrorCode, context?: unknown }[];\n };\n};\n\n\nfunction generateId (map: Map<string, unknown>) {\n // generate a unique id\n let id: string;\n do {\n id = nanoid(8);\n } while (map.has(id));\n\n return id;\n}\n\nfunction toPositiveMultipleOf5 (val: number | string) {\n const num = isString(val) ? parseInt(val) : val;\n\n // throw error if nan\n if (isNaN(num)) throw new Error(`toMultipleOf5: ${val} is not a number`);\n\n const res = Math.floor(num / 5) * 5;\n\n if (res <= 0) throw new Error(`toMultipleOf5: the result ${res} is not positive`);\n\n return res;\n}\n\nfunction toPositiveInteger (val: number | string) {\n const num = isString(val) ? parseInt(val) : val;\n\n // throw error if nan\n if (isNaN(num)) throw new Error(`toInteger: ${val} is not a number`);\n if (num <= 0) throw new Error(`toInteger: te result ${num} is not positive`);\n\n return num;\n}\n\n/**\n * Converts a string/number to a trimmed string\n */\nfunction toString (x: string | number): string {\n return isNumber(x)\n ? x.toString()\n : x.trim();\n}\n\n/**\n * Converts a possible nullish string/number to a trimmed string or undefined\n */\nfunction toStringIfExists (x: string | number | undefined | null): string | undefined {\n if (x == null) return undefined;\n return toString(x);\n}\n\nfunction extractLocations (\n course: Types.Course,\n locationsNameMap: Map<string, C_Location>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n) {\n if (course.locations == null) return;\n const locationsStr = toString(course.locations);\n if (!locationsStr) return;\n\n const locationReferences: { groupIndex: number; to: string }[] = [];\n\n locationsStr.split('+')\n .flatMap((x, i) => x.split(',').map(x => [i, x.trim()] as [number, string]))\n .forEach(([i, x]) => {\n const key = x.toLowerCase();\n const ref = locationsNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_location_reference', context: x });\n return;\n }\n\n if (locationReferences.some(y => y.to == ref.id)) {\n warnings.push({ code: 'location_referenced_multiple_times', context: x });\n return;\n }\n\n locationReferences.push({ groupIndex: i, to: ref.id });\n });\n\n return locationReferences;\n}\n\nfunction extractGroups (\n course: Types.Course,\n groupsNameMap: Map<string, C_Group>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n) {\n if (course.groups == null) return;\n const groupsStr = toString(course.groups);\n if (!groupsStr) return;\n\n const groupReferences: { to: string }[] = [];\n groupsStr.split(',').map(x => x.trim())\n .forEach(x => {\n const key = x.toLowerCase();\n const ref = groupsNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_group_reference', context: x });\n return;\n }\n\n if (groupReferences.some(y => y.to == ref.id)) {\n warnings.push({ code: 'group_referenced_multiple_times', context: x });\n return;\n }\n\n groupReferences.push({ to: ref.id });\n });\n\n return groupReferences;\n}\n\nfunction extractTeachers (\n course: Types.Course,\n teachersNameMap: Map<string, C_Teacher>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n) {\n if (course.teachers == null) return;\n const teachersStr = toString(course.teachers);\n if (!teachersStr) return undefined;\n\n const teacherReferences: { to: string }[] = [];\n teachersStr.split(',').map(x => x.trim())\n .forEach(x => {\n const key = x.toLowerCase();\n const ref = teachersNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_teacher_reference', context: x });\n return;\n }\n\n if (teacherReferences.some(y => y.to == ref.id)) {\n warnings.push({ code: 'teacher_referenced_multiple_times', context: x });\n return;\n }\n\n teacherReferences.push({ to: ref.id });\n });\n\n return teacherReferences;\n}\n\nfunction extractSyllabus (\n course: Types.Course,\n syllabusesCodeMap: Map<string, C_Syllabus>,\n syllabusesNameMap: Map<string, C_Syllabus>,\n warnings: { code: Types.ErrorCode, context?: unknown }[],\n): string | undefined {\n if (course.syllabus == null) return undefined;\n const syllabusValue = toString(course.syllabus);\n if (!syllabusValue) return undefined;\n\n const syllabus = syllabusesCodeMap.get(syllabusValue) || syllabusesNameMap.get(syllabusValue.toLowerCase());\n\n if (!syllabus) {\n warnings.push({ code: 'invalid_syllabus_reference', context: syllabusValue });\n return undefined;\n }\n\n return syllabus.id;\n}\n\n\nfunction _schedules (\n source: Types.Source\n): Out {\n // gather all import related warnings/errors\n const warnings: { code: Types.ErrorCode, context?: unknown }[] = [];\n // let errors: string[] = [];\n\n source.settings = source.settings ?.filter(x => Object.values(x).some(y => toString(y).length > 0));\n source.locations = source.locations?.filter(x => Object.values(x).some(y => toString(y).length > 0));\n source.teachers = source.teachers ?.filter(x => Object.values(x).some(y => toString(y).length > 0));\n source.groups = source.groups ?.filter(x => Object.values(x).some(y => toString(y).length > 0));\n source.persons = source.persons ?.filter(x => Object.values(x).some(y => toString(y).length > 0));\n source.courses = source.courses ?.filter(x => Object.values(x).some(y => toString(y).length > 0));\n\n\n ////\n //// settings\n ////\n const settings: C_Settings = {\n discretization: 5,\n numDays: 5,\n dayStart: '08:00',\n dayEnd: '17:00',\n };\n if (!source.settings) {\n warnings.push({ code: 'missing_settings_sheet' });\n }\n if (source.settings?.[0]?.number_of_days) {\n const numDays = source.settings?.[0]?.number_of_days;\n try {\n settings.numDays = toPositiveInteger(numDays);\n\n // must be 5 or 7\n if (settings.numDays !== 5 && settings.numDays !== 7) {\n warnings.push({ code: 'invalid_number_of_days', context: numDays });\n settings.numDays = 5;\n }\n } catch {\n warnings.push({ code: 'invalid_number_of_days', context: numDays });\n }\n }\n\n ////\n //// syllabuses\n ////\n const syllabusesIdMap = new Map<string, C_Syllabus>();\n const syllabusesNameMap = new Map<string, C_Syllabus>();\n const syllabusesCodeMap = new Map<string, C_Syllabus>();\n if (!source.syllabuses) {\n // warnings.push({ code: 'missing_syllabus_sheet' });\n }\n source.syllabuses?.forEach(x => {\n const ids = toStringIfExists(x.foreign_ID );\n const subjectCode = toStringIfExists(x.subject_code);\n const subjectName = toStringIfExists(x.subject_name);\n const courseCode = toStringIfExists(x.course_code );\n const courseName = toStringIfExists(x.course_name );\n const schoolType = toStringIfExists(x.school_type );\n\n const id = toStringIfExists(x.ID) ?? generateId(syllabusesIdMap);\n if (syllabusesIdMap.has(id)) {\n warnings.push({ code: 'ignoring_duplicate_syllabus', context: `${subjectName} (${courseName})` });\n return;\n }\n\n if (!subjectName) {\n warnings.push({ code: 'missing_subject_name', context: x });\n return;\n }\n\n if (!schoolType) {\n warnings.push({ code: 'missing_school_type', context: x });\n return;\n }\n\n // create the syllabus object\n const y: C_Syllabus = {\n id,\n ...ids && { ids },\n ...subjectCode && { subjectCode },\n subjectName,\n ...courseCode && { courseCode },\n ...courseName && { courseName },\n schoolType,\n official: true, // always true for imported syllabuses\n __raw: x\n };\n syllabusesIdMap.set(id, y);\n const code = courseCode || subjectCode;\n const name = courseName || subjectName;\n if (code) syllabusesCodeMap.set(code, y);\n if (name) syllabusesNameMap.set(name.toLowerCase(), y);\n });\n\n\n ////\n //// locations, teachers\n ////\n const locationsIdMap = new Map<string, C_Location>();\n const locationsNameMap = new Map<string, C_Location>();\n const teachersIdMap = new Map<string, C_Teacher>();\n const teachersNameMap = new Map<string, C_Teacher>();\n [\n {\n name: { singular: 'location', plural: 'locations' } as const,\n data: source.locations,\n idMap: locationsIdMap,\n nameMap: locationsNameMap,\n },\n {\n name: { singular: 'teacher', plural: 'teachers' } as const,\n data: source.teachers,\n idMap: teachersIdMap,\n nameMap: teachersNameMap,\n }\n ].forEach(({ name, data, idMap, nameMap }) => {\n if (!data) {\n warnings.push({ code: `missing_${name.plural}_sheet` });\n }\n data?.forEach(x => {\n const ids = toStringIfExists(x.foreign_ID);\n const displayName = toStringIfExists(x.name );\n\n const id = toStringIfExists(x.ID) ?? generateId(idMap);\n if (idMap.has(id)) {\n warnings.push({ code: `ignoring_duplicate_${name.singular}`, context: displayName });\n return;\n }\n\n const y = {\n id,\n ...ids && { ids },\n ...displayName && { displayName },\n __raw: x\n };\n idMap.set(id, y);\n if (displayName) nameMap.set(displayName.toLowerCase(), y);\n });\n });\n\n\n ////\n //// groups\n ////\n const groupsIdMap = new Map<string, C_Group>();\n const groupsNameMap = new Map<string, C_Group>();\n if (!source.groups) {\n warnings.push({ code: 'missing_groups_sheet' });\n }\n source.groups?.forEach(x => {\n const ids = toStringIfExists(x.foreign_ID);\n const displayName = toStringIfExists(x.name );\n\n const id = toStringIfExists(x.ID) ?? generateId(groupsIdMap);\n if (groupsIdMap.has(id)) {\n warnings.push({ code: 'ignoring_duplicate_group', context: displayName });\n return;\n }\n\n // parent groups determine the species of the group\n const species = toStringIfExists(x.is_class) == '1' ? 'class' as const : undefined;\n\n const y = {\n id,\n ...ids && { ids },\n ...displayName && { displayName },\n ...species && { species },\n __raw: x\n } as C_Group;\n groupsIdMap.set(id, y);\n if (displayName) groupsNameMap.set(displayName.toLowerCase(), y);\n });\n\n\n ////\n //// persons\n ////\n const personsIdMap = new Map<string, C_Person>();\n const personsSsnMap = new Map<string, C_Person>();\n if (!source.persons) {\n warnings.push({ code: 'missing_persons_sheet' });\n }\n source.persons?.forEach(x => {\n const ids = toStringIfExists(x.foreign_ID);\n const firstName = toStringIfExists(x.first_name);\n const lastName = toStringIfExists(x.last_name );\n const SSN = toStringIfExists(x.SSN );\n\n const id = toStringIfExists(x.ID) ?? generateId(personsIdMap);\n if (personsIdMap.has(id)) {\n warnings.push({ code: 'ignoring_duplicate_person', context: `${firstName} ${lastName}` });\n return;\n }\n\n let group: string | undefined = undefined;\n if (x.group) {\n const key = toString(x.group).toLowerCase();\n const ref = groupsNameMap.get(key);\n if (!ref) {\n warnings.push({ code: 'invalid_group_reference', context: x.group });\n } else if (ref.species !== 'class') {\n warnings.push({ code: 'forbidden_group_reference', context: x.group });\n } else {\n group = ref.id;\n }\n }\n\n let sex: C_Person['sex'];\n if (x.sex) {\n if (isString(x.sex) && x.sex.toLowerCase() == 'man') sex = 'Man';\n else if (isString(x.sex) && x.sex.toLowerCase() == 'woman') sex = 'Woman';\n else {\n warnings.push({ code: 'invalid_sex', context: x.sex });\n }\n }\n\n const person: C_Person = {\n id,\n ...ids && { ids },\n ...firstName && { firstName },\n ...lastName && { lastName },\n ...sex && { sex },\n ...group && { group },\n ...SSN && { SSN: { value: SSN } },\n __raw: x\n };\n personsIdMap.set(id, person);\n if (SSN) personsSsnMap.set(SSN, person);\n });\n\n ////\n //// group.members (need to wait for persons to be parsed)\n ////\n groupsNameMap.forEach(x => {\n let duplicateReported = false;\n\n // ignore if no members\n if (!x.__raw.members) return;\n\n // parse and replace SSN by person id\n x.members = makeChainable(x.__raw.members)\n .chain(x => toString(x)\n .split(',')\n .map(x => x.trim())\n .filter(x => x != null)\n )\n .chain(x => countBy(x))\n .chain(y => Object.entries(y)\n .map(([ssn, count]) => {\n // ignore empty ssn\n if (!ssn) return;\n\n // warn if duplicate member\n if (!duplicateReported && count > 1) {\n warnings.push({ code: 'duplicate_group_member', context: x.displayName });\n duplicateReported = true;\n }\n\n // fetch person and warn if not found\n const person = personsSsnMap.get(ssn);\n if (!person) {\n warnings.push({ code: 'invalid_person_reference', context: ssn });\n return;\n }\n\n // warn if the group is a class and the person is already in a different class\n // (we also need to set the group of the person if it is empty)\n if (x.species == 'class') {\n if (!person.group) {\n person.group = x.id;\n }\n else if (person.group != x.id) {\n warnings.push({ code: 'person_in_multiple_classes', context: ssn });\n return;\n }\n }\n\n return person.id;\n })\n .filter(x => x != null)\n )\n .value;\n });\n\n\n ////\n //// courses\n ////\n const coursesIdMap = new Map<string, C_Course >();\n const eventsIdMap = new Map<string, C_Event >();\n const overlapGroupsKeyMap = new Map<string, C_OverlapGroup>();\n if (!source.courses) {\n warnings.push({ code: 'missing_courses_sheet' });\n }\n source.courses?.forEach(x => {\n // await loading events until courses are created\n const mergeWith = toStringIfExists(x.merge_with);\n if (mergeWith) return;\n\n const ids = toStringIfExists(x.foreign_ID);\n const displayName = toStringIfExists(x.name );\n const subject = toStringIfExists(x.subject );\n\n const courseId = toStringIfExists(x.ID) ?? generateId(coursesIdMap);\n if (coursesIdMap.has(courseId)) {\n warnings.push({ code: 'ignoring_duplicate_course', context: displayName + (subject ? ` (${subject})` : '') });\n return;\n }\n\n // try to parse the total hours as an integer\n let expectedTotalHours: number | undefined = undefined;\n if (x.total_hours) {\n try {\n expectedTotalHours = toPositiveInteger(x.total_hours);\n } catch {\n warnings.push({ code: 'invalid_total_hours', context: x.total_hours });\n }\n }\n\n // try to parse the minutes per week as an multiple of 5\n let minutesPerWeek: number | undefined = undefined;\n if (x.minutes_per_week) {\n try {\n minutesPerWeek = toPositiveMultipleOf5(x.minutes_per_week);\n } catch {\n warnings.push({ code: 'invalid_minutes_per_week', context: x.minutes_per_week });\n }\n }\n\n // only one of minutes_per_week or total_hours should be defined\n if (minutesPerWeek != null && expectedTotalHours != null) {\n warnings.push({ code: 'ambiguous_planned_duration', context: `${minutesPerWeek} min/week OR ${expectedTotalHours} hrs` });\n }\n const plannedDuration =\n expectedTotalHours\n ? `${expectedTotalHours} hrs` :\n minutesPerWeek\n ? `${minutesPerWeek} min/week` :\n undefined;\n\n\n // try to parse the lesson durations as an array of integers from which we will generate events\n if (x.lesson_durations) {\n const _durations = typeof x.lesson_durations === 'string'\n ? x.lesson_durations.split(',').map(x => x.trim())\n : [x.lesson_durations];\n\n // truncate to multiple of 5 and filter out zero duration events\n const durations = _durations\n .map(e => {\n try {\n return toPositiveMultipleOf5(e);\n } catch {\n warnings.push({ code: 'invalid_lesson_durations', context: x.lesson_durations });\n return undefined;\n }\n })\n .filter((x): x is number => x !== undefined);\n\n durations.forEach(x => {\n const id = generateId(eventsIdMap);\n const event: C_Event = { id, course: courseId, preferredDuration: x };\n eventsIdMap.set(id, event);\n });\n }\n\n // overlap group\n if (x.overlap_group && toString(x.overlap_group)) {\n const key = toString(x.overlap_group).toLowerCase();\n\n const og = overlapGroupsKeyMap.get(key);\n if (og) og.coalesced?.push({ toModel: 'courses', to: courseId });\n else {\n const id = generateId(overlapGroupsKeyMap);\n const coalesced = [{ toModel: 'courses' as const, to: courseId }];\n overlapGroupsKeyMap.set(key, { id, coalesced });\n }\n }\n\n const locations = extractLocations(x, locationsNameMap, warnings)?.map(x => ({ groupIndex: x.groupIndex, locations: [x.to] as [string] }));\n const groups = extractGroups (x, groupsNameMap, warnings);\n const teachers = extractTeachers (x, teachersNameMap, warnings);\n const syllabus = extractSyllabus(x, syllabusesCodeMap, syllabusesNameMap, warnings);\n\n const course: C_Course = {\n id: courseId,\n ...ids && { ids },\n ...displayName && { displayName },\n ...subject && { subject },\n /* TO BE DEPRECATED */ ...expectedTotalHours && { expectedTotalHours },\n /* TO BE DEPRECATED */ ...minutesPerWeek && { minutesPerWeek },\n ...plannedDuration && { plannedDuration },\n ...groups && { groups },\n ...teachers && { teachers },\n ...locations && { locations },\n ...syllabus && { syllabus },\n __raw: x\n };\n coursesIdMap.set(courseId, course);\n });\n\n\n ////\n //// events\n ////\n source.courses?.forEach(x => {\n // load courses\n const mergeWith = toStringIfExists(x.merge_with);\n if (!mergeWith) return;\n\n // ensure that the course to merge with exists\n if (!coursesIdMap.has(mergeWith)) {\n warnings.push({ code: 'invalid_course_reference', context: mergeWith });\n return;\n }\n\n const displayName = toStringIfExists(x.name);\n const locations = extractLocations(x, locationsNameMap, warnings)?.map(x => ({ groupIndex: x.groupIndex, locations: [x.to] as [string] }));\n const groups = extractGroups (x, groupsNameMap, warnings);\n const teachers = extractTeachers (x, teachersNameMap, warnings);\n\n // try to parse the lesson durations as an array of integers from which we will generate events\n if (x.lesson_durations) {\n const _durations = typeof x.lesson_durations === 'string'\n ? x.lesson_durations.split(',').map(x => x.trim())\n : [x.lesson_durations];\n\n // truncate to multiple of 5 and filter out zero duration events\n const durations = _durations\n .map(e => {\n try {\n return toPositiveMultipleOf5(e);\n } catch {\n warnings.push({ code: 'invalid_lesson_durations', context: x.lesson_durations });\n return undefined;\n }\n })\n .filter((x): x is number => x !== undefined);\n\n durations.forEach(preferredDuration => {\n const id = generateId(eventsIdMap);\n const event: C_Event = {\n id,\n course: mergeWith,\n preferredDuration: preferredDuration,\n ...displayName && { displayName },\n ...groups && { groups },\n ...teachers && { teachers },\n ...locations && { locations },\n __raw: x\n };\n eventsIdMap.set(id, event);\n });\n }\n });\n\n\n ////\n //// return\n ////\n return {\n settings,\n locations: [...locationsIdMap .values()],\n teachers: [...teachersIdMap .values()],\n persons: [...personsIdMap .values()],\n groups: [...groupsIdMap .values()],\n courses: [...coursesIdMap .values()],\n events: [...eventsIdMap .values()],\n overlapGroups: [...overlapGroupsKeyMap.values()],\n syllabuses: [...syllabusesIdMap .values()],\n meta: {\n // ...errors .length && { errors: errors },\n ...warnings.length && { warnings: warnings }\n }\n };\n}\n\n\nexport default {\n schedules: _schedules\n} satisfies FromInterface;"],"mappings":";;;;;AAkCA,SAAS,WAAY,OAA2B;CAE9C,IAAIA;AACJ;AACE,OAAK,OAAO,EAAE;QACPC,MAAI,IAAI,GAAG;AAEpB,QAAO;;AAGT,SAAS,sBAAuB,KAAsB;CACpD,MAAM,MAAM,SAAS,IAAI,GAAG,SAAS,IAAI,GAAG;AAG5C,KAAI,MAAM,IAAI,CAAE,OAAM,IAAI,MAAM,kBAAkB,IAAI,kBAAkB;CAExE,MAAM,MAAM,KAAK,MAAM,MAAM,EAAE,GAAG;AAElC,KAAI,OAAO,EAAG,OAAM,IAAI,MAAM,6BAA6B,IAAI,kBAAkB;AAEjF,QAAO;;AAGT,SAAS,kBAAmB,KAAsB;CAChD,MAAM,MAAM,SAAS,IAAI,GAAG,SAAS,IAAI,GAAG;AAG5C,KAAI,MAAM,IAAI,CAAE,OAAM,IAAI,MAAM,cAAc,IAAI,kBAAkB;AACpE,KAAI,OAAO,EAAG,OAAM,IAAI,MAAM,wBAAwB,IAAI,kBAAkB;AAE5E,QAAO;;;;;AAMT,SAAS,SAAU,GAA4B;AAC7C,QAAO,SAAS,EAAE,GACd,EAAE,UAAU,GACZ,EAAE,MAAM;;;;;AAMd,SAAS,iBAAkB,GAA2D;AACpF,KAAI,KAAK,KAAM,QAAO;AACtB,QAAO,SAAS,EAAE;;AAGpB,SAAS,iBACP,QACA,kBACA,UACA;AACA,KAAI,OAAO,aAAa,KAAM;CAC9B,MAAM,eAAe,SAAS,OAAO,UAAU;AAC/C,KAAI,CAAC,aAAc;CAEnB,MAAMC,qBAA2D,EAAE;AAEnE,cAAa,MAAM,IAAI,CACpB,SAAS,GAAG,MAAM,EAAE,MAAM,IAAI,CAAC,KAAI,QAAK,CAAC,GAAGC,IAAE,MAAM,CAAC,CAAqB,CAAC,CAC3E,SAAS,CAAC,GAAG,OAAO;EACnB,MAAM,MAAM,EAAE,aAAa;EAC3B,MAAM,MAAM,iBAAiB,IAAI,IAAI;AACrC,MAAI,CAAC,KAAK;AACR,YAAS,KAAK;IAAE,MAAM;IAA8B,SAAS;IAAG,CAAC;AACjE;;AAGF,MAAI,mBAAmB,MAAK,MAAK,EAAE,MAAM,IAAI,GAAG,EAAE;AAChD,YAAS,KAAK;IAAE,MAAM;IAAsC,SAAS;IAAG,CAAC;AACzE;;AAGF,qBAAmB,KAAK;GAAE,YAAY;GAAG,IAAI,IAAI;GAAI,CAAC;GACtD;AAEJ,QAAO;;AAGT,SAAS,cACP,QACA,eACA,UACA;AACA,KAAI,OAAO,UAAU,KAAM;CAC3B,MAAM,YAAY,SAAS,OAAO,OAAO;AACzC,KAAI,CAAC,UAAW;CAEhB,MAAMC,kBAAoC,EAAE;AAC5C,WAAU,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,CACpC,SAAQ,MAAK;EACZ,MAAM,MAAM,EAAE,aAAa;EAC3B,MAAM,MAAM,cAAc,IAAI,IAAI;AAClC,MAAI,CAAC,KAAK;AACR,YAAS,KAAK;IAAE,MAAM;IAA2B,SAAS;IAAG,CAAC;AAC9D;;AAGF,MAAI,gBAAgB,MAAK,MAAK,EAAE,MAAM,IAAI,GAAG,EAAE;AAC7C,YAAS,KAAK;IAAE,MAAM;IAAmC,SAAS;IAAG,CAAC;AACtE;;AAGF,kBAAgB,KAAK,EAAE,IAAI,IAAI,IAAI,CAAC;GACpC;AAEJ,QAAO;;AAGT,SAAS,gBACP,QACA,iBACA,UACA;AACA,KAAI,OAAO,YAAY,KAAM;CAC7B,MAAM,cAAc,SAAS,OAAO,SAAS;AAC7C,KAAI,CAAC,YAAa,QAAO;CAEzB,MAAMC,oBAAsC,EAAE;AAC9C,aAAY,MAAM,IAAI,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,CACtC,SAAQ,MAAK;EACZ,MAAM,MAAM,EAAE,aAAa;EAC3B,MAAM,MAAM,gBAAgB,IAAI,IAAI;AACpC,MAAI,CAAC,KAAK;AACR,YAAS,KAAK;IAAE,MAAM;IAA6B,SAAS;IAAG,CAAC;AAChE;;AAGF,MAAI,kBAAkB,MAAK,MAAK,EAAE,MAAM,IAAI,GAAG,EAAE;AAC/C,YAAS,KAAK;IAAE,MAAM;IAAqC,SAAS;IAAG,CAAC;AACxE;;AAGF,oBAAkB,KAAK,EAAE,IAAI,IAAI,IAAI,CAAC;GACtC;AAEJ,QAAO;;AAGT,SAAS,gBACP,QACA,mBACA,mBACA,UACoB;AACpB,KAAI,OAAO,YAAY,KAAM,QAAO;CACpC,MAAM,gBAAgB,SAAS,OAAO,SAAS;AAC/C,KAAI,CAAC,cAAe,QAAO;CAE3B,MAAM,WAAW,kBAAkB,IAAI,cAAc,IAAI,kBAAkB,IAAI,cAAc,aAAa,CAAC;AAE3G,KAAI,CAAC,UAAU;AACb,WAAS,KAAK;GAAE,MAAM;GAA8B,SAAS;GAAe,CAAC;AAC7E;;AAGF,QAAO,SAAS;;AAIlB,SAAS,WACP,QACK;CAEL,MAAMC,WAA2D,EAAE;AAGnE,QAAO,WAAY,OAAO,UAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAK,SAAS,EAAE,CAAC,SAAS,EAAE,CAAC;AACpG,QAAO,YAAY,OAAO,WAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAK,SAAS,EAAE,CAAC,SAAS,EAAE,CAAC;AACpG,QAAO,WAAY,OAAO,UAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAK,SAAS,EAAE,CAAC,SAAS,EAAE,CAAC;AACpG,QAAO,SAAY,OAAO,QAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAK,SAAS,EAAE,CAAC,SAAS,EAAE,CAAC;AACpG,QAAO,UAAY,OAAO,SAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAK,SAAS,EAAE,CAAC,SAAS,EAAE,CAAC;AACpG,QAAO,UAAY,OAAO,SAAW,QAAO,MAAK,OAAO,OAAO,EAAE,CAAC,MAAK,MAAK,SAAS,EAAE,CAAC,SAAS,EAAE,CAAC;CAMpG,MAAMC,WAAuB;EAC3B,gBAAgB;EAChB,SAAgB;EAChB,UAAgB;EAChB,QAAgB;EACjB;AACD,KAAI,CAAC,OAAO,SACV,UAAS,KAAK,EAAE,MAAM,0BAA0B,CAAC;AAEnD,KAAI,OAAO,WAAW,IAAI,gBAAgB;EACxC,MAAM,UAAU,OAAO,WAAW,IAAI;AACtC,MAAI;AACF,YAAS,UAAU,kBAAkB,QAAQ;AAG7C,OAAI,SAAS,YAAY,KAAK,SAAS,YAAY,GAAG;AACpD,aAAS,KAAK;KAAE,MAAM;KAA0B,SAAS;KAAS,CAAC;AACnE,aAAS,UAAU;;UAEf;AACN,YAAS,KAAK;IAAE,MAAM;IAA0B,SAAS;IAAS,CAAC;;;CAOvE,MAAM,kCAAkB,IAAI,KAAyB;CACrD,MAAM,oCAAoB,IAAI,KAAyB;CACvD,MAAM,oCAAoB,IAAI,KAAyB;AACvD,KAAI,CAAC,OAAO,YAAY;AAGxB,QAAO,YAAY,SAAQ,MAAK;EAC9B,MAAM,MAAc,iBAAiB,EAAE,WAAa;EACpD,MAAM,cAAc,iBAAiB,EAAE,aAAa;EACpD,MAAM,cAAc,iBAAiB,EAAE,aAAa;EACpD,MAAM,aAAc,iBAAiB,EAAE,YAAa;EACpD,MAAM,aAAc,iBAAiB,EAAE,YAAa;EACpD,MAAM,aAAc,iBAAiB,EAAE,YAAa;EAEpD,MAAM,KAAK,iBAAiB,EAAE,GAAG,IAAI,WAAW,gBAAgB;AAChE,MAAI,gBAAgB,IAAI,GAAG,EAAE;AAC3B,YAAS,KAAK;IAAE,MAAM;IAA+B,SAAS,GAAG,YAAY,IAAI,WAAW;IAAI,CAAC;AACjG;;AAGF,MAAI,CAAC,aAAa;AAChB,YAAS,KAAK;IAAE,MAAM;IAAwB,SAAS;IAAG,CAAC;AAC3D;;AAGF,MAAI,CAAC,YAAY;AACf,YAAS,KAAK;IAAE,MAAM;IAAuB,SAAS;IAAG,CAAC;AAC1D;;EAIF,MAAMC,IAAgB;GACpB;GACA,GAAG,OAAmB,EAAE,KAAiB;GACzC,GAAG,eAAmB,EAAE,aAAiB;GACzC;GACA,GAAG,cAAmB,EAAE,YAAiB;GACzC,GAAG,cAAmB,EAAE,YAAiB;GACzC;GACA,UAAU;GACV,OAAU;GACX;AACD,kBAAgB,IAAI,IAAI,EAAE;EAC1B,MAAM,OAAO,cAAc;EAC3B,MAAM,OAAO,cAAc;AAC3B,MAAI,KAAM,mBAAkB,IAAI,MAAM,EAAE;AACxC,MAAI,KAAM,mBAAkB,IAAI,KAAK,aAAa,EAAE,EAAE;GACtD;CAMF,MAAM,iCAAmB,IAAI,KAAyB;CACtD,MAAM,mCAAmB,IAAI,KAAyB;CACtD,MAAM,gCAAmB,IAAI,KAAwB;CACrD,MAAM,kCAAmB,IAAI,KAAwB;AACrD,EACE;EACE,MAAS;GAAE,UAAU;GAAY,QAAQ;GAAa;EACtD,MAAS,OAAO;EAChB,OAAS;EACT,SAAS;EACV,EACD;EACE,MAAS;GAAE,UAAU;GAAW,QAAQ;GAAY;EACpD,MAAS,OAAO;EAChB,OAAS;EACT,SAAS;EACV,CACF,CAAC,SAAS,EAAE,MAAM,MAAM,OAAO,cAAc;AAC5C,MAAI,CAAC,KACH,UAAS,KAAK,EAAE,MAAM,WAAW,KAAK,OAAO,SAAS,CAAC;AAEzD,QAAM,SAAQ,MAAK;GACjB,MAAM,MAAc,iBAAiB,EAAE,WAAW;GAClD,MAAM,cAAc,iBAAiB,EAAE,KAAW;GAElD,MAAM,KAAK,iBAAiB,EAAE,GAAG,IAAI,WAAW,MAAM;AACtD,OAAI,MAAM,IAAI,GAAG,EAAE;AACjB,aAAS,KAAK;KAAE,MAAM,sBAAsB,KAAK;KAAY,SAAS;KAAa,CAAC;AACpF;;GAGF,MAAM,IAAI;IACR;IACA,GAAG,OAAe,EAAE,KAAK;IACzB,GAAG,eAAe,EAAE,aAAa;IACjC,OAAO;IACR;AACD,SAAM,IAAI,IAAI,EAAE;AAChB,OAAI,YAAa,SAAQ,IAAI,YAAY,aAAa,EAAE,EAAE;IAC1D;GACF;CAMF,MAAM,8BAAgB,IAAI,KAAsB;CAChD,MAAM,gCAAgB,IAAI,KAAsB;AAChD,KAAI,CAAC,OAAO,OACV,UAAS,KAAK,EAAE,MAAM,wBAAwB,CAAC;AAEjD,QAAO,QAAQ,SAAQ,MAAK;EAC1B,MAAM,MAAc,iBAAiB,EAAE,WAAW;EAClD,MAAM,cAAc,iBAAiB,EAAE,KAAW;EAElD,MAAM,KAAK,iBAAiB,EAAE,GAAG,IAAI,WAAW,YAAY;AAC5D,MAAI,YAAY,IAAI,GAAG,EAAE;AACvB,YAAS,KAAK;IAAE,MAAM;IAA4B,SAAS;IAAa,CAAC;AACzE;;EAIF,MAAM,UAAU,iBAAiB,EAAE,SAAS,IAAI,MAAM,UAAmB;EAEzE,MAAM,IAAI;GACR;GACA,GAAG,OAAe,EAAE,KAAK;GACzB,GAAG,eAAe,EAAE,aAAa;GACjC,GAAG,WAAe,EAAE,SAAS;GAC7B,OAAO;GACR;AACD,cAAY,IAAI,IAAI,EAAE;AACtB,MAAI,YAAa,eAAc,IAAI,YAAY,aAAa,EAAE,EAAE;GAChE;CAMF,MAAM,+BAAgB,IAAI,KAAuB;CACjD,MAAM,gCAAgB,IAAI,KAAuB;AACjD,KAAI,CAAC,OAAO,QACV,UAAS,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAElD,QAAO,SAAS,SAAQ,MAAK;EAC3B,MAAM,MAAY,iBAAiB,EAAE,WAAW;EAChD,MAAM,YAAY,iBAAiB,EAAE,WAAW;EAChD,MAAM,WAAY,iBAAiB,EAAE,UAAW;EAChD,MAAM,MAAY,iBAAiB,EAAE,IAAW;EAEhD,MAAM,KAAK,iBAAiB,EAAE,GAAG,IAAI,WAAW,aAAa;AAC7D,MAAI,aAAa,IAAI,GAAG,EAAE;AACxB,YAAS,KAAK;IAAE,MAAM;IAA6B,SAAS,GAAG,UAAU,GAAG;IAAY,CAAC;AACzF;;EAGF,IAAIC,QAA4B;AAChC,MAAI,EAAE,OAAO;GACX,MAAM,MAAM,SAAS,EAAE,MAAM,CAAC,aAAa;GAC3C,MAAM,MAAM,cAAc,IAAI,IAAI;AAClC,OAAI,CAAC,IACH,UAAS,KAAK;IAAE,MAAM;IAA2B,SAAS,EAAE;IAAO,CAAC;YAC3D,IAAI,YAAY,QACzB,UAAS,KAAK;IAAE,MAAM;IAA6B,SAAS,EAAE;IAAO,CAAC;OAEtE,SAAQ,IAAI;;EAIhB,IAAIC;AACJ,MAAI,EAAE,IACJ,KAAS,SAAS,EAAE,IAAI,IAAI,EAAE,IAAI,aAAa,IAAI,MAAS,OAAM;WACzD,SAAS,EAAE,IAAI,IAAI,EAAE,IAAI,aAAa,IAAI,QAAS,OAAM;MAEhE,UAAS,KAAK;GAAE,MAAM;GAAe,SAAS,EAAE;GAAK,CAAC;EAI1D,MAAMC,SAAmB;GACvB;GACA,GAAG,OAAa,EAAE,KAAK;GACvB,GAAG,aAAa,EAAE,WAAW;GAC7B,GAAG,YAAa,EAAE,UAAU;GAC5B,GAAG,OAAa,EAAE,KAAK;GACvB,GAAG,SAAa,EAAE,OAAO;GACzB,GAAG,OAAa,EAAE,KAAK,EAAE,OAAO,KAAK,EAAE;GACvC,OAAO;GACR;AACD,eAAa,IAAI,IAAI,OAAO;AAC5B,MAAI,IAAK,eAAc,IAAI,KAAK,OAAO;GACvC;AAKF,eAAc,SAAQ,MAAK;EACzB,IAAI,oBAAoB;AAGxB,MAAI,CAAC,EAAE,MAAM,QAAS;AAGtB,IAAE,UAAU,cAAc,EAAE,MAAM,QAAQ,CACvC,OAAM,QAAK,SAASR,IAAE,CACpB,MAAM,IAAI,CACV,KAAI,QAAKA,IAAE,MAAM,CAAC,CAClB,QAAO,QAAKA,OAAK,KAAK,CACxB,CACA,OAAM,QAAK,QAAQA,IAAE,CAAC,CACtB,OAAM,MAAK,OAAO,QAAQ,EAAE,CAC1B,KAAK,CAAC,KAAK,WAAW;AAErB,OAAI,CAAC,IAAK;AAGV,OAAI,CAAC,qBAAqB,QAAQ,GAAG;AACnC,aAAS,KAAK;KAAE,MAAM;KAA0B,SAAS,EAAE;KAAa,CAAC;AACzE,wBAAoB;;GAItB,MAAM,SAAS,cAAc,IAAI,IAAI;AACrC,OAAI,CAAC,QAAQ;AACX,aAAS,KAAK;KAAE,MAAM;KAA4B,SAAS;KAAK,CAAC;AACjE;;AAKF,OAAI,EAAE,WAAW,SACf;QAAI,CAAC,OAAO,MACV,QAAO,QAAQ,EAAE;aAEV,OAAO,SAAS,EAAE,IAAI;AAC7B,cAAS,KAAK;MAAE,MAAM;MAA8B,SAAS;MAAK,CAAC;AACnE;;;AAIJ,UAAO,OAAO;IACd,CACD,QAAO,QAAKA,OAAK,KAAK,CACxB,CACA;GACH;CAMF,MAAM,+BAAsB,IAAI,KAA6B;CAC7D,MAAM,8BAAsB,IAAI,KAA6B;CAC7D,MAAM,sCAAsB,IAAI,KAA6B;AAC7D,KAAI,CAAC,OAAO,QACV,UAAS,KAAK,EAAE,MAAM,yBAAyB,CAAC;AAElD,QAAO,SAAS,SAAQ,MAAK;AAG3B,MADkB,iBAAiB,EAAE,WAAW,CACjC;EAEf,MAAM,MAAc,iBAAiB,EAAE,WAAW;EAClD,MAAM,cAAc,iBAAiB,EAAE,KAAW;EAClD,MAAM,UAAc,iBAAiB,EAAE,QAAW;EAElD,MAAM,WAAW,iBAAiB,EAAE,GAAG,IAAI,WAAW,aAAa;AACnE,MAAI,aAAa,IAAI,SAAS,EAAE;AAC9B,YAAS,KAAK;IAAE,MAAM;IAA6B,SAAS,eAAe,UAAU,KAAK,QAAQ,KAAK;IAAK,CAAC;AAC7G;;EAIF,IAAIS,qBAAyC;AAC7C,MAAI,EAAE,YACJ,KAAI;AACF,wBAAqB,kBAAkB,EAAE,YAAY;UAC/C;AACN,YAAS,KAAK;IAAE,MAAM;IAAuB,SAAS,EAAE;IAAa,CAAC;;EAK1E,IAAIC,iBAAqC;AACzC,MAAI,EAAE,iBACJ,KAAI;AACF,oBAAiB,sBAAsB,EAAE,iBAAiB;UACpD;AACN,YAAS,KAAK;IAAE,MAAM;IAA4B,SAAS,EAAE;IAAkB,CAAC;;AAKpF,MAAI,kBAAkB,QAAQ,sBAAsB,KAClD,UAAS,KAAK;GAAE,MAAM;GAA8B,SAAS,GAAG,eAAe,eAAe,mBAAmB;GAAO,CAAC;EAE3H,MAAM,kBACJ,qBACI,GAAG,mBAAmB,QACxB,iBACI,GAAG,eAAe,aACpB;AAIN,MAAI,EAAE,iBAiBJ,EAhBmB,OAAO,EAAE,qBAAqB,WAC7C,EAAE,iBAAiB,MAAM,IAAI,CAAC,KAAI,QAAKV,IAAE,MAAM,CAAC,GAChD,CAAC,EAAE,iBAAiB,EAIrB,KAAI,MAAK;AACR,OAAI;AACF,WAAO,sBAAsB,EAAE;WACzB;AACN,aAAS,KAAK;KAAE,MAAM;KAA4B,SAAS,EAAE;KAAkB,CAAC;AAChF;;IAEF,CACD,QAAQ,QAAmBA,QAAM,OAAU,CAEpC,SAAQ,QAAK;GACrB,MAAM,KAAK,WAAW,YAAY;GAClC,MAAMW,QAAiB;IAAE;IAAI,QAAQ;IAAU,mBAAmBX;IAAG;AACrE,eAAY,IAAI,IAAI,MAAM;IAC1B;AAIJ,MAAI,EAAE,iBAAiB,SAAS,EAAE,cAAc,EAAE;GAChD,MAAM,MAAM,SAAS,EAAE,cAAc,CAAC,aAAa;GAEnD,MAAM,KAAK,oBAAoB,IAAI,IAAI;AACvC,OAAI,GAAI,IAAG,WAAW,KAAK;IAAE,SAAS;IAAW,IAAI;IAAU,CAAC;QAC3D;IACH,MAAM,KAAY,WAAW,oBAAoB;IACjD,MAAM,YAAY,CAAC;KAAE,SAAS;KAAoB,IAAI;KAAU,CAAC;AACjE,wBAAoB,IAAI,KAAK;KAAE;KAAI;KAAW,CAAC;;;EAInD,MAAM,YAAc,iBAAiB,GAAG,kBAAkB,SAAS,EAAE,KAAI,SAAM;GAAE,YAAYA,IAAE;GAAY,WAAW,CAACA,IAAE,GAAG;GAAc,EAAE;EAC5I,MAAM,SAAc,cAAiB,GAAG,eAAkB,SAAS;EACnE,MAAM,WAAc,gBAAiB,GAAG,iBAAkB,SAAS;EACnE,MAAM,WAAc,gBAAgB,GAAG,mBAAmB,mBAAmB,SAAS;EAEtF,MAAMY,SAAmB;GACvB,IAAO;GACP,GAAG,OAAsB,EAAE,KAAoB;GAC/C,GAAG,eAAsB,EAAE,aAAoB;GAC/C,GAAG,WAAsB,EAAE,SAAoB;GACxB,GAAG,sBAAsB,EAAE,oBAAoB;GAC/C,GAAG,kBAAsB,EAAE,gBAAoB;GACtE,GAAG,mBAAsB,EAAE,iBAAoB;GAC/C,GAAG,UAAsB,EAAE,QAAoB;GAC/C,GAAG,YAAsB,EAAE,UAAoB;GAC/C,GAAG,aAAsB,EAAE,WAAoB;GAC/C,GAAG,YAAsB,EAAE,UAAoB;GAC/C,OAAO;GACR;AACD,eAAa,IAAI,UAAU,OAAO;GAClC;AAMF,QAAO,SAAS,SAAQ,MAAK;EAE3B,MAAM,YAAY,iBAAiB,EAAE,WAAW;AAChD,MAAI,CAAC,UAAW;AAGhB,MAAI,CAAC,aAAa,IAAI,UAAU,EAAE;AAChC,YAAS,KAAK;IAAE,MAAM;IAA4B,SAAS;IAAW,CAAC;AACvE;;EAGF,MAAM,cAAc,iBAAiB,EAAE,KAAK;EAC5C,MAAM,YAAc,iBAAiB,GAAG,kBAAkB,SAAS,EAAE,KAAI,SAAM;GAAE,YAAYZ,IAAE;GAAY,WAAW,CAACA,IAAE,GAAG;GAAc,EAAE;EAC5I,MAAM,SAAc,cAAiB,GAAG,eAAkB,SAAS;EACnE,MAAM,WAAc,gBAAiB,GAAG,iBAAkB,SAAS;AAGnE,MAAI,EAAE,iBAiBJ,EAhBmB,OAAO,EAAE,qBAAqB,WAC7C,EAAE,iBAAiB,MAAM,IAAI,CAAC,KAAI,QAAKA,IAAE,MAAM,CAAC,GAChD,CAAC,EAAE,iBAAiB,EAIrB,KAAI,MAAK;AACR,OAAI;AACF,WAAO,sBAAsB,EAAE;WACzB;AACN,aAAS,KAAK;KAAE,MAAM;KAA4B,SAAS,EAAE;KAAkB,CAAC;AAChF;;IAEF,CACD,QAAQ,QAAmBA,QAAM,OAAU,CAEpC,SAAQ,sBAAqB;GACrC,MAAM,KAAK,WAAW,YAAY;GAClC,MAAMW,QAAiB;IACrB;IACA,QAAmB;IACA;IACnB,GAAG,eAAgB,EAAE,aAAa;IAClC,GAAG,UAAgB,EAAE,QAAa;IAClC,GAAG,YAAgB,EAAE,UAAa;IAClC,GAAG,aAAgB,EAAE,WAAa;IAClC,OAAmB;IACpB;AACD,eAAY,IAAI,IAAI,MAAM;IAC1B;GAEJ;AAMF,QAAO;EACL;EACA,WAAe,CAAC,GAAG,eAAoB,QAAQ,CAAC;EAChD,UAAe,CAAC,GAAG,cAAoB,QAAQ,CAAC;EAChD,SAAe,CAAC,GAAG,aAAoB,QAAQ,CAAC;EAChD,QAAe,CAAC,GAAG,YAAoB,QAAQ,CAAC;EAChD,SAAe,CAAC,GAAG,aAAoB,QAAQ,CAAC;EAChD,QAAe,CAAC,GAAG,YAAoB,QAAQ,CAAC;EAChD,eAAe,CAAC,GAAG,oBAAoB,QAAQ,CAAC;EAChD,YAAe,CAAC,GAAG,gBAAoB,QAAQ,CAAC;EAChD,MAAe,EAEb,GAAG,SAAS,UAAU,EAAY,UAAU,EAC7C;EACF;;AAIH,mBAAe,EACb,WAAW,YACZ"}
@@ -37,7 +37,7 @@ function fromCollections(courses, settings, options, periodsMap) {
37
37
  period: getPeriodIndex(course.period, periodsMap, options),
38
38
  _period: course.period ? getVertexId(course.period, options) : void 0
39
39
  };
40
- if (options.meta) doc.meta = omitBy({
40
+ if (options.includeEntityMeta) doc.meta = omitBy({
41
41
  color: course.color,
42
42
  ids: course.ids,
43
43
  name: course.displayName
@@ -1 +1 @@
1
- {"version":3,"file":"collections.js","names":["doc: Types.collection & { overlapGroupId?: string }","x"],"sources":["../../../../src/RS/to/input/collections.ts"],"sourcesContent":["import { groupBy, omit, omitBy, values } from 'lodash-es';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseEvents } from './events';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { getPeriodIndex, idOf } from './util/util';\nimport { parseLocationReferences } from './util/parse-location-references';\nimport { parseDays } from './util/parse-days';\nimport { parseIntervals } from './util/parse-intervals';\nimport type { Types } from '../../types';\nimport { makeChainable } from '../../../common/make-chainable';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromCollections (\n courses: ConnectedTypes.course[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n periodsMap: Map<string | undefined, number>,\n): (Types.collection[] | Types.collection)[] {\n const defaultInterval = getDefaultInterval(settings);\n\n const collections = courses\n .map(course => {\n const id = getVertexId(course, options);\n const intervals = course.intervals ?? defaultInterval;\n\n const doc: Types.collection & { overlapGroupId?: string } = {\n id,\n\n weight: course.weight,\n density: course.density,\n maxEventLengthVariance: course.eventDurationVariance,\n potentialCenter: course.centerOfAttraction ? parseFloat(course.centerOfAttraction.replace(':', '.')) : undefined,\n distributionKey: id,\n\n events: parseEvents (course.events, settings, options, periodsMap),\n dependencies: parseLocationReferences (course.locations, options),\n groups: parseGroupReferences ({ type: 'course', item: course }, options),\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n days: parseDays (course.days, settings),\n minBreakLength: parseMinimumBreakLength (course.minBreakLength),\n lockedTimes: attachLockedTimes (course.lockedTimes, options),\n period: getPeriodIndex (course.period, periodsMap, options),\n _period: course.period ? getVertexId(course.period, options) : undefined,\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n color: course.color,\n ids: course.ids,\n name: course.displayName,\n }, x => x == null);\n }\n\n // temporarily attach overlap group\n doc.overlapGroupId = course.overlapGroup\n ? getVertexId(course.overlapGroup, options)\n : undefined;\n\n return omitBy(doc, x => x == null) as Types.collection & { overlapGroupId?: string };\n });\n\n const overlapping = makeChainable(collections)\n .chain(\n x => x.filter(x => x.overlapGroupId != null),\n x => groupBy(x, x => x.overlapGroupId),\n x => values(x)\n .map(xs => xs.map(x => omit(x, 'overlapGroupId') as Types.collection))\n )\n .value;\n\n const plain = collections\n .filter(x => x.overlapGroupId == null)\n .map(x => {\n delete x.overlapGroupId; // remove overlapGroupId from individual collections\n return x as Types.collection;\n });\n\n return overlapping.concat(plain);\n};"],"mappings":";;;;;;;;;;;;;;AAeA,SAAgB,gBACd,SACA,UACA,SACA,YAC2C;CAC3C,MAAM,kBAAkB,mBAAmB,SAAS;CAEpD,MAAM,cAAc,QACjB,KAAI,WAAU;EACb,MAAM,KAAY,YAAY,QAAQ,QAAQ;EAC9C,MAAM,YAAY,OAAO,aAAa;EAEtC,MAAMA,MAAsD;GAC1D;GAEA,QAAwB,OAAO;GAC/B,SAAwB,OAAO;GAC/B,wBAAwB,OAAO;GAC/B,iBAAwB,OAAO,qBAAqB,WAAW,OAAO,mBAAmB,QAAQ,KAAK,IAAI,CAAC,GAAG;GAC9G,iBAAwB;GAExB,QAAc,YAA2B,OAAO,QAAQ,UAAU,SAAS,WAAW;GACtF,cAAc,wBAA2B,OAAO,WAAW,QAAQ;GACnE,QAAc,qBAA2B;IAAE,MAAM;IAAU,MAAM;IAAQ,EAAE,QAAQ;GACnF,WAAc,QAAQ,YAClB,eAAe,WAAW,QAAW,SAAS,GAC9C,KAAK,sBAAsB,WAAW,QAAW,QAAQ;GAC7D,MAAgB,UAA2B,OAAO,MAAM,SAAS;GACjE,gBAAgB,wBAA2B,OAAO,eAAe;GACjE,aAAgB,kBAA2B,OAAO,aAAa,QAAQ;GACvE,QAAgB,eAA2B,OAAO,QAAQ,YAAY,QAAQ;GAC9E,SAAgB,OAAO,SAAS,YAAY,OAAO,QAAQ,QAAQ,GAAG;GACvE;AAED,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,OAAO,OAAO;GACd,KAAO,OAAO;GACd,MAAO,OAAO;GACf,GAAE,MAAK,KAAK,KAAK;AAIpB,MAAI,iBAAiB,OAAO,eACxB,YAAY,OAAO,cAAc,QAAQ,GACzC;AAEJ,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC;CAEJ,MAAM,cAAc,cAAc,YAAY,CAC3C,OACC,MAAK,EAAE,QAAO,QAAKC,IAAE,kBAAkB,KAAK,GAC5C,MAAK,QAAQ,IAAG,QAAKA,IAAE,eAAe,GACtC,MAAK,OAAO,EAAE,CACX,KAAI,OAAM,GAAG,KAAI,QAAK,KAAKA,KAAG,iBAAiB,CAAqB,CAAC,CACzE,CACA;CAEH,MAAM,QAAQ,YACX,QAAO,MAAK,EAAE,kBAAkB,KAAK,CACrC,KAAI,MAAK;AACR,SAAO,EAAE;AACT,SAAO;GACP;AAEJ,QAAO,YAAY,OAAO,MAAM"}
1
+ {"version":3,"file":"collections.js","names":["doc: Types.collection & { overlapGroupId?: string }","x"],"sources":["../../../../src/RS/to/input/collections.ts"],"sourcesContent":["import { groupBy, omit, omitBy, values } from 'lodash-es';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseEvents } from './events';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { getPeriodIndex, idOf } from './util/util';\nimport { parseLocationReferences } from './util/parse-location-references';\nimport { parseDays } from './util/parse-days';\nimport { parseIntervals } from './util/parse-intervals';\nimport type { Types } from '../../types';\nimport { makeChainable } from '../../../common/make-chainable';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromCollections (\n courses: ConnectedTypes.course[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n periodsMap: Map<string | undefined, number>,\n): (Types.collection[] | Types.collection)[] {\n const defaultInterval = getDefaultInterval(settings);\n\n const collections = courses\n .map(course => {\n const id = getVertexId(course, options);\n const intervals = course.intervals ?? defaultInterval;\n\n const doc: Types.collection & { overlapGroupId?: string } = {\n id,\n\n weight: course.weight,\n density: course.density,\n maxEventLengthVariance: course.eventDurationVariance,\n potentialCenter: course.centerOfAttraction ? parseFloat(course.centerOfAttraction.replace(':', '.')) : undefined,\n distributionKey: id,\n\n events: parseEvents (course.events, settings, options, periodsMap),\n dependencies: parseLocationReferences (course.locations, options),\n groups: parseGroupReferences ({ type: 'course', item: course }, options),\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n days: parseDays (course.days, settings),\n minBreakLength: parseMinimumBreakLength (course.minBreakLength),\n lockedTimes: attachLockedTimes (course.lockedTimes, options),\n period: getPeriodIndex (course.period, periodsMap, options),\n _period: course.period ? getVertexId(course.period, options) : undefined,\n };\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n color: course.color,\n ids: course.ids,\n name: course.displayName,\n }, x => x == null);\n }\n\n // temporarily attach overlap group\n doc.overlapGroupId = course.overlapGroup\n ? getVertexId(course.overlapGroup, options)\n : undefined;\n\n return omitBy(doc, x => x == null) as Types.collection & { overlapGroupId?: string };\n });\n\n const overlapping = makeChainable(collections)\n .chain(\n x => x.filter(x => x.overlapGroupId != null),\n x => groupBy(x, x => x.overlapGroupId),\n x => values(x)\n .map(xs => xs.map(x => omit(x, 'overlapGroupId') as Types.collection))\n )\n .value;\n\n const plain = collections\n .filter(x => x.overlapGroupId == null)\n .map(x => {\n delete x.overlapGroupId; // remove overlapGroupId from individual collections\n return x as Types.collection;\n });\n\n return overlapping.concat(plain);\n};"],"mappings":";;;;;;;;;;;;;;AAeA,SAAgB,gBACd,SACA,UACA,SACA,YAC2C;CAC3C,MAAM,kBAAkB,mBAAmB,SAAS;CAEpD,MAAM,cAAc,QACjB,KAAI,WAAU;EACb,MAAM,KAAY,YAAY,QAAQ,QAAQ;EAC9C,MAAM,YAAY,OAAO,aAAa;EAEtC,MAAMA,MAAsD;GAC1D;GAEA,QAAwB,OAAO;GAC/B,SAAwB,OAAO;GAC/B,wBAAwB,OAAO;GAC/B,iBAAwB,OAAO,qBAAqB,WAAW,OAAO,mBAAmB,QAAQ,KAAK,IAAI,CAAC,GAAG;GAC9G,iBAAwB;GAExB,QAAc,YAA2B,OAAO,QAAQ,UAAU,SAAS,WAAW;GACtF,cAAc,wBAA2B,OAAO,WAAW,QAAQ;GACnE,QAAc,qBAA2B;IAAE,MAAM;IAAU,MAAM;IAAQ,EAAE,QAAQ;GACnF,WAAc,QAAQ,YAClB,eAAe,WAAW,QAAW,SAAS,GAC9C,KAAK,sBAAsB,WAAW,QAAW,QAAQ;GAC7D,MAAgB,UAA2B,OAAO,MAAM,SAAS;GACjE,gBAAgB,wBAA2B,OAAO,eAAe;GACjE,aAAgB,kBAA2B,OAAO,aAAa,QAAQ;GACvE,QAAgB,eAA2B,OAAO,QAAQ,YAAY,QAAQ;GAC9E,SAAgB,OAAO,SAAS,YAAY,OAAO,QAAQ,QAAQ,GAAG;GACvE;AAED,MAAI,QAAQ,kBACV,KAAI,OAAO,OAAO;GAChB,OAAO,OAAO;GACd,KAAO,OAAO;GACd,MAAO,OAAO;GACf,GAAE,MAAK,KAAK,KAAK;AAIpB,MAAI,iBAAiB,OAAO,eACxB,YAAY,OAAO,cAAc,QAAQ,GACzC;AAEJ,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC;CAEJ,MAAM,cAAc,cAAc,YAAY,CAC3C,OACC,MAAK,EAAE,QAAO,QAAKC,IAAE,kBAAkB,KAAK,GAC5C,MAAK,QAAQ,IAAG,QAAKA,IAAE,eAAe,GACtC,MAAK,OAAO,EAAE,CACX,KAAI,OAAM,GAAG,KAAI,QAAK,KAAKA,KAAG,iBAAiB,CAAqB,CAAC,CACzE,CACA;CAEH,MAAM,QAAQ,YACX,QAAO,MAAK,EAAE,kBAAkB,KAAK,CACrC,KAAI,MAAK;AACR,SAAO,EAAE;AACT,SAAO;GACP;AAEJ,QAAO,YAAY,OAAO,MAAM"}
@@ -17,7 +17,7 @@ function fromLocations(locations, settings, options) {
17
17
  days: parseDays(location.days, settings),
18
18
  lockedTimes: attachLockedTimes(location.lockedTimes, options)
19
19
  };
20
- if (options.meta) doc.meta = omitBy({
20
+ if (options.includeEntityMeta) doc.meta = omitBy({
21
21
  ids: location.ids,
22
22
  name: location.displayName
23
23
  }, (x) => x == null);
@@ -1 +1 @@
1
- {"version":3,"file":"dependencies.js","names":["doc: Types.dependency"],"sources":["../../../../src/RS/to/input/dependencies.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { parseDays } from './util/parse-days';\nimport type { Types } from '../../types';\n\nexport function fromLocations (\n locations: ConnectedTypes.location[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions\n) {\n return locations\n .filter(x => {\n const id = getVertexId(x, options);\n\n // filter location references based on partial schedule options\n const includedLocations = options.partialScheduleOptions?.includedLocations;\n if (includedLocations && !includedLocations.has(id)) return false;\n return true;\n })\n .map(location => {\n const id = getVertexId(location, options);\n\n const doc: Types.dependency = {\n id,\n minBreakLength: location.minBreakLength,\n days: parseDays (location.days, settings),\n lockedTimes: attachLockedTimes(location.lockedTimes, options)\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n ids: location.ids,\n name: location.displayName\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.dependency;\n });\n}"],"mappings":";;;;;;AAOA,SAAgB,cACd,WACA,UACA,SACA;AACA,QAAO,UACJ,QAAO,MAAK;EACX,MAAM,KAAK,YAAY,GAAG,QAAQ;EAGlC,MAAM,oBAAoB,QAAQ,wBAAwB;AAC1D,MAAI,qBAAqB,CAAC,kBAAkB,IAAI,GAAG,CAAE,QAAO;AAC5D,SAAO;GACP,CACD,KAAI,aAAY;EAGf,MAAMA,MAAwB;GAC5B,IAHS,YAAY,UAAU,QAAQ;GAIvC,gBAAgB,SAAS;GACzB,MAAgB,UAA0B,SAAS,MAAM,SAAS;GAClE,aAAgB,kBAAkB,SAAS,aAAa,QAAQ;GACjE;AAED,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,KAAM,SAAS;GACf,MAAM,SAAS;GAChB,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
1
+ {"version":3,"file":"dependencies.js","names":["doc: Types.dependency"],"sources":["../../../../src/RS/to/input/dependencies.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { parseDays } from './util/parse-days';\nimport type { Types } from '../../types';\n\nexport function fromLocations (\n locations: ConnectedTypes.location[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions\n) {\n return locations\n .filter(x => {\n const id = getVertexId(x, options);\n\n // filter location references based on partial schedule options\n const includedLocations = options.partialScheduleOptions?.includedLocations;\n if (includedLocations && !includedLocations.has(id)) return false;\n return true;\n })\n .map(location => {\n const id = getVertexId(location, options);\n\n const doc: Types.dependency = {\n id,\n minBreakLength: location.minBreakLength,\n days: parseDays (location.days, settings),\n lockedTimes: attachLockedTimes(location.lockedTimes, options)\n };\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n ids: location.ids,\n name: location.displayName\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.dependency;\n });\n}"],"mappings":";;;;;;AAOA,SAAgB,cACd,WACA,UACA,SACA;AACA,QAAO,UACJ,QAAO,MAAK;EACX,MAAM,KAAK,YAAY,GAAG,QAAQ;EAGlC,MAAM,oBAAoB,QAAQ,wBAAwB;AAC1D,MAAI,qBAAqB,CAAC,kBAAkB,IAAI,GAAG,CAAE,QAAO;AAC5D,SAAO;GACP,CACD,KAAI,aAAY;EAGf,MAAMA,MAAwB;GAC5B,IAHS,YAAY,UAAU,QAAQ;GAIvC,gBAAgB,SAAS;GACzB,MAAgB,UAA0B,SAAS,MAAM,SAAS;GAClE,aAAgB,kBAAkB,SAAS,aAAa,QAAQ;GACjE;AAED,MAAI,QAAQ,kBACV,KAAI,OAAO,OAAO;GAChB,KAAM,SAAS;GACf,MAAM,SAAS;GAChB,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
@@ -47,7 +47,7 @@ function parseEvents(events, settings, options, periodsMap) {
47
47
  minBreakLength: parseMinimumBreakLength(event.minBreakLength)
48
48
  };
49
49
  if (event.fixedStart && event.start) Object.assign(doc, toDayAndStart(event.start));
50
- if (options.meta) doc.meta = omitBy({
50
+ if (options.includeEntityMeta) doc.meta = omitBy({
51
51
  name: event.displayName,
52
52
  ids: event.ids,
53
53
  parked: event.parked,
@@ -1 +1 @@
1
- {"version":3,"file":"events.js","names":["doc: Types.event","id"],"sources":["../../../../src/RS/to/input/events.ts"],"sourcesContent":["import moment from 'moment';\nimport { omitBy } from 'lodash-es';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport type { Types } from '../../types';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport { parseLocationReferences, parseSelectedLocations } from './util/parse-location-references';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getDayIndex, getVertexId } from '../../../core/util';\nimport { getPeriodIndex, idOf } from './util/util';\nimport { parseDays } from './util/parse-days';\nimport type { DateType } from '../../../common/types';\nimport { parseIntervals } from './util/parse-intervals';\nimport { getDefaultInterval } from './intervals';\n\nexport function toDayAndStart (start: DateType) {\n return {\n start: parseFloat(moment.utc(start).format('HH.mm')),\n day: getDayIndex(start)\n };\n}\n\nexport function parseEvents (\n events: ConnectedTypes.event[] | undefined,\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n periodsMap: Map<string | undefined, number>\n): Types.event[] {\n const defaultInterval = getDefaultInterval(settings);\n\n return (events ?? [])\n .map((event): Types.event | undefined => {\n const id = getVertexId(event, options);\n const intervals = event.intervals ?? event.course?.intervals ?? defaultInterval;\n\n const duration = event.preferredDuration;\n if (!duration) throw new Error(`(RS::To::Events) Event \"${id}\" has no duration`);\n\n const forcedOverlapId = event.overlapSpecies?.species?.find(({ to }) => to == event)?.id;\n\n // if the event is linked, force duration variance to 0\n const maxLengthVariance = forcedOverlapId != null\n ? event.durationVariance ?? 0\n : 0;\n\n const doc: Types.event = {\n id: idOf.event(event, options),\n length: duration,\n maxLengthVariance: maxLengthVariance,\n weight: event.weight,\n density: event.density,\n potentialCenter: event.centerOfAttraction ? parseFloat(event.centerOfAttraction.replace(':', '.')) : undefined,\n forcedOverlapId: forcedOverlapId,\n period: getPeriodIndex (event.period, periodsMap, options),\n _period: event.period ? getVertexId(event.period, options) : undefined,\n days: parseDays (event.days, settings),\n dependencies: parseLocationReferences (event.locations, options),\n groups: parseGroupReferences ({ type: 'event', item: event }, options),\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n lockedTimes: attachLockedTimes (event.lockedTimes, options),\n minBreakLength: parseMinimumBreakLength (event.minBreakLength),\n };\n\n // if the event has a fixed day and start\n if (event.fixedStart && event.start) {\n Object.assign(doc, toDayAndStart(event.start));\n }\n\n if (options.meta) {\n doc.meta = omitBy({\n name: event.displayName,\n ids: event.ids,\n parked: event.parked,\n visible: event.visible,\n start: event.start ? moment.utc(event.start) : undefined,\n end: event.end ? moment.utc(event.start) : undefined,\n course: event.course ? getVertexId(event.course, options) : undefined,\n inLocations: event.inLocations ? parseSelectedLocations(event, options) : undefined\n }, x => x == null);\n }\n\n ////\n //// filter events based on partialScheduleOptions\n ////\n if (options.partialScheduleOptions) {\n const { includedEvents, omittedEventsHandling } = options.partialScheduleOptions;\n if (includedEvents && !includedEvents.has(doc.id)) { // collection.id => take into account dynamic locked times too!\n\n if (omittedEventsHandling == 'ignore') return;\n\n if (omittedEventsHandling == 'freeze') {\n // must not be parked and have a start and duration to be frozen, otherwise it's ignored\n if (event.parked || !event.start || !event.duration) return;\n\n // fix day, start and end\n Object.assign(doc, toDayAndStart(event.start));\n doc.length = event.duration;\n doc.maxLengthVariance = 0;\n\n // override intervals and days to not cause conflicts\n const numDays = settings.numDays ?? 5;\n doc.days = Array.from({ length: numDays }, (_, i) => i);\n doc.intervals = Array.from({ length: numDays }, () => [{ beg: 0, end: 23.55 }]);\n\n // fix locations\n doc.dependencies = (event.inLocations ?? [])\n .filter((x): x is NonNullable<typeof x> => !!x)\n .map(x => {\n const id = getVertexId(x, options);\n\n // filter location references based on partial schedule options\n const includedLocations = options.partialScheduleOptions?.includedLocations;\n if (includedLocations && !includedLocations.has(id)) return;\n\n return [{ dependency: id }] as Types.availableDependency[];\n })\n .filter(x => x != null);\n }\n\n }\n }\n\n return omitBy(doc, x => x == null) as Types.event;\n })\n .filter((x): x is NonNullable<typeof x> => !!x);\n}"],"mappings":";;;;;;;;;;;;;AAeA,SAAgB,cAAe,OAAiB;AAC9C,QAAO;EACL,OAAO,WAAW,OAAO,IAAI,MAAM,CAAC,OAAO,QAAQ,CAAC;EACpD,KAAO,YAAY,MAAM;EAC1B;;AAGH,SAAgB,YACd,QACA,UACA,SACA,YACe;CACf,MAAM,kBAAkB,mBAAmB,SAAS;AAEpD,SAAQ,UAAU,EAAE,EACjB,KAAK,UAAmC;EACvC,MAAM,KAAY,YAAY,OAAO,QAAQ;EAC7C,MAAM,YAAY,MAAM,aAAa,MAAM,QAAQ,aAAa;EAEhE,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,SAAU,OAAM,IAAI,MAAM,2BAA2B,GAAG,mBAAmB;EAEhF,MAAM,kBAAkB,MAAM,gBAAgB,SAAS,MAAM,EAAE,SAAS,MAAM,MAAM,EAAE;EAGtF,MAAM,oBAAoB,mBAAmB,OACzC,MAAM,oBAAoB,IAC1B;EAEJ,MAAMA,MAAmB;GACvB,IAAmB,KAAK,MAAM,OAAO,QAAQ;GAC7C,QAAmB;GACA;GACnB,QAAmB,MAAM;GACzB,SAAmB,MAAM;GACzB,iBAAmB,MAAM,qBAAqB,WAAW,MAAM,mBAAmB,QAAQ,KAAK,IAAI,CAAC,GAAG;GACpF;GACnB,QAAmB,eAA2B,MAAM,QAAQ,YAAY,QAAQ;GAChF,SAAmB,MAAM,SAAS,YAAY,MAAM,QAAQ,QAAQ,GAAG;GACvE,MAAmB,UAA2B,MAAM,MAAM,SAAS;GACnE,cAAmB,wBAA2B,MAAM,WAAW,QAAQ;GACvE,QAAmB,qBAA2B;IAAE,MAAM;IAAS,MAAM;IAAO,EAAE,QAAQ;GACtF,WAAmB,QAAQ,YACvB,eAAe,WAAW,QAAW,SAAS,GAC9C,KAAK,sBAAsB,WAAW,QAAW,QAAQ;GAC7D,aAAgB,kBAA2B,MAAM,aAAa,QAAQ;GACtE,gBAAgB,wBAA2B,MAAM,eAAe;GACjE;AAGD,MAAI,MAAM,cAAc,MAAM,MAC5B,QAAO,OAAO,KAAK,cAAc,MAAM,MAAM,CAAC;AAGhD,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,MAAa,MAAM;GACnB,KAAa,MAAM;GACnB,QAAa,MAAM;GACnB,SAAa,MAAM;GACnB,OAAa,MAAM,QAAc,OAAO,IAAI,MAAM,MAAM,GAAkB;GAC1E,KAAa,MAAM,MAAc,OAAO,IAAI,MAAM,MAAM,GAAkB;GAC1E,QAAa,MAAM,SAAc,YAAY,MAAM,QAAQ,QAAQ,GAAO;GAC1E,aAAa,MAAM,cAAc,uBAAuB,OAAO,QAAQ,GAAG;GAC3E,GAAE,MAAK,KAAK,KAAK;AAMpB,MAAI,QAAQ,wBAAwB;GAClC,MAAM,EAAE,gBAAgB,0BAA0B,QAAQ;AAC1D,OAAI,kBAAkB,CAAC,eAAe,IAAI,IAAI,GAAG,EAAE;AAEjD,QAAI,yBAAyB,SAAU;AAEvC,QAAI,yBAAyB,UAAU;AAErC,SAAI,MAAM,UAAU,CAAC,MAAM,SAAS,CAAC,MAAM,SAAU;AAGrD,YAAO,OAAO,KAAK,cAAc,MAAM,MAAM,CAAC;AAC9C,SAAI,SAAS,MAAM;AACnB,SAAI,oBAAoB;KAGxB,MAAM,UAAU,SAAS,WAAW;AACpC,SAAI,OAAY,MAAM,KAAK,EAAE,QAAQ,SAAS,GAAG,GAAG,MAAM,EAAE;AAC5D,SAAI,YAAY,MAAM,KAAK,EAAE,QAAQ,SAAS,QAAQ,CAAC;MAAE,KAAK;MAAG,KAAK;MAAO,CAAC,CAAC;AAG/E,SAAI,gBAAgB,MAAM,eAAe,EAAE,EACxC,QAAQ,MAAkC,CAAC,CAAC,EAAE,CAC9C,KAAI,MAAK;MACR,MAAMC,OAAK,YAAY,GAAG,QAAQ;MAGlC,MAAM,oBAAoB,QAAQ,wBAAwB;AAC1D,UAAI,qBAAqB,CAAC,kBAAkB,IAAIA,KAAG,CAAE;AAErD,aAAO,CAAC,EAAE,YAAYA,MAAI,CAAC;OAC3B,CACD,QAAO,MAAK,KAAK,KAAK;;;;AAM/B,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC,CACD,QAAQ,MAAkC,CAAC,CAAC,EAAE"}
1
+ {"version":3,"file":"events.js","names":["doc: Types.event","id"],"sources":["../../../../src/RS/to/input/events.ts"],"sourcesContent":["import moment from 'moment';\nimport { omitBy } from 'lodash-es';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport type { Types } from '../../types';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport { parseLocationReferences, parseSelectedLocations } from './util/parse-location-references';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getDayIndex, getVertexId } from '../../../core/util';\nimport { getPeriodIndex, idOf } from './util/util';\nimport { parseDays } from './util/parse-days';\nimport type { DateType } from '../../../common/types';\nimport { parseIntervals } from './util/parse-intervals';\nimport { getDefaultInterval } from './intervals';\n\nexport function toDayAndStart (start: DateType) {\n return {\n start: parseFloat(moment.utc(start).format('HH.mm')),\n day: getDayIndex(start)\n };\n}\n\nexport function parseEvents (\n events: ConnectedTypes.event[] | undefined,\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n periodsMap: Map<string | undefined, number>\n): Types.event[] {\n const defaultInterval = getDefaultInterval(settings);\n\n return (events ?? [])\n .map((event): Types.event | undefined => {\n const id = getVertexId(event, options);\n const intervals = event.intervals ?? event.course?.intervals ?? defaultInterval;\n\n const duration = event.preferredDuration;\n if (!duration) throw new Error(`(RS::To::Events) Event \"${id}\" has no duration`);\n\n const forcedOverlapId = event.overlapSpecies?.species?.find(({ to }) => to == event)?.id;\n\n // if the event is linked, force duration variance to 0\n const maxLengthVariance = forcedOverlapId != null\n ? event.durationVariance ?? 0\n : 0;\n\n const doc: Types.event = {\n id: idOf.event(event, options),\n length: duration,\n maxLengthVariance: maxLengthVariance,\n weight: event.weight,\n density: event.density,\n potentialCenter: event.centerOfAttraction ? parseFloat(event.centerOfAttraction.replace(':', '.')) : undefined,\n forcedOverlapId: forcedOverlapId,\n period: getPeriodIndex (event.period, periodsMap, options),\n _period: event.period ? getVertexId(event.period, options) : undefined,\n days: parseDays (event.days, settings),\n dependencies: parseLocationReferences (event.locations, options),\n groups: parseGroupReferences ({ type: 'event', item: event }, options),\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n lockedTimes: attachLockedTimes (event.lockedTimes, options),\n minBreakLength: parseMinimumBreakLength (event.minBreakLength),\n };\n\n // if the event has a fixed day and start\n if (event.fixedStart && event.start) {\n Object.assign(doc, toDayAndStart(event.start));\n }\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n name: event.displayName,\n ids: event.ids,\n parked: event.parked,\n visible: event.visible,\n start: event.start ? moment.utc(event.start) : undefined,\n end: event.end ? moment.utc(event.start) : undefined,\n course: event.course ? getVertexId(event.course, options) : undefined,\n inLocations: event.inLocations ? parseSelectedLocations(event, options) : undefined\n }, x => x == null);\n }\n\n ////\n //// filter events based on partialScheduleOptions\n ////\n if (options.partialScheduleOptions) {\n const { includedEvents, omittedEventsHandling } = options.partialScheduleOptions;\n if (includedEvents && !includedEvents.has(doc.id)) { // collection.id => take into account dynamic locked times too!\n\n if (omittedEventsHandling == 'ignore') return;\n\n if (omittedEventsHandling == 'freeze') {\n // must not be parked and have a start and duration to be frozen, otherwise it's ignored\n if (event.parked || !event.start || !event.duration) return;\n\n // fix day, start and end\n Object.assign(doc, toDayAndStart(event.start));\n doc.length = event.duration;\n doc.maxLengthVariance = 0;\n\n // override intervals and days to not cause conflicts\n const numDays = settings.numDays ?? 5;\n doc.days = Array.from({ length: numDays }, (_, i) => i);\n doc.intervals = Array.from({ length: numDays }, () => [{ beg: 0, end: 23.55 }]);\n\n // fix locations\n doc.dependencies = (event.inLocations ?? [])\n .filter((x): x is NonNullable<typeof x> => !!x)\n .map(x => {\n const id = getVertexId(x, options);\n\n // filter location references based on partial schedule options\n const includedLocations = options.partialScheduleOptions?.includedLocations;\n if (includedLocations && !includedLocations.has(id)) return;\n\n return [{ dependency: id }] as Types.availableDependency[];\n })\n .filter(x => x != null);\n }\n\n }\n }\n\n return omitBy(doc, x => x == null) as Types.event;\n })\n .filter((x): x is NonNullable<typeof x> => !!x);\n}"],"mappings":";;;;;;;;;;;;;AAeA,SAAgB,cAAe,OAAiB;AAC9C,QAAO;EACL,OAAO,WAAW,OAAO,IAAI,MAAM,CAAC,OAAO,QAAQ,CAAC;EACpD,KAAO,YAAY,MAAM;EAC1B;;AAGH,SAAgB,YACd,QACA,UACA,SACA,YACe;CACf,MAAM,kBAAkB,mBAAmB,SAAS;AAEpD,SAAQ,UAAU,EAAE,EACjB,KAAK,UAAmC;EACvC,MAAM,KAAY,YAAY,OAAO,QAAQ;EAC7C,MAAM,YAAY,MAAM,aAAa,MAAM,QAAQ,aAAa;EAEhE,MAAM,WAAW,MAAM;AACvB,MAAI,CAAC,SAAU,OAAM,IAAI,MAAM,2BAA2B,GAAG,mBAAmB;EAEhF,MAAM,kBAAkB,MAAM,gBAAgB,SAAS,MAAM,EAAE,SAAS,MAAM,MAAM,EAAE;EAGtF,MAAM,oBAAoB,mBAAmB,OACzC,MAAM,oBAAoB,IAC1B;EAEJ,MAAMA,MAAmB;GACvB,IAAmB,KAAK,MAAM,OAAO,QAAQ;GAC7C,QAAmB;GACA;GACnB,QAAmB,MAAM;GACzB,SAAmB,MAAM;GACzB,iBAAmB,MAAM,qBAAqB,WAAW,MAAM,mBAAmB,QAAQ,KAAK,IAAI,CAAC,GAAG;GACpF;GACnB,QAAmB,eAA2B,MAAM,QAAQ,YAAY,QAAQ;GAChF,SAAmB,MAAM,SAAS,YAAY,MAAM,QAAQ,QAAQ,GAAG;GACvE,MAAmB,UAA2B,MAAM,MAAM,SAAS;GACnE,cAAmB,wBAA2B,MAAM,WAAW,QAAQ;GACvE,QAAmB,qBAA2B;IAAE,MAAM;IAAS,MAAM;IAAO,EAAE,QAAQ;GACtF,WAAmB,QAAQ,YACvB,eAAe,WAAW,QAAW,SAAS,GAC9C,KAAK,sBAAsB,WAAW,QAAW,QAAQ;GAC7D,aAAgB,kBAA2B,MAAM,aAAa,QAAQ;GACtE,gBAAgB,wBAA2B,MAAM,eAAe;GACjE;AAGD,MAAI,MAAM,cAAc,MAAM,MAC5B,QAAO,OAAO,KAAK,cAAc,MAAM,MAAM,CAAC;AAGhD,MAAI,QAAQ,kBACV,KAAI,OAAO,OAAO;GAChB,MAAa,MAAM;GACnB,KAAa,MAAM;GACnB,QAAa,MAAM;GACnB,SAAa,MAAM;GACnB,OAAa,MAAM,QAAc,OAAO,IAAI,MAAM,MAAM,GAAkB;GAC1E,KAAa,MAAM,MAAc,OAAO,IAAI,MAAM,MAAM,GAAkB;GAC1E,QAAa,MAAM,SAAc,YAAY,MAAM,QAAQ,QAAQ,GAAO;GAC1E,aAAa,MAAM,cAAc,uBAAuB,OAAO,QAAQ,GAAG;GAC3E,GAAE,MAAK,KAAK,KAAK;AAMpB,MAAI,QAAQ,wBAAwB;GAClC,MAAM,EAAE,gBAAgB,0BAA0B,QAAQ;AAC1D,OAAI,kBAAkB,CAAC,eAAe,IAAI,IAAI,GAAG,EAAE;AAEjD,QAAI,yBAAyB,SAAU;AAEvC,QAAI,yBAAyB,UAAU;AAErC,SAAI,MAAM,UAAU,CAAC,MAAM,SAAS,CAAC,MAAM,SAAU;AAGrD,YAAO,OAAO,KAAK,cAAc,MAAM,MAAM,CAAC;AAC9C,SAAI,SAAS,MAAM;AACnB,SAAI,oBAAoB;KAGxB,MAAM,UAAU,SAAS,WAAW;AACpC,SAAI,OAAY,MAAM,KAAK,EAAE,QAAQ,SAAS,GAAG,GAAG,MAAM,EAAE;AAC5D,SAAI,YAAY,MAAM,KAAK,EAAE,QAAQ,SAAS,QAAQ,CAAC;MAAE,KAAK;MAAG,KAAK;MAAO,CAAC,CAAC;AAG/E,SAAI,gBAAgB,MAAM,eAAe,EAAE,EACxC,QAAQ,MAAkC,CAAC,CAAC,EAAE,CAC9C,KAAI,MAAK;MACR,MAAMC,OAAK,YAAY,GAAG,QAAQ;MAGlC,MAAM,oBAAoB,QAAQ,wBAAwB;AAC1D,UAAI,qBAAqB,CAAC,kBAAkB,IAAIA,KAAG,CAAE;AAErD,aAAO,CAAC,EAAE,YAAYA,MAAI,CAAC;OAC3B,CACD,QAAO,MAAK,KAAK,KAAK;;;;AAM/B,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC,CACD,QAAQ,MAAkC,CAAC,CAAC,EAAE"}
@@ -27,7 +27,7 @@ function fromGroups(groups, settings, options) {
27
27
  minBreakLength: parseMinimumBreakLength(group.minBreakLength),
28
28
  ...parseMaxWorkingHours(group, options)
29
29
  };
30
- if (options.meta) doc.meta = omitBy({
30
+ if (options.includeEntityMeta) doc.meta = omitBy({
31
31
  ids: group.ids,
32
32
  name: group.displayName
33
33
  }, (x) => x == null);
@@ -1 +1 @@
1
- {"version":3,"file":"groups.js","names":["doc: Types.group"],"sources":["../../../../src/RS/to/input/groups.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport type { Types } from '../../types';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { parseDays } from './util/parse-days';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { parseMaxWorkingHours } from './util/parse-max-working-hours';\nimport { idOf } from './util/util';\nimport { parseIntervals } from './util/parse-intervals';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromGroups (\n groups: ConnectedTypes.group[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions\n): Types.group[] {\n const defaultInterval = getDefaultInterval(settings);\n\n return groups\n .filter(group => group.species == 'class')\n .map(group => {\n const intervals = group.intervals ?? defaultInterval;\n const rootInterval = group.rootInterval ?? settings.defaultRootInterval;\n\n const doc: Types.group = {\n id: idOf.group(group, options),\n group_type: 'classes',\n weight: group.weight,\n minimizeGaps: true,\n minimizeDependencyAlternation: false,\n forbidOverlappingEvents: options.oldFormat ? group.forbidOverlappingEvents : undefined, // Deprecated in v3\n disableDayLengthPunishment: options.oldFormat ? group.disableDayLengthPunishment : undefined, // Deprecated in v3\n intervals: options.oldFormat\n ? parseIntervals(intervals, rootInterval, settings)\n : idOf.intervalPairReference(intervals, rootInterval, options),\n days: parseDays (group.days, settings),\n lockedTimes: attachLockedTimes (group.lockedTimes, options),\n minBreakLength: parseMinimumBreakLength (group.minBreakLength),\n\n ...parseMaxWorkingHours(group, options),\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n ids: group.ids,\n name: group.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.group;\n });\n};\n"],"mappings":";;;;;;;;;;AAWA,SAAgB,WACd,QACA,UACA,SACe;CACf,MAAM,kBAAkB,mBAAmB,SAAS;AAEpD,QAAO,OACJ,QAAO,UAAS,MAAM,WAAW,QAAQ,CACzC,KAAI,UAAS;EACZ,MAAM,YAAe,MAAM,aAAgB;EAC3C,MAAM,eAAe,MAAM,gBAAgB,SAAS;EAEpD,MAAMA,MAAmB;GACvB,IAA+B,KAAK,MAAM,OAAO,QAAQ;GACzD,YAA+B;GAC/B,QAA+B,MAAM;GACrC,cAA+B;GAC/B,+BAA+B;GAC/B,yBAA+B,QAAQ,YAAY,MAAM,0BAA0B;GACnF,4BAA+B,QAAQ,YAAY,MAAM,6BAA6B;GACtF,WAA+B,QAAQ,YACnC,eAAe,WAAW,cAAc,SAAS,GACjD,KAAK,sBAAsB,WAAW,cAAc,QAAQ;GAChE,MAAgB,UAA2B,MAAM,MAAM,SAAS;GAChE,aAAgB,kBAA2B,MAAM,aAAa,QAAQ;GACtE,gBAAgB,wBAA2B,MAAM,eAAe;GAEhE,GAAG,qBAAqB,OAAO,QAAQ;GACxC;AAED,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,KAAM,MAAM;GACZ,MAAM,MAAM;GACb,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
1
+ {"version":3,"file":"groups.js","names":["doc: Types.group"],"sources":["../../../../src/RS/to/input/groups.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport type { Types } from '../../types';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { parseDays } from './util/parse-days';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { parseMaxWorkingHours } from './util/parse-max-working-hours';\nimport { idOf } from './util/util';\nimport { parseIntervals } from './util/parse-intervals';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromGroups (\n groups: ConnectedTypes.group[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions\n): Types.group[] {\n const defaultInterval = getDefaultInterval(settings);\n\n return groups\n .filter(group => group.species == 'class')\n .map(group => {\n const intervals = group.intervals ?? defaultInterval;\n const rootInterval = group.rootInterval ?? settings.defaultRootInterval;\n\n const doc: Types.group = {\n id: idOf.group(group, options),\n group_type: 'classes',\n weight: group.weight,\n minimizeGaps: true,\n minimizeDependencyAlternation: false,\n forbidOverlappingEvents: options.oldFormat ? group.forbidOverlappingEvents : undefined, // Deprecated in v3\n disableDayLengthPunishment: options.oldFormat ? group.disableDayLengthPunishment : undefined, // Deprecated in v3\n intervals: options.oldFormat\n ? parseIntervals(intervals, rootInterval, settings)\n : idOf.intervalPairReference(intervals, rootInterval, options),\n days: parseDays (group.days, settings),\n lockedTimes: attachLockedTimes (group.lockedTimes, options),\n minBreakLength: parseMinimumBreakLength (group.minBreakLength),\n\n ...parseMaxWorkingHours(group, options),\n };\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n ids: group.ids,\n name: group.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.group;\n });\n};\n"],"mappings":";;;;;;;;;;AAWA,SAAgB,WACd,QACA,UACA,SACe;CACf,MAAM,kBAAkB,mBAAmB,SAAS;AAEpD,QAAO,OACJ,QAAO,UAAS,MAAM,WAAW,QAAQ,CACzC,KAAI,UAAS;EACZ,MAAM,YAAe,MAAM,aAAgB;EAC3C,MAAM,eAAe,MAAM,gBAAgB,SAAS;EAEpD,MAAMA,MAAmB;GACvB,IAA+B,KAAK,MAAM,OAAO,QAAQ;GACzD,YAA+B;GAC/B,QAA+B,MAAM;GACrC,cAA+B;GAC/B,+BAA+B;GAC/B,yBAA+B,QAAQ,YAAY,MAAM,0BAA0B;GACnF,4BAA+B,QAAQ,YAAY,MAAM,6BAA6B;GACtF,WAA+B,QAAQ,YACnC,eAAe,WAAW,cAAc,SAAS,GACjD,KAAK,sBAAsB,WAAW,cAAc,QAAQ;GAChE,MAAgB,UAA2B,MAAM,MAAM,SAAS;GAChE,aAAgB,kBAA2B,MAAM,aAAa,QAAQ;GACtE,gBAAgB,wBAA2B,MAAM,eAAe;GAEhE,GAAG,qBAAqB,OAAO,QAAQ;GACxC;AAED,MAAI,QAAQ,kBACV,KAAI,OAAO,OAAO;GAChB,KAAM,MAAM;GACZ,MAAM,MAAM;GACb,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
@@ -10,7 +10,7 @@ function fromPersonsToIndividuals(persons, options) {
10
10
  id: idOf.person(person, options),
11
11
  group: person.group ? `groups.${getVertexId(person.group, options)}` : void 0
12
12
  };
13
- if (options.meta) doc.meta = omitBy({
13
+ if (options.includeEntityMeta) doc.meta = omitBy({
14
14
  ids: person.ids,
15
15
  name: person.displayName
16
16
  }, (x) => x == null);
@@ -24,7 +24,7 @@ function fromGroupToIndividualsSet(id, reference, options) {
24
24
  id,
25
25
  individuals: [...members.difference(excluded)].map((x) => idOf.person(x, options))
26
26
  };
27
- if (options.meta) doc.meta = omitBy({ name: reference.to.displayName + (reference.exclude?.length ? ` (excl. ${reference.exclude.map((x) => x.displayName).join(", ")})` : "") }, (x) => x == null);
27
+ if (options.includeEntityMeta) doc.meta = omitBy({ name: reference.to.displayName + (reference.exclude?.length ? ` (excl. ${reference.exclude.map((x) => x.displayName).join(", ")})` : "") }, (x) => x == null);
28
28
  return doc;
29
29
  }
30
30
  function extractUniqueIndividuals(persons, courses, events, lockedTimes, options) {
@@ -1 +1 @@
1
- {"version":3,"file":"individuals.js","names":["doc: Types.individual","doc: Types.individualsSet","x"],"sources":["../../../../src/RS/to/input/individuals.ts"],"sourcesContent":["import { omitBy, uniqBy } from 'lodash-es';\nimport type { Types } from '../../types';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { idOf } from './util/util';\nimport { makeChainable } from '../../../common/make-chainable';\nimport type { GroupWithExclude } from '../../../core/interfaces/vertices/util/edges';\n\n\nfunction fromPersonsToIndividuals (\n persons: ConnectedTypes.person[],\n options: Types.parsedToOptions\n): Types.individual[] {\n return persons.map(person => {\n const doc: Types.individual = {\n id: idOf.person(person, options),\n group: person.group ? `groups.${ getVertexId(person.group, options) }` : undefined,\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n ids: person.ids,\n name: person.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.individual;\n });\n}\n\nfunction fromGroupToIndividualsSet (\n id: string,\n reference: GroupWithExclude.Util.Value<ConnectedTypes.group, ConnectedTypes.person>,\n options: Types.parsedToOptions\n): Types.individualsSet {\n const members = new Set(reference.to.members?.map(x => getVertexId(x, options)));\n const excluded = new Set(reference.exclude?.map(x => getVertexId(x, options)));\n const remaining = members.difference(excluded);\n\n const doc: Types.individualsSet = {\n id: id,\n individuals: [...remaining].map(x => idOf.person(x, options)),\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n name: reference.to.displayName + (reference.exclude?.length ? ` (excl. ${ reference.exclude.map(x => x.displayName).join(', ') })` : ''),\n }, x => x == null);\n }\n\n return doc;\n}\n\n// function fromParticipantsToIndividualsSet (\n// id: string,\n// reference: PersonReference<ConnectedTypes.person>[],\n// options: OutOptions\n// ): _IndividualsSet {\n// const idKey = getIdKey(options);\n\n// const doc: _IndividualsSet = {\n// id: id,\n// individuals: reference.map(x => getVertexId(x.to, idKey))\n// };\n\n// if (options.meta) {\n// doc.meta = omitBy({\n// name: reference.map(x => x.to.displayName).join(', '),\n// }, x => x == null);\n// }\n\n// return doc;\n// }\n\nexport function extractUniqueIndividuals (\n persons: ConnectedTypes.person[],\n courses: ConnectedTypes.course[],\n events: ConnectedTypes.event[],\n lockedTimes: ConnectedTypes.lockedTime[],\n options: Types.parsedToOptions\n): (Types.individual | Types.individualsSet)[] {\n const individuals = fromPersonsToIndividuals(persons, options);\n\n const uniqueIndividualsSetsFromGroupReferences = makeChainable([...courses, ...events, ...lockedTimes])\n .chain(\n x => x\n .map((item): GroupWithExclude.Util.Value<ConnectedTypes.group, ConnectedTypes.person>[] | undefined => {\n if ('groups' in item) return item.groups; // we may ignore the parent groups as they are all plain group references\n if ('coalesced' in item) return item.coalesced\n ?.filter(x => x.toModel == 'groups');\n return;\n })\n .flatMap(references => (references ?? [])\n .filter(x => x.to.species != 'class' || x.exclude?.length) // if plain group reference we will use the group directly\n .map(x => ({ id: idOf.groupReference(x, options), value: x }))\n ),\n x => uniqBy(x, x => x.id)\n .map(x => fromGroupToIndividualsSet(x.id, x.value, options))\n )\n .value;\n\n ////\n //// NOT NEEDED AS WE USE THE INDIVIDUALS DIRECTLY\n ////\n // const uniqueIndividualsSetsFromParticipants = chain([...courses, ...events])\n // .map(item => {\n // if ('participants' in item) return item.participants;\n // return;\n // })\n // .filter(x => x != null)\n // .filter(x => x.length > 1) // if a single participant we will use the individual directly\n // .map(x => ({ id: idOf.participantsReference(x, options), value: x }))\n // .uniqBy(x => x.id)\n // .map(x => fromParticipantsToIndividualsSet(x.id, x.value, options))\n // .value();\n\n return individuals\n .concat(uniqueIndividualsSetsFromGroupReferences);\n // .concat(uniqueIndividualsSetsFromParticipants);\n};\n"],"mappings":";;;;;;AASA,SAAS,yBACP,SACA,SACoB;AACpB,QAAO,QAAQ,KAAI,WAAU;EAC3B,MAAMA,MAAwB;GAC5B,IAAO,KAAK,OAAO,QAAQ,QAAQ;GACnC,OAAO,OAAO,QAAQ,UAAW,YAAY,OAAO,OAAO,QAAQ,KAAM;GAC1E;AAED,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,KAAM,OAAO;GACb,MAAM,OAAO;GACd,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC;;AAGJ,SAAS,0BACP,IACA,WACA,SACsB;CACtB,MAAM,UAAY,IAAI,IAAI,UAAU,GAAG,SAAS,KAAI,MAAK,YAAY,GAAG,QAAQ,CAAC,CAAC;CAClF,MAAM,WAAY,IAAI,IAAI,UAAU,SAAS,KAAI,MAAK,YAAY,GAAG,QAAQ,CAAC,CAAC;CAG/E,MAAMC,MAA4B;EACnB;EACb,aAAa,CAAC,GAJE,QAAQ,WAAW,SAAS,CAIjB,CAAC,KAAI,MAAK,KAAK,OAAO,GAAG,QAAQ,CAAC;EAC9D;AAED,KAAI,QAAQ,KACV,KAAI,OAAO,OAAO,EAChB,MAAM,UAAU,GAAG,eAAe,UAAU,SAAS,SAAS,WAAY,UAAU,QAAQ,KAAI,MAAK,EAAE,YAAY,CAAC,KAAK,KAAK,CAAE,KAAK,KACtI,GAAE,MAAK,KAAK,KAAK;AAGpB,QAAO;;AAwBT,SAAgB,yBACd,SACA,SACA,QACA,aACA,SAC6C;CAC7C,MAAM,cAAc,yBAAyB,SAAS,QAAQ;CAE9D,MAAM,2CAA2C,cAAc;EAAC,GAAG;EAAS,GAAG;EAAQ,GAAG;EAAY,CAAC,CACpG,OACC,MAAK,EACF,KAAK,SAAiG;AACrG,MAAI,YAAe,KAAM,QAAO,KAAK;AACrC,MAAI,eAAe,KAAM,QAAO,KAAK,WACjC,QAAO,QAAKC,IAAE,WAAW,SAAS;GAEtC,CACD,SAAQ,gBAAe,cAAc,EAAE,EACrC,QAAO,QAAKA,IAAE,GAAG,WAAW,WAAWA,IAAE,SAAS,OAAO,CACzD,KAAI,SAAM;EAAE,IAAI,KAAK,eAAeA,KAAG,QAAQ;EAAE,OAAOA;EAAG,EAAE,CAC/D,GACH,MAAK,OAAO,IAAG,QAAKA,IAAE,GAAG,CACtB,KAAI,QAAK,0BAA0BA,IAAE,IAAIA,IAAE,OAAO,QAAQ,CAAC,CAC/D,CACA;AAiBH,QAAO,YACJ,OAAO,yCAAyC"}
1
+ {"version":3,"file":"individuals.js","names":["doc: Types.individual","doc: Types.individualsSet","x"],"sources":["../../../../src/RS/to/input/individuals.ts"],"sourcesContent":["import { omitBy, uniqBy } from 'lodash-es';\nimport type { Types } from '../../types';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { idOf } from './util/util';\nimport { makeChainable } from '../../../common/make-chainable';\nimport type { GroupWithExclude } from '../../../core/interfaces/vertices/util/edges';\n\n\nfunction fromPersonsToIndividuals (\n persons: ConnectedTypes.person[],\n options: Types.parsedToOptions\n): Types.individual[] {\n return persons.map(person => {\n const doc: Types.individual = {\n id: idOf.person(person, options),\n group: person.group ? `groups.${ getVertexId(person.group, options) }` : undefined,\n };\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n ids: person.ids,\n name: person.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.individual;\n });\n}\n\nfunction fromGroupToIndividualsSet (\n id: string,\n reference: GroupWithExclude.Util.Value<ConnectedTypes.group, ConnectedTypes.person>,\n options: Types.parsedToOptions\n): Types.individualsSet {\n const members = new Set(reference.to.members?.map(x => getVertexId(x, options)));\n const excluded = new Set(reference.exclude?.map(x => getVertexId(x, options)));\n const remaining = members.difference(excluded);\n\n const doc: Types.individualsSet = {\n id: id,\n individuals: [...remaining].map(x => idOf.person(x, options)),\n };\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n name: reference.to.displayName + (reference.exclude?.length ? ` (excl. ${ reference.exclude.map(x => x.displayName).join(', ') })` : ''),\n }, x => x == null);\n }\n\n return doc;\n}\n\n// function fromParticipantsToIndividualsSet (\n// id: string,\n// reference: PersonReference<ConnectedTypes.person>[],\n// options: OutOptions\n// ): _IndividualsSet {\n// const idKey = getIdKey(options);\n\n// const doc: _IndividualsSet = {\n// id: id,\n// individuals: reference.map(x => getVertexId(x.to, idKey))\n// };\n\n// if (options.meta) {\n// doc.meta = omitBy({\n// name: reference.map(x => x.to.displayName).join(', '),\n// }, x => x == null);\n// }\n\n// return doc;\n// }\n\nexport function extractUniqueIndividuals (\n persons: ConnectedTypes.person[],\n courses: ConnectedTypes.course[],\n events: ConnectedTypes.event[],\n lockedTimes: ConnectedTypes.lockedTime[],\n options: Types.parsedToOptions\n): (Types.individual | Types.individualsSet)[] {\n const individuals = fromPersonsToIndividuals(persons, options);\n\n const uniqueIndividualsSetsFromGroupReferences = makeChainable([...courses, ...events, ...lockedTimes])\n .chain(\n x => x\n .map((item): GroupWithExclude.Util.Value<ConnectedTypes.group, ConnectedTypes.person>[] | undefined => {\n if ('groups' in item) return item.groups; // we may ignore the parent groups as they are all plain group references\n if ('coalesced' in item) return item.coalesced\n ?.filter(x => x.toModel == 'groups');\n return;\n })\n .flatMap(references => (references ?? [])\n .filter(x => x.to.species != 'class' || x.exclude?.length) // if plain group reference we will use the group directly\n .map(x => ({ id: idOf.groupReference(x, options), value: x }))\n ),\n x => uniqBy(x, x => x.id)\n .map(x => fromGroupToIndividualsSet(x.id, x.value, options))\n )\n .value;\n\n ////\n //// NOT NEEDED AS WE USE THE INDIVIDUALS DIRECTLY\n ////\n // const uniqueIndividualsSetsFromParticipants = chain([...courses, ...events])\n // .map(item => {\n // if ('participants' in item) return item.participants;\n // return;\n // })\n // .filter(x => x != null)\n // .filter(x => x.length > 1) // if a single participant we will use the individual directly\n // .map(x => ({ id: idOf.participantsReference(x, options), value: x }))\n // .uniqBy(x => x.id)\n // .map(x => fromParticipantsToIndividualsSet(x.id, x.value, options))\n // .value();\n\n return individuals\n .concat(uniqueIndividualsSetsFromGroupReferences);\n // .concat(uniqueIndividualsSetsFromParticipants);\n};\n"],"mappings":";;;;;;AASA,SAAS,yBACP,SACA,SACoB;AACpB,QAAO,QAAQ,KAAI,WAAU;EAC3B,MAAMA,MAAwB;GAC5B,IAAO,KAAK,OAAO,QAAQ,QAAQ;GACnC,OAAO,OAAO,QAAQ,UAAW,YAAY,OAAO,OAAO,QAAQ,KAAM;GAC1E;AAED,MAAI,QAAQ,kBACV,KAAI,OAAO,OAAO;GAChB,KAAM,OAAO;GACb,MAAM,OAAO;GACd,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC;;AAGJ,SAAS,0BACP,IACA,WACA,SACsB;CACtB,MAAM,UAAY,IAAI,IAAI,UAAU,GAAG,SAAS,KAAI,MAAK,YAAY,GAAG,QAAQ,CAAC,CAAC;CAClF,MAAM,WAAY,IAAI,IAAI,UAAU,SAAS,KAAI,MAAK,YAAY,GAAG,QAAQ,CAAC,CAAC;CAG/E,MAAMC,MAA4B;EACnB;EACb,aAAa,CAAC,GAJE,QAAQ,WAAW,SAAS,CAIjB,CAAC,KAAI,MAAK,KAAK,OAAO,GAAG,QAAQ,CAAC;EAC9D;AAED,KAAI,QAAQ,kBACV,KAAI,OAAO,OAAO,EAChB,MAAM,UAAU,GAAG,eAAe,UAAU,SAAS,SAAS,WAAY,UAAU,QAAQ,KAAI,MAAK,EAAE,YAAY,CAAC,KAAK,KAAK,CAAE,KAAK,KACtI,GAAE,MAAK,KAAK,KAAK;AAGpB,QAAO;;AAwBT,SAAgB,yBACd,SACA,SACA,QACA,aACA,SAC6C;CAC7C,MAAM,cAAc,yBAAyB,SAAS,QAAQ;CAE9D,MAAM,2CAA2C,cAAc;EAAC,GAAG;EAAS,GAAG;EAAQ,GAAG;EAAY,CAAC,CACpG,OACC,MAAK,EACF,KAAK,SAAiG;AACrG,MAAI,YAAe,KAAM,QAAO,KAAK;AACrC,MAAI,eAAe,KAAM,QAAO,KAAK,WACjC,QAAO,QAAKC,IAAE,WAAW,SAAS;GAEtC,CACD,SAAQ,gBAAe,cAAc,EAAE,EACrC,QAAO,QAAKA,IAAE,GAAG,WAAW,WAAWA,IAAE,SAAS,OAAO,CACzD,KAAI,SAAM;EAAE,IAAI,KAAK,eAAeA,KAAG,QAAQ;EAAE,OAAOA;EAAG,EAAE,CAC/D,GACH,MAAK,OAAO,IAAG,QAAKA,IAAE,GAAG,CACtB,KAAI,QAAK,0BAA0BA,IAAE,IAAIA,IAAE,OAAO,QAAQ,CAAC,CAC/D,CACA;AAiBH,QAAO,YACJ,OAAO,yCAAyC"}
@@ -27,7 +27,7 @@ function fromTeachers(teachers, settings, options) {
27
27
  minBreakLength: parseMinimumBreakLength(teacher.minBreakLength),
28
28
  ...parseMaxWorkingHours(teacher, options)
29
29
  };
30
- if (options.meta) doc.meta = omitBy({
30
+ if (options.includeEntityMeta) doc.meta = omitBy({
31
31
  ids: teacher.ids,
32
32
  name: teacher.displayName
33
33
  }, (x) => x == null);
@@ -1 +1 @@
1
- {"version":3,"file":"teachers.js","names":["doc: Types.group"],"sources":["../../../../src/RS/to/input/teachers.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport type { Types } from '../../types';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { parseMaxWorkingHours } from './util/parse-max-working-hours';\nimport { parseDays } from './util/parse-days';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { idOf } from './util/util';\nimport { parseIntervals } from './util/parse-intervals';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromTeachers (\n teachers: ConnectedTypes.teacher[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions\n): Types.group[] {\n const defaultInterval = getDefaultInterval(settings);\n\n return teachers\n .map(teacher => {\n const intervals = teacher.intervals ?? defaultInterval;\n const rootInterval = teacher.rootInterval ?? settings.defaultRootInterval;\n\n const doc: Types.group = {\n id: idOf.teacher(teacher, options),\n group_type: 'personal',\n minimizeGaps: false,\n minimizeDependencyAlternation: true,\n forbidOverlappingEvents: options.oldFormat ? teacher.forbidOverlappingEvents : undefined, // Deprecated in v3\n disableDayLengthPunishment: options.oldFormat ? teacher.disableDayLengthPunishment : undefined, // Deprecated in v3\n weight: teacher.weight,\n intervals: options.oldFormat\n ? parseIntervals(intervals, rootInterval, settings)\n : idOf.intervalPairReference(intervals, rootInterval, options),\n days: parseDays (teacher.days, settings),\n lockedTimes: attachLockedTimes (teacher.lockedTimes, options),\n minBreakLength: parseMinimumBreakLength (teacher.minBreakLength),\n\n ...parseMaxWorkingHours(teacher, options),\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n ids: teacher.ids,\n name: teacher.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.group;\n });\n};\n"],"mappings":";;;;;;;;;;AAWA,SAAgB,aACd,UACA,UACA,SACe;CACf,MAAM,kBAAkB,mBAAmB,SAAS;AAEpD,QAAO,SACJ,KAAI,YAAW;EACd,MAAM,YAAe,QAAQ,aAAgB;EAC7C,MAAM,eAAe,QAAQ,gBAAgB,SAAS;EAEtD,MAAMA,MAAmB;GACvB,IAA+B,KAAK,QAAQ,SAAS,QAAQ;GAC7D,YAA+B;GAC/B,cAA+B;GAC/B,+BAA+B;GAC/B,yBAA+B,QAAQ,YAAY,QAAQ,0BAA0B;GACrF,4BAA+B,QAAQ,YAAY,QAAQ,6BAA6B;GACxF,QAA+B,QAAQ;GACvC,WAA+B,QAAQ,YACnC,eAAe,WAAW,cAAc,SAAS,GACjD,KAAK,sBAAsB,WAAW,cAAc,QAAQ;GAChE,MAAgB,UAA2B,QAAQ,MAAM,SAAS;GAClE,aAAgB,kBAA2B,QAAQ,aAAa,QAAQ;GACxE,gBAAgB,wBAA2B,QAAQ,eAAe;GAElE,GAAG,qBAAqB,SAAS,QAAQ;GAC1C;AAED,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,KAAM,QAAQ;GACd,MAAM,QAAQ;GACf,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
1
+ {"version":3,"file":"teachers.js","names":["doc: Types.group"],"sources":["../../../../src/RS/to/input/teachers.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport type { Types } from '../../types';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { parseMaxWorkingHours } from './util/parse-max-working-hours';\nimport { parseDays } from './util/parse-days';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { idOf } from './util/util';\nimport { parseIntervals } from './util/parse-intervals';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromTeachers (\n teachers: ConnectedTypes.teacher[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions\n): Types.group[] {\n const defaultInterval = getDefaultInterval(settings);\n\n return teachers\n .map(teacher => {\n const intervals = teacher.intervals ?? defaultInterval;\n const rootInterval = teacher.rootInterval ?? settings.defaultRootInterval;\n\n const doc: Types.group = {\n id: idOf.teacher(teacher, options),\n group_type: 'personal',\n minimizeGaps: false,\n minimizeDependencyAlternation: true,\n forbidOverlappingEvents: options.oldFormat ? teacher.forbidOverlappingEvents : undefined, // Deprecated in v3\n disableDayLengthPunishment: options.oldFormat ? teacher.disableDayLengthPunishment : undefined, // Deprecated in v3\n weight: teacher.weight,\n intervals: options.oldFormat\n ? parseIntervals(intervals, rootInterval, settings)\n : idOf.intervalPairReference(intervals, rootInterval, options),\n days: parseDays (teacher.days, settings),\n lockedTimes: attachLockedTimes (teacher.lockedTimes, options),\n minBreakLength: parseMinimumBreakLength (teacher.minBreakLength),\n\n ...parseMaxWorkingHours(teacher, options),\n };\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n ids: teacher.ids,\n name: teacher.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.group;\n });\n};\n"],"mappings":";;;;;;;;;;AAWA,SAAgB,aACd,UACA,UACA,SACe;CACf,MAAM,kBAAkB,mBAAmB,SAAS;AAEpD,QAAO,SACJ,KAAI,YAAW;EACd,MAAM,YAAe,QAAQ,aAAgB;EAC7C,MAAM,eAAe,QAAQ,gBAAgB,SAAS;EAEtD,MAAMA,MAAmB;GACvB,IAA+B,KAAK,QAAQ,SAAS,QAAQ;GAC7D,YAA+B;GAC/B,cAA+B;GAC/B,+BAA+B;GAC/B,yBAA+B,QAAQ,YAAY,QAAQ,0BAA0B;GACrF,4BAA+B,QAAQ,YAAY,QAAQ,6BAA6B;GACxF,QAA+B,QAAQ;GACvC,WAA+B,QAAQ,YACnC,eAAe,WAAW,cAAc,SAAS,GACjD,KAAK,sBAAsB,WAAW,cAAc,QAAQ;GAChE,MAAgB,UAA2B,QAAQ,MAAM,SAAS;GAClE,aAAgB,kBAA2B,QAAQ,aAAa,QAAQ;GACxE,gBAAgB,wBAA2B,QAAQ,eAAe;GAElE,GAAG,qBAAqB,SAAS,QAAQ;GAC1C;AAED,MAAI,QAAQ,kBACV,KAAI,OAAO,OAAO;GAChB,KAAM,QAAQ;GACd,MAAM,QAAQ;GACf,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
@@ -13,7 +13,7 @@ function attachLockedTimes(lockedTimes, options) {
13
13
  start,
14
14
  length
15
15
  };
16
- if (options.meta) doc.meta = omitBy({
16
+ if (options.includeEntityMeta) doc.meta = omitBy({
17
17
  id: getVertexId(lockedTime, options),
18
18
  visible: lockedTime.visible,
19
19
  name: lockedTime.displayName
@@ -1 +1 @@
1
- {"version":3,"file":"attach-locked-times.js","names":["doc: Types.lockedTime"],"sources":["../../../../../src/RS/to/input/util/attach-locked-times.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport type { Types } from '../../../types';\nimport type { ConnectedTypes } from '../../../make-connected';\nimport { getDayIndex, getVertexId, parseDateInterval } from '../../../../core/util';\n\nexport function attachLockedTimes (\n lockedTimes: ConnectedTypes.lockedTime[] | null | undefined,\n options: Types.parsedToOptions\n): Types.lockedTime[] | undefined {\n\n return lockedTimes?.map(lockedTime => {\n const { start: startDate, end: endDate } = parseDateInterval(lockedTime, 'locked time');\n\n const length = endDate.diff(startDate, 'minutes');\n if (length < 5) throw new Error('(RS::To::LockedTimes) Length of a locked time is less than 5 min');\n\n const start = parseFloat(startDate.format('HH.mm'));\n\n const doc: Types.lockedTime = {\n day: getDayIndex(startDate),\n start: start,\n length: length,\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n id: getVertexId(lockedTime, options),\n visible: lockedTime.visible,\n name: lockedTime.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.lockedTime;\n });\n};"],"mappings":";;;;AAKA,SAAgB,kBACd,aACA,SACgC;AAEhC,QAAO,aAAa,KAAI,eAAc;EACpC,MAAM,EAAE,OAAO,WAAW,KAAK,YAAY,kBAAkB,YAAY,cAAc;EAEvF,MAAM,SAAS,QAAQ,KAAK,WAAW,UAAU;AACjD,MAAI,SAAS,EAAG,OAAM,IAAI,MAAM,mEAAmE;EAEnG,MAAM,QAAQ,WAAW,UAAU,OAAO,QAAQ,CAAC;EAEnD,MAAMA,MAAwB;GAC5B,KAAQ,YAAY,UAAU;GACtB;GACA;GACT;AAED,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,IAAS,YAAY,YAAY,QAAQ;GACzC,SAAS,WAAW;GACpB,MAAS,WAAW;GACrB,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
1
+ {"version":3,"file":"attach-locked-times.js","names":["doc: Types.lockedTime"],"sources":["../../../../../src/RS/to/input/util/attach-locked-times.ts"],"sourcesContent":["import { omitBy } from 'lodash-es';\nimport type { Types } from '../../../types';\nimport type { ConnectedTypes } from '../../../make-connected';\nimport { getDayIndex, getVertexId, parseDateInterval } from '../../../../core/util';\n\nexport function attachLockedTimes (\n lockedTimes: ConnectedTypes.lockedTime[] | null | undefined,\n options: Types.parsedToOptions\n): Types.lockedTime[] | undefined {\n\n return lockedTimes?.map(lockedTime => {\n const { start: startDate, end: endDate } = parseDateInterval(lockedTime, 'locked time');\n\n const length = endDate.diff(startDate, 'minutes');\n if (length < 5) throw new Error('(RS::To::LockedTimes) Length of a locked time is less than 5 min');\n\n const start = parseFloat(startDate.format('HH.mm'));\n\n const doc: Types.lockedTime = {\n day: getDayIndex(startDate),\n start: start,\n length: length,\n };\n\n if (options.includeEntityMeta) {\n doc.meta = omitBy({\n id: getVertexId(lockedTime, options),\n visible: lockedTime.visible,\n name: lockedTime.displayName,\n }, x => x == null);\n }\n\n return omitBy(doc, x => x == null) as Types.lockedTime;\n });\n};"],"mappings":";;;;AAKA,SAAgB,kBACd,aACA,SACgC;AAEhC,QAAO,aAAa,KAAI,eAAc;EACpC,MAAM,EAAE,OAAO,WAAW,KAAK,YAAY,kBAAkB,YAAY,cAAc;EAEvF,MAAM,SAAS,QAAQ,KAAK,WAAW,UAAU;AACjD,MAAI,SAAS,EAAG,OAAM,IAAI,MAAM,mEAAmE;EAEnG,MAAM,QAAQ,WAAW,UAAU,OAAO,QAAQ,CAAC;EAEnD,MAAMA,MAAwB;GAC5B,KAAQ,YAAY,UAAU;GACtB;GACA;GACT;AAED,MAAI,QAAQ,kBACV,KAAI,OAAO,OAAO;GAChB,IAAS,YAAY,YAAY,QAAQ;GACzC,SAAS,WAAW;GACpB,MAAS,WAAW;GACrB,GAAE,MAAK,KAAK,KAAK;AAGpB,SAAO,OAAO,MAAK,MAAK,KAAK,KAAK;GAClC"}
@@ -15,9 +15,13 @@ type PartialScheduleOptions<IDs extends Array<string> | Set<string>> = {
15
15
  omittedEventsHandling: 'ignore' | 'freeze';
16
16
  };
17
17
  interface ToOptions extends BaseOptions {
18
- meta?: boolean;
19
- isPrivateId?: boolean;
20
- isPublicId?: boolean;
18
+ /**
19
+ * If enabled, each entity in the output (such as groups, dependencies, collections, events, etc.)
20
+ * will include a `meta` object containing details like original IDs and display names.
21
+ * Note: This is distinct from the root-level `meta` object in the mapped output which is always included.
22
+ * @default false
23
+ */
24
+ includeEntityMeta?: boolean;
21
25
  /**
22
26
  * If true, the output will include the full core data. This enables the resulting output file to be uploaded as a new schedule.
23
27
  * @default false
@@ -29,8 +33,9 @@ interface ToOptions extends BaseOptions {
29
33
  */
30
34
  appendOutput?: boolean;
31
35
  /**
32
- * @deprecated
33
- * If true, the mapping becomes the same as the previous version 2 one. That means:
36
+ * @deprecated the only purpose of this backwards compatibility is to support the input-analyzer module
37
+ *
38
+ * If not set to true, these new features are taken into account:
34
39
  *
35
40
  * - New root level entries: `intervals` and `individuals`
36
41
  * - As the there is now a root level `intervals` the former may be references from `Collection.intervals`, `Group.intervals` etc.
@@ -56,7 +61,7 @@ interface ToOutput {
56
61
  structure?: Structure;
57
62
  division: Pick<Types.Deep.Division, 'displayName' | 'start' | 'end'>;
58
63
  };
59
- algorithmParameters?: {
64
+ algorithmParameters: {
60
65
  weights?: AlgorithmWeightParameters;
61
66
  };
62
67
  score?: [number, number, number, number];
@@ -1 +1 @@
1
- {"version":3,"file":"to.js","names":[],"sources":["../../../src/RS/types/to.ts"],"sourcesContent":["import type { BaseOptions } from '../../common/types';\nimport type { CoreTypes } from '../../core';\nimport type { MixedScheduleData } from '../make-connected';\nimport type { Types } from './';\nimport type { AlgorithmWeightParameters } from './algorithm-parameters';\n\n\nexport type ToInput = MixedScheduleData;\n\ntype PartialScheduleOptions<IDs extends Array<string> | Set<string>> = {\n /** @description undefined means all included */\n includedEvents?: IDs;\n /** @description undefined means all included */\n includedLocations?: IDs;\n omittedEventsHandling: 'ignore' | 'freeze';\n};\n\nexport interface ToOptions extends BaseOptions {\n meta?: boolean;\n isPrivateId?: boolean;\n isPublicId?: boolean;\n /**\n * If true, the output will include the full core data. This enables the resulting output file to be uploaded as a new schedule.\n * @default false\n */\n appendCoreData?: boolean;\n /**\n * If true, the output will include current configuration of events and locked times as `output` entry.\n * @default false\n */\n appendOutput?: boolean;\n\n /**\n * @deprecated\n * If true, the mapping becomes the same as the previous version 2 one. That means:\n *\n * - New root level entries: `intervals` and `individuals`\n * - As the there is now a root level `intervals` the former may be references from `Collection.intervals`, `Group.intervals` etc.\n * - The `Collection.groups` and `Event.groups` supports referencing `individuals` (and `individuals sets`) in addition to `groups`\n * - The `Group.forbidOverlappingEvents` has been removed.\n * - The `Group.disableDayLengthPunishment` has been replaced by `Group.minimizeGaps`\n */\n oldFormat?: boolean;\n\n partialScheduleOptions?: PartialScheduleOptions<Array<string> | Set<string>>\n\n algorithmWeightParameters?: AlgorithmWeightParameters;\n\n /**\n * to be used before whe have migrated from \"maxNumWorkingHours\" and \"maxNumDailyWorkingHours\" to \"maximumScheduleSpan\"\n */\n useMaximumScheduleSpan?: boolean;\n}\n\nexport interface ParsedToOptions extends Omit<ToOptions, 'partialScheduleOptions'> {\n partialScheduleOptions?: PartialScheduleOptions<Set<string>>\n}\n\nexport const structure = 'RS/algorithm-5.0.2';\nexport type Structure = typeof structure;\nexport interface ToOutput {\n meta: {\n structure?: Structure;\n division: Pick<CoreTypes.Deep.Division, 'displayName' | 'start' | 'end'>;\n };\n algorithmParameters?: {\n weights?: AlgorithmWeightParameters;\n };\n score?: [number, number, number, number];\n input: Types.scheduleData;\n output?: Types.configuration[];\n coreData?: Partial<CoreTypes.SerializedWithOptionalId.Schedule>;\n}"],"mappings":";AA0DA,MAAa,YAAY"}
1
+ {"version":3,"file":"to.js","names":[],"sources":["../../../src/RS/types/to.ts"],"sourcesContent":["import type { BaseOptions } from '../../common/types';\nimport type { CoreTypes } from '../../core';\nimport type { MixedScheduleData } from '../make-connected';\nimport type { Types } from './';\nimport type { AlgorithmWeightParameters } from './algorithm-parameters';\n\n\nexport type ToInput = MixedScheduleData;\n\ntype PartialScheduleOptions<IDs extends Array<string> | Set<string>> = {\n /** @description undefined means all included */\n includedEvents?: IDs;\n /** @description undefined means all included */\n includedLocations?: IDs;\n omittedEventsHandling: 'ignore' | 'freeze';\n};\n\nexport interface ToOptions extends BaseOptions {\n /**\n * If enabled, each entity in the output (such as groups, dependencies, collections, events, etc.)\n * will include a `meta` object containing details like original IDs and display names.\n * Note: This is distinct from the root-level `meta` object in the mapped output which is always included.\n * @default false\n */\n includeEntityMeta?: boolean;\n\n /**\n * If true, the output will include the full core data. This enables the resulting output file to be uploaded as a new schedule.\n * @default false\n */\n appendCoreData?: boolean;\n\n /**\n * If true, the output will include current configuration of events and locked times as `output` entry.\n * @default false\n */\n appendOutput?: boolean;\n\n /**\n * @deprecated the only purpose of this backwards compatibility is to support the input-analyzer module\n *\n * If not set to true, these new features are taken into account:\n *\n * - New root level entries: `intervals` and `individuals`\n * - As the there is now a root level `intervals` the former may be references from `Collection.intervals`, `Group.intervals` etc.\n * - The `Collection.groups` and `Event.groups` supports referencing `individuals` (and `individuals sets`) in addition to `groups`\n * - The `Group.forbidOverlappingEvents` has been removed.\n * - The `Group.disableDayLengthPunishment` has been replaced by `Group.minimizeGaps`\n */\n oldFormat?: boolean;\n\n partialScheduleOptions?: PartialScheduleOptions<Array<string> | Set<string>>\n\n algorithmWeightParameters?: AlgorithmWeightParameters;\n\n /**\n * to be used before whe have migrated from \"maxNumWorkingHours\" and \"maxNumDailyWorkingHours\" to \"maximumScheduleSpan\"\n */\n useMaximumScheduleSpan?: boolean;\n}\n\nexport interface ParsedToOptions extends Omit<ToOptions, 'partialScheduleOptions'> {\n partialScheduleOptions?: PartialScheduleOptions<Set<string>>\n}\n\nexport const structure = 'RS/algorithm-5.0.2';\nexport type Structure = typeof structure;\nexport interface ToOutput {\n meta: {\n structure?: Structure;\n division: Pick<CoreTypes.Deep.Division, 'displayName' | 'start' | 'end'>;\n };\n algorithmParameters: {\n weights?: AlgorithmWeightParameters;\n };\n score?: [number, number, number, number];\n input: Types.scheduleData;\n output?: Types.configuration[];\n coreData?: Partial<CoreTypes.SerializedWithOptionalId.Schedule>;\n}"],"mappings":";AAiEA,MAAa,YAAY"}
@@ -80,7 +80,7 @@ type Infer<T extends VertexQuery$1<any, any>> = T extends VertexQuery$1<infer C,
80
80
  */
81
81
  declare namespace createVertexQuery {
82
82
  function division<Q$1 extends VertexQuery<DivisionVertex>>(query: Q$1): VertexQuery$1<"divisions", Q$1>;
83
- function settings<Q$1 extends VertexQuery<DivisionSettingsVertex>>(query: Q$1): VertexQuery$1<"divisionSettings", Q$1>;
83
+ function settings<Q$1 extends VertexQuery<DivisionSettingsVertex>>(query: Q$1): VertexQuery$1<"settings", Q$1>;
84
84
  function exception<Q$1 extends VertexQuery<ExceptionVertex>>(query: Q$1): VertexQuery$1<"exceptions", Q$1>;
85
85
  function event<Q$1 extends VertexQuery<EventVertex>>(query: Q$1): VertexQuery$1<"events", Q$1>;
86
86
  function scheduleEvent<Q$1 extends VertexQuery<ScheduleEventVertex>>(query: Q$1): VertexQuery$1<"scheduleEvents", Q$1>;
@@ -24,7 +24,7 @@ let createVertexQuery;
24
24
  }
25
25
  _createVertexQuery.division = division;
26
26
  function settings(query) {
27
- return new VertexQuery("divisionSettings", query);
27
+ return new VertexQuery("settings", query);
28
28
  }
29
29
  _createVertexQuery.settings = settings;
30
30
  function exception(query) {
@@ -1 +1 @@
1
- {"version":3,"file":"vertex-query.js","names":["_collection: C","_query: Q"],"sources":["../../../../src/core/interfaces/vertices/vertex-query.ts"],"sourcesContent":["import type { Collection, CustomVertexExport } from './util';\nimport type { TeacherVertex } from './teachers';\nimport type { ExceptionVertex } from './exceptions';\nimport type { EventVertex, ScheduleEventVertex, CalendarEventVertex } from './events';\nimport type { CourseVertex } from './courses';\nimport type { DivisionSettingsVertex } from './division-settings';\nimport type { DivisionVertex } from './divisions';\nimport type { GenerationVertex } from './generations';\nimport type { GroupVertex } from './groups';\nimport type { LocationVertex } from './locations';\nimport type { LockedTimeVertex } from './locked-times';\nimport type { OverlapGroupVertex } from './overlap-groups';\nimport type { PeriodVertex } from './periods';\nimport type { PersonVertex } from './persons';\nimport type { RootIntervalVertex } from './root-intervals';\nimport type { SyllabusVertex } from './syllabuses';\nimport type { ConfigurationVertex } from './configurations';\nimport type { CompanyVertex } from './companies';\nimport type { UserVertex } from './users';\nimport type { VertexQuery as Query } from './util/vertex-query';\nimport type { TypeEqual } from 'ts-expect';\nimport { expectType } from 'ts-expect';\n\n\ntype VertexRecord = {\n divisions: DivisionVertex;\n divisionSettings: DivisionSettingsVertex;\n settings: DivisionSettingsVertex;\n teachers: TeacherVertex;\n exceptions: ExceptionVertex;\n events: EventVertex;\n scheduleEvents: ScheduleEventVertex;\n calendarEvents: CalendarEventVertex;\n courses: CourseVertex;\n generations: GenerationVertex;\n groups: GroupVertex;\n locations: LocationVertex;\n lockedTimes: LockedTimeVertex;\n overlapGroups: OverlapGroupVertex;\n periods: PeriodVertex;\n persons: PersonVertex;\n students: PersonVertex;\n rootIntervals: RootIntervalVertex;\n syllabuses: SyllabusVertex;\n configurations: ConfigurationVertex;\n companies: CompanyVertex;\n users: UserVertex;\n};\nexport type BaseQueryRecord<C extends keyof VertexRecord> = Query<VertexRecord[C]>;\n{\n // the keys must all be a collection\n type Mismatches = keyof {\n [K in keyof VertexRecord as K extends Collection ? never : K]: true\n };\n expectType<TypeEqual<Mismatches, never>>(true);\n}\n\n/**\n * See {@link createVertexQuery}\n */\nexport class VertexQuery <\n C extends keyof VertexRecord,\n Q extends BaseQueryRecord<C>\n> {\n constructor (\n private readonly _collection: C,\n private readonly _query: Q\n ) {}\n\n get collection () { return this._collection }\n get query () { return this._query }\n}\n\n\n/**\n * Infers the result type for a query created via createVertexQuery.\n *\n * @example\n * const personQuery = createVertexQuery('persons', { select: ['id', 'firstName'] });\n * type PersonType = Infer<typeof personQuery>;\n */\nexport type Infer<T extends VertexQuery<any, any>> =\n T extends VertexQuery<infer C, infer Q>\n ? CustomVertexExport<VertexRecord[C], Q>\n : never;\n\n\n/**\n * Builds a typed vertex query for a given vertex type using a select/expand shape.\n *\n * Use together with Infer<T> to obtain the serialized result type.\n *\n * @typeParam Q - The select/expand query shape for that vertex.\n *\n * @example\n * const groupQuery = createVertexQuery.group({\n * select: ['id', 'displayName'],\n * expand: { members: { select: ['id'] } }\n * });\n * type GroupType = Infer<typeof groupQuery>;\n */\nexport namespace createVertexQuery {\n //\n // The following did not work: it seems we need to define Q as an completely independent type parameter (?)\n //\n // export function createVertexQuery <\n // C extends keyof VertexRecord,\n // const Q extends Query<VertexRecord[C]>\n // > (\n // collection: C,\n // query: Q\n // ) {\n // return new VertexQuery(collection, query);\n // }\n\n export function division <Q extends Query<DivisionVertex >> (query: Q) { return new VertexQuery('divisions', query) }\n export function settings <Q extends Query<DivisionSettingsVertex>> (query: Q) { return new VertexQuery('divisionSettings', query) }\n export function exception <Q extends Query<ExceptionVertex >> (query: Q) { return new VertexQuery('exceptions', query) }\n export function event <Q extends Query<EventVertex >> (query: Q) { return new VertexQuery('events', query) }\n export function scheduleEvent<Q extends Query<ScheduleEventVertex >> (query: Q) { return new VertexQuery('scheduleEvents', query) }\n export function calendarEvent<Q extends Query<CalendarEventVertex >> (query: Q) { return new VertexQuery('calendarEvents', query) }\n export function course <Q extends Query<CourseVertex >> (query: Q) { return new VertexQuery('courses', query) }\n export function generation <Q extends Query<GenerationVertex >> (query: Q) { return new VertexQuery('generations', query) }\n export function location <Q extends Query<LocationVertex >> (query: Q) { return new VertexQuery('locations', query) }\n export function lockedTime <Q extends Query<LockedTimeVertex >> (query: Q) { return new VertexQuery('lockedTimes', query) }\n export function overlapGroup <Q extends Query<OverlapGroupVertex >> (query: Q) { return new VertexQuery('overlapGroups', query) }\n export function period <Q extends Query<PeriodVertex >> (query: Q) { return new VertexQuery('periods', query) }\n export function person <Q extends Query<PersonVertex >> (query: Q) { return new VertexQuery('persons', query) }\n export function rootInterval <Q extends Query<RootIntervalVertex >> (query: Q) { return new VertexQuery('rootIntervals', query) }\n export function syllabus <Q extends Query<SyllabusVertex >> (query: Q) { return new VertexQuery('syllabuses', query) }\n export function group <Q extends Query<GroupVertex >> (query: Q) { return new VertexQuery('groups', query) }\n export function teacher <Q extends Query<TeacherVertex >> (query: Q) { return new VertexQuery('teachers', query) }\n}"],"mappings":";;;AAsDE,WAAyC,KAAK;;;;AAMhD,IAAa,cAAb,MAGE;CACA,YACE,AAAiBA,aACjB,AAAiBC,QACjB;EAFiB;EACA;;CAGnB,IAAI,aAAc;AAAE,SAAO,KAAK;;CAChC,IAAI,QAAc;AAAE,SAAO,KAAK;;;;;CA6CzB,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,aAAoB,MAAM;;;CAC9H,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,oBAAoB,MAAM;;;CAC9H,SAAS,UAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,cAAoB,MAAM;;;CAC9H,SAAS,MAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,UAAoB,MAAM;;;CAC9H,SAAS,cAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,kBAAoB,MAAM;;;CAC9H,SAAS,cAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,kBAAoB,MAAM;;;CAC9H,SAAS,OAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,WAAoB,MAAM;;;CAC9H,SAAS,WAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,eAAoB,MAAM;;;CAC9H,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,aAAoB,MAAM;;;CAC9H,SAAS,WAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,eAAoB,MAAM;;;CAC9H,SAAS,aAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,iBAAoB,MAAM;;;CAC9H,SAAS,OAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,WAAoB,MAAM;;;CAC9H,SAAS,OAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,WAAoB,MAAM;;;CAC9H,SAAS,aAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,iBAAoB,MAAM;;;CAC9H,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,cAAoB,MAAM;;;CAC9H,SAAS,MAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,UAAoB,MAAM;;;CAC9H,SAAS,QAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,YAAoB,MAAM"}
1
+ {"version":3,"file":"vertex-query.js","names":["_collection: C","_query: Q"],"sources":["../../../../src/core/interfaces/vertices/vertex-query.ts"],"sourcesContent":["import type { Collection, CustomVertexExport } from './util';\nimport type { TeacherVertex } from './teachers';\nimport type { ExceptionVertex } from './exceptions';\nimport type { EventVertex, ScheduleEventVertex, CalendarEventVertex } from './events';\nimport type { CourseVertex } from './courses';\nimport type { DivisionSettingsVertex } from './division-settings';\nimport type { DivisionVertex } from './divisions';\nimport type { GenerationVertex } from './generations';\nimport type { GroupVertex } from './groups';\nimport type { LocationVertex } from './locations';\nimport type { LockedTimeVertex } from './locked-times';\nimport type { OverlapGroupVertex } from './overlap-groups';\nimport type { PeriodVertex } from './periods';\nimport type { PersonVertex } from './persons';\nimport type { RootIntervalVertex } from './root-intervals';\nimport type { SyllabusVertex } from './syllabuses';\nimport type { ConfigurationVertex } from './configurations';\nimport type { CompanyVertex } from './companies';\nimport type { UserVertex } from './users';\nimport type { VertexQuery as Query } from './util/vertex-query';\nimport type { TypeEqual } from 'ts-expect';\nimport { expectType } from 'ts-expect';\n\n\ntype VertexRecord = {\n divisions: DivisionVertex;\n divisionSettings: DivisionSettingsVertex;\n settings: DivisionSettingsVertex;\n teachers: TeacherVertex;\n exceptions: ExceptionVertex;\n events: EventVertex;\n scheduleEvents: ScheduleEventVertex;\n calendarEvents: CalendarEventVertex;\n courses: CourseVertex;\n generations: GenerationVertex;\n groups: GroupVertex;\n locations: LocationVertex;\n lockedTimes: LockedTimeVertex;\n overlapGroups: OverlapGroupVertex;\n periods: PeriodVertex;\n persons: PersonVertex;\n students: PersonVertex;\n rootIntervals: RootIntervalVertex;\n syllabuses: SyllabusVertex;\n configurations: ConfigurationVertex;\n companies: CompanyVertex;\n users: UserVertex;\n};\nexport type BaseQueryRecord<C extends keyof VertexRecord> = Query<VertexRecord[C]>;\n{\n // the keys must all be a collection\n type Mismatches = keyof {\n [K in keyof VertexRecord as K extends Collection ? never : K]: true\n };\n expectType<TypeEqual<Mismatches, never>>(true);\n}\n\n/**\n * See {@link createVertexQuery}\n */\nexport class VertexQuery <\n C extends keyof VertexRecord,\n Q extends BaseQueryRecord<C>\n> {\n constructor (\n private readonly _collection: C,\n private readonly _query: Q\n ) {}\n\n get collection () { return this._collection }\n get query () { return this._query }\n}\n\n\n/**\n * Infers the result type for a query created via createVertexQuery.\n *\n * @example\n * const personQuery = createVertexQuery('persons', { select: ['id', 'firstName'] });\n * type PersonType = Infer<typeof personQuery>;\n */\nexport type Infer<T extends VertexQuery<any, any>> =\n T extends VertexQuery<infer C, infer Q>\n ? CustomVertexExport<VertexRecord[C], Q>\n : never;\n\n\n/**\n * Builds a typed vertex query for a given vertex type using a select/expand shape.\n *\n * Use together with Infer<T> to obtain the serialized result type.\n *\n * @typeParam Q - The select/expand query shape for that vertex.\n *\n * @example\n * const groupQuery = createVertexQuery.group({\n * select: ['id', 'displayName'],\n * expand: { members: { select: ['id'] } }\n * });\n * type GroupType = Infer<typeof groupQuery>;\n */\nexport namespace createVertexQuery {\n //\n // The following did not work: it seems we need to define Q as an completely independent type parameter (?)\n //\n // export function createVertexQuery <\n // C extends keyof VertexRecord,\n // const Q extends Query<VertexRecord[C]>\n // > (\n // collection: C,\n // query: Q\n // ) {\n // return new VertexQuery(collection, query);\n // }\n\n export function division <Q extends Query<DivisionVertex >> (query: Q) { return new VertexQuery('divisions', query) }\n export function settings <Q extends Query<DivisionSettingsVertex>> (query: Q) { return new VertexQuery('settings', query) }\n export function exception <Q extends Query<ExceptionVertex >> (query: Q) { return new VertexQuery('exceptions', query) }\n export function event <Q extends Query<EventVertex >> (query: Q) { return new VertexQuery('events', query) }\n export function scheduleEvent<Q extends Query<ScheduleEventVertex >> (query: Q) { return new VertexQuery('scheduleEvents', query) }\n export function calendarEvent<Q extends Query<CalendarEventVertex >> (query: Q) { return new VertexQuery('calendarEvents', query) }\n export function course <Q extends Query<CourseVertex >> (query: Q) { return new VertexQuery('courses', query) }\n export function generation <Q extends Query<GenerationVertex >> (query: Q) { return new VertexQuery('generations', query) }\n export function location <Q extends Query<LocationVertex >> (query: Q) { return new VertexQuery('locations', query) }\n export function lockedTime <Q extends Query<LockedTimeVertex >> (query: Q) { return new VertexQuery('lockedTimes', query) }\n export function overlapGroup <Q extends Query<OverlapGroupVertex >> (query: Q) { return new VertexQuery('overlapGroups', query) }\n export function period <Q extends Query<PeriodVertex >> (query: Q) { return new VertexQuery('periods', query) }\n export function person <Q extends Query<PersonVertex >> (query: Q) { return new VertexQuery('persons', query) }\n export function rootInterval <Q extends Query<RootIntervalVertex >> (query: Q) { return new VertexQuery('rootIntervals', query) }\n export function syllabus <Q extends Query<SyllabusVertex >> (query: Q) { return new VertexQuery('syllabuses', query) }\n export function group <Q extends Query<GroupVertex >> (query: Q) { return new VertexQuery('groups', query) }\n export function teacher <Q extends Query<TeacherVertex >> (query: Q) { return new VertexQuery('teachers', query) }\n}"],"mappings":";;;AAsDE,WAAyC,KAAK;;;;AAMhD,IAAa,cAAb,MAGE;CACA,YACE,AAAiBA,aACjB,AAAiBC,QACjB;EAFiB;EACA;;CAGnB,IAAI,aAAc;AAAE,SAAO,KAAK;;CAChC,IAAI,QAAc;AAAE,SAAO,KAAK;;;;;CA6CzB,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,aAAoB,MAAM;;;CAC9H,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,YAAoB,MAAM;;;CAC9H,SAAS,UAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,cAAoB,MAAM;;;CAC9H,SAAS,MAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,UAAoB,MAAM;;;CAC9H,SAAS,cAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,kBAAoB,MAAM;;;CAC9H,SAAS,cAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,kBAAoB,MAAM;;;CAC9H,SAAS,OAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,WAAoB,MAAM;;;CAC9H,SAAS,WAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,eAAoB,MAAM;;;CAC9H,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,aAAoB,MAAM;;;CAC9H,SAAS,WAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,eAAoB,MAAM;;;CAC9H,SAAS,aAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,iBAAoB,MAAM;;;CAC9H,SAAS,OAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,WAAoB,MAAM;;;CAC9H,SAAS,OAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,WAAoB,MAAM;;;CAC9H,SAAS,aAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,iBAAoB,MAAM;;;CAC9H,SAAS,SAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,cAAoB,MAAM;;;CAC9H,SAAS,MAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,UAAoB,MAAM;;;CAC9H,SAAS,QAAwD,OAAU;AAAE,SAAO,IAAI,YAAY,YAAoB,MAAM"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@royalschedule/maps",
3
3
  "description": "",
4
- "version": "4.0.16",
4
+ "version": "4.0.18",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",