@royalschedule/maps 4.0.6 → 4.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/Additio/to/schedules.js +2 -2
- package/dist/Additio/to/schedules.js.map +1 -1
- package/dist/RS/to/input/collections.js +2 -5
- package/dist/RS/to/input/collections.js.map +1 -1
- package/dist/RS/to/input/dynamic-locked-times.js +1 -1
- package/dist/RS/to/input/dynamic-locked-times.js.map +1 -1
- package/dist/RS/to/input/input.js +29 -28
- package/dist/RS/to/input/input.js.map +1 -1
- package/dist/RS/to/input/util/parse-intervals.js +14 -7
- package/dist/RS/to/input/util/parse-intervals.js.map +1 -1
- package/dist/RS/to/input/util/parse-location-references.js +1 -1
- package/dist/RS/to/input/util/parse-location-references.js.map +1 -1
- package/dist/RS/to/input/util/util.js +1 -2
- package/dist/RS/to/input/util/util.js.map +1 -1
- package/package.json +1 -1
|
@@ -15,13 +15,13 @@ const header = [
|
|
|
15
15
|
"startTime",
|
|
16
16
|
"length",
|
|
17
17
|
"subject",
|
|
18
|
+
"inweek",
|
|
18
19
|
"roomid",
|
|
19
20
|
"room",
|
|
20
21
|
"teacherid",
|
|
21
22
|
"teacher",
|
|
22
23
|
"classid",
|
|
23
|
-
"class"
|
|
24
|
-
"inweek"
|
|
24
|
+
"class"
|
|
25
25
|
];
|
|
26
26
|
var schedules_default = (schedule, _options = {}) => {
|
|
27
27
|
const options = _options;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"schedules.js","names":["header: (keyof Required<OutEvent>)[]","weeks","out: OutEvent"],"sources":["../../../src/Additio/to/schedules.ts"],"sourcesContent":["import XLSX from 'xlsx';\nimport moment from 'moment';\nimport { getDayIndex, getVertexId } from '../../core/util';\nimport type { BaseOptions } from '../../common/types';\nimport type { Types as CoreTypes } from '../../core/types';\nimport type { Types } from '../types';\n\ntype OutEvent = {\n id?: string;\n externalid?: string;\n groupid?: string;\n group?: string;\n dayid?: number;\n startTime?: string;\n length?: number;\n subject?: string;\n roomid?: string;\n room?: string;\n teacherid?: string;\n teacher?: string;\n classid?: string;\n class?: string;\n inweek?: string;\n};\n\n/**\n * Specifies the header and the order of the columns in the exported Excel file.\n */\nconst header: (keyof Required<OutEvent>)[] = [\n 'id',\n 'externalid',\n 'groupid',\n 'group',\n 'dayid',\n 'startTime',\n 'length',\n 'subject',\n 'roomid',\n 'room',\n 'teacherid',\n 'teacher',\n 'classid',\n 'class'
|
|
1
|
+
{"version":3,"file":"schedules.js","names":["header: (keyof Required<OutEvent>)[]","weeks","out: OutEvent"],"sources":["../../../src/Additio/to/schedules.ts"],"sourcesContent":["import XLSX from 'xlsx';\nimport moment from 'moment';\nimport { getDayIndex, getVertexId } from '../../core/util';\nimport type { BaseOptions } from '../../common/types';\nimport type { Types as CoreTypes } from '../../core/types';\nimport type { Types } from '../types';\n\ntype OutEvent = {\n id?: string;\n externalid?: string;\n groupid?: string;\n group?: string;\n dayid?: number;\n startTime?: string;\n length?: number;\n subject?: string;\n roomid?: string;\n room?: string;\n teacherid?: string;\n teacher?: string;\n classid?: string;\n class?: string;\n inweek?: string;\n};\n\n/**\n * Specifies the header and the order of the columns in the exported Excel file.\n */\nconst header: (keyof Required<OutEvent>)[] = [\n 'id',\n 'externalid',\n 'groupid',\n 'group',\n 'dayid',\n 'startTime',\n 'length',\n 'subject',\n 'inweek',\n 'roomid',\n 'room',\n 'teacherid',\n 'teacher',\n 'classid',\n 'class'\n];\n\ntype Schedule = {\n division: CoreTypes.mixed.division;\n settings: CoreTypes.mixed.divisionSettings;\n periods: CoreTypes.mixed.period[];\n locations: CoreTypes.mixed.location[];\n groups: CoreTypes.mixed.group[];\n teachers: CoreTypes.mixed.teacher[];\n courses: CoreTypes.mixed.course[];\n events: CoreTypes.mixed.event[];\n lockedTimes: CoreTypes.mixed.lockedTime[];\n};\n\n\nexport default (\n schedule: Schedule,\n _options: Types.options = { }\n): XLSX.WorkBook => {\n const options = _options as BaseOptions & Types.options;\n\n // a map of period.id -> period\n const periodsMap = new Map(schedule.periods.map(x => [getVertexId(x, options) as string | undefined, x as Omit<typeof x, 'id'>] as const));\n periodsMap.set(undefined, { ranges: [{ start: schedule.division.start, end: schedule.division.end }] });\n\n // a map of period.id -> inWeeks\n const inWeeksMap = new Map<string | undefined, string>();\n periodsMap.forEach((period, id) => {\n const weeks = period.ranges\n .map(x => {\n // fetch all weeks between start and end\n const start = moment.utc(x.start);\n const end = moment.utc(x.end);\n const weeks = new Array<number>();\n while (start.isBefore(end)) {\n weeks.push(start.week());\n start.add(1, 'week');\n }\n return weeks;\n })\n .flat()\n .join(', ');\n\n inWeeksMap.set(id, weeks);\n });\n\n // a map of location.id -> location\n const locationsMap = new Map(schedule.locations.map(x => [getVertexId(x, options), x] as const));\n\n // a map of group.id -> group\n const groupsMap = new Map(schedule.groups.map(x => [getVertexId(x, options), x] as const));\n\n // a map of teacher.id -> teacher\n const teachersMap = new Map(schedule.teachers.map(x => [getVertexId(x, options), x] as const));\n\n // a map of course.id -> course\n const coursesMap = new Map(schedule.courses.map(x => [getVertexId(x, options), x] as const));\n\n\n const events = (options.includeEvents ?? true ? schedule.events : [])\n .map(event => {\n // ignore parked events\n if (event.parked) return;\n if (!event.start || !event.end) return;\n\n // must have a course\n const course = event.course ? coursesMap.get(getVertexId(event.course, options)) : undefined;\n if (!course) return;\n\n const start = moment.utc(event.start);\n const end = moment.utc(event.end);\n\n const period = event.period ?? course.period ?? schedule.settings.period;\n const periodId = period ? getVertexId(period, options) : undefined;\n const inWeeks = inWeeksMap.get(periodId);\n\n const locations = (event.inLocations ?? []).map(x => x ? locationsMap.get(getVertexId(x, options)) : null).filter(x => !!x);\n const teachers = (event.teachers ?? course.teachers ?? []).map(x => teachersMap .get(getVertexId(x.to, options)) ).filter(x => !!x);\n const groups = (event.groups ?? course.groups ?? []).map(x => groupsMap .get(getVertexId(x.to, options)) ).filter(x => !!x);\n\n const out: OutEvent = {\n id: getVertexId(event, options),\n externalid: event.ids?.toString(),\n groupid: course.ids,\n group: course.displayName,\n dayid: getDayIndex(start),\n startTime: start.format('HHmm'),\n length: end.diff(start, 'minutes'),\n subject: course.subject,\n inweek: inWeeks,\n roomid: locations.map(x => x.ids ).join(', '),\n room: locations.map(x => x.displayName).join(', '),\n teacherid: teachers .map(x => x.ids ).join(', '),\n teacher: teachers .map(x => x.displayName).join(', '),\n classid: groups .map(x => x.ids ).join(', '),\n class: groups .map(x => x.displayName).join(', ')\n };\n return out;\n })\n .filter(x => x != null);\n\n const complementaryTimes = (options.includeComplementaryTimes ?? true ? schedule.lockedTimes : [])\n .filter(x => x.type == 'COMPLEMENTARY_TIME')\n .map(lockedTime => {\n // ignore parked events\n if (lockedTime.parked) return;\n if (!lockedTime.start || !lockedTime.end) return;\n\n const start = moment.utc(lockedTime.start);\n const end = moment.utc(lockedTime.end);\n\n const teachers = (lockedTime.coalesced ?? [])\n .filter(x => x.toModel == 'teachers')\n .map(x => teachersMap.get(getVertexId(x.to, options)))\n .filter(x => !!x);\n\n const subject = lockedTime.tags?.find(x => x.type == 'COMPLEMENTARY_TIME')?.value;\n\n // the full schedule range\n const inweek = inWeeksMap.get(undefined);\n\n const out: OutEvent = {\n id: getVertexId(lockedTime, options),\n dayid: (start.day() + 6) % 7,\n startTime: start.format('HHmm'),\n length: end.diff(start, 'minutes'),\n subject: subject,\n inweek: inweek,\n teacherid: teachers .map(x => x.ids ).join(', '),\n teacher: teachers .map(x => x.displayName).join(', '),\n };\n return out;\n })\n .filter(x => !!x);\n\n const wb = XLSX.utils.book_new();\n\n const sheet = XLSX.utils.json_to_sheet([...events, ...complementaryTimes], { header });\n XLSX.utils.book_append_sheet(wb, sheet, 'royal schedule export');\n\n return wb;\n};\n"],"mappings":";;;;;;;;AA4BA,MAAMA,SAAuC;CAC3C;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;;AAgBF,yBACE,UACA,WAA0B,OACR;CAClB,MAAM,UAAU;CAGhB,MAAM,aAAa,IAAI,IAAI,SAAS,QAAQ,KAAI,MAAK,CAAC,YAAY,GAAG,UAAgC;AACrG,YAAW,IAAI,QAAW,EAAE,QAAQ,CAAC;EAAE,OAAO,SAAS,SAAS;EAAO,KAAK,SAAS,SAAS;;CAG9F,MAAM,6BAAa,IAAI;AACvB,YAAW,SAAS,QAAQ,OAAO;EACjC,MAAM,QAAQ,OAAO,OAClB,KAAI,MAAK;GAER,MAAM,QAAQ,OAAO,IAAI,EAAE;GAC3B,MAAM,MAAQ,OAAO,IAAI,EAAE;GAC3B,MAAMC,UAAQ,IAAI;AAClB,UAAO,MAAM,SAAS,MAAM;AAC1B,YAAM,KAAK,MAAM;AACjB,UAAM,IAAI,GAAG;;AAEf,UAAOA;KAER,OACA,KAAK;AAER,aAAW,IAAI,IAAI;;CAIrB,MAAM,eAAe,IAAI,IAAI,SAAS,UAAU,KAAI,MAAK,CAAC,YAAY,GAAG,UAAU;CAGnF,MAAM,YAAY,IAAI,IAAI,SAAS,OAAO,KAAI,MAAK,CAAC,YAAY,GAAG,UAAU;CAG7E,MAAM,cAAc,IAAI,IAAI,SAAS,SAAS,KAAI,MAAK,CAAC,YAAY,GAAG,UAAU;CAGjF,MAAM,aAAa,IAAI,IAAI,SAAS,QAAQ,KAAI,MAAK,CAAC,YAAY,GAAG,UAAU;CAG/E,MAAM,UAAU,QAAQ,iBAAiB,OAAO,SAAS,SAAS,IAC/D,KAAI,UAAS;AAEZ,MAAI,MAAM,OAAQ;AAClB,MAAI,CAAC,MAAM,SAAS,CAAC,MAAM,IAAK;EAGhC,MAAM,SAAS,MAAM,SAAS,WAAW,IAAI,YAAY,MAAM,QAAQ,YAAY;AACnF,MAAI,CAAC,OAAQ;EAEb,MAAM,QAAQ,OAAO,IAAI,MAAM;EAC/B,MAAM,MAAQ,OAAO,IAAI,MAAM;EAE/B,MAAM,SAAS,MAAM,UAAU,OAAO,UAAU,SAAS,SAAS;EAClE,MAAM,WAAW,SAAS,YAAY,QAAQ,WAAW;EACzD,MAAM,UAAU,WAAW,IAAI;EAE/B,MAAM,aAAa,MAAM,eAA+B,IAAI,KAAI,MAAM,IAAI,aAAa,IAAI,YAAY,GAAM,YAAY,MAAM,QAAO,MAAK,CAAC,CAAC;EAC7I,MAAM,YAAa,MAAM,YAAY,OAAO,YAAY,IAAI,KAAI,MAAU,YAAa,IAAI,YAAY,EAAE,IAAI,WAAkB,QAAO,MAAK,CAAC,CAAC;EAC7I,MAAM,UAAa,MAAM,UAAY,OAAO,UAAY,IAAI,KAAI,MAAU,UAAa,IAAI,YAAY,EAAE,IAAI,WAAkB,QAAO,MAAK,CAAC,CAAC;EAE7I,MAAMC,MAAgB;GACpB,IAAY,YAAY,OAAO;GAC/B,YAAY,MAAM,KAAK;GACvB,SAAY,OAAO;GACnB,OAAY,OAAO;GACnB,OAAY,YAAY;GACxB,WAAY,MAAM,OAAO;GACzB,QAAY,IAAI,KAAK,OAAO;GAC5B,SAAY,OAAO;GACnB,QAAY;GACZ,QAAY,UAAU,KAAI,MAAK,EAAE,KAAa,KAAK;GACnD,MAAY,UAAU,KAAI,MAAK,EAAE,aAAa,KAAK;GACnD,WAAY,SAAU,KAAI,MAAK,EAAE,KAAa,KAAK;GACnD,SAAY,SAAU,KAAI,MAAK,EAAE,aAAa,KAAK;GACnD,SAAY,OAAU,KAAI,MAAK,EAAE,KAAa,KAAK;GACnD,OAAY,OAAU,KAAI,MAAK,EAAE,aAAa,KAAK;;AAErD,SAAO;IAER,QAAO,MAAK,KAAK;CAEpB,MAAM,sBAAsB,QAAQ,6BAA6B,OAAO,SAAS,cAAc,IAC5F,QAAO,MAAK,EAAE,QAAQ,sBACtB,KAAI,eAAc;AAEjB,MAAI,WAAW,OAAQ;AACvB,MAAI,CAAC,WAAW,SAAS,CAAC,WAAW,IAAK;EAE1C,MAAM,QAAQ,OAAO,IAAI,WAAW;EACpC,MAAM,MAAQ,OAAO,IAAI,WAAW;EAEpC,MAAM,YAAY,WAAW,aAAa,IACvC,QAAO,MAAK,EAAE,WAAW,YACzB,KAAI,MAAK,YAAY,IAAI,YAAY,EAAE,IAAI,WAC3C,QAAO,MAAK,CAAC,CAAC;EAEjB,MAAM,UAAU,WAAW,MAAM,MAAK,MAAK,EAAE,QAAQ,uBAAuB;EAG5E,MAAM,SAAS,WAAW,IAAI;EAE9B,MAAMA,MAAgB;GACpB,IAAW,YAAY,YAAY;GACnC,QAAY,MAAM,QAAQ,KAAK;GAC/B,WAAW,MAAM,OAAO;GACxB,QAAW,IAAI,KAAK,OAAO;GAChB;GACA;GACX,WAAW,SAAU,KAAI,MAAK,EAAE,KAAa,KAAK;GAClD,SAAW,SAAU,KAAI,MAAK,EAAE,aAAa,KAAK;;AAEpD,SAAO;IAER,QAAO,MAAK,CAAC,CAAC;CAEjB,MAAM,KAAK,KAAK,MAAM;CAEtB,MAAM,QAAQ,KAAK,MAAM,cAAc,CAAC,GAAG,QAAQ,GAAG,qBAAqB,EAAE;AAC7E,MAAK,MAAM,kBAAkB,IAAI,OAAO;AAExC,QAAO"}
|
|
@@ -9,7 +9,7 @@ import { getDefaultInterval } from "./intervals.js";
|
|
|
9
9
|
import { parseGroupReferences } from "./util/parse-group-references.js";
|
|
10
10
|
import { parseLocationReferences } from "./util/parse-location-references.js";
|
|
11
11
|
import { parseEvents } from "./events.js";
|
|
12
|
-
import { groupBy, omitBy, values } from "lodash-es";
|
|
12
|
+
import { groupBy, omit, omitBy, values } from "lodash-es";
|
|
13
13
|
|
|
14
14
|
//#region src/RS/to/input/collections.ts
|
|
15
15
|
function fromCollections(courses, settings, options, periodsMap) {
|
|
@@ -44,10 +44,7 @@ function fromCollections(courses, settings, options, periodsMap) {
|
|
|
44
44
|
doc.overlapGroupId = course.overlapGroup ? getVertexId(course.overlapGroup, options) : void 0;
|
|
45
45
|
return omitBy(doc, (x) => x == null);
|
|
46
46
|
});
|
|
47
|
-
const overlapping = makeChainable(collections).chain((x) => x.filter((x$1) => x$1.overlapGroupId != null), (x) => groupBy(x, (x$1) => x$1.overlapGroupId), (x) => values(x).
|
|
48
|
-
delete x$1.overlapGroupId;
|
|
49
|
-
return x$1;
|
|
50
|
-
}))).value;
|
|
47
|
+
const overlapping = makeChainable(collections).chain((x) => x.filter((x$1) => x$1.overlapGroupId != null), (x) => groupBy(x, (x$1) => x$1.overlapGroupId), (x) => values(x).flatMap((xs) => xs.map((x$1) => omit(x$1, "overlapGroupId")))).value;
|
|
51
48
|
const plain = collections.filter((x) => x.overlapGroupId == null).map((x) => {
|
|
52
49
|
delete x.overlapGroupId;
|
|
53
50
|
return x;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"collections.js","names":["doc: Types.collection & { overlapGroupId?: string }","x"],"sources":["../../../../src/RS/to/input/collections.ts"],"sourcesContent":["import { groupBy, omitBy, values } from 'lodash-es';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseEvents } from './events';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { getPeriodIndex, idOf } from './util/util';\nimport { parseLocationReferences } from './util/parse-location-references';\nimport { parseDays } from './util/parse-days';\nimport { parseIntervals } from './util/parse-intervals';\nimport type { Types } from '../../types';\nimport { makeChainable } from '../../../common/make-chainable';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromCollections (\n courses: ConnectedTypes.course[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n periodsMap: Map<string | undefined, number>,\n): (Types.collection[] | Types.collection)[] {\n const defaultInterval = getDefaultInterval(settings);\n\n const collections = courses\n .map(course => {\n const id = getVertexId(course, options);\n const intervals = course.intervals ?? defaultInterval;\n\n const doc: Types.collection & { overlapGroupId?: string } = {\n id,\n\n weight: course.weight,\n density: course.density,\n maxEventLengthVariance: course.eventDurationVariance,\n potentialCenter: course.centerOfAttraction ? parseFloat(course.centerOfAttraction.replace(':', '.')) : undefined,\n distributionKey: id,\n\n events: parseEvents (course.events, settings, options, periodsMap),\n dependencies: parseLocationReferences (course.locations, options),\n groups: parseGroupReferences ({ type: 'course', item: course }, options),\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n days: parseDays (course.days, settings),\n minBreakLength: parseMinimumBreakLength (course.minBreakLength),\n lockedTimes: attachLockedTimes (course.lockedTimes, options),\n period: getPeriodIndex (course.period, periodsMap, options),\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n color: course.color,\n ids: course.ids,\n name: course.displayName,\n }, x => x == null);\n }\n\n // temporarily attach overlap group\n doc.overlapGroupId = course.overlapGroup\n ? getVertexId(course.overlapGroup, options)\n : undefined;\n\n return omitBy(doc, x => x == null) as Types.collection & { overlapGroupId?: string };\n });\n\n const overlapping = makeChainable(collections)\n .chain(\n x => x.filter(x => x.overlapGroupId != null),\n x => groupBy(x, x => x.overlapGroupId),\n x => values(x)\n .
|
|
1
|
+
{"version":3,"file":"collections.js","names":["doc: Types.collection & { overlapGroupId?: string }","x"],"sources":["../../../../src/RS/to/input/collections.ts"],"sourcesContent":["import { groupBy, omit, omitBy, values } from 'lodash-es';\nimport { attachLockedTimes } from './util/attach-locked-times';\nimport { parseEvents } from './events';\nimport { parseMinimumBreakLength } from './util/parse-minimum-break-length';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getVertexId } from '../../../core/util';\nimport { getPeriodIndex, idOf } from './util/util';\nimport { parseLocationReferences } from './util/parse-location-references';\nimport { parseDays } from './util/parse-days';\nimport { parseIntervals } from './util/parse-intervals';\nimport type { Types } from '../../types';\nimport { makeChainable } from '../../../common/make-chainable';\nimport { getDefaultInterval } from './intervals';\n\nexport function fromCollections (\n courses: ConnectedTypes.course[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n periodsMap: Map<string | undefined, number>,\n): (Types.collection[] | Types.collection)[] {\n const defaultInterval = getDefaultInterval(settings);\n\n const collections = courses\n .map(course => {\n const id = getVertexId(course, options);\n const intervals = course.intervals ?? defaultInterval;\n\n const doc: Types.collection & { overlapGroupId?: string } = {\n id,\n\n weight: course.weight,\n density: course.density,\n maxEventLengthVariance: course.eventDurationVariance,\n potentialCenter: course.centerOfAttraction ? parseFloat(course.centerOfAttraction.replace(':', '.')) : undefined,\n distributionKey: id,\n\n events: parseEvents (course.events, settings, options, periodsMap),\n dependencies: parseLocationReferences (course.locations, options),\n groups: parseGroupReferences ({ type: 'course', item: course }, options),\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n days: parseDays (course.days, settings),\n minBreakLength: parseMinimumBreakLength (course.minBreakLength),\n lockedTimes: attachLockedTimes (course.lockedTimes, options),\n period: getPeriodIndex (course.period, periodsMap, options),\n };\n\n if (options.meta) {\n doc.meta = omitBy({\n color: course.color,\n ids: course.ids,\n name: course.displayName,\n }, x => x == null);\n }\n\n // temporarily attach overlap group\n doc.overlapGroupId = course.overlapGroup\n ? getVertexId(course.overlapGroup, options)\n : undefined;\n\n return omitBy(doc, x => x == null) as Types.collection & { overlapGroupId?: string };\n });\n\n const overlapping = makeChainable(collections)\n .chain(\n x => x.filter(x => x.overlapGroupId != null),\n x => groupBy(x, x => x.overlapGroupId),\n x => values(x)\n .flatMap(xs => xs.map(x => omit(x, 'overlapGroupId') as Types.collection))\n )\n .value;\n\n const plain = collections\n .filter(x => x.overlapGroupId == null)\n .map(x => {\n delete x.overlapGroupId; // remove overlapGroupId from individual collections\n return x as Types.collection;\n });\n\n return overlapping.concat(plain);\n};"],"mappings":";;;;;;;;;;;;;;AAeA,SAAgB,gBACd,SACA,UACA,SACA,YAC2C;CAC3C,MAAM,kBAAkB,mBAAmB;CAE3C,MAAM,cAAc,QACjB,KAAI,WAAU;EACb,MAAM,KAAY,YAAY,QAAQ;EACtC,MAAM,YAAY,OAAO,aAAa;EAEtC,MAAMA,MAAsD;GAC1D;GAEA,QAAwB,OAAO;GAC/B,SAAwB,OAAO;GAC/B,wBAAwB,OAAO;GAC/B,iBAAwB,OAAO,qBAAqB,WAAW,OAAO,mBAAmB,QAAQ,KAAK,QAAQ;GAC9G,iBAAwB;GAExB,QAAc,YAA2B,OAAO,QAAQ,UAAU,SAAS;GAC3E,cAAc,wBAA2B,OAAO,WAAW;GAC3D,QAAc,qBAA2B;IAAE,MAAM;IAAU,MAAM;MAAU;GAC3E,WAAc,QAAQ,YAClB,eAAe,WAAW,QAAW,YACrC,KAAK,sBAAsB,WAAW,QAAW;GACrD,MAAgB,UAA2B,OAAO,MAAM;GACxD,gBAAgB,wBAA2B,OAAO;GAClD,aAAgB,kBAA2B,OAAO,aAAa;GAC/D,QAAgB,eAA2B,OAAO,QAAQ,YAAY;;AAGxE,MAAI,QAAQ,KACV,KAAI,OAAO,OAAO;GAChB,OAAO,OAAO;GACd,KAAO,OAAO;GACd,MAAO,OAAO;MACb,MAAK,KAAK;AAIf,MAAI,iBAAiB,OAAO,eACxB,YAAY,OAAO,cAAc,WACjC;AAEJ,SAAO,OAAO,MAAK,MAAK,KAAK;;CAGjC,MAAM,cAAc,cAAc,aAC/B,OACC,MAAK,EAAE,QAAO,QAAKC,IAAE,kBAAkB,QACvC,MAAK,QAAQ,IAAG,QAAKA,IAAE,kBACvB,MAAK,OAAO,GACT,SAAQ,OAAM,GAAG,KAAI,QAAK,KAAKA,KAAG,qBAEtC;CAEH,MAAM,QAAQ,YACX,QAAO,MAAK,EAAE,kBAAkB,MAChC,KAAI,MAAK;AACR,SAAO,EAAE;AACT,SAAO;;AAGX,QAAO,YAAY,OAAO"}
|
|
@@ -58,7 +58,7 @@ function parse(lockedTime, settings, options, defaultInterval) {
|
|
|
58
58
|
function fromDynamicLockedTimes(lockedTimes, settings, options) {
|
|
59
59
|
const defaultInterval = getDefaultInterval(settings);
|
|
60
60
|
const complementaryHours = makeChainable(lockedTimes).chain((x) => x.filter((x$1) => x$1.type == "COMPLEMENTARY_TIME").map((x$1) => parse(x$1, settings, options, defaultInterval)).filter((x$1) => !!x$1), (x) => groupBy(x, (x$1) => [x$1.distributionKey].flat().toSorted().join("|")), (x) => Object.entries(x).map(([id, events]) => ({
|
|
61
|
-
id
|
|
61
|
+
id: `COMPLEMENTARY_TIME.${id}`,
|
|
62
62
|
events
|
|
63
63
|
}))).value;
|
|
64
64
|
const lunches = lockedTimes.filter((x) => x.type == "LUNCH").map((lockedTime) => parse(lockedTime, settings, options, defaultInterval)).filter((x) => !!x);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dynamic-locked-times.js","names":["x","doc: Types.event"],"sources":["../../../../src/RS/to/input/dynamic-locked-times.ts"],"sourcesContent":["import { groupBy, omitBy, uniq } from 'lodash-es';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getDayIndex, getVertexId } from '../../../core/util';\nimport { idOf } from './util/util';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport { parseIntervals } from './util/parse-intervals';\nimport type { Types } from '../../types';\nimport { makeChainable } from '../../../common/make-chainable';\nimport { toDayAndStart } from './events';\nimport { getDefaultInterval } from './intervals';\nimport type { AllowedInterval } from '../../../core/types/common/intervals';\n\nfunction parse (\n lockedTime: ConnectedTypes.lockedTime,\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n defaultInterval: AllowedInterval[]\n) {\n const duration = lockedTime.duration;\n if (!duration || duration < 5) throw new Error('(RS::To::DynamicLockedTimes) Length of a locked time is less than 5 min');\n\n const dependencyReferences = makeChainable(lockedTime.coalesced ?? [])\n .chain(\n x => x\n .filter(x => x.toModel == 'locations')\n .map(x => getVertexId(x.to, options)),\n x => uniq(x),\n x => x.map(x => [x])\n )\n .value;\n\n const days = lockedTime.intervals\n ? makeChainable(lockedTime.intervals)\n .chain(\n x => x.map(x => getDayIndex(x.start)),\n x => uniq(x),\n )\n .value\n : undefined;\n const day = days?.length == 1 ? days[0] : undefined;\n\n // ignore locked times that reside on days that are not part of any coalesced's days\n if (day != null && lockedTime.coalesced) {\n const allowedByEvery = lockedTime.coalesced.every(x => !x.to.days?.length || x.to.days.some(x => x.day == day));\n if (!allowedByEvery) return;\n }\n\n // each type (tag) of complementary hours should distribute evenly\n const distributionKey = lockedTime.type == 'COMPLEMENTARY_TIME'\n ? 'COMPLEMENTARY_TIME.' + lockedTime.tags?.at(0)?.value\n : undefined;\n\n const intervals = lockedTime.intervals ?? defaultInterval;\n\n const doc: Types.event = {\n id: idOf.lockedTime(lockedTime, options),\n length: duration,\n minBreakLength: false,\n maxLengthVariance: lockedTime.durationVariance,\n groups: parseGroupReferences({ type: 'lockedTime', item: lockedTime }, options),\n dependencies: dependencyReferences,\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n distributionKey,\n ...days && { days },\n ...day && { day },\n };\n\n\n ////\n //// filter based on partialScheduleOptions\n ////\n if (options.partialScheduleOptions) {\n const { includedEvents, omittedEventsHandling } = options.partialScheduleOptions;\n if (includedEvents && !includedEvents.has(doc.id)) {\n\n if (omittedEventsHandling == 'ignore') return;\n\n if (omittedEventsHandling == 'freeze') {\n // must not be parked and have a start and duration to be frozen, otherwise it's ignored\n if (lockedTime.parked || !lockedTime.start || !lockedTime.duration) return;\n\n // fix day, start and end\n Object.assign(doc, toDayAndStart(lockedTime.start));\n doc.length = lockedTime.duration;\n doc.maxLengthVariance = 0;\n\n // override intervals and days to not cause conflicts\n const numDays = settings.numDays ?? 5;\n doc.days = Array.from({ length: numDays }, (_, i) => i);\n doc.intervals = Array.from({ length: numDays }, () => [{ beg: 0, end: 23.55 }]);\n\n // fix locations\n // > locations are always fixed for locked times\n }\n }\n }\n\n return omitBy(doc, x => x == null) as Types.event;\n}\n\nexport function fromDynamicLockedTimes (\n lockedTimes: ConnectedTypes.lockedTime[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n): (Types.collection | Types.event)[] {\n const defaultInterval = getDefaultInterval(settings);\n\n const complementaryHours = makeChainable(lockedTimes)\n .chain(\n x => x\n .filter(x => x.type == 'COMPLEMENTARY_TIME')\n .map(x => parse(x, settings, options, defaultInterval))\n .filter(x => !!x),\n x => groupBy(x, x => [x.distributionKey].flat().toSorted().join('|')),\n x => Object.entries(x)\n .map(([id, events]) => ({ id
|
|
1
|
+
{"version":3,"file":"dynamic-locked-times.js","names":["x","doc: Types.event"],"sources":["../../../../src/RS/to/input/dynamic-locked-times.ts"],"sourcesContent":["import { groupBy, omitBy, uniq } from 'lodash-es';\nimport type { ConnectedTypes } from '../../make-connected';\nimport { getDayIndex, getVertexId } from '../../../core/util';\nimport { idOf } from './util/util';\nimport { parseGroupReferences } from './util/parse-group-references';\nimport { parseIntervals } from './util/parse-intervals';\nimport type { Types } from '../../types';\nimport { makeChainable } from '../../../common/make-chainable';\nimport { toDayAndStart } from './events';\nimport { getDefaultInterval } from './intervals';\nimport type { AllowedInterval } from '../../../core/types/common/intervals';\n\nfunction parse (\n lockedTime: ConnectedTypes.lockedTime,\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n defaultInterval: AllowedInterval[]\n) {\n const duration = lockedTime.duration;\n if (!duration || duration < 5) throw new Error('(RS::To::DynamicLockedTimes) Length of a locked time is less than 5 min');\n\n const dependencyReferences = makeChainable(lockedTime.coalesced ?? [])\n .chain(\n x => x\n .filter(x => x.toModel == 'locations')\n .map(x => getVertexId(x.to, options)),\n x => uniq(x),\n x => x.map(x => [x])\n )\n .value;\n\n const days = lockedTime.intervals\n ? makeChainable(lockedTime.intervals)\n .chain(\n x => x.map(x => getDayIndex(x.start)),\n x => uniq(x),\n )\n .value\n : undefined;\n const day = days?.length == 1 ? days[0] : undefined;\n\n // ignore locked times that reside on days that are not part of any coalesced's days\n if (day != null && lockedTime.coalesced) {\n const allowedByEvery = lockedTime.coalesced.every(x => !x.to.days?.length || x.to.days.some(x => x.day == day));\n if (!allowedByEvery) return;\n }\n\n // each type (tag) of complementary hours should distribute evenly\n const distributionKey = lockedTime.type == 'COMPLEMENTARY_TIME'\n ? 'COMPLEMENTARY_TIME.' + lockedTime.tags?.at(0)?.value\n : undefined;\n\n const intervals = lockedTime.intervals ?? defaultInterval;\n\n const doc: Types.event = {\n id: idOf.lockedTime(lockedTime, options),\n length: duration,\n minBreakLength: false,\n maxLengthVariance: lockedTime.durationVariance,\n groups: parseGroupReferences({ type: 'lockedTime', item: lockedTime }, options),\n dependencies: dependencyReferences,\n intervals: options.oldFormat\n ? parseIntervals(intervals, undefined, settings)\n : idOf.intervalPairReference(intervals, undefined, options),\n distributionKey,\n ...days && { days },\n ...day && { day },\n };\n\n\n ////\n //// filter based on partialScheduleOptions\n ////\n if (options.partialScheduleOptions) {\n const { includedEvents, omittedEventsHandling } = options.partialScheduleOptions;\n if (includedEvents && !includedEvents.has(doc.id)) {\n\n if (omittedEventsHandling == 'ignore') return;\n\n if (omittedEventsHandling == 'freeze') {\n // must not be parked and have a start and duration to be frozen, otherwise it's ignored\n if (lockedTime.parked || !lockedTime.start || !lockedTime.duration) return;\n\n // fix day, start and end\n Object.assign(doc, toDayAndStart(lockedTime.start));\n doc.length = lockedTime.duration;\n doc.maxLengthVariance = 0;\n\n // override intervals and days to not cause conflicts\n const numDays = settings.numDays ?? 5;\n doc.days = Array.from({ length: numDays }, (_, i) => i);\n doc.intervals = Array.from({ length: numDays }, () => [{ beg: 0, end: 23.55 }]);\n\n // fix locations\n // > locations are always fixed for locked times\n }\n }\n }\n\n return omitBy(doc, x => x == null) as Types.event;\n}\n\nexport function fromDynamicLockedTimes (\n lockedTimes: ConnectedTypes.lockedTime[],\n settings: ConnectedTypes.divisionSettings,\n options: Types.parsedToOptions,\n): (Types.collection | Types.event)[] {\n const defaultInterval = getDefaultInterval(settings);\n\n const complementaryHours = makeChainable(lockedTimes)\n .chain(\n x => x\n .filter(x => x.type == 'COMPLEMENTARY_TIME')\n .map(x => parse(x, settings, options, defaultInterval))\n .filter(x => !!x),\n x => groupBy(x, x => [x.distributionKey].flat().toSorted().join('|')),\n x => Object.entries(x)\n .map(([id, events]) => ({ id: `COMPLEMENTARY_TIME.${id}`, events }) as Types.collection),\n )\n .value;\n\n const lunches = lockedTimes\n .filter(x => x.type == 'LUNCH')\n .map(lockedTime => parse(lockedTime, settings, options, defaultInterval))\n .filter(x => !!x);\n\n return [...complementaryHours, ...lunches];\n};"],"mappings":";;;;;;;;;;AAYA,SAAS,MACP,YACA,UACA,SACA,iBACA;CACA,MAAM,WAAW,WAAW;AAC5B,KAAI,CAAC,YAAY,WAAW,EAAG,OAAM,IAAI,MAAM;CAE/C,MAAM,uBAAuB,cAAc,WAAW,aAAa,IAChE,OACC,MAAK,EACF,QAAO,QAAKA,IAAE,WAAW,aACzB,KAAI,QAAK,YAAYA,IAAE,IAAI,YAC9B,MAAK,KAAK,KACV,MAAK,EAAE,KAAI,QAAK,CAACA,OAElB;CAEH,MAAM,OAAO,WAAW,YACpB,cAAc,WAAW,WACxB,OACC,MAAK,EAAE,KAAI,QAAK,YAAYA,IAAE,UAC9B,MAAK,KAAK,IAEX,QACD;CACJ,MAAM,MAAM,MAAM,UAAU,IAAI,KAAK,KAAK;AAG1C,KAAI,OAAO,QAAQ,WAAW,WAAW;EACvC,MAAM,iBAAiB,WAAW,UAAU,OAAM,MAAK,CAAC,EAAE,GAAG,MAAM,UAAU,EAAE,GAAG,KAAK,MAAK,QAAKA,IAAE,OAAO;AAC1G,MAAI,CAAC,eAAgB;;CAIvB,MAAM,kBAAkB,WAAW,QAAQ,uBACvC,wBAAwB,WAAW,MAAM,GAAG,IAAI,QAChD;CAEJ,MAAM,YAAY,WAAW,aAAa;CAE1C,MAAMC,MAAmB;EACvB,IAAmB,KAAK,WAAW,YAAY;EAC/C,QAAmB;EACnB,gBAAmB;EACnB,mBAAmB,WAAW;EAC9B,QAAmB,qBAAqB;GAAE,MAAM;GAAc,MAAM;KAAc;EAClF,cAAmB;EACnB,WAAmB,QAAQ,YACvB,eAAe,WAAW,QAAW,YACrC,KAAK,sBAAsB,WAAW,QAAW;EACrD;EACA,GAAG,QAAQ,EAAE;EACb,GAAG,OAAO,EAAE;;AAOd,KAAI,QAAQ,wBAAwB;EAClC,MAAM,EAAE,gBAAgB,0BAA0B,QAAQ;AAC1D,MAAI,kBAAkB,CAAC,eAAe,IAAI,IAAI,KAAK;AAEjD,OAAI,yBAAyB,SAAU;AAEvC,OAAI,yBAAyB,UAAU;AAErC,QAAI,WAAW,UAAU,CAAC,WAAW,SAAS,CAAC,WAAW,SAAU;AAGpE,WAAO,OAAO,KAAK,cAAc,WAAW;AAC5C,QAAI,SAAS,WAAW;AACxB,QAAI,oBAAoB;IAGxB,MAAM,UAAU,SAAS,WAAW;AACpC,QAAI,OAAY,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,MAAM;AAC1D,QAAI,YAAY,MAAM,KAAK,EAAE,QAAQ,iBAAiB,CAAC;KAAE,KAAK;KAAG,KAAK;;;;;AAQ5E,QAAO,OAAO,MAAK,MAAK,KAAK;;AAG/B,SAAgB,uBACd,aACA,UACA,SACoC;CACpC,MAAM,kBAAkB,mBAAmB;CAE3C,MAAM,qBAAqB,cAAc,aACtC,OACC,MAAK,EACF,QAAO,QAAKD,IAAE,QAAQ,sBACtB,KAAI,QAAK,MAAMA,KAAG,UAAU,SAAS,kBACrC,QAAO,QAAK,CAAC,CAACA,OACjB,MAAK,QAAQ,IAAG,QAAK,CAACA,IAAE,iBAAiB,OAAO,WAAW,KAAK,QAChE,MAAK,OAAO,QAAQ,GACjB,KAAK,CAAC,IAAI,aAAa;EAAE,IAAI,sBAAsB;EAAM;MAE7D;CAEH,MAAM,UAAU,YACb,QAAO,MAAK,EAAE,QAAQ,SACtB,KAAI,eAAc,MAAM,YAAY,UAAU,SAAS,kBACvD,QAAO,MAAK,CAAC,CAAC;AAEjB,QAAO,CAAC,GAAG,oBAAoB,GAAG"}
|
|
@@ -10,42 +10,43 @@ import { fromGroups } from "./groups.js";
|
|
|
10
10
|
import { extractUniqueIndividuals } from "./individuals.js";
|
|
11
11
|
|
|
12
12
|
//#region src/RS/to/input/input.ts
|
|
13
|
-
function parseInput(
|
|
14
|
-
const
|
|
15
|
-
const
|
|
16
|
-
const
|
|
17
|
-
const
|
|
18
|
-
const
|
|
19
|
-
const
|
|
20
|
-
const
|
|
21
|
-
const
|
|
22
|
-
const
|
|
23
|
-
const
|
|
13
|
+
function parseInput(data, options = {}) {
|
|
14
|
+
const _settings = data.settings;
|
|
15
|
+
const _division = data.division;
|
|
16
|
+
const _groups = data.groups ?? [];
|
|
17
|
+
const _teachers = data.teachers ?? [];
|
|
18
|
+
const _locations = data.locations ?? [];
|
|
19
|
+
const _events = data.events ?? [];
|
|
20
|
+
const _lockedTimes = data.lockedTimes ?? [];
|
|
21
|
+
const _courses = data.courses ?? [];
|
|
22
|
+
const _persons = data.persons ?? [];
|
|
23
|
+
const _periods = data.periods ?? [];
|
|
24
24
|
[
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
25
|
+
_groups,
|
|
26
|
+
_teachers,
|
|
27
|
+
_locations,
|
|
28
|
+
_courses,
|
|
29
|
+
_events,
|
|
30
|
+
_lockedTimes
|
|
31
31
|
].forEach((entities) => {
|
|
32
32
|
entities.forEach((entity) => {
|
|
33
33
|
if (entity.intervals && entity.intervals.length == 0) entity.intervals = void 0;
|
|
34
34
|
else if (entity.intervals === null) entity.intervals = void 0;
|
|
35
35
|
});
|
|
36
36
|
});
|
|
37
|
-
const { map: periodsMap, matrix: periodsMatrix } = parsePeriods(
|
|
38
|
-
const
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
37
|
+
const { map: periodsMap, matrix: periodsMatrix } = parsePeriods(_periods, _division, options);
|
|
38
|
+
const periods = periodsMatrix?.length ? periodsMatrix : void 0;
|
|
39
|
+
const out = {
|
|
40
|
+
settings: parseSettings(_settings),
|
|
41
|
+
default: parseDefault(_settings, periodsMap, options),
|
|
42
|
+
periods,
|
|
43
|
+
intervals: options.oldFormat ? void 0 : extractUniqueIntervals(_settings, _groups, _teachers, _courses, _events, _lockedTimes, options),
|
|
44
|
+
dependencies: fromLocations(_locations, _settings, options),
|
|
45
|
+
groups: fromGroups(_groups, _settings, options).concat(fromTeachers(_teachers, _settings, options)),
|
|
46
|
+
individuals: options.oldFormat ? void 0 : extractUniqueIndividuals(_persons, _courses, _events, _lockedTimes, options),
|
|
47
|
+
events: [...fromCollections(_courses, _settings, options, periodsMap), ...fromDynamicLockedTimes(_lockedTimes, _settings, options)]
|
|
47
48
|
};
|
|
48
|
-
return
|
|
49
|
+
return out;
|
|
49
50
|
}
|
|
50
51
|
|
|
51
52
|
//#endregion
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"input.js","names":["
|
|
1
|
+
{"version":3,"file":"input.js","names":["out: Types.scheduleData"],"sources":["../../../../src/RS/to/input/input.ts"],"sourcesContent":["import type { Types } from '../../types';\nimport { fromLocations } from './dependencies';\nimport { fromTeachers } from './teachers';\nimport { fromCollections } from './collections';\nimport { parseSettings } from './settings';\nimport { parseDefault } from './default';\nimport { fromDynamicLockedTimes } from './dynamic-locked-times';\nimport { parsePeriods } from './periods';\nimport type { ConnectedScheduleData } from '../../make-connected';\nimport { fromGroups } from './groups';\nimport { extractUniqueIntervals } from './intervals';\nimport { extractUniqueIndividuals } from './individuals';\n\nexport function parseInput (\n data: ConnectedScheduleData,\n options: Types.parsedToOptions = {}\n): Types.scheduleData {\n\n const _settings = data.settings;\n const _division = data.division;\n const _groups = data.groups ?? [];\n const _teachers = data.teachers ?? [];\n const _locations = data.locations ?? [];\n const _events = data.events ?? [];\n const _lockedTimes = data.lockedTimes ?? [];\n const _courses = data.courses ?? [];\n const _persons = data.persons ?? [];\n const _periods = data.periods ?? [];\n\n\n ////\n //// replace empty/null intervals with undefined\n ////\n [_groups, _teachers, _locations, _courses, _events, _lockedTimes].forEach(entities => {\n entities.forEach(entity => {\n if (entity.intervals && entity.intervals.length == 0) entity.intervals = undefined;\n else if (entity.intervals === null ) entity.intervals = undefined;\n });\n });\n\n\n const { map: periodsMap, matrix: periodsMatrix } = parsePeriods(_periods, _division, options);\n const periods = periodsMatrix?.length ? periodsMatrix : undefined; // cannot be an empty string\n\n const out: Types.scheduleData = {\n settings: parseSettings(_settings),\n default: parseDefault (_settings, periodsMap, options),\n\n periods: periods,\n\n intervals: options.oldFormat ? undefined : extractUniqueIntervals(_settings, _groups, _teachers, _courses, _events, _lockedTimes, options),\n\n dependencies: fromLocations(_locations, _settings, options),\n\n groups: fromGroups(_groups, _settings, options)\n .concat(fromTeachers(_teachers, _settings, options)),\n\n individuals: options.oldFormat ? undefined : extractUniqueIndividuals(_persons, _courses, _events, _lockedTimes, options),\n\n events: [\n ...fromCollections(_courses, _settings, options, periodsMap),\n ...fromDynamicLockedTimes(_lockedTimes, _settings, options)\n ]\n };\n return out;\n};"],"mappings":";;;;;;;;;;;;AAaA,SAAgB,WACd,MACA,UAAiC,IACb;CAEpB,MAAM,YAAe,KAAK;CAC1B,MAAM,YAAe,KAAK;CAC1B,MAAM,UAAe,KAAK,UAAe;CACzC,MAAM,YAAe,KAAK,YAAe;CACzC,MAAM,aAAe,KAAK,aAAe;CACzC,MAAM,UAAe,KAAK,UAAe;CACzC,MAAM,eAAe,KAAK,eAAe;CACzC,MAAM,WAAe,KAAK,WAAe;CACzC,MAAM,WAAe,KAAK,WAAe;CACzC,MAAM,WAAe,KAAK,WAAe;AAMzC;EAAC;EAAS;EAAW;EAAY;EAAU;EAAS;GAAc,SAAQ,aAAY;AACpF,WAAS,SAAQ,WAAU;AACzB,OAAS,OAAO,aAAa,OAAO,UAAU,UAAU,EAAG,QAAO,YAAY;YACrE,OAAO,cAAc,KAA6B,QAAO,YAAY;;;CAKlF,MAAM,EAAE,KAAK,YAAY,QAAQ,kBAAkB,aAAa,UAAU,WAAW;CACrF,MAAM,UAAU,eAAe,SAAS,gBAAgB;CAExD,MAAMA,MAA0B;EAC9B,UAAU,cAAc;EACxB,SAAU,aAAc,WAAW,YAAY;EAEtC;EAET,WAAW,QAAQ,YAAY,SAAY,uBAAuB,WAAW,SAAS,WAAW,UAAU,SAAS,cAAc;EAElI,cAAc,cAAc,YAAY,WAAW;EAEnD,QAAQ,WAAW,SAAS,WAAW,SACpC,OAAO,aAAa,WAAW,WAAW;EAE7C,aAAa,QAAQ,YAAY,SAAY,yBAAyB,UAAU,UAAU,SAAS,cAAc;EAEjH,QAAQ,CACN,GAAG,gBAAgB,UAAU,WAAW,SAAS,aACjD,GAAG,uBAAuB,cAAc,WAAW;;AAGvD,QAAO"}
|
|
@@ -4,27 +4,34 @@ import { groupBy } from "lodash-es";
|
|
|
4
4
|
import moment from "moment";
|
|
5
5
|
|
|
6
6
|
//#region src/RS/to/input/util/parse-intervals.ts
|
|
7
|
+
function parseDate(date) {
|
|
8
|
+
if (typeof date == "string") {
|
|
9
|
+
if (/^\d{1,2}:\d{2}$/.test(date)) return moment.utc(date, "HH:mm");
|
|
10
|
+
return moment.utc(date);
|
|
11
|
+
}
|
|
12
|
+
return moment.utc(date);
|
|
13
|
+
}
|
|
7
14
|
function getDayLimits(intervals, settings) {
|
|
8
15
|
if (!intervals) {
|
|
9
|
-
const start =
|
|
10
|
-
const end =
|
|
16
|
+
const start = parseDate(settings.dayStart);
|
|
17
|
+
const end = parseDate(settings.dayEnd);
|
|
11
18
|
intervals = Array.from({ length: settings.numDays }, () => ({
|
|
12
19
|
start: start.clone(),
|
|
13
20
|
end: end.clone()
|
|
14
21
|
}));
|
|
15
22
|
} else if (intervals.length == 1) {
|
|
16
23
|
const i = intervals[0];
|
|
17
|
-
const start =
|
|
18
|
-
const end =
|
|
24
|
+
const start = parseDate(i.start);
|
|
25
|
+
const end = parseDate(i.end);
|
|
19
26
|
intervals = Array.from({ length: settings.numDays }, () => ({
|
|
20
27
|
start: start.clone(),
|
|
21
28
|
end: end.clone()
|
|
22
29
|
}));
|
|
23
|
-
} else throw new Error(`(RS::To::Intervals) Intervals length ${intervals.length} does not match settings.numDays ${settings.numDays}`);
|
|
30
|
+
} else if (intervals.length != settings.numDays) throw new Error(`(RS::To::Intervals) Intervals length ${intervals.length} does not match settings.numDays ${settings.numDays}`);
|
|
24
31
|
return intervals.map((i, day) => {
|
|
25
32
|
if (!i.start || !i.end) throw new Error(`(RS::To::Intervals) Interval missing start or end for day ${day}`);
|
|
26
|
-
const start = parseFloat(
|
|
27
|
-
const end = parseFloat(
|
|
33
|
+
const start = parseFloat(parseDate(i.start).format("HH.mm"));
|
|
34
|
+
const end = parseFloat(parseDate(i.end).format("HH.mm"));
|
|
28
35
|
return {
|
|
29
36
|
beg: start,
|
|
30
37
|
end
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-intervals.js","names":["x"],"sources":["../../../../../src/RS/to/input/util/parse-intervals.ts"],"sourcesContent":["import moment from 'moment';\nimport { groupBy } from 'lodash-es';\nimport type { ConnectedTypes } from '../../../make-connected';\nimport type { AllowedInterval } from '../../../../core/types/common/intervals';\nimport { getDayIndex } from '../../../../core/util';\nimport type { Types } from '../../../types';\nimport { makeChainable } from '../../../../common/make-chainable';\n\nfunction getDayLimits (\n intervals: AllowedInterval[] | undefined,\n settings: ConnectedTypes.divisionSettings\n): { beg: number, end: number }[] {\n // remove short-hand notation (a single interval for all days)\n if (!intervals) {\n const start =
|
|
1
|
+
{"version":3,"file":"parse-intervals.js","names":["x"],"sources":["../../../../../src/RS/to/input/util/parse-intervals.ts"],"sourcesContent":["import moment from 'moment';\nimport { groupBy } from 'lodash-es';\nimport type { ConnectedTypes } from '../../../make-connected';\nimport type { AllowedInterval } from '../../../../core/types/common/intervals';\nimport { getDayIndex } from '../../../../core/util';\nimport type { Types } from '../../../types';\nimport { makeChainable } from '../../../../common/make-chainable';\nimport type { DateType } from '../../../../common/types';\n\nfunction parseDate (date: string | DateType): moment.Moment {\n if (typeof date == 'string') {\n // in case of HH:mm format, parse as UTC time on epoch date\n if (/^\\d{1,2}:\\d{2}$/.test(date)) {\n return moment.utc(date, 'HH:mm');\n }\n\n // otherwise parse as full date\n return moment.utc(date);\n }\n\n return moment.utc(date);\n}\n\nfunction getDayLimits (\n intervals: AllowedInterval[] | undefined,\n settings: ConnectedTypes.divisionSettings\n): { beg: number, end: number }[] {\n // remove short-hand notation (a single interval for all days)\n if (!intervals) {\n const start = parseDate(settings.dayStart);\n const end = parseDate(settings.dayEnd);\n intervals = Array.from({ length: settings.numDays }, () => ({ start: start.clone(), end: end.clone() }));\n }\n else if (intervals.length == 1) {\n const i = intervals[0];\n const start = parseDate(i.start);\n const end = parseDate(i.end);\n intervals = Array.from({ length: settings.numDays }, () => ({ start: start.clone(), end: end.clone() }));\n } else if (intervals.length != settings.numDays) {\n throw new Error(`(RS::To::Intervals) Intervals length ${intervals.length} does not match settings.numDays ${settings.numDays}`);\n }\n\n // the day start and end times of each day\n return intervals.map((i, day: number) => {\n if (!i.start || !i.end) throw new Error(`(RS::To::Intervals) Interval missing start or end for day ${day}`);\n const start = parseFloat(parseDate(i.start).format('HH.mm'));\n const end = parseFloat(parseDate(i.end ).format('HH.mm'));\n return { beg: start, end: end };\n });\n}\n\n\n/**\n * @deprecated The old way of parsing intervals\n */\nexport function parseIntervals (\n intervals: AllowedInterval[] | undefined,\n rootInterval: ConnectedTypes.rootInterval | undefined,\n settings: ConnectedTypes.divisionSettings\n): Types.interval[][] | undefined {\n // if only intervals are provided\n if (intervals && !rootInterval) return getDayLimits(intervals, settings).map(i => [i]);\n\n // if root intervals are present\n if (rootInterval) {\n const dayLimits = getDayLimits(intervals, settings);\n return makeChainable(rootInterval.intervals)\n .chain(\n x => groupBy(x, x => getDayIndex(x.start)),\n x => Object.entries(x)\n .map(([day, xs]) => {\n const limit = dayLimits.at(parseInt(day));\n if (!limit) throw new Error(`(RS::To::Intervals) Day ${day} not found in dayStartAndEnds`);\n\n // remove all block intervals that lay outside the day start and end\n return xs\n .map(x => ({\n beg: parseFloat(moment.utc(x.start).format('HH.mm')),\n end: parseFloat(moment.utc(x.end ).format('HH.mm')),\n binary: true\n } satisfies Types.interval))\n .filter(x => x.beg >= limit.beg && x.end <= limit.end);\n })\n )\n .value;\n }\n\n // only remaining case here is \"!intervals && !rootInterval\"\n return;\n};\n"],"mappings":";;;;;;AASA,SAAS,UAAW,MAAwC;AAC1D,KAAI,OAAO,QAAQ,UAAU;AAE3B,MAAI,kBAAkB,KAAK,MACzB,QAAO,OAAO,IAAI,MAAM;AAI1B,SAAO,OAAO,IAAI;;AAGpB,QAAO,OAAO,IAAI;;AAGpB,SAAS,aACP,WACA,UACgC;AAEhC,KAAI,CAAC,WAAW;EACd,MAAM,QAAQ,UAAU,SAAS;EACjC,MAAM,MAAQ,UAAU,SAAS;AACjC,cAAY,MAAM,KAAK,EAAE,QAAQ,SAAS,kBAAkB;GAAE,OAAO,MAAM;GAAS,KAAK,IAAI;;YAEtF,UAAU,UAAU,GAAG;EAC9B,MAAM,IAAI,UAAU;EACpB,MAAM,QAAQ,UAAU,EAAE;EAC1B,MAAM,MAAQ,UAAU,EAAE;AAC1B,cAAY,MAAM,KAAK,EAAE,QAAQ,SAAS,kBAAkB;GAAE,OAAO,MAAM;GAAS,KAAK,IAAI;;YACpF,UAAU,UAAU,SAAS,QACtC,OAAM,IAAI,MAAM,wCAAwC,UAAU,OAAO,mCAAmC,SAAS;AAIvH,QAAO,UAAU,KAAK,GAAG,QAAgB;AACvC,MAAI,CAAC,EAAE,SAAS,CAAC,EAAE,IAAK,OAAM,IAAI,MAAM,6DAA6D;EACrG,MAAM,QAAQ,WAAW,UAAU,EAAE,OAAO,OAAO;EACnD,MAAM,MAAQ,WAAW,UAAU,EAAE,KAAO,OAAO;AACnD,SAAO;GAAE,KAAK;GAAY;;;;;;;AAQ9B,SAAgB,eACd,WACA,cACA,UACgC;AAEhC,KAAI,aAAa,CAAC,aAAc,QAAO,aAAa,WAAW,UAAU,KAAI,MAAK,CAAC;AAGnF,KAAI,cAAc;EAChB,MAAM,YAAY,aAAa,WAAW;AAC1C,SAAO,cAAc,aAAa,WAC/B,OACC,MAAK,QAAQ,IAAG,QAAK,YAAYA,IAAE,UACnC,MAAK,OAAO,QAAQ,GACjB,KAAK,CAAC,KAAK,QAAQ;GAClB,MAAM,QAAQ,UAAU,GAAG,SAAS;AACpC,OAAI,CAAC,MAAO,OAAM,IAAI,MAAM,2BAA2B,IAAI;AAG3D,UAAO,GACJ,KAAI,SAAM;IACT,KAAQ,WAAW,OAAO,IAAIA,IAAE,OAAO,OAAO;IAC9C,KAAQ,WAAW,OAAO,IAAIA,IAAE,KAAO,OAAO;IAC9C,QAAQ;OAET,QAAO,QAAKA,IAAE,OAAO,MAAM,OAAOA,IAAE,OAAO,MAAM;MAGzD"}
|
|
@@ -15,7 +15,7 @@ function parseLocationReferences(references, options) {
|
|
|
15
15
|
return includedLocations.has(x$1.dependency);
|
|
16
16
|
}), (x) => groupBy(x, (x$1) => x$1.groupIndex), (x) => Object.values(x).map((xs) => xs.map((x$1) => ({
|
|
17
17
|
dependency: x$1.dependency,
|
|
18
|
-
rank: x$1.rank
|
|
18
|
+
...x$1.rank != null && { rank: x$1.rank }
|
|
19
19
|
})))).value;
|
|
20
20
|
}
|
|
21
21
|
function parseSelectedLocations(event, options) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-location-references.js","names":["x"],"sources":["../../../../../src/RS/to/input/util/parse-location-references.ts"],"sourcesContent":["import { groupBy } from 'lodash-es';\nimport type { ConnectedTypes } from '../../../make-connected';\nimport type { Types } from '../../../types';\nimport { getVertexId } from '../../../../core/util';\nimport type { AvailableLocation } from '../../../../core/types/common';\nimport { makeChainable } from '../../../../common/make-chainable';\n\nexport function parseLocationReferences (\n references: AvailableLocation<ConnectedTypes.location>[] | null | undefined,\n options: Types.parsedToOptions\n) {\n if (!references) return;\n\n return makeChainable(references)\n .chain(\n x => x\n .map(({ locations, groupIndex, rank }) => ({\n dependency: getVertexId(locations[0], options),\n rank: rank,\n groupIndex: groupIndex\n }))\n .filter(x => {\n // filter location references based on partial schedule options\n const includedLocations = options.partialScheduleOptions?.includedLocations;\n if (!includedLocations) return true;\n return includedLocations.has(x.dependency);\n }),\n x => groupBy(x, x => x.groupIndex),\n x => Object.values(x)\n .map(xs => xs.map(x => ({\n dependency: x.dependency,\n rank:
|
|
1
|
+
{"version":3,"file":"parse-location-references.js","names":["x"],"sources":["../../../../../src/RS/to/input/util/parse-location-references.ts"],"sourcesContent":["import { groupBy } from 'lodash-es';\nimport type { ConnectedTypes } from '../../../make-connected';\nimport type { Types } from '../../../types';\nimport { getVertexId } from '../../../../core/util';\nimport type { AvailableLocation } from '../../../../core/types/common';\nimport { makeChainable } from '../../../../common/make-chainable';\n\nexport function parseLocationReferences (\n references: AvailableLocation<ConnectedTypes.location>[] | null | undefined,\n options: Types.parsedToOptions\n) {\n if (!references) return;\n\n return makeChainable(references)\n .chain(\n x => x\n .map(({ locations, groupIndex, rank }) => ({\n dependency: getVertexId(locations[0], options),\n rank: rank,\n groupIndex: groupIndex\n }))\n .filter(x => {\n // filter location references based on partial schedule options\n const includedLocations = options.partialScheduleOptions?.includedLocations;\n if (!includedLocations) return true;\n return includedLocations.has(x.dependency);\n }),\n x => groupBy(x, x => x.groupIndex),\n x => Object.values(x)\n .map(xs => xs.map(x => ({\n dependency: x.dependency,\n ...x.rank != null && { rank: x.rank },\n }) as Types.availableDependency))\n )\n .value;\n}\n\nexport function parseSelectedLocations (event: ConnectedTypes.event, options: Types.parsedToOptions) {\n\n const locationsRef = event.locations ?? event.course?.locations;\n const dependencies = parseLocationReferences(locationsRef, options);\n const inLocations = event.inLocations ?? [];\n\n // simply use inLocations if they match the number of dependencies\n if (inLocations.length === dependencies?.length) {\n return inLocations.map(x => x ? getVertexId(x, options) : null);\n }\n\n // otherwise, fill inLocations with nulls to match the number of dependencies\n return inLocations\n .map(x => x ? getVertexId(x, options) : null)\n .concat(Array\n .from({ length: Math.max((dependencies?.length ?? 0) - inLocations.length, 0) })\n .map(() => null as string | null)\n );\n}"],"mappings":";;;;;AAOA,SAAgB,wBACd,YACA,SACA;AACA,KAAI,CAAC,WAAY;AAEjB,QAAO,cAAc,YAClB,OACC,MAAK,EACF,KAAK,EAAE,WAAW,YAAY,YAAY;EACzC,YAAY,YAAY,UAAU,IAAI;EAC1B;EACA;KAEb,QAAO,QAAK;EAEX,MAAM,oBAAoB,QAAQ,wBAAwB;AAC1D,MAAI,CAAC,kBAAmB,QAAO;AAC/B,SAAO,kBAAkB,IAAIA,IAAE;MAEnC,MAAK,QAAQ,IAAG,QAAKA,IAAE,cACvB,MAAK,OAAO,OAAO,GAChB,KAAI,OAAM,GAAG,KAAI,SAAM;EACtB,YAAYA,IAAE;EACd,GAAGA,IAAE,QAAQ,QAAQ,EAAE,MAAMA,IAAE;OAGpC;;AAGL,SAAgB,uBAAwB,OAA6B,SAAgC;CAEnG,MAAM,eAAe,MAAM,aAAa,MAAM,QAAQ;CACtD,MAAM,eAAe,wBAAwB,cAAc;CAC3D,MAAM,cAAe,MAAM,eAAe;AAG1C,KAAI,YAAY,WAAW,cAAc,OACvC,QAAO,YAAY,KAAI,MAAM,IAAI,YAAY,GAAG,WAAW;AAI7D,QAAO,YACJ,KAAI,MAAM,IAAI,YAAY,GAAG,WAAW,MACxC,OAAO,MACL,KAAK,EAAE,QAAQ,KAAK,KAAK,cAAc,UAAU,KAAK,YAAY,QAAQ,MAC1E,UAAU"}
|
|
@@ -11,8 +11,7 @@ function toTimeFloat(str) {
|
|
|
11
11
|
return parseFloat(str.replace(":", "."));
|
|
12
12
|
}
|
|
13
13
|
function getPeriodIndex(period, periodsMap, options) {
|
|
14
|
-
|
|
15
|
-
const id = getVertexId(period, options);
|
|
14
|
+
const id = period ? getVertexId(period, options) : void 0;
|
|
16
15
|
const periodIndex = periodsMap.get(id);
|
|
17
16
|
if (periodIndex === void 0) throw new Error(`(RS::To::getPeriodIndex) Period "${id}" is not in periodsMap`);
|
|
18
17
|
return periodIndex;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"util.js","names":["person","group","teacher","event","lockedTime"],"sources":["../../../../../src/RS/to/input/util/util.ts"],"sourcesContent":["import type { ConnectedTypes } from '../../../make-connected';\nimport type { Types } from '../../../types';\nimport type { Collection, GroupReference } from '../../../../core/types/common';\nimport { getVertexId } from '../../../../core/util';\nimport type { AllowedInterval } from '../../../../core/types/common/intervals';\n\nexport function min2hrs (min: number | undefined | null) {\n return min ? min / 60 : undefined;\n}\n\n/**\n * Converts a time string in the format \"HH:MM\" to a float representation, e.g., \"12:30\" becomes 12.3.\n */\nexport function toTimeFloat (str: string) {\n return parseFloat(str.replace(':', '.'));\n}\n\nexport function getPeriodIndex (\n period: ConnectedTypes.period | undefined | null,\n periodsMap: Map<string | undefined, number>,\n options: Types.parsedToOptions\n): number | undefined {\n
|
|
1
|
+
{"version":3,"file":"util.js","names":["person","group","teacher","event","lockedTime"],"sources":["../../../../../src/RS/to/input/util/util.ts"],"sourcesContent":["import type { ConnectedTypes } from '../../../make-connected';\nimport type { Types } from '../../../types';\nimport type { Collection, GroupReference } from '../../../../core/types/common';\nimport { getVertexId } from '../../../../core/util';\nimport type { AllowedInterval } from '../../../../core/types/common/intervals';\n\nexport function min2hrs (min: number | undefined | null) {\n return min ? min / 60 : undefined;\n}\n\n/**\n * Converts a time string in the format \"HH:MM\" to a float representation, e.g., \"12:30\" becomes 12.3.\n */\nexport function toTimeFloat (str: string) {\n return parseFloat(str.replace(':', '.'));\n}\n\nexport function getPeriodIndex (\n period: ConnectedTypes.period | undefined | null,\n periodsMap: Map<string | undefined, number>,\n options: Types.parsedToOptions\n): number | undefined {\n const id = period ? getVertexId(period, options) : undefined;\n const periodIndex = periodsMap.get(id);\n if (periodIndex === undefined) throw new Error(`(RS::To::getPeriodIndex) Period \"${id}\" is not in periodsMap`);\n return periodIndex;\n}\n\nexport const COLLECTION_ID = {\n persons: 'persons',\n groups: 'groups',\n teachers: 'teachers',\n events: 'events',\n lockedTimes: 'lockedtimes',\n} satisfies Partial<Record<Collection, string>>;\n\n/**\n * returns a combined id for the vertex or edge by combining the type(s) and id(s).\n */\nexport namespace idOf {\n /** `persons.id` */\n export function person (\n person: ConnectedTypes.person | string,\n options: Types.parsedToOptions\n ): string {\n return `${COLLECTION_ID.persons}.${ getVertexId(person, options) }`;\n }\n\n /** `groups.id` */\n export function group (\n group: ConnectedTypes.group,\n options: Types.parsedToOptions\n ): string {\n return `${COLLECTION_ID.groups}.${ getVertexId(group, options) }`;\n }\n\n /** `teachers.id` */\n export function teacher (\n teacher: ConnectedTypes.teacher,\n options: Types.parsedToOptions\n ): string {\n return `${COLLECTION_ID.teachers}.${ getVertexId(teacher, options) }`;\n }\n\n /** `events.id` */\n export function event (\n event: ConnectedTypes.event,\n options: Types.parsedToOptions\n ): string {\n return `${COLLECTION_ID.events}.${ getVertexId(event, options) }`;\n }\n\n /** `lockedtimes.id` */\n export function lockedTime (\n lockedTime: ConnectedTypes.lockedTime,\n options: Types.parsedToOptions\n ): string {\n return `${COLLECTION_ID.lockedTimes}.${ getVertexId(lockedTime, options) }`;\n }\n\n /** `groups.id<.exclude.id1.id2...>` */\n export function groupReference (\n group: GroupReference<ConnectedTypes.group, ConnectedTypes.person>,\n options: Types.parsedToOptions\n ): string {\n return `${COLLECTION_ID.groups}.${ getVertexId(group.to, options) }`\n + (group.exclude?.length\n ? '.exclude.' + group.exclude\n .map(x => getVertexId(x, options))\n .sort()\n .join('.')\n : '');\n }\n\n /** `<rootIntervalsId&>JSON.stringify(intervals)>` */\n export function intervalPairReference (\n interval: AllowedInterval[] | undefined,\n rootInterval: ConnectedTypes.rootInterval | undefined,\n options: Types.parsedToOptions\n ): string {\n // replace null with undefined\n if (interval === null) interval = undefined;\n if (rootInterval === null) rootInterval = undefined;\n\n // replace empty intervals with undefined\n if (interval && interval .length == 0) interval = undefined;\n if (rootInterval && rootInterval.intervals.length == 0) rootInterval = undefined;\n\n const out = (rootInterval ? getVertexId(rootInterval, options) + '&' : '') + JSON.stringify(interval);\n return out;\n }\n}\n"],"mappings":";;;AAMA,SAAgB,QAAS,KAAgC;AACvD,QAAO,MAAM,MAAM,KAAK;;;;;AAM1B,SAAgB,YAAa,KAAa;AACxC,QAAO,WAAW,IAAI,QAAQ,KAAK;;AAGrC,SAAgB,eACd,QACA,YACA,SACoB;CACpB,MAAM,KAAK,SAAS,YAAY,QAAQ,WAAW;CACnD,MAAM,cAAc,WAAW,IAAI;AACnC,KAAI,gBAAgB,OAAW,OAAM,IAAI,MAAM,oCAAoC,GAAG;AACtF,QAAO;;AAGT,MAAa,gBAAgB;CAC3B,SAAa;CACb,QAAa;CACb,UAAa;CACb,QAAa;CACb,aAAa;;;;CAQN,SAAS,OACd,UACA,SACQ;AACR,SAAO,GAAG,cAAc,QAAQ,GAAI,YAAYA,UAAQ;;;CAInD,SAAS,MACd,SACA,SACQ;AACR,SAAO,GAAG,cAAc,OAAO,GAAI,YAAYC,SAAO;;;CAIjD,SAAS,QACd,WACA,SACQ;AACR,SAAO,GAAG,cAAc,SAAS,GAAI,YAAYC,WAAS;;;CAIrD,SAAS,MACd,SACA,SACQ;AACR,SAAO,GAAG,cAAc,OAAO,GAAI,YAAYC,SAAO;;;CAIjD,SAAS,WACd,cACA,SACQ;AACR,SAAO,GAAG,cAAc,YAAY,GAAI,YAAYC,cAAY;;;CAI3D,SAAS,eACd,SACA,SACQ;AACR,SAAO,GAAG,cAAc,OAAO,GAAI,YAAYH,QAAM,IAAI,cACpDA,QAAM,SAAS,SACd,cAAcA,QAAM,QACnB,KAAI,MAAK,YAAY,GAAG,UACxB,OACA,KAAK,OACN;;;CAID,SAAS,sBACd,UACA,cACA,SACQ;AAER,MAAI,aAAiB,KAAM,YAAe;AAC1C,MAAI,iBAAiB,KAAM,gBAAe;AAG1C,MAAI,YAAgB,SAAuB,UAAU,EAAG,YAAe;AACvE,MAAI,gBAAgB,aAAa,UAAU,UAAU,EAAG,gBAAe;EAEvE,MAAM,OAAO,eAAe,YAAY,cAAc,WAAW,MAAM,MAAM,KAAK,UAAU;AAC5F,SAAO"}
|