@mrtdown/core 2.0.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (182) hide show
  1. package/README.md +107 -0
  2. package/dist/constants.d.ts +10 -0
  3. package/dist/constants.js +11 -0
  4. package/dist/constants.js.map +1 -0
  5. package/dist/helpers/calculateDurationWithinServiceHours.d.ts +2 -0
  6. package/dist/helpers/calculateDurationWithinServiceHours.js +13 -0
  7. package/dist/helpers/calculateDurationWithinServiceHours.js.map +1 -0
  8. package/dist/helpers/calculateDurationWithinServiceHours.test.d.ts +1 -0
  9. package/dist/helpers/calculateDurationWithinServiceHours.test.js +83 -0
  10. package/dist/helpers/calculateDurationWithinServiceHours.test.js.map +1 -0
  11. package/dist/helpers/computeImpactFromEvidenceClaims.d.ts +21 -0
  12. package/dist/helpers/computeImpactFromEvidenceClaims.js +293 -0
  13. package/dist/helpers/computeImpactFromEvidenceClaims.js.map +1 -0
  14. package/dist/helpers/computeImpactFromEvidenceClaims.test.d.ts +1 -0
  15. package/dist/helpers/computeImpactFromEvidenceClaims.test.js +544 -0
  16. package/dist/helpers/computeImpactFromEvidenceClaims.test.js.map +1 -0
  17. package/dist/helpers/computeStartOfDaysWithinInterval.d.ts +2 -0
  18. package/dist/helpers/computeStartOfDaysWithinInterval.js +15 -0
  19. package/dist/helpers/computeStartOfDaysWithinInterval.js.map +1 -0
  20. package/dist/helpers/computeStartOfDaysWithinInterval.test.d.ts +1 -0
  21. package/dist/helpers/computeStartOfDaysWithinInterval.test.js +126 -0
  22. package/dist/helpers/computeStartOfDaysWithinInterval.test.js.map +1 -0
  23. package/dist/helpers/estimateOpenAICost.d.ts +40 -0
  24. package/dist/helpers/estimateOpenAICost.js +55 -0
  25. package/dist/helpers/estimateOpenAICost.js.map +1 -0
  26. package/dist/helpers/keyForAffectedEntity.d.ts +7 -0
  27. package/dist/helpers/keyForAffectedEntity.js +14 -0
  28. package/dist/helpers/keyForAffectedEntity.js.map +1 -0
  29. package/dist/helpers/normalizeRecurringPeriod.d.ts +7 -0
  30. package/dist/helpers/normalizeRecurringPeriod.js +118 -0
  31. package/dist/helpers/normalizeRecurringPeriod.js.map +1 -0
  32. package/dist/helpers/normalizeRecurringPeriod.test.d.ts +1 -0
  33. package/dist/helpers/normalizeRecurringPeriod.test.js +93 -0
  34. package/dist/helpers/normalizeRecurringPeriod.test.js.map +1 -0
  35. package/dist/helpers/resolvePeriods.d.ts +224 -0
  36. package/dist/helpers/resolvePeriods.js +207 -0
  37. package/dist/helpers/resolvePeriods.js.map +1 -0
  38. package/dist/helpers/resolvePeriods.test.d.ts +1 -0
  39. package/dist/helpers/resolvePeriods.test.js +239 -0
  40. package/dist/helpers/resolvePeriods.test.js.map +1 -0
  41. package/dist/helpers/splitIntervalByServiceHours.d.ts +2 -0
  42. package/dist/helpers/splitIntervalByServiceHours.js +30 -0
  43. package/dist/helpers/splitIntervalByServiceHours.js.map +1 -0
  44. package/dist/helpers/splitIntervalByServiceHours.test.d.ts +1 -0
  45. package/dist/helpers/splitIntervalByServiceHours.test.js +152 -0
  46. package/dist/helpers/splitIntervalByServiceHours.test.js.map +1 -0
  47. package/dist/helpers/sumIntervalDuration.d.ts +2 -0
  48. package/dist/helpers/sumIntervalDuration.js +9 -0
  49. package/dist/helpers/sumIntervalDuration.js.map +1 -0
  50. package/dist/index.d.ts +18 -0
  51. package/dist/index.js +19 -0
  52. package/dist/index.js.map +1 -0
  53. package/dist/repo/MRTDownRepository.d.ts +23 -0
  54. package/dist/repo/MRTDownRepository.js +28 -0
  55. package/dist/repo/MRTDownRepository.js.map +1 -0
  56. package/dist/repo/common/FileStore.d.ts +12 -0
  57. package/dist/repo/common/FileStore.js +27 -0
  58. package/dist/repo/common/FileStore.js.map +1 -0
  59. package/dist/repo/common/StandardRepository.d.ts +32 -0
  60. package/dist/repo/common/StandardRepository.js +58 -0
  61. package/dist/repo/common/StandardRepository.js.map +1 -0
  62. package/dist/repo/common/store.d.ts +29 -0
  63. package/dist/repo/common/store.js +2 -0
  64. package/dist/repo/common/store.js.map +1 -0
  65. package/dist/repo/issue/IssueRepository.d.ts +36 -0
  66. package/dist/repo/issue/IssueRepository.js +177 -0
  67. package/dist/repo/issue/IssueRepository.js.map +1 -0
  68. package/dist/repo/issue/helpers/deriveCurrentState.d.ts +51 -0
  69. package/dist/repo/issue/helpers/deriveCurrentState.js +113 -0
  70. package/dist/repo/issue/helpers/deriveCurrentState.js.map +1 -0
  71. package/dist/repo/issue/helpers/deriveCurrentState.test.d.ts +1 -0
  72. package/dist/repo/issue/helpers/deriveCurrentState.test.js +477 -0
  73. package/dist/repo/issue/helpers/deriveCurrentState.test.js.map +1 -0
  74. package/dist/repo/landmark/LandmarkRepository.d.ts +7 -0
  75. package/dist/repo/landmark/LandmarkRepository.js +12 -0
  76. package/dist/repo/landmark/LandmarkRepository.js.map +1 -0
  77. package/dist/repo/line/LineRepository.d.ts +13 -0
  78. package/dist/repo/line/LineRepository.js +32 -0
  79. package/dist/repo/line/LineRepository.js.map +1 -0
  80. package/dist/repo/operator/OperatorRepository.d.ts +7 -0
  81. package/dist/repo/operator/OperatorRepository.js +12 -0
  82. package/dist/repo/operator/OperatorRepository.js.map +1 -0
  83. package/dist/repo/service/ServiceRepository.d.ts +19 -0
  84. package/dist/repo/service/ServiceRepository.js +39 -0
  85. package/dist/repo/service/ServiceRepository.js.map +1 -0
  86. package/dist/repo/station/StationRepository.d.ts +13 -0
  87. package/dist/repo/station/StationRepository.js +30 -0
  88. package/dist/repo/station/StationRepository.js.map +1 -0
  89. package/dist/repo/town/TownRepository.d.ts +7 -0
  90. package/dist/repo/town/TownRepository.js +12 -0
  91. package/dist/repo/town/TownRepository.js.map +1 -0
  92. package/dist/schema/Landmark.d.ts +11 -0
  93. package/dist/schema/Landmark.js +7 -0
  94. package/dist/schema/Landmark.js.map +1 -0
  95. package/dist/schema/Line.d.ts +58 -0
  96. package/dist/schema/Line.js +35 -0
  97. package/dist/schema/Line.js.map +1 -0
  98. package/dist/schema/Operator.d.ts +16 -0
  99. package/dist/schema/Operator.js +12 -0
  100. package/dist/schema/Operator.js.map +1 -0
  101. package/dist/schema/Service.d.ts +55 -0
  102. package/dist/schema/Service.js +24 -0
  103. package/dist/schema/Service.js.map +1 -0
  104. package/dist/schema/Station.d.ts +29 -0
  105. package/dist/schema/Station.js +25 -0
  106. package/dist/schema/Station.js.map +1 -0
  107. package/dist/schema/Town.d.ts +11 -0
  108. package/dist/schema/Town.js +7 -0
  109. package/dist/schema/Town.js.map +1 -0
  110. package/dist/schema/common.d.ts +24 -0
  111. package/dist/schema/common.js +23 -0
  112. package/dist/schema/common.js.map +1 -0
  113. package/dist/schema/issue/bundle.d.ts +239 -0
  114. package/dist/schema/issue/bundle.js +11 -0
  115. package/dist/schema/issue/bundle.js.map +1 -0
  116. package/dist/schema/issue/cause.d.ts +51 -0
  117. package/dist/schema/issue/cause.js +30 -0
  118. package/dist/schema/issue/cause.js.map +1 -0
  119. package/dist/schema/issue/claim.d.ts +149 -0
  120. package/dist/schema/issue/claim.js +36 -0
  121. package/dist/schema/issue/claim.js.map +1 -0
  122. package/dist/schema/issue/entity.d.ts +176 -0
  123. package/dist/schema/issue/entity.js +35 -0
  124. package/dist/schema/issue/entity.js.map +1 -0
  125. package/dist/schema/issue/evidence.d.ts +124 -0
  126. package/dist/schema/issue/evidence.js +30 -0
  127. package/dist/schema/issue/evidence.js.map +1 -0
  128. package/dist/schema/issue/facilityEffect.d.ts +15 -0
  129. package/dist/schema/issue/facilityEffect.js +12 -0
  130. package/dist/schema/issue/facilityEffect.js.map +1 -0
  131. package/dist/schema/issue/id.d.ts +3 -0
  132. package/dist/schema/issue/id.js +6 -0
  133. package/dist/schema/issue/id.js.map +1 -0
  134. package/dist/schema/issue/impactEvent.d.ts +373 -0
  135. package/dist/schema/issue/impactEvent.js +43 -0
  136. package/dist/schema/issue/impactEvent.js.map +1 -0
  137. package/dist/schema/issue/issue.d.ts +19 -0
  138. package/dist/schema/issue/issue.js +13 -0
  139. package/dist/schema/issue/issue.js.map +1 -0
  140. package/dist/schema/issue/issueType.d.ts +7 -0
  141. package/dist/schema/issue/issueType.js +3 -0
  142. package/dist/schema/issue/issueType.js.map +1 -0
  143. package/dist/schema/issue/period.d.ts +72 -0
  144. package/dist/schema/issue/period.js +32 -0
  145. package/dist/schema/issue/period.js.map +1 -0
  146. package/dist/schema/issue/serviceEffect.d.ts +29 -0
  147. package/dist/schema/issue/serviceEffect.js +21 -0
  148. package/dist/schema/issue/serviceEffect.js.map +1 -0
  149. package/dist/schema/issue/serviceScope.d.ts +38 -0
  150. package/dist/schema/issue/serviceScope.js +30 -0
  151. package/dist/schema/issue/serviceScope.js.map +1 -0
  152. package/dist/util/assert.d.ts +1 -0
  153. package/dist/util/assert.js +6 -0
  154. package/dist/util/assert.js.map +1 -0
  155. package/dist/util/ingestContent/helpers/getSlugDateTimeFromClaims.d.ts +7 -0
  156. package/dist/util/ingestContent/helpers/getSlugDateTimeFromClaims.js +24 -0
  157. package/dist/util/ingestContent/helpers/getSlugDateTimeFromClaims.js.map +1 -0
  158. package/dist/util/ingestContent/index.d.ts +12 -0
  159. package/dist/util/ingestContent/index.js +171 -0
  160. package/dist/util/ingestContent/index.js.map +1 -0
  161. package/dist/util/ingestContent/types.d.ts +32 -0
  162. package/dist/util/ingestContent/types.js +2 -0
  163. package/dist/util/ingestContent/types.js.map +1 -0
  164. package/dist/write/MRTDownWriter.d.ts +27 -0
  165. package/dist/write/MRTDownWriter.js +27 -0
  166. package/dist/write/MRTDownWriter.js.map +1 -0
  167. package/dist/write/common/FileWriteStore.d.ts +13 -0
  168. package/dist/write/common/FileWriteStore.js +31 -0
  169. package/dist/write/common/FileWriteStore.js.map +1 -0
  170. package/dist/write/common/StandardWriter.d.ts +14 -0
  171. package/dist/write/common/StandardWriter.js +17 -0
  172. package/dist/write/common/StandardWriter.js.map +1 -0
  173. package/dist/write/common/store.d.ts +32 -0
  174. package/dist/write/common/store.js +2 -0
  175. package/dist/write/common/store.js.map +1 -0
  176. package/dist/write/id/IdGenerator.d.ts +18 -0
  177. package/dist/write/id/IdGenerator.js +23 -0
  178. package/dist/write/id/IdGenerator.js.map +1 -0
  179. package/dist/write/issue/IssueWriter.d.ts +12 -0
  180. package/dist/write/issue/IssueWriter.js +33 -0
  181. package/dist/write/issue/IssueWriter.js.map +1 -0
  182. package/package.json +80 -0
@@ -0,0 +1,38 @@
1
+ import z from 'zod';
2
+ /**
3
+ * Service Whole (service)
4
+ */
5
+ export declare const ServiceScopeWholeSchema: z.ZodObject<{
6
+ type: z.ZodLiteral<"service.whole">;
7
+ }, z.z.core.$strip>;
8
+ export type ServiceScopeWhole = z.infer<typeof ServiceScopeWholeSchema>;
9
+ /**
10
+ * Service segment (service-level)
11
+ *
12
+ * This should also be used when representing an entire service.
13
+ */
14
+ export declare const ServiceScopeSegmentSchema: z.ZodObject<{
15
+ type: z.ZodLiteral<"service.segment">;
16
+ fromStationId: z.ZodString;
17
+ toStationId: z.ZodString;
18
+ }, z.z.core.$strip>;
19
+ export type ServiceScopeSegment = z.infer<typeof ServiceScopeSegmentSchema>;
20
+ /**
21
+ * Service point (station-level)
22
+ */
23
+ export declare const ServiceScopePointSchema: z.ZodObject<{
24
+ type: z.ZodLiteral<"service.point">;
25
+ stationId: z.ZodString;
26
+ }, z.z.core.$strip>;
27
+ export type ServiceScopePoint = z.infer<typeof ServiceScopePointSchema>;
28
+ export declare const ServiceScopeSchema: z.ZodDiscriminatedUnion<[z.ZodObject<{
29
+ type: z.ZodLiteral<"service.whole">;
30
+ }, z.z.core.$strip>, z.ZodObject<{
31
+ type: z.ZodLiteral<"service.segment">;
32
+ fromStationId: z.ZodString;
33
+ toStationId: z.ZodString;
34
+ }, z.z.core.$strip>, z.ZodObject<{
35
+ type: z.ZodLiteral<"service.point">;
36
+ stationId: z.ZodString;
37
+ }, z.z.core.$strip>]>;
38
+ export type ServiceScope = z.infer<typeof ServiceScopeSchema>;
@@ -0,0 +1,30 @@
1
+ import z from 'zod';
2
+ /**
3
+ * Service Whole (service)
4
+ */
5
+ export const ServiceScopeWholeSchema = z.object({
6
+ type: z.literal('service.whole'),
7
+ });
8
+ /**
9
+ * Service segment (service-level)
10
+ *
11
+ * This should also be used when representing an entire service.
12
+ */
13
+ export const ServiceScopeSegmentSchema = z.object({
14
+ type: z.literal('service.segment'),
15
+ fromStationId: z.string(),
16
+ toStationId: z.string(),
17
+ });
18
+ /**
19
+ * Service point (station-level)
20
+ */
21
+ export const ServiceScopePointSchema = z.object({
22
+ type: z.literal('service.point'),
23
+ stationId: z.string(),
24
+ });
25
+ export const ServiceScopeSchema = z.discriminatedUnion('type', [
26
+ ServiceScopeWholeSchema,
27
+ ServiceScopeSegmentSchema,
28
+ ServiceScopePointSchema,
29
+ ]);
30
+ //# sourceMappingURL=serviceScope.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"serviceScope.js","sourceRoot":"/","sources":["schema/issue/serviceScope.ts"],"names":[],"mappings":"AAAA,OAAO,CAAC,MAAM,KAAK,CAAC;AAEpB;;GAEG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,CAAC,MAAM,CAAC;IAC9C,IAAI,EAAE,CAAC,CAAC,OAAO,CAAC,eAAe,CAAC;CACjC,CAAC,CAAC;AAGH;;;;GAIG;AACH,MAAM,CAAC,MAAM,yBAAyB,GAAG,CAAC,CAAC,MAAM,CAAC;IAChD,IAAI,EAAE,CAAC,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAClC,aAAa,EAAE,CAAC,CAAC,MAAM,EAAE;IACzB,WAAW,EAAE,CAAC,CAAC,MAAM,EAAE;CACxB,CAAC,CAAC;AAGH;;GAEG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,CAAC,MAAM,CAAC;IAC9C,IAAI,EAAE,CAAC,CAAC,OAAO,CAAC,eAAe,CAAC;IAChC,SAAS,EAAE,CAAC,CAAC,MAAM,EAAE;CACtB,CAAC,CAAC;AAGH,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,CAAC,kBAAkB,CAAC,MAAM,EAAE;IAC7D,uBAAuB;IACvB,yBAAyB;IACzB,uBAAuB;CACxB,CAAC,CAAC","sourcesContent":["import z from 'zod';\n\n/**\n * Service Whole (service)\n */\nexport const ServiceScopeWholeSchema = z.object({\n type: z.literal('service.whole'),\n});\nexport type ServiceScopeWhole = z.infer<typeof ServiceScopeWholeSchema>;\n\n/**\n * Service segment (service-level)\n *\n * This should also be used when representing an entire service.\n */\nexport const ServiceScopeSegmentSchema = z.object({\n type: z.literal('service.segment'),\n fromStationId: z.string(),\n toStationId: z.string(),\n});\nexport type ServiceScopeSegment = z.infer<typeof ServiceScopeSegmentSchema>;\n\n/**\n * Service point (station-level)\n */\nexport const ServiceScopePointSchema = z.object({\n type: z.literal('service.point'),\n stationId: z.string(),\n});\nexport type ServiceScopePoint = z.infer<typeof ServiceScopePointSchema>;\n\nexport const ServiceScopeSchema = z.discriminatedUnion('type', [\n ServiceScopeWholeSchema,\n ServiceScopeSegmentSchema,\n ServiceScopePointSchema,\n]);\nexport type ServiceScope = z.infer<typeof ServiceScopeSchema>;\n"]}
@@ -0,0 +1 @@
1
+ export declare function assert(condition: boolean, message?: string): asserts condition;
@@ -0,0 +1,6 @@
1
+ export function assert(condition, message = 'Assertion failed') {
2
+ if (!condition) {
3
+ throw new Error(message);
4
+ }
5
+ }
6
+ //# sourceMappingURL=assert.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"assert.js","sourceRoot":"/","sources":["util/assert.ts"],"names":[],"mappings":"AAAA,MAAM,UAAU,MAAM,CACpB,SAAkB,EAClB,OAAO,GAAG,kBAAkB;IAE5B,IAAI,CAAC,SAAS,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC;IAC3B,CAAC;AACH,CAAC","sourcesContent":["export function assert(\n condition: boolean,\n message = 'Assertion failed',\n): asserts condition {\n if (!condition) {\n throw new Error(message);\n }\n}\n"]}
@@ -0,0 +1,7 @@
1
+ import type { Claim } from '#schema/issue/claim.js';
2
+ /**
3
+ * Get the slug date time from the claims.
4
+ * @param claims - The claims.
5
+ * @returns The slug date time or null if no time hints are found.
6
+ */
7
+ export declare function getSlugDateTimeFromClaims(claims: Claim[]): string | null;
@@ -0,0 +1,24 @@
1
+ import { assert } from '#util/assert.js';
2
+ /**
3
+ * Get the slug date time from the claims.
4
+ * @param claims - The claims.
5
+ * @returns The slug date time or null if no time hints are found.
6
+ */
7
+ export function getSlugDateTimeFromClaims(claims) {
8
+ const timeHints = claims
9
+ .filter((claim) => claim.timeHints != null)
10
+ .map((claim) => claim.timeHints);
11
+ if (timeHints.length > 0) {
12
+ assert(timeHints[0] != null, 'Expected time hints');
13
+ switch (timeHints[0].kind) {
14
+ case 'fixed': {
15
+ return timeHints[0].startAt;
16
+ }
17
+ case 'recurring': {
18
+ return timeHints[0].startAt;
19
+ }
20
+ }
21
+ }
22
+ return null;
23
+ }
24
+ //# sourceMappingURL=getSlugDateTimeFromClaims.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getSlugDateTimeFromClaims.js","sourceRoot":"/","sources":["util/ingestContent/helpers/getSlugDateTimeFromClaims.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAEzC;;;;GAIG;AACH,MAAM,UAAU,yBAAyB,CAAC,MAAe;IACvD,MAAM,SAAS,GAAG,MAAM;SACrB,MAAM,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC;SAC1C,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;IAEnC,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACzB,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE,qBAAqB,CAAC,CAAC;QACpD,QAAQ,SAAS,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;YAC1B,KAAK,OAAO,CAAC,CAAC,CAAC;gBACb,OAAO,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;YAC9B,CAAC;YACD,KAAK,WAAW,CAAC,CAAC,CAAC;gBACjB,OAAO,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;YAC9B,CAAC;QACH,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC","sourcesContent":["import type { Claim } from '#schema/issue/claim.js';\nimport { assert } from '#util/assert.js';\n\n/**\n * Get the slug date time from the claims.\n * @param claims - The claims.\n * @returns The slug date time or null if no time hints are found.\n */\nexport function getSlugDateTimeFromClaims(claims: Claim[]): string | null {\n const timeHints = claims\n .filter((claim) => claim.timeHints != null)\n .map((claim) => claim.timeHints);\n\n if (timeHints.length > 0) {\n assert(timeHints[0] != null, 'Expected time hints');\n switch (timeHints[0].kind) {\n case 'fixed': {\n return timeHints[0].startAt;\n }\n case 'recurring': {\n return timeHints[0].startAt;\n }\n }\n }\n\n return null;\n}\n"]}
@@ -0,0 +1,12 @@
1
+ import type { IngestContent } from './types.js';
2
+ /**
3
+ * Ingests content from social media, news, or other sources into the MRTDown issue system.
4
+ *
5
+ * Triages the content to determine if it belongs to an existing issue or a new one, extracts
6
+ * claims, computes impact (affected lines, stations, periods), and persists evidence and impact
7
+ * events. Irrelevant content is ignored.
8
+ *
9
+ * @param content - The content to ingest (Reddit post, news article, or Twitter/Mastodon post).
10
+ * @returns `null` when content is irrelevant or after successful ingestion.
11
+ */
12
+ export declare function ingestContent(content: IngestContent): Promise<null>;
@@ -0,0 +1,171 @@
1
+ import { resolve } from 'node:path';
2
+ import { DateTime } from 'luxon';
3
+ import { ulid } from 'ulid';
4
+ import { computeImpactFromEvidenceClaims } from '#helpers/computeImpactFromEvidenceClaims.js';
5
+ import { extractClaimsFromNewEvidence } from '#llm/functions/extractClaimsFromNewEvidence/index.js';
6
+ import { generateIssueTitleAndSlug } from '#llm/functions/generateIssueTitleAndSlug/index.js';
7
+ import { translate } from '#llm/functions/translate/index.js';
8
+ import { triageNewEvidence } from '#llm/functions/triageNewEvidence/index.js';
9
+ import { FileStore } from '#repo/common/FileStore.js';
10
+ import { MRTDownRepository } from '#repo/MRTDownRepository.js';
11
+ import { FileWriteStore } from '#write/common/FileWriteStore.js';
12
+ import { MRTDownWriter } from '#write/MRTDownWriter.js';
13
+ import { assert } from '../assert.js';
14
+ import { getSlugDateTimeFromClaims } from './helpers/getSlugDateTimeFromClaims.js';
15
+ const DATA_DIR = resolve(import.meta.dirname, '../../../data');
16
+ const store = new FileStore(DATA_DIR);
17
+ const writeStore = new FileWriteStore(DATA_DIR);
18
+ const repo = new MRTDownRepository({ store });
19
+ const writer = new MRTDownWriter({ store: writeStore });
20
+ /**
21
+ * Ingests content from social media, news, or other sources into the MRTDown issue system.
22
+ *
23
+ * Triages the content to determine if it belongs to an existing issue or a new one, extracts
24
+ * claims, computes impact (affected lines, stations, periods), and persists evidence and impact
25
+ * events. Irrelevant content is ignored.
26
+ *
27
+ * @param content - The content to ingest (Reddit post, news article, or Twitter/Mastodon post).
28
+ * @returns `null` when content is irrelevant or after successful ingestion.
29
+ */
30
+ export async function ingestContent(content) {
31
+ // --- Normalise input ---
32
+ // HACK: Force `createdAt` to be Asia/Singapore timezone
33
+ const createdAt = DateTime.fromISO(content.createdAt)
34
+ .setZone('Asia/Singapore')
35
+ .toISO();
36
+ assert(createdAt != null, 'Expected valid createdAt');
37
+ content.createdAt = createdAt;
38
+ console.log('[ingestContent]', content);
39
+ // --- Triage: existing issue, new issue, or irrelevant ---
40
+ const triageResult = await triageNewEvidence({
41
+ newEvidence: {
42
+ ts: content.createdAt,
43
+ text: getText(content),
44
+ },
45
+ repo,
46
+ });
47
+ console.log('[ingestContent.triageNewEvidence]', triageResult);
48
+ if (triageResult.result.kind === 'irrelevant-content') {
49
+ console.log('[ingestContent] Nothing to do.');
50
+ return null;
51
+ }
52
+ // --- Extract structured claims (lines, stations, periods, effects) ---
53
+ const { claims } = await extractClaimsFromNewEvidence({
54
+ newEvidence: {
55
+ ts: content.createdAt,
56
+ text: getText(content),
57
+ },
58
+ repo,
59
+ });
60
+ console.log('[ingestContent.extractClaimsFromNewEvidence]', claims);
61
+ // --- Resolve issue bundle: fetch existing or create new ---
62
+ let issueBundle;
63
+ switch (triageResult.result.kind) {
64
+ case 'part-of-existing-issue': {
65
+ // Load full bundle (issue + evidence + impact) for impact computation
66
+ const { issueId } = triageResult.result;
67
+ const existingBundle = repo.issues.get(issueId);
68
+ assert(existingBundle != null, `Expected issue for id=${issueId}`);
69
+ issueBundle = existingBundle;
70
+ break;
71
+ }
72
+ case 'part-of-new-issue': {
73
+ // Create issue: derive date from claims, generate title/slug, translate, persist
74
+ const slugDateTime = DateTime.fromISO(getSlugDateTimeFromClaims(claims) ?? content.createdAt);
75
+ assert(slugDateTime.isValid, `Invalid date: ${content.createdAt}`);
76
+ const { title, slug } = await generateIssueTitleAndSlug({
77
+ text: getText(content),
78
+ });
79
+ console.log('[ingestContent.generateSlug]', slug);
80
+ const translatedTitles = await translate(title);
81
+ const issueId = `${slugDateTime.toFormat('yyyy-MM-dd')}-${slug}`;
82
+ const issue = {
83
+ id: issueId,
84
+ type: triageResult.result.issueType,
85
+ title: translatedTitles,
86
+ titleMeta: {
87
+ source: '@openai/gpt-5-nano',
88
+ },
89
+ };
90
+ writer.issues.create(issue);
91
+ issueBundle = {
92
+ issue,
93
+ evidence: [],
94
+ impactEvents: [],
95
+ path: DATA_DIR,
96
+ };
97
+ break;
98
+ }
99
+ }
100
+ // --- Build evidence record ---
101
+ const contentDateTime = DateTime.fromISO(content.createdAt);
102
+ assert(contentDateTime.isValid, `Invalid date: ${content.createdAt}`);
103
+ const evidence = {
104
+ id: `ev_${ulid(contentDateTime.toMillis())}`,
105
+ ts: contentDateTime.toISO({ includeOffset: true }),
106
+ type: getEvidenceType(content),
107
+ text: getText(content),
108
+ sourceUrl: content.url,
109
+ render: {
110
+ text: await translate(getText(content)),
111
+ source: '@openai/gpt-5-nano',
112
+ },
113
+ };
114
+ // --- Compute impact events from claims (effects, scopes, periods) ---
115
+ const { newImpactEvents } = computeImpactFromEvidenceClaims({
116
+ issueBundle: {
117
+ ...issueBundle,
118
+ evidence: [...issueBundle.evidence, evidence],
119
+ },
120
+ evidenceId: evidence.id,
121
+ evidenceTs: evidence.ts,
122
+ claims,
123
+ });
124
+ // --- Persist to disk ---
125
+ writer.issues.appendEvidence(issueBundle.issue.id, evidence);
126
+ for (const impact of newImpactEvents) {
127
+ writer.issues.appendImpact(issueBundle.issue.id, impact);
128
+ }
129
+ return null;
130
+ }
131
+ /**
132
+ * Extracts the primary text content from an IngestContent item based on its source type.
133
+ *
134
+ * @param content - The content to extract text from.
135
+ * @returns The text body (selftext for Reddit, summary for news, text for social).
136
+ */
137
+ function getText(content) {
138
+ switch (content.source) {
139
+ case 'reddit': {
140
+ return content.selftext;
141
+ }
142
+ case 'news-website': {
143
+ return content.summary;
144
+ }
145
+ case 'twitter':
146
+ case 'mastodon': {
147
+ return content.text;
148
+ }
149
+ }
150
+ }
151
+ /**
152
+ * Maps IngestContent source type to the corresponding Evidence type for provenance tracking.
153
+ *
154
+ * @param content - The content to classify.
155
+ * @returns The evidence type: official-statement (Reddit), media.report (news), or public.report (social).
156
+ */
157
+ function getEvidenceType(content) {
158
+ switch (content.source) {
159
+ case 'reddit': {
160
+ return 'official-statement';
161
+ }
162
+ case 'news-website': {
163
+ return 'media.report';
164
+ }
165
+ case 'twitter':
166
+ case 'mastodon': {
167
+ return 'public.report';
168
+ }
169
+ }
170
+ }
171
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"/","sources":["util/ingestContent/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,OAAO,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AACjC,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAC5B,OAAO,EAAE,+BAA+B,EAAE,MAAM,6CAA6C,CAAC;AAC9F,OAAO,EAAE,4BAA4B,EAAE,MAAM,sDAAsD,CAAC;AACpG,OAAO,EAAE,yBAAyB,EAAE,MAAM,mDAAmD,CAAC;AAC9F,OAAO,EAAE,SAAS,EAAE,MAAM,mCAAmC,CAAC;AAC9D,OAAO,EAAE,iBAAiB,EAAE,MAAM,2CAA2C,CAAC;AAC9E,OAAO,EAAE,SAAS,EAAE,MAAM,2BAA2B,CAAC;AACtD,OAAO,EAAE,iBAAiB,EAAE,MAAM,4BAA4B,CAAC;AAI/D,OAAO,EAAE,cAAc,EAAE,MAAM,iCAAiC,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AACxD,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,yBAAyB,EAAE,MAAM,wCAAwC,CAAC;AAGnF,MAAM,QAAQ,GAAG,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;AAE/D,MAAM,KAAK,GAAG,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC;AACtC,MAAM,UAAU,GAAG,IAAI,cAAc,CAAC,QAAQ,CAAC,CAAC;AAChD,MAAM,IAAI,GAAG,IAAI,iBAAiB,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;AAC9C,MAAM,MAAM,GAAG,IAAI,aAAa,CAAC,EAAE,KAAK,EAAE,UAAU,EAAE,CAAC,CAAC;AAExD;;;;;;;;;GASG;AACH,MAAM,CAAC,KAAK,UAAU,aAAa,CAAC,OAAsB;IACxD,0BAA0B;IAC1B,wDAAwD;IACxD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC;SAClD,OAAO,CAAC,gBAAgB,CAAC;SACzB,KAAK,EAAE,CAAC;IACX,MAAM,CAAC,SAAS,IAAI,IAAI,EAAE,0BAA0B,CAAC,CAAC;IAEtD,OAAO,CAAC,SAAS,GAAG,SAAS,CAAC;IAC9B,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;IAExC,2DAA2D;IAC3D,MAAM,YAAY,GAAG,MAAM,iBAAiB,CAAC;QAC3C,WAAW,EAAE;YACX,EAAE,EAAE,OAAO,CAAC,SAAS;YACrB,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC;SACvB;QACD,IAAI;KACL,CAAC,CAAC;IACH,OAAO,CAAC,GAAG,CAAC,mCAAmC,EAAE,YAAY,CAAC,CAAC;IAE/D,IAAI,YAAY,CAAC,MAAM,CAAC,IAAI,KAAK,oBAAoB,EAAE,CAAC;QACtD,OAAO,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC;QAC9C,OAAO,IAAI,CAAC;IACd,CAAC;IAED,wEAAwE;IACxE,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,4BAA4B,CAAC;QACpD,WAAW,EAAE;YACX,EAAE,EAAE,OAAO,CAAC,SAAS;YACrB,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC;SACvB;QACD,IAAI;KACL,CAAC,CAAC;IACH,OAAO,CAAC,GAAG,CAAC,8CAA8C,EAAE,MAAM,CAAC,CAAC;IAEpE,6DAA6D;IAC7D,IAAI,WAAwB,CAAC;IAE7B,QAAQ,YAAY,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC;QACjC,KAAK,wBAAwB,CAAC,CAAC,CAAC;YAC9B,sEAAsE;YACtE,MAAM,EAAE,OAAO,EAAE,GAAG,YAAY,CAAC,MAAM,CAAC;YACxC,MAAM,cAAc,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;YAChD,MAAM,CAAC,cAAc,IAAI,IAAI,EAAE,yBAAyB,OAAO,EAAE,CAAC,CAAC;YACnE,WAAW,GAAG,cAAc,CAAC;YAC7B,MAAM;QACR,CAAC;QACD,KAAK,mBAAmB,CAAC,CAAC,CAAC;YACzB,iFAAiF;YACjF,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,CACnC,yBAAyB,CAAC,MAAM,CAAC,IAAI,OAAO,CAAC,SAAS,CACvD,CAAC;YACF,MAAM,CAAC,YAAY,CAAC,OAAO,EAAE,iBAAiB,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC;YAEnE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,MAAM,yBAAyB,CAAC;gBACtD,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC;aACvB,CAAC,CAAC;YACH,OAAO,CAAC,GAAG,CAAC,8BAA8B,EAAE,IAAI,CAAC,CAAC;YAElD,MAAM,gBAAgB,GAAG,MAAM,SAAS,CAAC,KAAK,CAAC,CAAC;YAEhD,MAAM,OAAO,GAAG,GAAG,YAAY,CAAC,QAAQ,CAAC,YAAY,CAAC,IAAI,IAAI,EAAE,CAAC;YAEjE,MAAM,KAAK,GAAU;gBACnB,EAAE,EAAE,OAAO;gBACX,IAAI,EAAE,YAAY,CAAC,MAAM,CAAC,SAAS;gBACnC,KAAK,EAAE,gBAAgB;gBACvB,SAAS,EAAE;oBACT,MAAM,EAAE,oBAAoB;iBAC7B;aACF,CAAC;YACF,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAE5B,WAAW,GAAG;gBACZ,KAAK;gBACL,QAAQ,EAAE,EAAE;gBACZ,YAAY,EAAE,EAAE;gBAChB,IAAI,EAAE,QAAQ;aACf,CAAC;YACF,MAAM;QACR,CAAC;IACH,CAAC;IAED,gCAAgC;IAChC,MAAM,eAAe,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IAC5D,MAAM,CAAC,eAAe,CAAC,OAAO,EAAE,iBAAiB,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC;IAEtE,MAAM,QAAQ,GAAa;QACzB,EAAE,EAAE,MAAM,IAAI,CAAC,eAAe,CAAC,QAAQ,EAAE,CAAC,EAAE;QAC5C,EAAE,EAAE,eAAe,CAAC,KAAK,CAAC,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC;QAClD,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC;QAC9B,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC;QACtB,SAAS,EAAE,OAAO,CAAC,GAAG;QACtB,MAAM,EAAE;YACN,IAAI,EAAE,MAAM,SAAS,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;YACvC,MAAM,EAAE,oBAAoB;SAC7B;KACF,CAAC;IAEF,uEAAuE;IACvE,MAAM,EAAE,eAAe,EAAE,GAAG,+BAA+B,CAAC;QAC1D,WAAW,EAAE;YACX,GAAG,WAAW;YACd,QAAQ,EAAE,CAAC,GAAG,WAAW,CAAC,QAAQ,EAAE,QAAQ,CAAC;SAC9C;QACD,UAAU,EAAE,QAAQ,CAAC,EAAE;QACvB,UAAU,EAAE,QAAQ,CAAC,EAAE;QACvB,MAAM;KACP,CAAC,CAAC;IAEH,0BAA0B;IAC1B,MAAM,CAAC,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,EAAE,QAAQ,CAAC,CAAC;IAC7D,KAAK,MAAM,MAAM,IAAI,eAAe,EAAE,CAAC;QACrC,MAAM,CAAC,MAAM,CAAC,YAAY,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC;IAC3D,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;GAKG;AACH,SAAS,OAAO,CAAC,OAAsB;IACrC,QAAQ,OAAO,CAAC,MAAM,EAAE,CAAC;QACvB,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,OAAO,OAAO,CAAC,QAAQ,CAAC;QAC1B,CAAC;QACD,KAAK,cAAc,CAAC,CAAC,CAAC;YACpB,OAAO,OAAO,CAAC,OAAO,CAAC;QACzB,CAAC;QACD,KAAK,SAAS,CAAC;QACf,KAAK,UAAU,CAAC,CAAC,CAAC;YAChB,OAAO,OAAO,CAAC,IAAI,CAAC;QACtB,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;GAKG;AACH,SAAS,eAAe,CAAC,OAAsB;IAC7C,QAAQ,OAAO,CAAC,MAAM,EAAE,CAAC;QACvB,KAAK,QAAQ,CAAC,CAAC,CAAC;YACd,OAAO,oBAAoB,CAAC;QAC9B,CAAC;QACD,KAAK,cAAc,CAAC,CAAC,CAAC;YACpB,OAAO,cAAc,CAAC;QACxB,CAAC;QACD,KAAK,SAAS,CAAC;QACf,KAAK,UAAU,CAAC,CAAC,CAAC;YAChB,OAAO,eAAe,CAAC;QACzB,CAAC;IACH,CAAC;AACH,CAAC","sourcesContent":["import { resolve } from 'node:path';\nimport { DateTime } from 'luxon';\nimport { ulid } from 'ulid';\nimport { computeImpactFromEvidenceClaims } from '#helpers/computeImpactFromEvidenceClaims.js';\nimport { extractClaimsFromNewEvidence } from '#llm/functions/extractClaimsFromNewEvidence/index.js';\nimport { generateIssueTitleAndSlug } from '#llm/functions/generateIssueTitleAndSlug/index.js';\nimport { translate } from '#llm/functions/translate/index.js';\nimport { triageNewEvidence } from '#llm/functions/triageNewEvidence/index.js';\nimport { FileStore } from '#repo/common/FileStore.js';\nimport { MRTDownRepository } from '#repo/MRTDownRepository.js';\nimport type { IssueBundle } from '#schema/issue/bundle.js';\nimport type { Evidence } from '#schema/issue/evidence.js';\nimport type { Issue } from '#schema/issue/issue.js';\nimport { FileWriteStore } from '#write/common/FileWriteStore.js';\nimport { MRTDownWriter } from '#write/MRTDownWriter.js';\nimport { assert } from '../assert.js';\nimport { getSlugDateTimeFromClaims } from './helpers/getSlugDateTimeFromClaims.js';\nimport type { IngestContent } from './types.js';\n\nconst DATA_DIR = resolve(import.meta.dirname, '../../../data');\n\nconst store = new FileStore(DATA_DIR);\nconst writeStore = new FileWriteStore(DATA_DIR);\nconst repo = new MRTDownRepository({ store });\nconst writer = new MRTDownWriter({ store: writeStore });\n\n/**\n * Ingests content from social media, news, or other sources into the MRTDown issue system.\n *\n * Triages the content to determine if it belongs to an existing issue or a new one, extracts\n * claims, computes impact (affected lines, stations, periods), and persists evidence and impact\n * events. Irrelevant content is ignored.\n *\n * @param content - The content to ingest (Reddit post, news article, or Twitter/Mastodon post).\n * @returns `null` when content is irrelevant or after successful ingestion.\n */\nexport async function ingestContent(content: IngestContent) {\n // --- Normalise input ---\n // HACK: Force `createdAt` to be Asia/Singapore timezone\n const createdAt = DateTime.fromISO(content.createdAt)\n .setZone('Asia/Singapore')\n .toISO();\n assert(createdAt != null, 'Expected valid createdAt');\n\n content.createdAt = createdAt;\n console.log('[ingestContent]', content);\n\n // --- Triage: existing issue, new issue, or irrelevant ---\n const triageResult = await triageNewEvidence({\n newEvidence: {\n ts: content.createdAt,\n text: getText(content),\n },\n repo,\n });\n console.log('[ingestContent.triageNewEvidence]', triageResult);\n\n if (triageResult.result.kind === 'irrelevant-content') {\n console.log('[ingestContent] Nothing to do.');\n return null;\n }\n\n // --- Extract structured claims (lines, stations, periods, effects) ---\n const { claims } = await extractClaimsFromNewEvidence({\n newEvidence: {\n ts: content.createdAt,\n text: getText(content),\n },\n repo,\n });\n console.log('[ingestContent.extractClaimsFromNewEvidence]', claims);\n\n // --- Resolve issue bundle: fetch existing or create new ---\n let issueBundle: IssueBundle;\n\n switch (triageResult.result.kind) {\n case 'part-of-existing-issue': {\n // Load full bundle (issue + evidence + impact) for impact computation\n const { issueId } = triageResult.result;\n const existingBundle = repo.issues.get(issueId);\n assert(existingBundle != null, `Expected issue for id=${issueId}`);\n issueBundle = existingBundle;\n break;\n }\n case 'part-of-new-issue': {\n // Create issue: derive date from claims, generate title/slug, translate, persist\n const slugDateTime = DateTime.fromISO(\n getSlugDateTimeFromClaims(claims) ?? content.createdAt,\n );\n assert(slugDateTime.isValid, `Invalid date: ${content.createdAt}`);\n\n const { title, slug } = await generateIssueTitleAndSlug({\n text: getText(content),\n });\n console.log('[ingestContent.generateSlug]', slug);\n\n const translatedTitles = await translate(title);\n\n const issueId = `${slugDateTime.toFormat('yyyy-MM-dd')}-${slug}`;\n\n const issue: Issue = {\n id: issueId,\n type: triageResult.result.issueType,\n title: translatedTitles,\n titleMeta: {\n source: '@openai/gpt-5-nano',\n },\n };\n writer.issues.create(issue);\n\n issueBundle = {\n issue,\n evidence: [],\n impactEvents: [],\n path: DATA_DIR,\n };\n break;\n }\n }\n\n // --- Build evidence record ---\n const contentDateTime = DateTime.fromISO(content.createdAt);\n assert(contentDateTime.isValid, `Invalid date: ${content.createdAt}`);\n\n const evidence: Evidence = {\n id: `ev_${ulid(contentDateTime.toMillis())}`,\n ts: contentDateTime.toISO({ includeOffset: true }),\n type: getEvidenceType(content),\n text: getText(content),\n sourceUrl: content.url,\n render: {\n text: await translate(getText(content)),\n source: '@openai/gpt-5-nano',\n },\n };\n\n // --- Compute impact events from claims (effects, scopes, periods) ---\n const { newImpactEvents } = computeImpactFromEvidenceClaims({\n issueBundle: {\n ...issueBundle,\n evidence: [...issueBundle.evidence, evidence],\n },\n evidenceId: evidence.id,\n evidenceTs: evidence.ts,\n claims,\n });\n\n // --- Persist to disk ---\n writer.issues.appendEvidence(issueBundle.issue.id, evidence);\n for (const impact of newImpactEvents) {\n writer.issues.appendImpact(issueBundle.issue.id, impact);\n }\n\n return null;\n}\n\n/**\n * Extracts the primary text content from an IngestContent item based on its source type.\n *\n * @param content - The content to extract text from.\n * @returns The text body (selftext for Reddit, summary for news, text for social).\n */\nfunction getText(content: IngestContent) {\n switch (content.source) {\n case 'reddit': {\n return content.selftext;\n }\n case 'news-website': {\n return content.summary;\n }\n case 'twitter':\n case 'mastodon': {\n return content.text;\n }\n }\n}\n\n/**\n * Maps IngestContent source type to the corresponding Evidence type for provenance tracking.\n *\n * @param content - The content to classify.\n * @returns The evidence type: official-statement (Reddit), media.report (news), or public.report (social).\n */\nfunction getEvidenceType(content: IngestContent) {\n switch (content.source) {\n case 'reddit': {\n return 'official-statement';\n }\n case 'news-website': {\n return 'media.report';\n }\n case 'twitter':\n case 'mastodon': {\n return 'public.report';\n }\n }\n}\n"]}
@@ -0,0 +1,32 @@
1
+ import type { z } from 'zod';
2
+ export type IngestContentTwitter = {
3
+ source: 'twitter' | 'mastodon';
4
+ accountName: string;
5
+ text: string;
6
+ url: string;
7
+ createdAt: string;
8
+ };
9
+ export type IngestContentReddit = {
10
+ source: 'reddit';
11
+ subreddit: string;
12
+ title: string;
13
+ selftext: string;
14
+ url: string;
15
+ createdAt: string;
16
+ thumbnailUrl: string | null;
17
+ };
18
+ export type IngestContentNewsArticle = {
19
+ source: 'news-website';
20
+ title: string;
21
+ summary: string;
22
+ url: string;
23
+ createdAt: string;
24
+ };
25
+ export type IngestContent = IngestContentTwitter | IngestContentReddit | IngestContentNewsArticle;
26
+ export type Tool<TParams = any> = {
27
+ name: string;
28
+ description: string;
29
+ paramSchema: z.ZodType<TParams>;
30
+ runner: (param: TParams) => Promise<string>;
31
+ };
32
+ export type ToolRegistry = Record<string, Tool>;
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.js","sourceRoot":"/","sources":["util/ingestContent/types.ts"],"names":[],"mappings":"","sourcesContent":["import type { z } from 'zod';\n\nexport type IngestContentTwitter = {\n source: 'twitter' | 'mastodon';\n accountName: string;\n text: string;\n url: string;\n createdAt: string;\n};\n\nexport type IngestContentReddit = {\n source: 'reddit';\n subreddit: string;\n title: string;\n selftext: string;\n url: string;\n createdAt: string;\n thumbnailUrl: string | null;\n};\n\nexport type IngestContentNewsArticle = {\n source: 'news-website';\n title: string;\n summary: string;\n url: string;\n createdAt: string;\n};\n\nexport type IngestContent =\n | IngestContentTwitter\n | IngestContentReddit\n | IngestContentNewsArticle;\n\nexport type Tool<TParams = any> = {\n name: string;\n description: string;\n paramSchema: z.ZodType<TParams>;\n runner: (param: TParams) => Promise<string>;\n};\n\nexport type ToolRegistry = Record<string, Tool>;\n"]}
@@ -0,0 +1,27 @@
1
+ import type { Landmark } from '../schema/Landmark.js';
2
+ import type { Line } from '../schema/Line.js';
3
+ import type { Operator } from '../schema/Operator.js';
4
+ import type { Service } from '../schema/Service.js';
5
+ import type { Station } from '../schema/Station.js';
6
+ import type { Town } from '../schema/Town.js';
7
+ import { StandardWriter } from './common/StandardWriter.js';
8
+ import type { IWriteStore } from './common/store.js';
9
+ import { IssueWriter } from './issue/IssueWriter.js';
10
+ type MRTDownWriterParams = {
11
+ store: IWriteStore;
12
+ };
13
+ /**
14
+ * A writer for the MRTDown data.
15
+ */
16
+ export declare class MRTDownWriter {
17
+ private readonly store;
18
+ readonly issues: IssueWriter;
19
+ readonly stations: StandardWriter<Station>;
20
+ readonly lines: StandardWriter<Line>;
21
+ readonly operators: StandardWriter<Operator>;
22
+ readonly services: StandardWriter<Service>;
23
+ readonly landmarks: StandardWriter<Landmark>;
24
+ readonly towns: StandardWriter<Town>;
25
+ constructor(params: MRTDownWriterParams);
26
+ }
27
+ export {};
@@ -0,0 +1,27 @@
1
+ import { DIR_LANDMARK, DIR_LINE, DIR_OPERATOR, DIR_SERVICE, DIR_STATION, DIR_TOWN, } from '../constants.js';
2
+ import { StandardWriter } from './common/StandardWriter.js';
3
+ import { IssueWriter } from './issue/IssueWriter.js';
4
+ /**
5
+ * A writer for the MRTDown data.
6
+ */
7
+ export class MRTDownWriter {
8
+ store;
9
+ issues;
10
+ stations;
11
+ lines;
12
+ operators;
13
+ services;
14
+ landmarks;
15
+ towns;
16
+ constructor(params) {
17
+ this.store = params.store;
18
+ this.issues = new IssueWriter(this.store);
19
+ this.stations = new StandardWriter(this.store, DIR_STATION);
20
+ this.lines = new StandardWriter(this.store, DIR_LINE);
21
+ this.operators = new StandardWriter(this.store, DIR_OPERATOR);
22
+ this.services = new StandardWriter(this.store, DIR_SERVICE);
23
+ this.landmarks = new StandardWriter(this.store, DIR_LANDMARK);
24
+ this.towns = new StandardWriter(this.store, DIR_TOWN);
25
+ }
26
+ }
27
+ //# sourceMappingURL=MRTDownWriter.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"MRTDownWriter.js","sourceRoot":"/","sources":["write/MRTDownWriter.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,QAAQ,EACR,YAAY,EACZ,WAAW,EACX,WAAW,EACX,QAAQ,GACT,MAAM,iBAAiB,CAAC;AAOzB,OAAO,EAAE,cAAc,EAAE,MAAM,4BAA4B,CAAC;AAE5D,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAMrD;;GAEG;AACH,MAAM,OAAO,aAAa;IACP,KAAK,CAAc;IAC3B,MAAM,CAAc;IACpB,QAAQ,CAA0B;IAClC,KAAK,CAAuB;IAC5B,SAAS,CAA2B;IACpC,QAAQ,CAA0B;IAClC,SAAS,CAA2B;IACpC,KAAK,CAAuB;IAErC,YAAY,MAA2B;QACrC,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;QAC1B,IAAI,CAAC,MAAM,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAC1C,IAAI,CAAC,QAAQ,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;QAC5D,IAAI,CAAC,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;QACtD,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;QAC9D,IAAI,CAAC,QAAQ,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,YAAY,CAAC,CAAC;QAC9D,IAAI,CAAC,KAAK,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IACxD,CAAC;CACF","sourcesContent":["import {\n DIR_LANDMARK,\n DIR_LINE,\n DIR_OPERATOR,\n DIR_SERVICE,\n DIR_STATION,\n DIR_TOWN,\n} from '../constants.js';\nimport type { Landmark } from '../schema/Landmark.js';\nimport type { Line } from '../schema/Line.js';\nimport type { Operator } from '../schema/Operator.js';\nimport type { Service } from '../schema/Service.js';\nimport type { Station } from '../schema/Station.js';\nimport type { Town } from '../schema/Town.js';\nimport { StandardWriter } from './common/StandardWriter.js';\nimport type { IWriteStore } from './common/store.js';\nimport { IssueWriter } from './issue/IssueWriter.js';\n\ntype MRTDownWriterParams = {\n store: IWriteStore;\n};\n\n/**\n * A writer for the MRTDown data.\n */\nexport class MRTDownWriter {\n private readonly store: IWriteStore;\n readonly issues: IssueWriter;\n readonly stations: StandardWriter<Station>;\n readonly lines: StandardWriter<Line>;\n readonly operators: StandardWriter<Operator>;\n readonly services: StandardWriter<Service>;\n readonly landmarks: StandardWriter<Landmark>;\n readonly towns: StandardWriter<Town>;\n\n constructor(params: MRTDownWriterParams) {\n this.store = params.store;\n this.issues = new IssueWriter(this.store);\n this.stations = new StandardWriter(this.store, DIR_STATION);\n this.lines = new StandardWriter(this.store, DIR_LINE);\n this.operators = new StandardWriter(this.store, DIR_OPERATOR);\n this.services = new StandardWriter(this.store, DIR_SERVICE);\n this.landmarks = new StandardWriter(this.store, DIR_LANDMARK);\n this.towns = new StandardWriter(this.store, DIR_TOWN);\n }\n}\n"]}
@@ -0,0 +1,13 @@
1
+ import type { IWriteStore } from './store.js';
2
+ /**
3
+ * A write store that writes to the file system.
4
+ */
5
+ export declare class FileWriteStore implements IWriteStore {
6
+ private readonly rootDir;
7
+ constructor(rootDir: string);
8
+ writeText(path: string, text: string): void;
9
+ writeJson(path: string, json: unknown): void;
10
+ appendText(path: string, text: string): void;
11
+ ensureDir(path: string): void;
12
+ delete(path: string): void;
13
+ }
@@ -0,0 +1,31 @@
1
+ import { appendFileSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
2
+ import { join } from 'node:path';
3
+ /**
4
+ * A write store that writes to the file system.
5
+ */
6
+ export class FileWriteStore {
7
+ rootDir;
8
+ constructor(rootDir) {
9
+ this.rootDir = rootDir;
10
+ }
11
+ writeText(path, text) {
12
+ const fullPath = join(this.rootDir, path);
13
+ writeFileSync(fullPath, text);
14
+ }
15
+ writeJson(path, json) {
16
+ this.writeText(path, JSON.stringify(json, null, 2));
17
+ }
18
+ appendText(path, text) {
19
+ const fullPath = join(this.rootDir, path);
20
+ appendFileSync(fullPath, text);
21
+ }
22
+ ensureDir(path) {
23
+ const fullPath = join(this.rootDir, path);
24
+ mkdirSync(fullPath, { recursive: true });
25
+ }
26
+ delete(path) {
27
+ const fullPath = join(this.rootDir, path);
28
+ rmSync(fullPath);
29
+ }
30
+ }
31
+ //# sourceMappingURL=FileWriteStore.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"FileWriteStore.js","sourceRoot":"/","sources":["write/common/FileWriteStore.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,SAAS,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,SAAS,CAAC;AAC3E,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAGjC;;GAEG;AACH,MAAM,OAAO,cAAc;IACI;IAA7B,YAA6B,OAAe;QAAf,YAAO,GAAP,OAAO,CAAQ;IAAG,CAAC;IAEhD,SAAS,CAAC,IAAY,EAAE,IAAY;QAClC,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAC1C,aAAa,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;IAChC,CAAC;IAED,SAAS,CAAC,IAAY,EAAE,IAAa;QACnC,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC;IACtD,CAAC;IAED,UAAU,CAAC,IAAY,EAAE,IAAY;QACnC,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAC1C,cAAc,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;IACjC,CAAC;IAED,SAAS,CAAC,IAAY;QACpB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAC1C,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAC3C,CAAC;IAED,MAAM,CAAC,IAAY;QACjB,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAC1C,MAAM,CAAC,QAAQ,CAAC,CAAC;IACnB,CAAC;CACF","sourcesContent":["import { appendFileSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';\nimport { join } from 'node:path';\nimport type { IWriteStore } from './store.js';\n\n/**\n * A write store that writes to the file system.\n */\nexport class FileWriteStore implements IWriteStore {\n constructor(private readonly rootDir: string) {}\n\n writeText(path: string, text: string): void {\n const fullPath = join(this.rootDir, path);\n writeFileSync(fullPath, text);\n }\n\n writeJson(path: string, json: unknown): void {\n this.writeText(path, JSON.stringify(json, null, 2));\n }\n\n appendText(path: string, text: string): void {\n const fullPath = join(this.rootDir, path);\n appendFileSync(fullPath, text);\n }\n\n ensureDir(path: string): void {\n const fullPath = join(this.rootDir, path);\n mkdirSync(fullPath, { recursive: true });\n }\n\n delete(path: string): void {\n const fullPath = join(this.rootDir, path);\n rmSync(fullPath);\n }\n}\n"]}
@@ -0,0 +1,14 @@
1
+ import type { IWriteStore } from './store.js';
2
+ type Item = {
3
+ id: string;
4
+ };
5
+ /**
6
+ * A standard writer for items represented by single JSON files that are stored in a directory.
7
+ */
8
+ export declare class StandardWriter<T extends Item> {
9
+ private readonly store;
10
+ private readonly dirPath;
11
+ constructor(store: IWriteStore, dirPath: string);
12
+ create(item: T): void;
13
+ }
14
+ export {};
@@ -0,0 +1,17 @@
1
+ import { join } from 'node:path';
2
+ /**
3
+ * A standard writer for items represented by single JSON files that are stored in a directory.
4
+ */
5
+ export class StandardWriter {
6
+ store;
7
+ dirPath;
8
+ constructor(store, dirPath) {
9
+ this.store = store;
10
+ this.dirPath = dirPath;
11
+ }
12
+ create(item) {
13
+ this.store.ensureDir(this.dirPath);
14
+ this.store.writeJson(join(this.dirPath, `${item.id}.json`), item);
15
+ }
16
+ }
17
+ //# sourceMappingURL=StandardWriter.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"StandardWriter.js","sourceRoot":"/","sources":["write/common/StandardWriter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AAOjC;;GAEG;AACH,MAAM,OAAO,cAAc;IAEN;IACA;IAFnB,YACmB,KAAkB,EAClB,OAAe;QADf,UAAK,GAAL,KAAK,CAAa;QAClB,YAAO,GAAP,OAAO,CAAQ;IAC/B,CAAC;IAEJ,MAAM,CAAC,IAAO;QACZ,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACnC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,EAAE,OAAO,CAAC,EAAE,IAAI,CAAC,CAAC;IACpE,CAAC;CACF","sourcesContent":["import { join } from 'node:path';\nimport type { IWriteStore } from './store.js';\n\ntype Item = {\n id: string;\n};\n\n/**\n * A standard writer for items represented by single JSON files that are stored in a directory.\n */\nexport class StandardWriter<T extends Item> {\n constructor(\n private readonly store: IWriteStore,\n private readonly dirPath: string,\n ) {}\n\n create(item: T): void {\n this.store.ensureDir(this.dirPath);\n this.store.writeJson(join(this.dirPath, `${item.id}.json`), item);\n }\n}\n"]}
@@ -0,0 +1,32 @@
1
+ export interface IWriteStore {
2
+ /**
3
+ * Write a text file.
4
+ * @param path
5
+ * @param text
6
+ */
7
+ writeText(path: string, text: string): void;
8
+ /**
9
+ * Write a JSON file.
10
+ * @param path
11
+ * @param json
12
+ */
13
+ writeJson(path: string, json: unknown): void;
14
+ /**
15
+ * Append text to a file.
16
+ * @param path
17
+ * @param text
18
+ */
19
+ appendText(path: string, text: string): void;
20
+ /**
21
+ * Ensure a directory exists.
22
+ * @param path
23
+ * @returns
24
+ */
25
+ ensureDir(path: string): void;
26
+ /**
27
+ * Delete a file or directory.
28
+ * @param path
29
+ * @returns
30
+ */
31
+ delete?(path: string): void;
32
+ }
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=store.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"store.js","sourceRoot":"/","sources":["write/common/store.ts"],"names":[],"mappings":"","sourcesContent":["export interface IWriteStore {\n /**\n * Write a text file.\n * @param path\n * @param text\n */\n writeText(path: string, text: string): void;\n /**\n * Write a JSON file.\n * @param path\n * @param json\n */\n writeJson(path: string, json: unknown): void;\n /**\n * Append text to a file.\n * @param path\n * @param text\n */\n appendText(path: string, text: string): void;\n /**\n * Ensure a directory exists.\n * @param path\n * @returns\n */\n ensureDir(path: string): void;\n /**\n * Delete a file or directory.\n * @param path\n * @returns\n */\n delete?(path: string): void;\n}\n"]}
@@ -0,0 +1,18 @@
1
+ import type { DateTime } from 'luxon';
2
+ /**
3
+ * A utility for generating unique IDs.
4
+ */
5
+ export declare const IdGenerator: {
6
+ /**
7
+ * Generate a unique ID for an evidence.
8
+ * @param ts - The timestamp of the evidence.
9
+ * @returns
10
+ */
11
+ evidenceId(ts?: DateTime): string;
12
+ /**
13
+ * Generate a unique ID for an impact event.
14
+ * @param ts - The timestamp of the impact event.
15
+ * @returns
16
+ */
17
+ impactEventId(ts?: DateTime): string;
18
+ };
@@ -0,0 +1,23 @@
1
+ import { ulid } from 'ulid';
2
+ /**
3
+ * A utility for generating unique IDs.
4
+ */
5
+ export const IdGenerator = {
6
+ /**
7
+ * Generate a unique ID for an evidence.
8
+ * @param ts - The timestamp of the evidence.
9
+ * @returns
10
+ */
11
+ evidenceId(ts) {
12
+ return `ev_${ulid(ts?.toMillis?.() ?? undefined)}`;
13
+ },
14
+ /**
15
+ * Generate a unique ID for an impact event.
16
+ * @param ts - The timestamp of the impact event.
17
+ * @returns
18
+ */
19
+ impactEventId(ts) {
20
+ return `ie_${ulid(ts?.toMillis?.() ?? undefined)}`;
21
+ },
22
+ };
23
+ //# sourceMappingURL=IdGenerator.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"IdGenerator.js","sourceRoot":"/","sources":["write/id/IdGenerator.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAE5B;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG;IACzB;;;;OAIG;IACH,UAAU,CAAC,EAAa;QACtB,OAAO,MAAM,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,EAAE,IAAI,SAAS,CAAC,EAAE,CAAC;IACrD,CAAC;IAED;;;;OAIG;IACH,aAAa,CAAC,EAAa;QACzB,OAAO,MAAM,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,EAAE,IAAI,SAAS,CAAC,EAAE,CAAC;IACrD,CAAC;CACF,CAAC","sourcesContent":["import type { DateTime } from 'luxon';\nimport { ulid } from 'ulid';\n\n/**\n * A utility for generating unique IDs.\n */\nexport const IdGenerator = {\n /**\n * Generate a unique ID for an evidence.\n * @param ts - The timestamp of the evidence.\n * @returns\n */\n evidenceId(ts?: DateTime) {\n return `ev_${ulid(ts?.toMillis?.() ?? undefined)}`;\n },\n\n /**\n * Generate a unique ID for an impact event.\n * @param ts - The timestamp of the impact event.\n * @returns\n */\n impactEventId(ts?: DateTime) {\n return `ie_${ulid(ts?.toMillis?.() ?? undefined)}`;\n },\n};\n"]}
@@ -0,0 +1,12 @@
1
+ import type { Evidence } from '#schema/issue/evidence.js';
2
+ import type { ImpactEvent } from '#schema/issue/impactEvent.js';
3
+ import type { Issue } from '#schema/issue/issue.js';
4
+ import type { IWriteStore } from '../common/store.js';
5
+ export declare class IssueWriter {
6
+ private readonly store;
7
+ constructor(store: IWriteStore);
8
+ create(issue: Issue): void;
9
+ appendEvidence(issueId: string, evidence: Evidence): void;
10
+ appendImpact(issueId: string, impact: ImpactEvent): void;
11
+ private getIssueDir;
12
+ }