@lssm/example.analytics-dashboard 0.0.0-canary-20251217080011 → 0.0.0-canary-20251219202229

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (193) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +2 -0
  3. package/dist/dashboard/dashboard.contracts.d.ts +132 -131
  4. package/dist/dashboard/dashboard.contracts.d.ts.map +1 -0
  5. package/dist/dashboard/dashboard.contracts.js +2 -1
  6. package/dist/dashboard/dashboard.contracts.js.map +1 -0
  7. package/dist/dashboard/dashboard.enum.d.ts +6 -5
  8. package/dist/dashboard/dashboard.enum.d.ts.map +1 -0
  9. package/dist/dashboard/dashboard.enum.js +6 -6
  10. package/dist/dashboard/dashboard.enum.js.map +1 -0
  11. package/dist/dashboard/dashboard.presentation.d.ts +2 -1
  12. package/dist/dashboard/dashboard.presentation.d.ts.map +1 -0
  13. package/dist/dashboard/dashboard.presentation.js +2 -1
  14. package/dist/dashboard/dashboard.presentation.js.map +1 -0
  15. package/dist/dashboard/dashboard.schema.d.ts +81 -80
  16. package/dist/dashboard/dashboard.schema.d.ts.map +1 -0
  17. package/dist/dashboard/dashboard.schema.js +44 -45
  18. package/dist/dashboard/dashboard.schema.js.map +1 -0
  19. package/dist/dashboard.feature.d.ts +2 -1
  20. package/dist/dashboard.feature.d.ts.map +1 -0
  21. package/dist/dashboard.feature.js +2 -1
  22. package/dist/dashboard.feature.js.map +1 -0
  23. package/dist/docs/analytics-dashboard.docblock.js +2 -1
  24. package/dist/docs/analytics-dashboard.docblock.js.map +1 -0
  25. package/dist/events.d.ts +41 -40
  26. package/dist/events.d.ts.map +1 -0
  27. package/dist/events.js +22 -23
  28. package/dist/events.js.map +1 -0
  29. package/dist/example.d.ts +2 -1
  30. package/dist/example.d.ts.map +1 -0
  31. package/dist/example.js +2 -1
  32. package/dist/example.js.map +1 -0
  33. package/dist/libs/contracts/dist/capabilities/openbanking.js +2 -1
  34. package/dist/libs/contracts/dist/capabilities/openbanking.js.map +1 -0
  35. package/dist/libs/contracts/dist/contract-registry/schemas.js +2 -1
  36. package/dist/libs/contracts/dist/contract-registry/schemas.js.map +1 -0
  37. package/dist/libs/contracts/dist/docs/accessibility_wcag_compliance_specs.docblock.js +2 -1
  38. package/dist/libs/contracts/dist/docs/accessibility_wcag_compliance_specs.docblock.js.map +1 -0
  39. package/dist/libs/contracts/dist/docs/index.js +2 -6
  40. package/dist/libs/contracts/dist/docs/meta.docs.js +30 -0
  41. package/dist/libs/contracts/dist/docs/meta.docs.js.map +1 -0
  42. package/dist/libs/contracts/dist/docs/presentations.js +2 -1
  43. package/dist/libs/contracts/dist/docs/presentations.js.map +1 -0
  44. package/dist/libs/contracts/dist/docs/registry.js +2 -1
  45. package/dist/libs/contracts/dist/docs/registry.js.map +1 -0
  46. package/dist/libs/contracts/dist/docs/tech/auth/better-auth-nextjs.docblock.js +2 -1
  47. package/dist/libs/contracts/dist/docs/tech/auth/better-auth-nextjs.docblock.js.map +1 -0
  48. package/dist/libs/contracts/dist/docs/tech/contracts/openapi-export.docblock.js +2 -1
  49. package/dist/libs/contracts/dist/docs/tech/contracts/openapi-export.docblock.js.map +1 -0
  50. package/dist/libs/contracts/dist/docs/tech/lifecycle-stage-system.docblock.js +2 -1
  51. package/dist/libs/contracts/dist/docs/tech/lifecycle-stage-system.docblock.js.map +1 -0
  52. package/dist/libs/contracts/dist/docs/tech/llm/llm-integration.docblock.js +2 -1
  53. package/dist/libs/contracts/dist/docs/tech/llm/llm-integration.docblock.js.map +1 -0
  54. package/dist/libs/contracts/dist/docs/tech/mcp-endpoints.docblock.js +2 -1
  55. package/dist/libs/contracts/dist/docs/tech/mcp-endpoints.docblock.js.map +1 -0
  56. package/dist/libs/contracts/dist/docs/tech/presentation-runtime.docblock.js +2 -1
  57. package/dist/libs/contracts/dist/docs/tech/presentation-runtime.docblock.js.map +1 -0
  58. package/dist/libs/contracts/dist/docs/tech/schema/README.docblock.js +2 -1
  59. package/dist/libs/contracts/dist/docs/tech/schema/README.docblock.js.map +1 -0
  60. package/dist/libs/contracts/dist/docs/tech/studio/learning-events.docblock.js +2 -1
  61. package/dist/libs/contracts/dist/docs/tech/studio/learning-events.docblock.js.map +1 -0
  62. package/dist/libs/contracts/dist/docs/tech/studio/learning-journeys.docblock.js +2 -1
  63. package/dist/libs/contracts/dist/docs/tech/studio/learning-journeys.docblock.js.map +1 -0
  64. package/dist/libs/contracts/dist/docs/tech/studio/platform-admin-panel.docblock.js +2 -1
  65. package/dist/libs/contracts/dist/docs/tech/studio/platform-admin-panel.docblock.js.map +1 -0
  66. package/dist/libs/contracts/dist/docs/tech/studio/project-access-teams.docblock.js +2 -1
  67. package/dist/libs/contracts/dist/docs/tech/studio/project-access-teams.docblock.js.map +1 -0
  68. package/dist/libs/contracts/dist/docs/tech/studio/project-routing.docblock.js +2 -1
  69. package/dist/libs/contracts/dist/docs/tech/studio/project-routing.docblock.js.map +1 -0
  70. package/dist/libs/contracts/dist/docs/tech/studio/sandbox-unlogged.docblock.js +2 -1
  71. package/dist/libs/contracts/dist/docs/tech/studio/sandbox-unlogged.docblock.js.map +1 -0
  72. package/dist/libs/contracts/dist/docs/tech/studio/team-invitations.docblock.js +2 -1
  73. package/dist/libs/contracts/dist/docs/tech/studio/team-invitations.docblock.js.map +1 -0
  74. package/dist/libs/contracts/dist/docs/tech/studio/workspace-ops.docblock.js +2 -1
  75. package/dist/libs/contracts/dist/docs/tech/studio/workspace-ops.docblock.js.map +1 -0
  76. package/dist/libs/contracts/dist/docs/tech/studio/workspaces.docblock.js +2 -1
  77. package/dist/libs/contracts/dist/docs/tech/studio/workspaces.docblock.js.map +1 -0
  78. package/dist/libs/contracts/dist/docs/tech/telemetry-ingest.docblock.js +2 -1
  79. package/dist/libs/contracts/dist/docs/tech/telemetry-ingest.docblock.js.map +1 -0
  80. package/dist/libs/contracts/dist/docs/tech/templates/runtime.docblock.js +2 -1
  81. package/dist/libs/contracts/dist/docs/tech/templates/runtime.docblock.js.map +1 -0
  82. package/dist/libs/contracts/dist/docs/tech/vscode-extension.docblock.js +2 -1
  83. package/dist/libs/contracts/dist/docs/tech/vscode-extension.docblock.js.map +1 -0
  84. package/dist/libs/contracts/dist/docs/tech/workflows/overview.docblock.js +2 -1
  85. package/dist/libs/contracts/dist/docs/tech/workflows/overview.docblock.js.map +1 -0
  86. package/dist/libs/contracts/dist/docs/tech-contracts.docs.js +97 -0
  87. package/dist/libs/contracts/dist/docs/tech-contracts.docs.js.map +1 -0
  88. package/dist/libs/contracts/dist/events.js +2 -1
  89. package/dist/libs/contracts/dist/events.js.map +1 -0
  90. package/dist/libs/contracts/dist/index.js +2 -1
  91. package/dist/libs/contracts/dist/integrations/contracts.js +65 -66
  92. package/dist/libs/contracts/dist/integrations/contracts.js.map +1 -0
  93. package/dist/libs/contracts/dist/integrations/openbanking/contracts/accounts.js +31 -32
  94. package/dist/libs/contracts/dist/integrations/openbanking/contracts/accounts.js.map +1 -0
  95. package/dist/libs/contracts/dist/integrations/openbanking/contracts/balances.js +18 -19
  96. package/dist/libs/contracts/dist/integrations/openbanking/contracts/balances.js.map +1 -0
  97. package/dist/libs/contracts/dist/integrations/openbanking/contracts/transactions.js +32 -33
  98. package/dist/libs/contracts/dist/integrations/openbanking/contracts/transactions.js.map +1 -0
  99. package/dist/libs/contracts/dist/integrations/openbanking/models.js +60 -61
  100. package/dist/libs/contracts/dist/integrations/openbanking/models.js.map +1 -0
  101. package/dist/libs/contracts/dist/integrations/openbanking/telemetry.js +2 -1
  102. package/dist/libs/contracts/dist/integrations/openbanking/telemetry.js.map +1 -0
  103. package/dist/libs/contracts/dist/integrations/providers/elevenlabs.js +2 -1
  104. package/dist/libs/contracts/dist/integrations/providers/elevenlabs.js.map +1 -0
  105. package/dist/libs/contracts/dist/integrations/providers/gcs-storage.js +2 -1
  106. package/dist/libs/contracts/dist/integrations/providers/gcs-storage.js.map +1 -0
  107. package/dist/libs/contracts/dist/integrations/providers/gmail.js +2 -1
  108. package/dist/libs/contracts/dist/integrations/providers/gmail.js.map +1 -0
  109. package/dist/libs/contracts/dist/integrations/providers/google-calendar.js +2 -1
  110. package/dist/libs/contracts/dist/integrations/providers/google-calendar.js.map +1 -0
  111. package/dist/libs/contracts/dist/integrations/providers/mistral.js +2 -1
  112. package/dist/libs/contracts/dist/integrations/providers/mistral.js.map +1 -0
  113. package/dist/libs/contracts/dist/integrations/providers/postmark.js +2 -1
  114. package/dist/libs/contracts/dist/integrations/providers/postmark.js.map +1 -0
  115. package/dist/libs/contracts/dist/integrations/providers/powens.js +2 -1
  116. package/dist/libs/contracts/dist/integrations/providers/powens.js.map +1 -0
  117. package/dist/libs/contracts/dist/integrations/providers/qdrant.js +2 -1
  118. package/dist/libs/contracts/dist/integrations/providers/qdrant.js.map +1 -0
  119. package/dist/libs/contracts/dist/integrations/providers/stripe.js +2 -1
  120. package/dist/libs/contracts/dist/integrations/providers/stripe.js.map +1 -0
  121. package/dist/libs/contracts/dist/integrations/providers/twilio-sms.js +2 -1
  122. package/dist/libs/contracts/dist/integrations/providers/twilio-sms.js.map +1 -0
  123. package/dist/libs/contracts/dist/knowledge/contracts.js +44 -45
  124. package/dist/libs/contracts/dist/knowledge/contracts.js.map +1 -0
  125. package/dist/libs/contracts/dist/knowledge/spaces/email-threads.js +2 -1
  126. package/dist/libs/contracts/dist/knowledge/spaces/email-threads.js.map +1 -0
  127. package/dist/libs/contracts/dist/knowledge/spaces/financial-docs.js +2 -1
  128. package/dist/libs/contracts/dist/knowledge/spaces/financial-docs.js.map +1 -0
  129. package/dist/libs/contracts/dist/knowledge/spaces/financial-overview.js +2 -1
  130. package/dist/libs/contracts/dist/knowledge/spaces/financial-overview.js.map +1 -0
  131. package/dist/libs/contracts/dist/knowledge/spaces/product-canon.js +2 -1
  132. package/dist/libs/contracts/dist/knowledge/spaces/product-canon.js.map +1 -0
  133. package/dist/libs/contracts/dist/knowledge/spaces/support-faq.js +2 -1
  134. package/dist/libs/contracts/dist/knowledge/spaces/support-faq.js.map +1 -0
  135. package/dist/libs/contracts/dist/knowledge/spaces/uploaded-docs.js +2 -1
  136. package/dist/libs/contracts/dist/knowledge/spaces/uploaded-docs.js.map +1 -0
  137. package/dist/libs/contracts/dist/llm/exporters.js +2 -1
  138. package/dist/libs/contracts/dist/llm/exporters.js.map +1 -0
  139. package/dist/libs/contracts/dist/onboarding-base.js +22 -23
  140. package/dist/libs/contracts/dist/onboarding-base.js.map +1 -0
  141. package/dist/libs/contracts/dist/ownership.js +4 -2
  142. package/dist/libs/contracts/dist/ownership.js.map +1 -0
  143. package/dist/libs/contracts/dist/presentations.v2.js +2 -1
  144. package/dist/libs/contracts/dist/presentations.v2.js.map +1 -0
  145. package/dist/libs/contracts/dist/regenerator/service.js +2 -1
  146. package/dist/libs/contracts/dist/regenerator/service.js.map +1 -0
  147. package/dist/libs/contracts/dist/schema/dist/index.js +3873 -6
  148. package/dist/libs/contracts/dist/schema/dist/index.js.map +1 -0
  149. package/dist/libs/contracts/dist/spec.js +2 -1
  150. package/dist/libs/contracts/dist/spec.js.map +1 -0
  151. package/dist/libs/schema/dist/index.js +4717 -6
  152. package/dist/libs/schema/dist/index.js.map +1 -0
  153. package/dist/query/query.contracts.d.ts +46 -45
  154. package/dist/query/query.contracts.d.ts.map +1 -0
  155. package/dist/query/query.contracts.js +2 -1
  156. package/dist/query/query.contracts.js.map +1 -0
  157. package/dist/query/query.enum.d.ts +4 -3
  158. package/dist/query/query.enum.d.ts.map +1 -0
  159. package/dist/query/query.enum.js +4 -4
  160. package/dist/query/query.enum.js.map +1 -0
  161. package/dist/query/query.presentation.d.ts +2 -1
  162. package/dist/query/query.presentation.d.ts.map +1 -0
  163. package/dist/query/query.presentation.js +2 -1
  164. package/dist/query/query.presentation.js.map +1 -0
  165. package/dist/query/query.schema.d.ts +36 -35
  166. package/dist/query/query.schema.d.ts.map +1 -0
  167. package/dist/query/query.schema.js +34 -35
  168. package/dist/query/query.schema.js.map +1 -0
  169. package/dist/query-engine/index.d.ts +2 -1
  170. package/dist/query-engine/index.d.ts.map +1 -0
  171. package/dist/query-engine/index.js +2 -1
  172. package/dist/query-engine/index.js.map +1 -0
  173. package/package.json +12 -11
  174. package/dist/libs/contracts/dist/docs/PUBLISHING.docblock.js +0 -16
  175. package/dist/libs/contracts/dist/docs/tech/PHASE_1_QUICKSTART.docblock.js +0 -16
  176. package/dist/libs/contracts/dist/docs/tech/PHASE_2_AI_NATIVE_OPERATIONS.docblock.js +0 -16
  177. package/dist/libs/contracts/dist/docs/tech/PHASE_3_AUTO_EVOLUTION.docblock.js +0 -16
  178. package/dist/libs/contracts/dist/docs/tech/PHASE_4_PERSONALIZATION_ENGINE.docblock.js +0 -16
  179. package/dist/libs/contracts/dist/docs/tech/PHASE_5_ZERO_TOUCH_OPERATIONS.docblock.js +0 -16
  180. package/dist/libs/contracts/dist/schema/dist/EnumType.js +0 -2
  181. package/dist/libs/contracts/dist/schema/dist/FieldType.js +0 -49
  182. package/dist/libs/contracts/dist/schema/dist/ScalarTypeEnum.js +0 -236
  183. package/dist/libs/contracts/dist/schema/dist/SchemaModel.js +0 -34
  184. package/dist/libs/contracts/dist/schema/dist/entity/defineEntity.js +0 -1
  185. package/dist/libs/contracts/dist/schema/dist/entity/index.js +0 -2
  186. package/dist/libs/contracts/dist/schema/dist/entity/types.js +0 -1
  187. package/dist/libs/schema/dist/EnumType.js +0 -56
  188. package/dist/libs/schema/dist/FieldType.js +0 -49
  189. package/dist/libs/schema/dist/ScalarTypeEnum.js +0 -236
  190. package/dist/libs/schema/dist/SchemaModel.js +0 -39
  191. package/dist/libs/schema/dist/entity/defineEntity.js +0 -1
  192. package/dist/libs/schema/dist/entity/index.js +0 -2
  193. package/dist/libs/schema/dist/entity/types.js +0 -1
@@ -1,25 +1,23 @@
1
- import { ScalarTypeEnum } from "../libs/schema/dist/ScalarTypeEnum.js";
2
- import { defineSchemaModel } from "../libs/schema/dist/SchemaModel.js";
3
- import "../libs/schema/dist/index.js";
1
+ import { E5, K5 } from "../libs/schema/dist/index.js";
4
2
  import { QueryTypeEnum } from "./query.enum.js";
5
3
 
6
4
  //#region src/query/query.schema.ts
7
5
  /**
8
6
  * A data query.
9
7
  */
10
- const QueryModel = defineSchemaModel({
8
+ const QueryModel = K5({
11
9
  name: "QueryModel",
12
10
  fields: {
13
11
  id: {
14
- type: ScalarTypeEnum.String_unsecure(),
12
+ type: E5.String_unsecure(),
15
13
  isOptional: false
16
14
  },
17
15
  name: {
18
- type: ScalarTypeEnum.String_unsecure(),
16
+ type: E5.String_unsecure(),
19
17
  isOptional: false
20
18
  },
21
19
  description: {
22
- type: ScalarTypeEnum.String_unsecure(),
20
+ type: E5.String_unsecure(),
23
21
  isOptional: true
24
22
  },
25
23
  type: {
@@ -27,23 +25,23 @@ const QueryModel = defineSchemaModel({
27
25
  isOptional: false
28
26
  },
29
27
  definition: {
30
- type: ScalarTypeEnum.JSON(),
28
+ type: E5.JSON(),
31
29
  isOptional: false
32
30
  },
33
31
  sql: {
34
- type: ScalarTypeEnum.String_unsecure(),
32
+ type: E5.String_unsecure(),
35
33
  isOptional: true
36
34
  },
37
35
  cacheTtlSeconds: {
38
- type: ScalarTypeEnum.Int_unsecure(),
36
+ type: E5.Int_unsecure(),
39
37
  isOptional: false
40
38
  },
41
39
  isShared: {
42
- type: ScalarTypeEnum.Boolean(),
40
+ type: E5.Boolean(),
43
41
  isOptional: false
44
42
  },
45
43
  createdAt: {
46
- type: ScalarTypeEnum.DateTime(),
44
+ type: E5.DateTime(),
47
45
  isOptional: false
48
46
  }
49
47
  }
@@ -51,35 +49,35 @@ const QueryModel = defineSchemaModel({
51
49
  /**
52
50
  * Query execution result.
53
51
  */
54
- const QueryResultModel = defineSchemaModel({
52
+ const QueryResultModel = K5({
55
53
  name: "QueryResultModel",
56
54
  fields: {
57
55
  queryId: {
58
- type: ScalarTypeEnum.String_unsecure(),
56
+ type: E5.String_unsecure(),
59
57
  isOptional: false
60
58
  },
61
59
  data: {
62
- type: ScalarTypeEnum.JSON(),
60
+ type: E5.JSON(),
63
61
  isOptional: false
64
62
  },
65
63
  columns: {
66
- type: ScalarTypeEnum.JSON(),
64
+ type: E5.JSON(),
67
65
  isOptional: false
68
66
  },
69
67
  rowCount: {
70
- type: ScalarTypeEnum.Int_unsecure(),
68
+ type: E5.Int_unsecure(),
71
69
  isOptional: false
72
70
  },
73
71
  executionTimeMs: {
74
- type: ScalarTypeEnum.Int_unsecure(),
72
+ type: E5.Int_unsecure(),
75
73
  isOptional: false
76
74
  },
77
75
  cachedAt: {
78
- type: ScalarTypeEnum.DateTime(),
76
+ type: E5.DateTime(),
79
77
  isOptional: true
80
78
  },
81
79
  error: {
82
- type: ScalarTypeEnum.String_unsecure(),
80
+ type: E5.String_unsecure(),
83
81
  isOptional: true
84
82
  }
85
83
  }
@@ -87,15 +85,15 @@ const QueryResultModel = defineSchemaModel({
87
85
  /**
88
86
  * Input for creating a query.
89
87
  */
90
- const CreateQueryInputModel = defineSchemaModel({
88
+ const CreateQueryInputModel = K5({
91
89
  name: "CreateQueryInput",
92
90
  fields: {
93
91
  name: {
94
- type: ScalarTypeEnum.NonEmptyString(),
92
+ type: E5.NonEmptyString(),
95
93
  isOptional: false
96
94
  },
97
95
  description: {
98
- type: ScalarTypeEnum.String_unsecure(),
96
+ type: E5.String_unsecure(),
99
97
  isOptional: true
100
98
  },
101
99
  type: {
@@ -103,24 +101,24 @@ const CreateQueryInputModel = defineSchemaModel({
103
101
  isOptional: false
104
102
  },
105
103
  definition: {
106
- type: ScalarTypeEnum.JSON(),
104
+ type: E5.JSON(),
107
105
  isOptional: false
108
106
  },
109
107
  sql: {
110
- type: ScalarTypeEnum.String_unsecure(),
108
+ type: E5.String_unsecure(),
111
109
  isOptional: true
112
110
  },
113
111
  metricIds: {
114
- type: ScalarTypeEnum.String_unsecure(),
112
+ type: E5.String_unsecure(),
115
113
  isArray: true,
116
114
  isOptional: true
117
115
  },
118
116
  cacheTtlSeconds: {
119
- type: ScalarTypeEnum.Int_unsecure(),
117
+ type: E5.Int_unsecure(),
120
118
  isOptional: true
121
119
  },
122
120
  isShared: {
123
- type: ScalarTypeEnum.Boolean(),
121
+ type: E5.Boolean(),
124
122
  isOptional: true
125
123
  }
126
124
  }
@@ -128,31 +126,32 @@ const CreateQueryInputModel = defineSchemaModel({
128
126
  /**
129
127
  * Input for executing a query.
130
128
  */
131
- const ExecuteQueryInputModel = defineSchemaModel({
129
+ const ExecuteQueryInputModel = K5({
132
130
  name: "ExecuteQueryInput",
133
131
  fields: {
134
132
  queryId: {
135
- type: ScalarTypeEnum.String_unsecure(),
133
+ type: E5.String_unsecure(),
136
134
  isOptional: false
137
135
  },
138
136
  parameters: {
139
- type: ScalarTypeEnum.JSON(),
137
+ type: E5.JSON(),
140
138
  isOptional: true
141
139
  },
142
140
  dateRange: {
143
- type: ScalarTypeEnum.JSON(),
141
+ type: E5.JSON(),
144
142
  isOptional: true
145
143
  },
146
144
  filters: {
147
- type: ScalarTypeEnum.JSON(),
145
+ type: E5.JSON(),
148
146
  isOptional: true
149
147
  },
150
148
  forceRefresh: {
151
- type: ScalarTypeEnum.Boolean(),
149
+ type: E5.Boolean(),
152
150
  isOptional: true
153
151
  }
154
152
  }
155
153
  });
156
154
 
157
155
  //#endregion
158
- export { CreateQueryInputModel, ExecuteQueryInputModel, QueryModel, QueryResultModel };
156
+ export { CreateQueryInputModel, ExecuteQueryInputModel, QueryModel, QueryResultModel };
157
+ //# sourceMappingURL=query.schema.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"query.schema.js","names":["defineSchemaModel","ScalarTypeEnum"],"sources":["../../src/query/query.schema.ts"],"sourcesContent":["import { defineSchemaModel, ScalarTypeEnum } from '@lssm/lib.schema';\nimport { QueryTypeEnum } from './query.enum';\n\n/**\n * A data query.\n */\nexport const QueryModel = defineSchemaModel({\n name: 'QueryModel',\n fields: {\n id: { type: ScalarTypeEnum.String_unsecure(), isOptional: false },\n name: { type: ScalarTypeEnum.String_unsecure(), isOptional: false },\n description: { type: ScalarTypeEnum.String_unsecure(), isOptional: true },\n type: { type: QueryTypeEnum, isOptional: false },\n definition: { type: ScalarTypeEnum.JSON(), isOptional: false },\n sql: { type: ScalarTypeEnum.String_unsecure(), isOptional: true },\n cacheTtlSeconds: { type: ScalarTypeEnum.Int_unsecure(), isOptional: false },\n isShared: { type: ScalarTypeEnum.Boolean(), isOptional: false },\n createdAt: { type: ScalarTypeEnum.DateTime(), isOptional: false },\n },\n});\n\n/**\n * Query execution result.\n */\nexport const QueryResultModel = defineSchemaModel({\n name: 'QueryResultModel',\n fields: {\n queryId: { type: ScalarTypeEnum.String_unsecure(), isOptional: false },\n data: { type: ScalarTypeEnum.JSON(), isOptional: false },\n columns: { type: ScalarTypeEnum.JSON(), isOptional: false },\n rowCount: { type: ScalarTypeEnum.Int_unsecure(), isOptional: false },\n executionTimeMs: { type: ScalarTypeEnum.Int_unsecure(), isOptional: false },\n cachedAt: { type: ScalarTypeEnum.DateTime(), isOptional: true },\n error: { type: ScalarTypeEnum.String_unsecure(), isOptional: true },\n },\n});\n\n/**\n * Input for creating a query.\n */\nexport const CreateQueryInputModel = defineSchemaModel({\n name: 'CreateQueryInput',\n fields: {\n name: { type: ScalarTypeEnum.NonEmptyString(), isOptional: false },\n description: { type: ScalarTypeEnum.String_unsecure(), isOptional: true },\n type: { type: QueryTypeEnum, isOptional: false },\n definition: { type: ScalarTypeEnum.JSON(), isOptional: false },\n sql: { type: ScalarTypeEnum.String_unsecure(), isOptional: true },\n metricIds: {\n type: ScalarTypeEnum.String_unsecure(),\n isArray: true,\n isOptional: true,\n },\n cacheTtlSeconds: { type: ScalarTypeEnum.Int_unsecure(), isOptional: true },\n isShared: { type: ScalarTypeEnum.Boolean(), isOptional: true },\n },\n});\n\n/**\n * Input for executing a query.\n */\nexport const ExecuteQueryInputModel = defineSchemaModel({\n name: 'ExecuteQueryInput',\n fields: {\n queryId: { type: ScalarTypeEnum.String_unsecure(), isOptional: false },\n parameters: { type: ScalarTypeEnum.JSON(), isOptional: true },\n dateRange: { type: ScalarTypeEnum.JSON(), isOptional: true },\n filters: { type: ScalarTypeEnum.JSON(), isOptional: true },\n forceRefresh: { type: ScalarTypeEnum.Boolean(), isOptional: true },\n },\n});\n"],"mappings":";;;;;;;AAMA,MAAa,aAAaA,GAAkB;CAC1C,MAAM;CACN,QAAQ;EACN,IAAI;GAAE,MAAMC,GAAe,iBAAiB;GAAE,YAAY;GAAO;EACjE,MAAM;GAAE,MAAMA,GAAe,iBAAiB;GAAE,YAAY;GAAO;EACnE,aAAa;GAAE,MAAMA,GAAe,iBAAiB;GAAE,YAAY;GAAM;EACzE,MAAM;GAAE,MAAM;GAAe,YAAY;GAAO;EAChD,YAAY;GAAE,MAAMA,GAAe,MAAM;GAAE,YAAY;GAAO;EAC9D,KAAK;GAAE,MAAMA,GAAe,iBAAiB;GAAE,YAAY;GAAM;EACjE,iBAAiB;GAAE,MAAMA,GAAe,cAAc;GAAE,YAAY;GAAO;EAC3E,UAAU;GAAE,MAAMA,GAAe,SAAS;GAAE,YAAY;GAAO;EAC/D,WAAW;GAAE,MAAMA,GAAe,UAAU;GAAE,YAAY;GAAO;EAClE;CACF,CAAC;;;;AAKF,MAAa,mBAAmBD,GAAkB;CAChD,MAAM;CACN,QAAQ;EACN,SAAS;GAAE,MAAMC,GAAe,iBAAiB;GAAE,YAAY;GAAO;EACtE,MAAM;GAAE,MAAMA,GAAe,MAAM;GAAE,YAAY;GAAO;EACxD,SAAS;GAAE,MAAMA,GAAe,MAAM;GAAE,YAAY;GAAO;EAC3D,UAAU;GAAE,MAAMA,GAAe,cAAc;GAAE,YAAY;GAAO;EACpE,iBAAiB;GAAE,MAAMA,GAAe,cAAc;GAAE,YAAY;GAAO;EAC3E,UAAU;GAAE,MAAMA,GAAe,UAAU;GAAE,YAAY;GAAM;EAC/D,OAAO;GAAE,MAAMA,GAAe,iBAAiB;GAAE,YAAY;GAAM;EACpE;CACF,CAAC;;;;AAKF,MAAa,wBAAwBD,GAAkB;CACrD,MAAM;CACN,QAAQ;EACN,MAAM;GAAE,MAAMC,GAAe,gBAAgB;GAAE,YAAY;GAAO;EAClE,aAAa;GAAE,MAAMA,GAAe,iBAAiB;GAAE,YAAY;GAAM;EACzE,MAAM;GAAE,MAAM;GAAe,YAAY;GAAO;EAChD,YAAY;GAAE,MAAMA,GAAe,MAAM;GAAE,YAAY;GAAO;EAC9D,KAAK;GAAE,MAAMA,GAAe,iBAAiB;GAAE,YAAY;GAAM;EACjE,WAAW;GACT,MAAMA,GAAe,iBAAiB;GACtC,SAAS;GACT,YAAY;GACb;EACD,iBAAiB;GAAE,MAAMA,GAAe,cAAc;GAAE,YAAY;GAAM;EAC1E,UAAU;GAAE,MAAMA,GAAe,SAAS;GAAE,YAAY;GAAM;EAC/D;CACF,CAAC;;;;AAKF,MAAa,yBAAyBD,GAAkB;CACtD,MAAM;CACN,QAAQ;EACN,SAAS;GAAE,MAAMC,GAAe,iBAAiB;GAAE,YAAY;GAAO;EACtE,YAAY;GAAE,MAAMA,GAAe,MAAM;GAAE,YAAY;GAAM;EAC7D,WAAW;GAAE,MAAMA,GAAe,MAAM;GAAE,YAAY;GAAM;EAC5D,SAAS;GAAE,MAAMA,GAAe,MAAM;GAAE,YAAY;GAAM;EAC1D,cAAc;GAAE,MAAMA,GAAe,SAAS;GAAE,YAAY;GAAM;EACnE;CACF,CAAC"}
@@ -102,4 +102,5 @@ declare class BasicQueryEngine implements IQueryEngine {
102
102
  }
103
103
  declare function createQueryEngine(cache?: IQueryCache): IQueryEngine;
104
104
  //#endregion
105
- export { AggregationDefinition, BasicQueryEngine, ColumnDefinition, CustomQueryDefinition, DimensionDefinition, FilterDefinition, IQueryCache, IQueryEngine, InMemoryQueryCache, MeasureDefinition, OrderByDefinition, QueryDefinition, QueryParameters, QueryResult, createQueryEngine };
105
+ export { AggregationDefinition, BasicQueryEngine, ColumnDefinition, CustomQueryDefinition, DimensionDefinition, FilterDefinition, IQueryCache, IQueryEngine, InMemoryQueryCache, MeasureDefinition, OrderByDefinition, QueryDefinition, QueryParameters, QueryResult, createQueryEngine };
106
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","names":[],"sources":["../../src/query-engine/index.ts"],"sourcesContent":[],"mappings":";;AAQA;AAQA;;;AAIY,UAZK,eAAA,CAYL;EACA,IAAA,EAAA,KAAA,GAAA,QAAA,GAAA,aAAA,GAAA,QAAA;EAAiB,GAAA,CAAA,EAAA,MAAA;EAIZ,SAAA,CAAA,EAAA,MAAA,EAAiB;EAOjB,WAAA,CAAA,EApBD,qBAoBoB;EAOnB,MAAA,CAAA,EA1BN,qBA0BsB;AAgBjC;AAKiB,UA5CA,qBAAA,CA8CH;EAGG,MAAA,EAAA,MAAA;EAEN,QAAA,EAjDC,iBAiDD,EAAA;EACF,UAAA,EAjDK,mBAiDL,EAAA;EAGG,OAAA,CAAA,EAnDA,gBAmDA,EAAA;EACG,OAAA,CAAA,EAnDH,iBAmDG,EAAA;EAAM,KAAA,CAAA,EAAA,MAAA;AAGrB;AACQ,UAnDS,iBAAA,CAmDT;EACG,IAAA,EAAA,MAAA;EAIE,KAAA,EAAA,MAAA;EAAI,WAAA,EAAA,OAAA,GAAA,KAAA,GAAA,KAAA,GAAA,KAAA,GAAA,KAAA,GAAA,gBAAA;EAIA,MAAA,CAAA,EAAA,MAAA;AASjB;AAEgB,UAhEC,mBAAA,CAgED;EACJ,IAAA,EAAA,MAAA;EACC,KAAA,EAAA,MAAA;EAAR,IAAA,CAAA,EAAA,MAAA,GAAA,QAAA,GAAA,QAAA;EACuB,WAAA,CAAA,EAAA,MAAA,GAAA,KAAA,GAAA,MAAA,GAAA,OAAA,GAAA,MAAA;;AAQX,UApEA,gBAAA,CAoEW;EACA,KAAA,EAAA,MAAA;EAAR,QAAA,EAAA,IAAA,GAAA,KAAA,GAAA,IAAA,GAAA,KAAA,GAAA,IAAA,GAAA,KAAA,GAAA,IAAA,GAAA,KAAA,GAAA,UAAA,GAAA,SAAA;EACO,KAAA,EAAA,OAAA;;AACI,UAvDd,iBAAA,CAuDc;EAAO,KAAA,EAAA,MAAA;EAGzB,SAAA,EAAA,KAAA,GAAA,MAAmB;;AAGN,UAxDT,qBAAA,CAwDS;EAYd,OAAA,EAAA,MAAA;EAEP,UAAA,EApES,MAoET,CAAA,MAAA,EAAA,OAAA,CAAA;;AAjBsC,UAhD1B,eAAA,CAgD0B;EAAW,SAAA,CAAA,EAAA;IAkCzC,KAAA,EAhFF,IAgFE;IAGS,GAAA,EAlFb,IAkFa;IAKN,WAAA,CAAA,EAAA,MAAA;EACJ,CAAA;EACC,OAAA,CAAA,EAtFD,gBAsFC,EAAA;EAAR,UAAA,CAAA,EArFU,MAqFV,CAAA,MAAA,EAAA,OAAA,CAAA;;AAVoC,UAxExB,WAAA,CAwEwB;EAAY,IAAA,EAvE7C,MAuE6C,CAAA,MAAA,EAAA,OAAA,CAAA,EAAA;EAoOrC,OAAA,EA1SL,gBA0SsB,EAAA;;;;aAtSpB;;;UAII,gBAAA;;;;;;UASA,YAAA;sBAED,yBACJ,kBACP,QAAQ;4BACe;;;;;UAQX,WAAA;oBACG,QAAQ;2BACD,kCAAkC;+BAC9B;;cAGlB,kBAAA,YAA8B;;oBAGjB,QAAQ;2BAYtB,kCAEP;+BAKgC;;cAYxB,gBAAA,YAA4B;;sBAGnB;sBAKN,yBACJ,kBACP,QAAQ;4BAuDe;;;;;;;;;;iBAmKZ,iBAAA,SAA0B,cAAc"}
@@ -182,4 +182,5 @@ function createQueryEngine(cache) {
182
182
  }
183
183
 
184
184
  //#endregion
185
- export { BasicQueryEngine, InMemoryQueryCache, createQueryEngine };
185
+ export { BasicQueryEngine, InMemoryQueryCache, createQueryEngine };
186
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","names":["result: QueryResult","errors: string[]","columns: ColumnDefinition[]","data: Record<string, unknown>[]","row: Record<string, unknown>"],"sources":["../../src/query-engine/index.ts"],"sourcesContent":["/**\n * Analytics Query Engine\n *\n * Provides query execution and caching for analytics dashboards.\n */\n\n// ============ Types ============\n\nexport interface QueryDefinition {\n type: 'SQL' | 'METRIC' | 'AGGREGATION' | 'CUSTOM';\n sql?: string;\n metricIds?: string[];\n aggregation?: AggregationDefinition;\n custom?: CustomQueryDefinition;\n}\n\nexport interface AggregationDefinition {\n source: string;\n measures: MeasureDefinition[];\n dimensions: DimensionDefinition[];\n filters?: FilterDefinition[];\n orderBy?: OrderByDefinition[];\n limit?: number;\n}\n\nexport interface MeasureDefinition {\n name: string;\n field: string;\n aggregation: 'COUNT' | 'SUM' | 'AVG' | 'MIN' | 'MAX' | 'COUNT_DISTINCT';\n format?: string;\n}\n\nexport interface DimensionDefinition {\n name: string;\n field: string;\n type?: 'TIME' | 'STRING' | 'NUMBER';\n granularity?: 'HOUR' | 'DAY' | 'WEEK' | 'MONTH' | 'YEAR';\n}\n\nexport interface FilterDefinition {\n field: string;\n operator:\n | 'eq'\n | 'neq'\n | 'gt'\n | 'gte'\n | 'lt'\n | 'lte'\n | 'in'\n | 'nin'\n | 'contains'\n | 'between';\n value: unknown;\n}\n\nexport interface OrderByDefinition {\n field: string;\n direction: 'ASC' | 'DESC';\n}\n\nexport interface CustomQueryDefinition {\n handler: string;\n parameters: Record<string, unknown>;\n}\n\nexport interface QueryParameters {\n dateRange?: {\n start: Date;\n end: Date;\n granularity?: string;\n };\n filters?: FilterDefinition[];\n parameters?: Record<string, unknown>;\n}\n\nexport interface QueryResult {\n data: Record<string, unknown>[];\n columns: ColumnDefinition[];\n rowCount: number;\n executionTimeMs: number;\n cached: boolean;\n cachedAt?: Date;\n error?: string;\n}\n\nexport interface ColumnDefinition {\n name: string;\n type: 'STRING' | 'NUMBER' | 'DATE' | 'BOOLEAN';\n label?: string;\n format?: string;\n}\n\n// ============ Query Engine Interface ============\n\nexport interface IQueryEngine {\n execute(\n definition: QueryDefinition,\n params: QueryParameters\n ): Promise<QueryResult>;\n validateQuery(definition: QueryDefinition): {\n valid: boolean;\n errors: string[];\n };\n}\n\n// ============ Query Cache ============\n\nexport interface IQueryCache {\n get(key: string): Promise<QueryResult | null>;\n set(key: string, result: QueryResult, ttlSeconds: number): Promise<void>;\n invalidate(pattern: string): Promise<void>;\n}\n\nexport class InMemoryQueryCache implements IQueryCache {\n private cache = new Map<string, { result: QueryResult; expiresAt: Date }>();\n\n async get(key: string): Promise<QueryResult | null> {\n const entry = this.cache.get(key);\n if (!entry) return null;\n if (entry.expiresAt < new Date()) {\n this.cache.delete(key);\n return null;\n }\n return { ...entry.result, cached: true, cachedAt: entry.expiresAt };\n }\n\n async set(\n key: string,\n result: QueryResult,\n ttlSeconds: number\n ): Promise<void> {\n const expiresAt = new Date(Date.now() + ttlSeconds * 1000);\n this.cache.set(key, { result, expiresAt });\n }\n\n async invalidate(pattern: string): Promise<void> {\n const regex = new RegExp(pattern);\n for (const key of this.cache.keys()) {\n if (regex.test(key)) {\n this.cache.delete(key);\n }\n }\n }\n}\n\n// ============ Basic Query Engine ============\n\nexport class BasicQueryEngine implements IQueryEngine {\n private cache: IQueryCache;\n\n constructor(cache?: IQueryCache) {\n this.cache = cache ?? new InMemoryQueryCache();\n }\n\n async execute(\n definition: QueryDefinition,\n params: QueryParameters\n ): Promise<QueryResult> {\n const startTime = Date.now();\n\n // Validate query\n const validation = this.validateQuery(definition);\n if (!validation.valid) {\n return {\n data: [],\n columns: [],\n rowCount: 0,\n executionTimeMs: Date.now() - startTime,\n cached: false,\n error: validation.errors.join(', '),\n };\n }\n\n // Check cache\n const cacheKey = this.buildCacheKey(definition, params);\n const cachedResult = await this.cache.get(cacheKey);\n if (cachedResult) {\n return cachedResult;\n }\n\n // Execute query based on type\n let result: QueryResult;\n switch (definition.type) {\n case 'AGGREGATION':\n result = await this.executeAggregation(definition.aggregation!, params);\n break;\n case 'METRIC':\n result = await this.executeMetric(definition.metricIds!, params);\n break;\n case 'SQL':\n result = await this.executeSql(definition.sql!, params);\n break;\n default:\n result = {\n data: [],\n columns: [],\n rowCount: 0,\n executionTimeMs: Date.now() - startTime,\n cached: false,\n error: `Unknown query type: ${definition.type}`,\n };\n }\n\n result.executionTimeMs = Date.now() - startTime;\n result.cached = false;\n\n // Cache result\n await this.cache.set(cacheKey, result, 300);\n\n return result;\n }\n\n validateQuery(definition: QueryDefinition): {\n valid: boolean;\n errors: string[];\n } {\n const errors: string[] = [];\n\n if (!definition.type) {\n errors.push('Query type is required');\n }\n\n switch (definition.type) {\n case 'SQL':\n if (!definition.sql) {\n errors.push('SQL query is required for SQL type');\n }\n break;\n case 'METRIC':\n if (!definition.metricIds || definition.metricIds.length === 0) {\n errors.push('Metric IDs are required for METRIC type');\n }\n break;\n case 'AGGREGATION':\n if (!definition.aggregation) {\n errors.push(\n 'Aggregation definition is required for AGGREGATION type'\n );\n } else {\n if (!definition.aggregation.source) {\n errors.push('Aggregation source is required');\n }\n if (\n !definition.aggregation.measures ||\n definition.aggregation.measures.length === 0\n ) {\n errors.push('At least one measure is required');\n }\n }\n break;\n }\n\n return { valid: errors.length === 0, errors };\n }\n\n private buildCacheKey(\n definition: QueryDefinition,\n params: QueryParameters\n ): string {\n return JSON.stringify({ definition, params });\n }\n\n private async executeAggregation(\n aggregation: AggregationDefinition,\n params: QueryParameters\n ): Promise<QueryResult> {\n // In production, this would execute against a data warehouse\n // For demo, return mock data\n const columns: ColumnDefinition[] = [\n ...aggregation.dimensions.map((d) => ({\n name: d.name,\n type: (d.type === 'NUMBER'\n ? 'NUMBER'\n : d.type === 'TIME'\n ? 'DATE'\n : 'STRING') as ColumnDefinition['type'],\n label: d.name,\n })),\n ...aggregation.measures.map((m) => ({\n name: m.name,\n type: 'NUMBER' as const,\n label: m.name,\n format: m.format,\n })),\n ];\n\n // Mock data generation\n const data = this.generateMockData(aggregation, params);\n\n return {\n data,\n columns,\n rowCount: data.length,\n executionTimeMs: 0,\n cached: false,\n };\n }\n\n private async executeMetric(\n metricIds: string[],\n _params: QueryParameters\n ): Promise<QueryResult> {\n // In production, this would fetch from metering service\n const data = metricIds.map((id) => ({\n metricId: id,\n value: Math.random() * 1000,\n change: (Math.random() - 0.5) * 20,\n }));\n\n return {\n data,\n columns: [\n { name: 'metricId', type: 'STRING' },\n { name: 'value', type: 'NUMBER' },\n { name: 'change', type: 'NUMBER' },\n ],\n rowCount: data.length,\n executionTimeMs: 0,\n cached: false,\n };\n }\n\n private async executeSql(\n _sql: string,\n _params: QueryParameters\n ): Promise<QueryResult> {\n // In production, this would execute SQL against a database\n return {\n data: [],\n columns: [],\n rowCount: 0,\n executionTimeMs: 0,\n cached: false,\n error: 'SQL execution not implemented in demo',\n };\n }\n\n private generateMockData(\n aggregation: AggregationDefinition,\n params: QueryParameters\n ): Record<string, unknown>[] {\n const data: Record<string, unknown>[] = [];\n const rowCount = 10;\n\n // Generate time series data if there's a time dimension\n const timeDimension = aggregation.dimensions.find((d) => d.type === 'TIME');\n\n for (let i = 0; i < rowCount; i++) {\n const row: Record<string, unknown> = {};\n\n for (const dim of aggregation.dimensions) {\n if (dim.type === 'TIME') {\n const date = new Date(params.dateRange?.start ?? new Date());\n date.setDate(date.getDate() + i);\n row[dim.name] = date.toISOString().split('T')[0];\n } else {\n row[dim.name] = `${dim.name}_${i % 5}`;\n }\n }\n\n for (const measure of aggregation.measures) {\n const baseValue = timeDimension ? 100 + i * 10 : Math.random() * 1000;\n const noise = (Math.random() - 0.5) * 20;\n row[measure.name] = Math.round((baseValue + noise) * 100) / 100;\n }\n\n data.push(row);\n }\n\n return data;\n }\n}\n\n// ============ Factory ============\n\nexport function createQueryEngine(cache?: IQueryCache): IQueryEngine {\n return new BasicQueryEngine(cache);\n}\n"],"mappings":";AAiHA,IAAa,qBAAb,MAAuD;CACrD,AAAQ,wBAAQ,IAAI,KAAuD;CAE3E,MAAM,IAAI,KAA0C;EAClD,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,MAAI,CAAC,MAAO,QAAO;AACnB,MAAI,MAAM,4BAAY,IAAI,MAAM,EAAE;AAChC,QAAK,MAAM,OAAO,IAAI;AACtB,UAAO;;AAET,SAAO;GAAE,GAAG,MAAM;GAAQ,QAAQ;GAAM,UAAU,MAAM;GAAW;;CAGrE,MAAM,IACJ,KACA,QACA,YACe;EACf,MAAM,YAAY,IAAI,KAAK,KAAK,KAAK,GAAG,aAAa,IAAK;AAC1D,OAAK,MAAM,IAAI,KAAK;GAAE;GAAQ;GAAW,CAAC;;CAG5C,MAAM,WAAW,SAAgC;EAC/C,MAAM,QAAQ,IAAI,OAAO,QAAQ;AACjC,OAAK,MAAM,OAAO,KAAK,MAAM,MAAM,CACjC,KAAI,MAAM,KAAK,IAAI,CACjB,MAAK,MAAM,OAAO,IAAI;;;AAQ9B,IAAa,mBAAb,MAAsD;CACpD,AAAQ;CAER,YAAY,OAAqB;AAC/B,OAAK,QAAQ,SAAS,IAAI,oBAAoB;;CAGhD,MAAM,QACJ,YACA,QACsB;EACtB,MAAM,YAAY,KAAK,KAAK;EAG5B,MAAM,aAAa,KAAK,cAAc,WAAW;AACjD,MAAI,CAAC,WAAW,MACd,QAAO;GACL,MAAM,EAAE;GACR,SAAS,EAAE;GACX,UAAU;GACV,iBAAiB,KAAK,KAAK,GAAG;GAC9B,QAAQ;GACR,OAAO,WAAW,OAAO,KAAK,KAAK;GACpC;EAIH,MAAM,WAAW,KAAK,cAAc,YAAY,OAAO;EACvD,MAAM,eAAe,MAAM,KAAK,MAAM,IAAI,SAAS;AACnD,MAAI,aACF,QAAO;EAIT,IAAIA;AACJ,UAAQ,WAAW,MAAnB;GACE,KAAK;AACH,aAAS,MAAM,KAAK,mBAAmB,WAAW,aAAc,OAAO;AACvE;GACF,KAAK;AACH,aAAS,MAAM,KAAK,cAAc,WAAW,WAAY,OAAO;AAChE;GACF,KAAK;AACH,aAAS,MAAM,KAAK,WAAW,WAAW,KAAM,OAAO;AACvD;GACF,QACE,UAAS;IACP,MAAM,EAAE;IACR,SAAS,EAAE;IACX,UAAU;IACV,iBAAiB,KAAK,KAAK,GAAG;IAC9B,QAAQ;IACR,OAAO,uBAAuB,WAAW;IAC1C;;AAGL,SAAO,kBAAkB,KAAK,KAAK,GAAG;AACtC,SAAO,SAAS;AAGhB,QAAM,KAAK,MAAM,IAAI,UAAU,QAAQ,IAAI;AAE3C,SAAO;;CAGT,cAAc,YAGZ;EACA,MAAMC,SAAmB,EAAE;AAE3B,MAAI,CAAC,WAAW,KACd,QAAO,KAAK,yBAAyB;AAGvC,UAAQ,WAAW,MAAnB;GACE,KAAK;AACH,QAAI,CAAC,WAAW,IACd,QAAO,KAAK,qCAAqC;AAEnD;GACF,KAAK;AACH,QAAI,CAAC,WAAW,aAAa,WAAW,UAAU,WAAW,EAC3D,QAAO,KAAK,0CAA0C;AAExD;GACF,KAAK;AACH,QAAI,CAAC,WAAW,YACd,QAAO,KACL,0DACD;SACI;AACL,SAAI,CAAC,WAAW,YAAY,OAC1B,QAAO,KAAK,iCAAiC;AAE/C,SACE,CAAC,WAAW,YAAY,YACxB,WAAW,YAAY,SAAS,WAAW,EAE3C,QAAO,KAAK,mCAAmC;;AAGnD;;AAGJ,SAAO;GAAE,OAAO,OAAO,WAAW;GAAG;GAAQ;;CAG/C,AAAQ,cACN,YACA,QACQ;AACR,SAAO,KAAK,UAAU;GAAE;GAAY;GAAQ,CAAC;;CAG/C,MAAc,mBACZ,aACA,QACsB;EAGtB,MAAMC,UAA8B,CAClC,GAAG,YAAY,WAAW,KAAK,OAAO;GACpC,MAAM,EAAE;GACR,MAAO,EAAE,SAAS,WACd,WACA,EAAE,SAAS,SACT,SACA;GACN,OAAO,EAAE;GACV,EAAE,EACH,GAAG,YAAY,SAAS,KAAK,OAAO;GAClC,MAAM,EAAE;GACR,MAAM;GACN,OAAO,EAAE;GACT,QAAQ,EAAE;GACX,EAAE,CACJ;EAGD,MAAM,OAAO,KAAK,iBAAiB,aAAa,OAAO;AAEvD,SAAO;GACL;GACA;GACA,UAAU,KAAK;GACf,iBAAiB;GACjB,QAAQ;GACT;;CAGH,MAAc,cACZ,WACA,SACsB;EAEtB,MAAM,OAAO,UAAU,KAAK,QAAQ;GAClC,UAAU;GACV,OAAO,KAAK,QAAQ,GAAG;GACvB,SAAS,KAAK,QAAQ,GAAG,MAAO;GACjC,EAAE;AAEH,SAAO;GACL;GACA,SAAS;IACP;KAAE,MAAM;KAAY,MAAM;KAAU;IACpC;KAAE,MAAM;KAAS,MAAM;KAAU;IACjC;KAAE,MAAM;KAAU,MAAM;KAAU;IACnC;GACD,UAAU,KAAK;GACf,iBAAiB;GACjB,QAAQ;GACT;;CAGH,MAAc,WACZ,MACA,SACsB;AAEtB,SAAO;GACL,MAAM,EAAE;GACR,SAAS,EAAE;GACX,UAAU;GACV,iBAAiB;GACjB,QAAQ;GACR,OAAO;GACR;;CAGH,AAAQ,iBACN,aACA,QAC2B;EAC3B,MAAMC,OAAkC,EAAE;EAC1C,MAAM,WAAW;EAGjB,MAAM,gBAAgB,YAAY,WAAW,MAAM,MAAM,EAAE,SAAS,OAAO;AAE3E,OAAK,IAAI,IAAI,GAAG,IAAI,UAAU,KAAK;GACjC,MAAMC,MAA+B,EAAE;AAEvC,QAAK,MAAM,OAAO,YAAY,WAC5B,KAAI,IAAI,SAAS,QAAQ;IACvB,MAAM,OAAO,IAAI,KAAK,OAAO,WAAW,yBAAS,IAAI,MAAM,CAAC;AAC5D,SAAK,QAAQ,KAAK,SAAS,GAAG,EAAE;AAChC,QAAI,IAAI,QAAQ,KAAK,aAAa,CAAC,MAAM,IAAI,CAAC;SAE9C,KAAI,IAAI,QAAQ,GAAG,IAAI,KAAK,GAAG,IAAI;AAIvC,QAAK,MAAM,WAAW,YAAY,UAAU;IAC1C,MAAM,YAAY,gBAAgB,MAAM,IAAI,KAAK,KAAK,QAAQ,GAAG;IACjE,MAAM,SAAS,KAAK,QAAQ,GAAG,MAAO;AACtC,QAAI,QAAQ,QAAQ,KAAK,OAAO,YAAY,SAAS,IAAI,GAAG;;AAG9D,QAAK,KAAK,IAAI;;AAGhB,SAAO;;;AAMX,SAAgB,kBAAkB,OAAmC;AACnE,QAAO,IAAI,iBAAiB,MAAM"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lssm/example.analytics-dashboard",
3
- "version": "0.0.0-canary-20251217080011",
3
+ "version": "0.0.0-canary-20251219202229",
4
4
  "description": "Analytics Dashboard example with widgets and query engine for ContractSpec",
5
5
  "main": "./dist/index.js",
6
6
  "types": "./dist/index.d.ts",
@@ -18,18 +18,18 @@
18
18
  "lint:check": "eslint src"
19
19
  },
20
20
  "dependencies": {
21
- "@lssm/lib.schema": "0.0.0-canary-20251217080011",
22
- "@lssm/lib.contracts": "0.0.0-canary-20251217080011",
23
- "@lssm/lib.bus": "0.0.0-canary-20251217080011",
24
- "@lssm/lib.identity-rbac": "0.0.0-canary-20251217080011",
25
- "@lssm/lib.metering": "0.0.0-canary-20251217080011",
26
- "@lssm/lib.jobs": "0.0.0-canary-20251217080011",
27
- "@lssm/module.audit-trail": "0.0.0-canary-20251217080011",
21
+ "@lssm/lib.schema": "0.0.0-canary-20251219202229",
22
+ "@lssm/lib.contracts": "0.0.0-canary-20251219202229",
23
+ "@lssm/lib.bus": "0.0.0-canary-20251219202229",
24
+ "@lssm/lib.identity-rbac": "0.0.0-canary-20251219202229",
25
+ "@lssm/lib.metering": "0.0.0-canary-20251219202229",
26
+ "@lssm/lib.jobs": "0.0.0-canary-20251219202229",
27
+ "@lssm/module.audit-trail": "0.0.0-canary-20251219202229",
28
28
  "zod": "^4.1.13"
29
29
  },
30
30
  "devDependencies": {
31
- "@lssm/tool.typescript": "0.0.0-canary-20251217080011",
32
- "@lssm/tool.tsdown": "0.0.0-canary-20251217080011",
31
+ "@lssm/tool.typescript": "0.0.0-canary-20251219202229",
32
+ "@lssm/tool.tsdown": "0.0.0-canary-20251219202229",
33
33
  "typescript": "^5.9.3"
34
34
  },
35
35
  "exports": {
@@ -79,5 +79,6 @@
79
79
  "./query/query.schema": "./dist/query/query.schema.js",
80
80
  "./*": "./*"
81
81
  }
82
- }
82
+ },
83
+ "license": "MIT"
83
84
  }
@@ -1,16 +0,0 @@
1
- import { registerDocBlocks } from "./registry.js";
2
-
3
- //#region ../../libs/contracts/dist/docs/PUBLISHING.docblock.js
4
- const PUBLISHING_DocBlocks = [{
5
- id: "docs.PUBLISHING",
6
- title: "Publishing ContractSpec Libraries",
7
- summary: "This guide describes how we release the ContractSpec libraries to npm. We use a dual-track release system: **Stable** (manual) and **Canary** (automatic).",
8
- kind: "reference",
9
- visibility: "public",
10
- route: "/docs/PUBLISHING",
11
- tags: ["PUBLISHING"],
12
- body: "# Publishing ContractSpec Libraries\n\nThis guide describes how we release the ContractSpec libraries to npm. We use a dual-track release system: **Stable** (manual) and **Canary** (automatic).\n\n## Release Tracks\n\n| Track | Branch | npm Tag | Frequency | Versioning | Use Case |\n|-------|--------|---------|-----------|------------|----------|\n| **Stable** | `release` | `latest` | Manual | SemVer (e.g., `1.7.4`) | Production, external users |\n| **Canary** | `main` | `canary` | Every Push | Snapshot (e.g., `1.7.4-canary...`) | Dev, internal testing |\n\n## Prerequisites\n\n- ✅ `NPM_TOKEN` secret is configured in GitHub (owner or automation token with _publish_ scope).\n- ✅ `GITHUB_TOKEN` (built-in) has permissions to create PRs (enabled by default in new repos).\n- ✅ For stable releases: `release` branch exists and is protected.\n\n## Canary Workflow (Automatic)\n\nEvery commit pushed to `main` triggers the `.github/workflows/publish-canary.yml` workflow.\n\n1. **Trigger**: Push to `main`.\n2. **Versioning**: Runs `changeset version --snapshot canary` to generate a temporary snapshot version.\n3. **Publish**: Packages are published to npm with the `canary` tag using `changeset publish --tag canary`.\n\n### Consuming Canary Builds\n\nTo install the latest bleeding-edge version:\n\n```bash\nnpm install @lssm/lib.contracts@canary\n# or\nbun add @lssm/lib.contracts@canary\n```\n\n## Stable Release Workflow (Manual)\n\nStable releases are managed via the `release` branch using the standard [Changesets Action](https://github.com/changesets/action).\n\n1. **Develop on `main`**: Create features and fixes.\n2. **Add Changesets**: Run `bun changeset` to document changes and impact (major/minor/patch).\n3. **Merge to `release`**: When ready to ship, open a PR from `main` to `release` or merge manually.\n4. **\"Version Packages\" PR**:\n - The GitHub Action detects new changesets and automatically creates a Pull Request titled **\"Version Packages\"**.\n - This PR contains the version bumps and updated `CHANGELOG.md` files.\n5. **Merge & Publish**:\n - Review and merge the \"Version Packages\" PR.\n - The Action runs again, detects the versions have been bumped, builds the libraries, and publishes them to npm with the `latest` tag.\n\n### Publishing Steps\n\n1. Ensure all changesets are present on `main`.\n2. Merge `main` into `release`:\n ```bash\n git checkout release\n git pull origin release\n git merge main\n git push origin release\n ```\n3. Go to GitHub Pull Requests. You will see a **\"Version Packages\"** PR created by the bot.\n4. Merge that PR.\n5. The release is now live on npm!\n\n## Manual Verification (Optional)\n\nBefore publishing a new version you can run:\n\n```bash\nbun run build:not-apps\nnpx npm-packlist --json packages/libs/contracts\n```\n\n## Rollback\n\nIf a publish fails mid-way, re-run the workflow once the issue is fixed. Already published packages are skipped automatically. Use `npm deprecate <package>@<version>` if we need to warn consumers about a broken release.\n"
13
- }];
14
- registerDocBlocks(PUBLISHING_DocBlocks);
15
-
16
- //#endregion
@@ -1,16 +0,0 @@
1
- import { registerDocBlocks } from "../registry.js";
2
-
3
- //#region ../../libs/contracts/dist/docs/tech/PHASE_1_QUICKSTART.docblock.js
4
- const tech_PHASE_1_QUICKSTART_DocBlocks = [{
5
- id: "docs.tech.PHASE_1_QUICKSTART",
6
- title: "Phase 1: API Reference Index",
7
- summary: "Quick reference for all new Phase 1 APIs.",
8
- kind: "reference",
9
- visibility: "public",
10
- route: "/docs/tech/PHASE_1_QUICKSTART",
11
- tags: ["tech", "PHASE_1_QUICKSTART"],
12
- body: "# Phase 1: API Reference Index\n\nQuick reference for all new Phase 1 APIs.\n\n---\n\n## @lssm/lib.multi-tenancy\n\n### RLS\n```typescript\nimport { createRlsMiddleware, type TenantIdProvider } from '@lssm/lib.multi-tenancy/rls';\n```\n\n### Provisioning\n```typescript\nimport { \n TenantProvisioningService,\n type CreateTenantInput,\n type TenantProvisioningConfig \n} from '@lssm/lib.multi-tenancy/provisioning';\n```\n\n### Isolation\n```typescript\nimport { IsolationValidator } from '@lssm/lib.multi-tenancy/isolation';\n```\n\n---\n\n## @lssm/lib.observability\n\n### Tracing\n```typescript\nimport { \n getTracer,\n traceAsync,\n traceSync,\n createTracingMiddleware \n} from '@lssm/lib.observability/tracing';\n```\n\n### Metrics\n```typescript\nimport {\n getMeter,\n createCounter,\n createUpDownCounter,\n createHistogram,\n standardMetrics\n} from '@lssm/lib.observability/metrics';\n```\n\n### Logging\n```typescript\nimport {\n Logger,\n logger,\n type LogLevel,\n type LogEntry\n} from '@lssm/lib.observability/logging';\n```\n\n---\n\n## @lssm/lib.resilience\n\n### Circuit Breaker\n```typescript\nimport {\n CircuitBreaker,\n type CircuitState,\n type CircuitBreakerConfig\n} from '@lssm/lib.resilience/circuit-breaker';\n```\n\n### Retry\n```typescript\nimport { retry } from '@lssm/lib.resilience/retry';\n```\n\n### Timeout\n```typescript\nimport { timeout } from '@lssm/lib.resilience/timeout';\n```\n\n### Fallback\n```typescript\nimport { fallback } from '@lssm/lib.resilience/fallback';\n```\n\n---\n\n## Enhanced: @lssm/lib.contracts\n\n### DataViews\n```typescript\nimport { DataViewQueryGenerator } from '@lssm/lib.contracts/data-views/query-generator';\nimport { DataViewRuntime } from '@lssm/lib.contracts/data-views/runtime';\n```\n\n### Workflows\n```typescript\nimport { SLAMonitor, type SLABreachEvent } from '@lssm/lib.contracts/workflow/sla-monitor';\nimport { PrismaStateStore } from '@lssm/lib.contracts/workflow/adapters/db-adapter';\n```\n\n---\n\n## Enhanced: @lssm/lib.design-system\n\n### DataView Components\n```typescript\nimport { DataViewRenderer } from '@lssm/lib.design-system/components/data-view/DataViewRenderer';\n// Also available: DataViewList, DataViewTable, DataViewDetail\n```\n\n---\n\n## Usage Examples\n\n### Complete Workflow with All Features\n\n```typescript\nimport { WorkflowRunner } from '@lssm/lib.contracts/workflow/runner';\nimport { PrismaStateStore } from '@lssm/lib.contracts/workflow/adapters/db-adapter';\nimport { SLAMonitor } from '@lssm/lib.contracts/workflow/sla-monitor';\nimport { CircuitBreaker } from '@lssm/lib.resilience/circuit-breaker';\nimport { traceAsync } from '@lssm/lib.observability/tracing';\n\nconst runner = new WorkflowRunner({\n registry,\n stateStore: new PrismaStateStore(db),\n opExecutor: async (op, input, ctx) => {\n return traceAsync(`op.${op.name}`, async (span) => {\n span.setAttribute('operation', op.name);\n const breaker = getCircuitBreaker(op.name);\n return breaker.execute(() => executeOperation(op, input, ctx));\n });\n },\n eventEmitter: (event, payload) => {\n if (event.startsWith('workflow.')) {\n logger.info(event, payload);\n }\n },\n});\n\nconst monitor = new SLAMonitor((event, payload) => {\n logger.warn('SLA_BREACH', payload);\n alertOps(payload);\n});\n\n// Start workflow\nconst workflowId = await runner.start('payment.flow', 1);\n\n// Monitor SLA\nconst state = await runner.getState(workflowId);\nconst spec = registry.get('payment.flow', 1);\nmonitor.check(state, spec!);\n```\n\n### Complete DataView with Observability\n\n```typescript\nimport { DataViewRenderer } from '@lssm/lib.design-system';\nimport { DataViewQueryGenerator } from '@lssm/lib.contracts/data-views/query-generator';\nimport { traceAsync } from '@lssm/lib.observability/tracing';\nimport { MyDataView } from './specs/users.data-view';\n\nexport function UserListPage() {\n const [page, setPage] = useState(1);\n const [users, setUsers] = useState([]);\n\n const loadUsers = async () => {\n return traceAsync('load_users', async (span) => {\n const generator = new DataViewQueryGenerator(MyDataView);\n const query = generator.generate({ pagination: { page, pageSize: 20 } });\n \n span.setAttribute('page', page);\n const result = await api.execute(query);\n setUsers(result.data);\n });\n };\n\n return (\n <DataViewRenderer\n spec={MyDataView}\n items={users}\n pagination={{ page, pageSize: 20, total: users.length }}\n onPageChange={setPage}\n />\n );\n}\n```\n\n### Complete Multi-Tenant Setup\n\n```typescript\n// 1. RLS Middleware\nimport { createRlsMiddleware } from '@lssm/lib.multi-tenancy/rls';\ndb.$use(createRlsMiddleware(() => req.tenantId));\n\n// 2. Tenant Provisioning\nimport { TenantProvisioningService } from '@lssm/lib.multi-tenancy/provisioning';\nconst service = new TenantProvisioningService({ db });\n\n// 3. Create new tenant\nawait service.provision({\n id: 'acme',\n name: 'Acme Corp',\n slug: 'acme',\n ownerEmail: 'admin@acme.com',\n});\n\n// 4. Validate isolation in tests\nimport { IsolationValidator } from '@lssm/lib.multi-tenancy/isolation';\n\ntest('queries are isolated', () => {\n const isValid = IsolationValidator.validateQuery(\n 'User',\n 'findMany',\n { where: { tenantId: 'acme' } },\n 'acme'\n );\n expect(isValid).toBe(true);\n});\n```\n\n---\n\n## Testing\n\n### Test Circuit Breakers\n\n```typescript\nimport { CircuitBreaker } from '@lssm/lib.resilience/circuit-breaker';\n\ntest('circuit opens after threshold', async () => {\n const breaker = new CircuitBreaker({\n failureThreshold: 3,\n resetTimeoutMs: 5000,\n });\n\n // Trigger failures\n for (let i = 0; i < 3; i++) {\n await expect(\n breaker.execute(() => Promise.reject('error'))\n ).rejects.toThrow();\n }\n\n // Circuit should be open\n await expect(\n breaker.execute(() => Promise.resolve('ok'))\n ).rejects.toThrow('CircuitBreaker is OPEN');\n});\n```\n\n### Test Workflow Retry\n\n```typescript\ntest('workflow retries on failure', async () => {\n let attempts = 0;\n const opExecutor = async () => {\n attempts++;\n if (attempts < 3) throw new Error('fail');\n return 'success';\n };\n\n const runner = new WorkflowRunner({ /* ... */ opExecutor });\n await runner.executeStep(workflowId);\n \n expect(attempts).toBe(3);\n});\n```\n\n---\n\n## Common Patterns\n\n### Pattern: Resilient External Call\n\n```typescript\nimport { CircuitBreaker } from '@lssm/lib.resilience/circuit-breaker';\nimport { retry } from '@lssm/lib.resilience/retry';\nimport { timeout } from '@lssm/lib.resilience/timeout';\nimport { traceAsync } from '@lssm/lib.observability/tracing';\n\nconst breaker = new CircuitBreaker({ failureThreshold: 5, resetTimeoutMs: 30000 });\n\nexport async function callExternalAPI(input: any) {\n return traceAsync('external_api_call', async (span) => {\n span.setAttribute('service', 'stripe');\n \n return breaker.execute(() =>\n retry(\n () => timeout(() => stripe.api.call(input), 5000),\n 3,\n 1000,\n true\n )\n );\n });\n}\n```\n\n**Benefits**: Circuit breaker + retry + timeout + tracing in one place.\n\n---\n\n### Pattern: Tenant-Aware Operation\n\n```typescript\nimport { traceAsync } from '@lssm/lib.observability/tracing';\n\nexport async function listUsers(tenantId: string) {\n return traceAsync('list_users', async (span) => {\n span.setAttribute('tenant_id', tenantId);\n \n // RLS middleware will inject WHERE tenantId = ?\n return db.user.findMany();\n });\n}\n```\n\n---\n\n### Pattern: Monitored Workflow\n\n```typescript\nimport { WorkflowRunner } from '@lssm/lib.contracts/workflow/runner';\nimport { SLAMonitor } from '@lssm/lib.contracts/workflow/sla-monitor';\nimport { logger } from '@lssm/lib.observability/logging';\n\nconst monitor = new SLAMonitor((event, payload) => {\n logger.warn('workflow.sla_breach', payload);\n});\n\n// In workflow poller\nconst state = await runner.getState(workflowId);\nconst spec = registry.get(state.workflowName, state.workflowVersion);\nif (spec) {\n monitor.check(state, spec);\n}\n```\n\n---\n\n## Next Steps\n\n1. **Implement one quick win** (30 minutes)\n2. **Add tests for new functionality** (1 hour)\n3. **Deploy to staging and verify observability** (1 hour)\n4. **Roll out to production** (monitor closely)\n5. **Read full documentation** at https://contractspec.lssm.tech/docs\n\n---\n\n**Questions?** See `/docs/guides/phase-1-migration` or reach out via https://contractspec.lssm.tech/contact\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
13
- }];
14
- registerDocBlocks(tech_PHASE_1_QUICKSTART_DocBlocks);
15
-
16
- //#endregion
@@ -1,16 +0,0 @@
1
- import { registerDocBlocks } from "../registry.js";
2
-
3
- //#region ../../libs/contracts/dist/docs/tech/PHASE_2_AI_NATIVE_OPERATIONS.docblock.js
4
- const tech_PHASE_2_AI_NATIVE_OPERATIONS_DocBlocks = [{
5
- id: "docs.tech.PHASE_2_AI_NATIVE_OPERATIONS",
6
- title: "Phase 2: AI-Native Operations",
7
- summary: "_Last updated: 2025-11-20_",
8
- kind: "reference",
9
- visibility: "public",
10
- route: "/docs/tech/PHASE_2_AI_NATIVE_OPERATIONS",
11
- tags: ["tech", "PHASE_2_AI_NATIVE_OPERATIONS"],
12
- body: "# Phase 2: AI-Native Operations\n\n_Last updated: 2025-11-20_\n\nPhase 2 turns ContractSpec into an AI-first operations stack. The new libraries below are the building blocks used by support bots, growth agents, and human-in-the-loop flows.\n\n## Libraries\n\n### @lssm/lib.ai-agent\n\n- **Spec + Registry**: `defineAgent`, `AgentRegistry` keep agent definitions type-safe.\n- **Runner**: `AgentRunner` drives LLM conversations, tool calls, retries, escalation, and telemetry hooks.\n- **Tools**: `ToolExecutor` standardizes schema validation + timeouts.\n- **Memory**: `InMemoryAgentMemory` + interfaces for plugging persistent stores.\n- **Approvals**: new `ApprovalWorkflow` + `InMemoryApprovalStore` capture low-confidence decisions and surface them to reviewers.\n\n### @lssm/lib.support-bot\n\nComposable support automation primitives:\n\n- `TicketClassifier` → heuristics + optional LLM validation for category, priority, sentiment.\n- `TicketResolver` → RAG pipeline backed by knowledge spaces.\n- `AutoResponder` → tone-aware drafts with citations.\n- `SupportFeedbackLoop` → tracks resolution rates.\n- `createSupportTools` → ready-made tool definitions for AgentRunner.\n\n### @lssm/lib.content-gen\n\nContent generators that consume a `ContentBrief` and output production-ready assets:\n\n- `BlogGenerator`, `LandingPageGenerator`, `EmailCampaignGenerator`, `SocialPostGenerator`.\n- `SeoOptimizer` builds metadata + schema markup.\n\n### @lssm/lib.analytics\n\nQueryless analytics helpers:\n\n- `FunnelAnalyzer` – conversion/drop-off per step.\n- `CohortTracker` – retention + LTV per cohort.\n- `ChurnPredictor` – recency/frequency/error scoring.\n- `GrowthHypothesisGenerator` – surfaces experiment ideas from metric trends.\n\n### @lssm/lib.growth\n\nA/B testing toolkit:\n\n- `ExperimentRegistry` + `ExperimentRunner` – deterministic bucketing.\n- `ExperimentTracker` – persist exposures + metrics.\n- `StatsEngine` – Welch’s t-test + improvement calculations.\n\n### Human-in-the-loop UI\n\n`@lssm/lib.design-system` now exposes:\n\n- `ApprovalQueue` – list + act on pending approvals.\n- `AgentMonitor` – live view of agent sessions with confidence + status.\n\n## Examples\n\n- `examples/ai-support-bot/setup.ts` shows ticket classification → resolution → response draft.\n- `examples/content-generation/generate.ts` produces blog, landing, email, social, SEO output from one brief.\n\n## Next Steps\n\n1. Wire these libraries into vertical apps (H-Circle, ArtisanOS, etc.).\n2. Add background workers that consume the new analytics/growth trackers.\n3. Expand web-landing to highlight these Phase 2 capabilities (see separate TODO).\n"
13
- }];
14
- registerDocBlocks(tech_PHASE_2_AI_NATIVE_OPERATIONS_DocBlocks);
15
-
16
- //#endregion
@@ -1,16 +0,0 @@
1
- import { registerDocBlocks } from "../registry.js";
2
-
3
- //#region ../../libs/contracts/dist/docs/tech/PHASE_3_AUTO_EVOLUTION.docblock.js
4
- const tech_PHASE_3_AUTO_EVOLUTION_DocBlocks = [{
5
- id: "docs.tech.PHASE_3_AUTO_EVOLUTION",
6
- title: "Phase 3: Auto-Evolution Technical Notes",
7
- summary: "**Status**: In progress",
8
- kind: "reference",
9
- visibility: "public",
10
- route: "/docs/tech/PHASE_3_AUTO_EVOLUTION",
11
- tags: ["tech", "PHASE_3_AUTO_EVOLUTION"],
12
- body: "# Phase 3: Auto-Evolution Technical Notes\n\n**Status**: In progress \n**Last updated**: 2025-11-21 \n\nPhase 3 introduces self-learning capabilities that analyze production telemetry, suggest new specs, safely roll out variants, and generate golden tests from real traffic. This document captures the main building blocks delivered in this iteration.\n\n---\n\n## 1. Libraries\n\n### @lssm/lib.evolution\n\n- `SpecAnalyzer` converts raw telemetry samples into usage stats + anomalies.\n- `SpecGenerator` produces `SpecSuggestion` objects and validates confidence thresholds.\n- `SpecSuggestionOrchestrator` routes proposals through the AI approval workflow and writes approved specs to `packages/libs/contracts/src/generated`.\n- Storage adapters:\n - `InMemorySpecSuggestionRepository` for tests.\n - `PrismaSpecSuggestionRepository` persists to the new Prisma model (see §4).\n - `FileSystemSuggestionWriter` emits JSON envelopes for git review.\n\n### @lssm/lib.observability\n\n- Added intent detection modules:\n - `IntentAggregator` batches telemetry into rolling windows.\n - `IntentDetector` surfaces latency/error/throughput regressions and sequential intents.\n- `EvolutionPipeline` orchestrates aggregation → detection → intent events and exposes hooks for downstream orchestrators.\n- `createTracingMiddleware` now accepts `resolveOperation`/`onSample` hooks to feed telemetry samples into the pipeline.\n\n### @lssm/lib.growth\n\n- New `spec-experiments` module:\n - `SpecExperimentRegistry`, `SpecExperimentRunner`, `SpecExperimentAdapter`.\n - `SpecExperimentAnalyzer` + `SpecExperimentController` handle guardrails and staged rollouts.\n - Helper `createSpecVariantResolver` plugs directly into `HandlerCtx.specVariantResolver`.\n- `SpecVariantResolver` is now a first-class concept in `@lssm/lib.contracts`. The runtime will attempt to execute variant specs before falling back to the registered handler.\n\n### @lssm/lib.testing\n\n- `TrafficRecorder` + `TrafficStore` capture production requests with sampling and sanitization hooks.\n- `GoldenTestGenerator` converts `TrafficSnapshot`s into Vitest/Jest suites.\n- `generateVitestSuite` / `generateJestSuite` output self-contained test files, and `runGoldenTests` offers a programmatic harness for CI pipelines.\n\n---\n\n## 2. Telemetry → Intent → Spec Pipeline\n\n1. `createTracingMiddleware({ onSample })` emits `TelemetrySample`s for every HTTP request.\n2. `IntentAggregator` groups samples into statistical windows (default 15 minutes).\n3. `IntentDetector` raises signals for:\n - Error spikes\n - Latency regressions\n - Throughput drops\n - Sequential workflows that hint at missing specs\n4. `EvolutionPipeline` emits `intent.detected` events and hands them to `SpecGenerator`.\n5. `SpecSuggestionOrchestrator` persists suggestions, triggers approval workflows, and—upon approval—writes JSON envelopes to `packages/.../contracts/src/generated`.\n\n---\n\n## 3. Spec Experiments & Rollouts\n\n1. Register spec experiments in `SpecExperimentRegistry` with control + variant bindings.\n2. Expose bucketed specs by attaching `createSpecVariantResolver` to `HandlerCtx.specVariantResolver` inside adapters.\n3. Record outcomes via `SpecExperimentAdapter.trackOutcome()` (latency + error metrics).\n4. `SpecExperimentController` uses guardrails from config and `SpecExperimentAnalyzer` to:\n - Auto-rollback on error/latency breaches.\n - Advance rollout stages (1% → 10% → 50% → 100%) when metrics stay green.\n\n---\n\n## 4. Data Models (Prisma)\n\nFile: `packages/libs/database/prisma/schema.prisma`\n\n- `SpecSuggestion` – stores serialized suggestion payloads + statuses.\n- `IntentSnapshot` – captured detector output for auditing/training.\n- `TrafficSnapshot` – persisted production traffic (input/output/error blobs).\n- `SpecExperiment` / `SpecExperimentMetric` – rollout state + metrics for each variant.\n\n> Run `bun database generate` after pulling to refresh the Prisma client.\n\n---\n\n## 5. Golden Test Workflow\n\n1. Capture traffic via middleware or direct `TrafficRecorder.record`.\n2. Use the new CLI command to materialize suites:\n\n```bash\ncontractspec test generate \\\n --operation billing.createInvoice \\\n --output tests/billing.createInvoice.golden.test.ts \\\n --runner-import ./tests/run-operation \\\n --runner-fn runBillingCommand \\\n --from-production \\\n --days 7 \\\n --sample-rate 0.05\n```\n\n3. Generated files import your runner and assert against recorded outputs (or expected errors for negative paths).\n\n---\n\n## 6. Operational Notes\n\n- **Approvals**: By default, every suggestion still requires human approval. `EvolutionConfig.autoApproveThreshold` can be tuned per environment but should remain conservative (<0.3) until OverlaySpec tooling lands.\n- **Sampling**: Keep `TrafficRecorder.sampleRate` ≤ 0.05 in production to avoid sensitive payload storage; scrub PII through the `sanitize` callback before persistence.\n- **Rollouts**: Guardrails default to 5% error-rate and 750ms P99 latency. Override per experiment to match SLOs.\n\n---\n\n## 7. Next Steps\n\n1. Wire `SpecExperimentAdapter.trackOutcome` into adapters (REST, GraphQL, Workers) so every execution logs metrics automatically.\n2. Add a UI for reviewing `SpecSuggestion` objects alongside approval status.\n3. Expand `TrafficRecorder` to ship directly to the Prisma-backed store (currently in-memory by default).\n4. Integrate `EvolutionPipeline` events with the Regenerator to close the loop (auto-open proposals + attach evidence).\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
13
- }];
14
- registerDocBlocks(tech_PHASE_3_AUTO_EVOLUTION_DocBlocks);
15
-
16
- //#endregion
@@ -1,16 +0,0 @@
1
- import { registerDocBlocks } from "../registry.js";
2
-
3
- //#region ../../libs/contracts/dist/docs/tech/PHASE_4_PERSONALIZATION_ENGINE.docblock.js
4
- const tech_PHASE_4_PERSONALIZATION_ENGINE_DocBlocks = [{
5
- id: "docs.tech.PHASE_4_PERSONALIZATION_ENGINE",
6
- title: "Phase 4: Personalization Engine",
7
- summary: "**Status**: Complete",
8
- kind: "reference",
9
- visibility: "public",
10
- route: "/docs/tech/PHASE_4_PERSONALIZATION_ENGINE",
11
- tags: ["tech", "PHASE_4_PERSONALIZATION_ENGINE"],
12
- body: "# Phase 4: Personalization Engine\n\n**Status**: Complete \n**Last updated**: 2025-11-21\n\nPhase 4 unlocks tenant-scoped personalization with zero bespoke code. We shipped three new libraries, a signing-aware Overlay editor, and the persistence layer required to observe usage and apply overlays safely.\n\n---\n\n## 1. Libraries\n\n### @lssm/lib.overlay-engine\n\n- OverlaySpec types + validator mirror the public spec.\n- Cryptographic signer (`ed25519`, `rsa-pss-sha256`) with canonical JSON serialization.\n- Registry that merges tenant/role/user/device overlays with predictable specificity.\n- React hooks (`useOverlay`, `useOverlayFields`) for client-side rendering.\n- Runtime engine audits every applied overlay for traceability.\n\n### @lssm/lib.personalization\n\n- Behavior tracker buffers field/feature/workflow events and exports OTel metrics.\n- Analyzer summarizes field usage and workflow drop-offs into actionable insights.\n- Adapter translates insights into overlay suggestions or workflow tweaks.\n- In-memory store implementation + interface for plugging Prisma/ClickHouse later.\n\n### @lssm/lib.workflow-composer\n\n- `WorkflowComposer` merges base workflows with tenant/role/device extensions.\n- Step injection utilities keep transitions intact and validate anchor steps.\n- Template helpers for common tenant review/approval, plus merge helpers for multi-scope extensions.\n\n---\n\n## 2. Overlay Editor App\n\nPath: `packages/apps/overlay-editor`\n\n- Next.js App Router UI for toggling field visibility, renaming labels, and reordering lists.\n- Live JSON preview powered by `defineOverlay`.\n- Server action that signs overlays via PEM private keys (Ed25519 by default) using the overlay engine signer.\n\n---\n\n## 3. Persistence\n\nAdded Prisma models (see `packages/libs/database/prisma/schema.prisma`):\n\n- `UserBehaviorEvent` – field/feature/workflow telemetry.\n- `OverlaySigningKey` – tenant managed signing keys with revocation timestamps.\n- `Overlay` – stored overlays (tenant/user/role/device scope) plus signature metadata.\n\n---\n\n## 4. Integration Steps\n\n1. Track usage inside apps via `createBehaviorTracker`.\n2. Periodically run `BehaviorAnalyzer.analyze` to generate insights.\n3. Convert insights into OverlaySpecs or Workflow extensions.\n4. Register tenant overlays in `OverlayRegistry` and serve via presentation runtimes.\n5. Compose workflows per tenant using `WorkflowComposer`.\n\nSee the `docs/tech/personalization/*` guides for concrete examples.\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
13
- }];
14
- registerDocBlocks(tech_PHASE_4_PERSONALIZATION_ENGINE_DocBlocks);
15
-
16
- //#endregion
@@ -1,16 +0,0 @@
1
- import { registerDocBlocks } from "../registry.js";
2
-
3
- //#region ../../libs/contracts/dist/docs/tech/PHASE_5_ZERO_TOUCH_OPERATIONS.docblock.js
4
- const tech_PHASE_5_ZERO_TOUCH_OPERATIONS_DocBlocks = [{
5
- id: "docs.tech.PHASE_5_ZERO_TOUCH_OPERATIONS",
6
- title: "Phase 5: Zero-Touch Operations",
7
- summary: "**Status**: In progress",
8
- kind: "reference",
9
- visibility: "public",
10
- route: "/docs/tech/PHASE_5_ZERO_TOUCH_OPERATIONS",
11
- tags: ["tech", "PHASE_5_ZERO_TOUCH_OPERATIONS"],
12
- body: "# Phase 5: Zero-Touch Operations\n\n**Status**: In progress \n**Last updated**: 2025-11-21\n\nPhase 5 delivers progressive delivery, SLO intelligence, cost attribution, and anomaly-driven remediation so the platform can deploy continuously without pager rotations.\n\n---\n\n## 1. New Libraries\n\n### @lssm/lib.progressive-delivery\n- `DeploymentStrategy` types capture canary vs blue-green rollouts.\n- `CanaryController` + `CanaryAnalyzer` orchestrate stage evaluation against telemetry thresholds.\n- `TrafficShifter` keeps stable/candidate splits in sync with feature-flag or router state.\n- `DeploymentCoordinator` drives stage progression, emits events, and triggers rollbacks.\n- `RollbackManager` encapsulates safe revert hooks (spec version revert, traffic shift, etc.).\n\n### @lssm/lib.slo\n- Declarative `SLODefinition` with latency + availability targets per capability/spec.\n- `SLOTracker` stores rolling snapshots + error budget positions.\n- `BurnRateCalculator` implements multi-window burn computations (fast vs slow burn).\n- `SLOMonitor` pushes incidents to Ops tooling automatically when burn exceeds thresholds.\n\n### @lssm/lib.cost-tracking\n- `CostTracker` normalizes DB/API/compute metrics into per-operation cost totals.\n- `BudgetAlertManager` raises tenant budget warnings (80% default) with contextual payloads.\n- `OptimizationRecommender` suggests batching, caching, or contract tweaks to cut spend.\n\n### Observability Anomaly Toolkit\n- `BaselineCalculator` establishes rolling intent metrics (latency, error rate, throughput).\n- `AnomalyDetector` flags spikes/drops via relative deltas after 10+ samples.\n- `RootCauseAnalyzer` correlates anomalies with recent deployments.\n- `AlertManager` deduplicates notifications and feeds MCP/SRE transports.\n\n---\n\n## 2. Data Model Additions\n\nFile: `packages/libs/database/prisma/schema.prisma`\n\n| Model | Purpose |\n| --- | --- |\n| `SLODefinition`, `SLOSnapshot`, `ErrorBudget`, `SLOIncident` | Persist definitions, rolling windows, and incidents. |\n| `OperationCost`, `TenantBudget`, `CostAlert`, `OptimizationSuggestion` | Track per-operation costs, budgets, and generated recommendations. |\n| `Deployment`, `DeploymentStage`, `RollbackEvent` | Audit progressive delivery runs and automated rollbacks. |\n| `MetricBaseline`, `AnomalyEvent` | Store computed baselines and anomaly evidence for training/analytics. |\n\nRun `bun database generate` after pulling to refresh the Prisma client.\n\n---\n\n## 3. Operational Flow\n\n1. **Deploy**: Define a `DeploymentStrategy` and feed telemetry via `@lssm/lib.observability`. Canary stages run automatically.\n2. **Protect**: `CanaryAnalyzer` evaluates error rate + latency thresholds. Failures trigger `RollbackManager`.\n3. **Observe**: `SLOMonitor` consumes snapshots and opens incidents when burn rate exceeds thresholds.\n4. **Optimize**: `CostTracker` aggregates spend per tenant + capability, while `OptimizationRecommender` surfaces fixes.\n5. **Detect**: Anomaly signals route to `RootCauseAnalyzer`, which links them to specific deployments for auto-rollback.\n\n---\n\n## 4. Integration Checklist\n\n1. Instrument adapters with `createTracingMiddleware({ onSample })` to feed metric points into `AnomalyDetector`.\n2. Register SLOs per critical operation (`billing.charge`, `knowledge.search`) and wire monitors to Ops notifications.\n3. Attach `CostTracker.recordSample` to workflow runners (DB instrumentation + external call wrappers).\n4. Store deployment metadata using the new Prisma models for auditing + UI surfacing.\n5. Update `@lssm/app.ops-console` (next iteration) to list deployments, SLO status, costs, and anomalies in one timeline.\n\n---\n\n## 5. Next Steps\n\n- Wire `DeploymentCoordinator` into the Contracts CLI so `contractspec deploy` can run staged rollouts.\n- Add UI for SLO dashboards (burn rate sparkline + incident feed).\n- Ship budget suggestions into Growth Agent for automated cost optimizations.\n- Connect `AnomalyEvent` stream to MCP agents for root-cause playbooks.\n"
13
- }];
14
- registerDocBlocks(tech_PHASE_5_ZERO_TOUCH_OPERATIONS_DocBlocks);
15
-
16
- //#endregion
@@ -1,2 +0,0 @@
1
- import "zod";
2
- import "graphql";
@@ -1,49 +0,0 @@
1
- import "zod";
2
- import { GraphQLScalarType } from "graphql";
3
-
4
- //#region ../../libs/contracts/dist/schema/dist/FieldType.js
5
- /**
6
- * GraphQL scalar wrapper that carries zod and JSON Schema metadata.
7
- *
8
- * TInternal is the runtime representation; TExternal is the GraphQL output.
9
- */
10
- var FieldType = class extends GraphQLScalarType {
11
- zodSchema;
12
- jsonSchemaDef;
13
- constructor(config) {
14
- super(config);
15
- this.zodSchema = config.zod;
16
- this.jsonSchemaDef = config.jsonSchema;
17
- }
18
- /** Return the attached zod schema for validation. */
19
- getZod() {
20
- return this.zodSchema;
21
- }
22
- /** GraphQL scalar instance usable by Pothos or vanilla GraphQL. */
23
- getPothos() {
24
- return this;
25
- }
26
- /** Return the JSON Schema (evaluates factory if provided). */
27
- getJson() {
28
- return typeof this.jsonSchemaDef === "function" ? this.jsonSchemaDef() : this.jsonSchemaDef;
29
- }
30
- getJsonSchemaDef() {
31
- return this.jsonSchemaDef;
32
- }
33
- getJsonSchema() {
34
- const deepResolve = (v) => {
35
- const value = typeof v === "function" ? v() : v;
36
- if (Array.isArray(value)) return value.map((item) => deepResolve(item));
37
- if (value && typeof value === "object") {
38
- const obj = {};
39
- for (const [k, val] of Object.entries(value)) obj[k] = deepResolve(val);
40
- return obj;
41
- }
42
- return value;
43
- };
44
- return deepResolve(this.getJson());
45
- }
46
- };
47
-
48
- //#endregion
49
- export { FieldType };