@superblocksteam/vite-plugin-file-sync 2.0.41-next.52 → 2.0.41-next.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (270) hide show
  1. package/dist/ai-service/agent/apis-system-prompt.d.ts +1 -1
  2. package/dist/ai-service/agent/apis-system-prompt.d.ts.map +1 -1
  3. package/dist/ai-service/agent/apis-system-prompt.js +21 -50
  4. package/dist/ai-service/agent/apis-system-prompt.js.map +1 -1
  5. package/dist/ai-service/agent/apis.d.ts +0 -10
  6. package/dist/ai-service/agent/apis.d.ts.map +1 -1
  7. package/dist/ai-service/agent/apis.js +3 -25
  8. package/dist/ai-service/agent/apis.js.map +1 -1
  9. package/dist/ai-service/agent/tool-message-utils.d.ts.map +1 -1
  10. package/dist/ai-service/agent/tool-message-utils.js +2 -5
  11. package/dist/ai-service/agent/tool-message-utils.js.map +1 -1
  12. package/dist/ai-service/agent/tools/build-debug.d.ts +1 -1
  13. package/dist/ai-service/agent/tools/build-debug.d.ts.map +1 -1
  14. package/dist/ai-service/agent/tools/build-debug.js +23 -24
  15. package/dist/ai-service/agent/tools/build-debug.js.map +1 -1
  16. package/dist/ai-service/agent/tools/build-edit-file.d.ts.map +1 -1
  17. package/dist/ai-service/agent/tools/build-edit-file.js +12 -7
  18. package/dist/ai-service/agent/tools/build-edit-file.js.map +1 -1
  19. package/dist/ai-service/agent/tools/build-finalize.d.ts.map +1 -1
  20. package/dist/ai-service/agent/tools/build-finalize.js +2 -23
  21. package/dist/ai-service/agent/tools/build-finalize.js.map +1 -1
  22. package/dist/ai-service/agent/tools/build-multi-edit-file.d.ts.map +1 -1
  23. package/dist/ai-service/agent/tools/build-multi-edit-file.js +9 -4
  24. package/dist/ai-service/agent/tools/build-multi-edit-file.js.map +1 -1
  25. package/dist/ai-service/agent/tools/build-write-file.d.ts.map +1 -1
  26. package/dist/ai-service/agent/tools/build-write-file.js +9 -4
  27. package/dist/ai-service/agent/tools/build-write-file.js.map +1 -1
  28. package/dist/ai-service/agent/tools/debug-cache.d.ts +13 -95
  29. package/dist/ai-service/agent/tools/debug-cache.d.ts.map +1 -1
  30. package/dist/ai-service/agent/tools/debug-cache.js +38 -215
  31. package/dist/ai-service/agent/tools/debug-cache.js.map +1 -1
  32. package/dist/ai-service/agent/tools/index.d.ts +0 -1
  33. package/dist/ai-service/agent/tools/index.d.ts.map +1 -1
  34. package/dist/ai-service/agent/tools/index.js +0 -1
  35. package/dist/ai-service/agent/tools/index.js.map +1 -1
  36. package/dist/ai-service/agent/tools.d.ts +1 -22
  37. package/dist/ai-service/agent/tools.d.ts.map +1 -1
  38. package/dist/ai-service/agent/tools.js +1 -3
  39. package/dist/ai-service/agent/tools.js.map +1 -1
  40. package/dist/ai-service/agent/utils.d.ts.map +1 -1
  41. package/dist/ai-service/agent/utils.js +159 -170
  42. package/dist/ai-service/agent/utils.js.map +1 -1
  43. package/dist/ai-service/app-interface/file-system-interface.d.ts.map +1 -1
  44. package/dist/ai-service/app-interface/file-system-interface.js +68 -117
  45. package/dist/ai-service/app-interface/file-system-interface.js.map +1 -1
  46. package/dist/ai-service/app-interface/linter.d.ts.map +1 -1
  47. package/dist/ai-service/app-interface/linter.js +54 -103
  48. package/dist/ai-service/app-interface/linter.js.map +1 -1
  49. package/dist/ai-service/app-interface/shell.d.ts.map +1 -1
  50. package/dist/ai-service/app-interface/shell.js +160 -227
  51. package/dist/ai-service/app-interface/shell.js.map +1 -1
  52. package/dist/ai-service/artifacts/bolt.d.ts.map +1 -1
  53. package/dist/ai-service/artifacts/bolt.js +30 -88
  54. package/dist/ai-service/artifacts/bolt.js.map +1 -1
  55. package/dist/ai-service/chat/chat-session-store.d.ts +2 -12
  56. package/dist/ai-service/chat/chat-session-store.d.ts.map +1 -1
  57. package/dist/ai-service/chat/chat-session-store.js +1 -45
  58. package/dist/ai-service/chat/chat-session-store.js.map +1 -1
  59. package/dist/ai-service/chat/extract-history.d.ts +1 -3
  60. package/dist/ai-service/chat/extract-history.d.ts.map +1 -1
  61. package/dist/ai-service/chat/extract-history.js +1 -6
  62. package/dist/ai-service/chat/extract-history.js.map +1 -1
  63. package/dist/ai-service/context/app-context.d.ts.map +1 -1
  64. package/dist/ai-service/context/app-context.js +241 -305
  65. package/dist/ai-service/context/app-context.js.map +1 -1
  66. package/dist/ai-service/evals/content-matchers/index.d.ts.map +1 -1
  67. package/dist/ai-service/evals/content-matchers/index.js +0 -1
  68. package/dist/ai-service/evals/content-matchers/index.js.map +1 -1
  69. package/dist/ai-service/index.d.ts +0 -3
  70. package/dist/ai-service/index.d.ts.map +1 -1
  71. package/dist/ai-service/index.js +71 -105
  72. package/dist/ai-service/index.js.map +1 -1
  73. package/dist/ai-service/integration-validator.d.ts +1 -1
  74. package/dist/ai-service/integration-validator.d.ts.map +1 -1
  75. package/dist/ai-service/integration-validator.js +3 -6
  76. package/dist/ai-service/integration-validator.js.map +1 -1
  77. package/dist/ai-service/integrations/metadata/database.d.ts.map +1 -1
  78. package/dist/ai-service/integrations/metadata/database.js +1 -1
  79. package/dist/ai-service/integrations/metadata/database.js.map +1 -1
  80. package/dist/ai-service/integrations/metadata/graphql-based.js +1 -1
  81. package/dist/ai-service/integrations/metadata/graphql-based.js.map +1 -1
  82. package/dist/ai-service/integrations/metadata/open-api.js +1 -1
  83. package/dist/ai-service/integrations/metadata/open-api.js.map +1 -1
  84. package/dist/ai-service/integrations/store.d.ts.map +1 -1
  85. package/dist/ai-service/integrations/store.js +74 -135
  86. package/dist/ai-service/integrations/store.js.map +1 -1
  87. package/dist/ai-service/llmobs.d.ts +154 -0
  88. package/dist/ai-service/llmobs.d.ts.map +1 -0
  89. package/dist/ai-service/llmobs.js +216 -0
  90. package/dist/ai-service/llmobs.js.map +1 -0
  91. package/dist/ai-service/prompt-builder-service/fragment-generators/chakra-tokens.d.ts +1 -3
  92. package/dist/ai-service/prompt-builder-service/fragment-generators/chakra-tokens.d.ts.map +1 -1
  93. package/dist/ai-service/prompt-builder-service/fragment-generators/chakra-tokens.js +3 -5
  94. package/dist/ai-service/prompt-builder-service/fragment-generators/chakra-tokens.js.map +1 -1
  95. package/dist/ai-service/prompt-builder-service/fragment-generators/file-fragments.d.ts +1 -3
  96. package/dist/ai-service/prompt-builder-service/fragment-generators/file-fragments.d.ts.map +1 -1
  97. package/dist/ai-service/prompt-builder-service/fragment-generators/file-fragments.js +1 -3
  98. package/dist/ai-service/prompt-builder-service/fragment-generators/file-fragments.js.map +1 -1
  99. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ButtonPropsDocs.js +1 -1
  100. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/CheckboxPropsDocs.js +1 -1
  101. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ColumnPropsDocs.js +1 -1
  102. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ContainerPropsDocs.js +1 -1
  103. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/DatePickerPropsDocs.js +1 -1
  104. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/DropdownPropsDocs.js +1 -1
  105. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/IconPropsDocs.js +1 -1
  106. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ImagePropsDocs.js +1 -1
  107. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/InputPropsDocs.js +1 -1
  108. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/ModalPropsDocs.js +1 -1
  109. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/PagePropsDocs.js +1 -1
  110. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/SectionPropsDocs.js +1 -1
  111. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/SlideoutPropsDocs.js +1 -1
  112. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/SwitchPropsDocs.js +1 -1
  113. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/TablePropsDocs.js +1 -1
  114. package/dist/ai-service/prompt-builder-service/static-fragments/library-components/TextPropsDocs.js +1 -1
  115. package/dist/ai-service/prompt-builder-service/static-fragments/library-typedefs/Dim.js +1 -1
  116. package/dist/ai-service/prompt-builder-service/static-fragments/library-typedefs/EventFlow.js +1 -1
  117. package/dist/ai-service/prompt-builder-service/static-fragments/library-typedefs/TextStyleWithVariant.js +1 -1
  118. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/full-examples.js +1 -1
  119. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-api.js +1 -1
  120. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-components-rules.js +1 -1
  121. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-custom-components.js +1 -1
  122. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-data-filtering.js +1 -1
  123. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-event-flow.js +1 -1
  124. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-forms.js +1 -1
  125. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-layouts.js +1 -1
  126. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-page.js +1 -1
  127. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-rbac.js +1 -1
  128. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-routes.js +1 -1
  129. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-state.js +1 -1
  130. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/superblocks-theming-chakra-new.js +1 -1
  131. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/system-base.js +1 -1
  132. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/system-incremental.js +1 -1
  133. package/dist/ai-service/prompt-builder-service/static-fragments/platform-parts/system-specific-edit.js +1 -1
  134. package/dist/ai-service/prompt-builder-service/utils/integrations/to-sdk-prompt.d.ts +1 -3
  135. package/dist/ai-service/prompt-builder-service/utils/integrations/to-sdk-prompt.d.ts.map +1 -1
  136. package/dist/ai-service/prompt-builder-service/utils/integrations/to-sdk-prompt.js +2 -4
  137. package/dist/ai-service/prompt-builder-service/utils/integrations/to-sdk-prompt.js.map +1 -1
  138. package/dist/ai-service/prompt-builder-service/utils/markdown-loader.d.ts +1 -3
  139. package/dist/ai-service/prompt-builder-service/utils/markdown-loader.d.ts.map +1 -1
  140. package/dist/ai-service/prompt-builder-service/utils/markdown-loader.js +1 -3
  141. package/dist/ai-service/prompt-builder-service/utils/markdown-loader.js.map +1 -1
  142. package/dist/ai-service/state-machine/clark-fsm.d.ts +4 -16
  143. package/dist/ai-service/state-machine/clark-fsm.d.ts.map +1 -1
  144. package/dist/ai-service/state-machine/clark-fsm.js +0 -47
  145. package/dist/ai-service/state-machine/clark-fsm.js.map +1 -1
  146. package/dist/ai-service/state-machine/handlers/agent-planning.d.ts.map +1 -1
  147. package/dist/ai-service/state-machine/handlers/agent-planning.js +6 -6
  148. package/dist/ai-service/state-machine/handlers/agent-planning.js.map +1 -1
  149. package/dist/ai-service/state-machine/handlers/awaiting-user.d.ts.map +1 -1
  150. package/dist/ai-service/state-machine/handlers/awaiting-user.js +4 -5
  151. package/dist/ai-service/state-machine/handlers/awaiting-user.js.map +1 -1
  152. package/dist/ai-service/state-machine/handlers/idle.js +1 -1
  153. package/dist/ai-service/state-machine/handlers/idle.js.map +1 -1
  154. package/dist/ai-service/state-machine/handlers/llm-generating.d.ts.map +1 -1
  155. package/dist/ai-service/state-machine/handlers/llm-generating.js +266 -274
  156. package/dist/ai-service/state-machine/handlers/llm-generating.js.map +1 -1
  157. package/dist/ai-service/state-machine/handlers/post-processing.js +0 -2
  158. package/dist/ai-service/state-machine/handlers/post-processing.js.map +1 -1
  159. package/dist/ai-service/state-machine/handlers/simple-prompt-builder.d.ts.map +1 -1
  160. package/dist/ai-service/state-machine/handlers/simple-prompt-builder.js +0 -16
  161. package/dist/ai-service/state-machine/handlers/simple-prompt-builder.js.map +1 -1
  162. package/dist/ai-service/state-machine/traced-fsm.d.ts +70 -41
  163. package/dist/ai-service/state-machine/traced-fsm.d.ts.map +1 -1
  164. package/dist/ai-service/state-machine/traced-fsm.js +173 -66
  165. package/dist/ai-service/state-machine/traced-fsm.js.map +1 -1
  166. package/dist/ai-service/template-renderer.d.ts.map +1 -1
  167. package/dist/ai-service/template-renderer.js +56 -102
  168. package/dist/ai-service/template-renderer.js.map +1 -1
  169. package/dist/ai-service/test-utils/dd-trace-helpers.d.ts +0 -1
  170. package/dist/ai-service/test-utils/dd-trace-helpers.d.ts.map +1 -1
  171. package/dist/ai-service/test-utils/dd-trace-helpers.js.map +1 -1
  172. package/dist/ai-service/transform/api-builder/to-sdk-transformer.d.ts +1 -2
  173. package/dist/ai-service/transform/api-builder/to-sdk-transformer.d.ts.map +1 -1
  174. package/dist/ai-service/transform/api-builder/to-sdk-transformer.js +63 -120
  175. package/dist/ai-service/transform/api-builder/to-sdk-transformer.js.map +1 -1
  176. package/dist/ai-service/transform/api-builder/to-yaml-transformer.d.ts.map +1 -1
  177. package/dist/ai-service/transform/api-builder/to-yaml-transformer.js +47 -97
  178. package/dist/ai-service/transform/api-builder/to-yaml-transformer.js.map +1 -1
  179. package/dist/ai-service/transform/remove-api-builder-imports/transformer.d.ts.map +1 -1
  180. package/dist/ai-service/transform/remove-api-builder-imports/transformer.js +37 -88
  181. package/dist/ai-service/transform/remove-api-builder-imports/transformer.js.map +1 -1
  182. package/dist/ai-service/transform/shared.d.ts +2 -4
  183. package/dist/ai-service/transform/shared.d.ts.map +1 -1
  184. package/dist/ai-service/transform/shared.js +3 -6
  185. package/dist/ai-service/transform/shared.js.map +1 -1
  186. package/dist/ai-service/transform/workaround-missing-sbapi/transformer.d.ts.map +1 -1
  187. package/dist/ai-service/transform/workaround-missing-sbapi/transformer.js +26 -77
  188. package/dist/ai-service/transform/workaround-missing-sbapi/transformer.js.map +1 -1
  189. package/dist/ai-service/types.d.ts +0 -2
  190. package/dist/ai-service/types.d.ts.map +1 -1
  191. package/dist/ai-service/types.js.map +1 -1
  192. package/dist/ai-service/util/ddog-llmobs.d.ts +20 -0
  193. package/dist/ai-service/util/ddog-llmobs.d.ts.map +1 -0
  194. package/dist/ai-service/util/ddog-llmobs.js +197 -0
  195. package/dist/ai-service/util/ddog-llmobs.js.map +1 -0
  196. package/dist/binding-extraction/extract-identifiers.d.ts +2 -5
  197. package/dist/binding-extraction/extract-identifiers.d.ts.map +1 -1
  198. package/dist/binding-extraction/extract-identifiers.js +4 -7
  199. package/dist/binding-extraction/extract-identifiers.js.map +1 -1
  200. package/dist/file-sync-vite-plugin.d.ts.map +1 -1
  201. package/dist/file-sync-vite-plugin.js +1 -92
  202. package/dist/file-sync-vite-plugin.js.map +1 -1
  203. package/dist/file-system-helpers.d.ts +0 -3
  204. package/dist/file-system-helpers.d.ts.map +1 -1
  205. package/dist/file-system-helpers.js +0 -12
  206. package/dist/file-system-helpers.js.map +1 -1
  207. package/dist/file-system-manager.d.ts.map +1 -1
  208. package/dist/file-system-manager.js +1 -53
  209. package/dist/file-system-manager.js.map +1 -1
  210. package/dist/parsing/entity/to-value-entity.d.ts.map +1 -1
  211. package/dist/parsing/entity/to-value-entity.js +0 -1
  212. package/dist/parsing/entity/to-value-entity.js.map +1 -1
  213. package/dist/parsing/jsx.d.ts +0 -4
  214. package/dist/parsing/jsx.d.ts.map +1 -1
  215. package/dist/parsing/jsx.js +0 -33
  216. package/dist/parsing/jsx.js.map +1 -1
  217. package/dist/parsing/page.d.ts.map +1 -1
  218. package/dist/parsing/page.js +3 -48
  219. package/dist/parsing/page.js.map +1 -1
  220. package/dist/parsing/properties.d.ts.map +1 -1
  221. package/dist/parsing/properties.js +0 -10
  222. package/dist/parsing/properties.js.map +1 -1
  223. package/dist/socket-manager.d.ts.map +1 -1
  224. package/dist/socket-manager.js +1 -3
  225. package/dist/socket-manager.js.map +1 -1
  226. package/dist/source-tracker.d.ts +0 -4
  227. package/dist/source-tracker.d.ts.map +1 -1
  228. package/dist/source-tracker.js +5 -132
  229. package/dist/source-tracker.js.map +1 -1
  230. package/dist/sync-service/list-dir.d.ts +1 -1
  231. package/dist/sync-service/list-dir.d.ts.map +1 -1
  232. package/dist/sync-service/list-dir.js +9 -3
  233. package/dist/sync-service/list-dir.js.map +1 -1
  234. package/dist/util/logger.d.ts +0 -9
  235. package/dist/util/logger.d.ts.map +1 -1
  236. package/dist/util/logger.js +7 -64
  237. package/dist/util/logger.js.map +1 -1
  238. package/package.json +7 -9
  239. package/dist/ai-service/agent/tools/build-set-api-triggers.d.ts +0 -27
  240. package/dist/ai-service/agent/tools/build-set-api-triggers.d.ts.map +0 -1
  241. package/dist/ai-service/agent/tools/build-set-api-triggers.js +0 -265
  242. package/dist/ai-service/agent/tools/build-set-api-triggers.js.map +0 -1
  243. package/dist/ai-service/llmobs/helpers.d.ts +0 -20
  244. package/dist/ai-service/llmobs/helpers.d.ts.map +0 -1
  245. package/dist/ai-service/llmobs/helpers.js +0 -130
  246. package/dist/ai-service/llmobs/helpers.js.map +0 -1
  247. package/dist/ai-service/llmobs/index.d.ts +0 -8
  248. package/dist/ai-service/llmobs/index.d.ts.map +0 -1
  249. package/dist/ai-service/llmobs/index.js +0 -7
  250. package/dist/ai-service/llmobs/index.js.map +0 -1
  251. package/dist/ai-service/llmobs/middleware/stream-text.d.ts +0 -60
  252. package/dist/ai-service/llmobs/middleware/stream-text.d.ts.map +0 -1
  253. package/dist/ai-service/llmobs/middleware/stream-text.js +0 -601
  254. package/dist/ai-service/llmobs/middleware/stream-text.js.map +0 -1
  255. package/dist/ai-service/llmobs/tracer.d.ts +0 -95
  256. package/dist/ai-service/llmobs/tracer.d.ts.map +0 -1
  257. package/dist/ai-service/llmobs/tracer.js +0 -341
  258. package/dist/ai-service/llmobs/tracer.js.map +0 -1
  259. package/dist/ai-service/llmobs/types.d.ts +0 -47
  260. package/dist/ai-service/llmobs/types.d.ts.map +0 -1
  261. package/dist/ai-service/llmobs/types.js +0 -2
  262. package/dist/ai-service/llmobs/types.js.map +0 -1
  263. package/dist/ai-service/test-utils/span-interceptor.d.ts +0 -74
  264. package/dist/ai-service/test-utils/span-interceptor.d.ts.map +0 -1
  265. package/dist/ai-service/test-utils/span-interceptor.js +0 -272
  266. package/dist/ai-service/test-utils/span-interceptor.js.map +0 -1
  267. package/dist/ai-service/util/safe-stringify.d.ts +0 -2
  268. package/dist/ai-service/util/safe-stringify.d.ts.map +0 -1
  269. package/dist/ai-service/util/safe-stringify.js +0 -68
  270. package/dist/ai-service/util/safe-stringify.js.map +0 -1
@@ -1,8 +1,9 @@
1
- import { hasToolCall, smoothStream, } from "ai";
1
+ import { hasToolCall, smoothStream } from "ai";
2
+ import ddTrace from "dd-trace";
2
3
  import { getLogger } from "../../../util/logger.js";
3
4
  import { buildTools } from "../../agent/tools.js";
4
5
  import { getAppState, getAvailableComponents, processStreamChunk, } from "../../agent/utils.js";
5
- import { tracedStreamText } from "../../llmobs/helpers.js";
6
+ import { tracedStreamText, traceWorkflow } from "../../util/ddog-llmobs.js";
6
7
  import { processLLMConfig } from "../../util/llm-config-utils.js";
7
8
  import { AGENT_PLANNED, APP_RUNTIME_UPDATED_WITHOUT_EDITS, V3_AGENT_FINISHED, } from "../clark-fsm.js";
8
9
  import { cacheBreakpointHelper } from "../helpers/cache-breakpoint.js";
@@ -39,37 +40,6 @@ function formatSummaryForAgents(latestSummary) {
39
40
  }
40
41
  return parts.length > 0 ? parts.join("\n\n") : null;
41
42
  }
42
- const buildUserPromptContentWithParts = (userPrompt, attachments) => {
43
- return [
44
- { type: "text", text: userPrompt },
45
- ...attachments.map((attachment) => {
46
- if (attachment.type === "image") {
47
- return {
48
- type: attachment.type,
49
- image: attachment.image,
50
- };
51
- }
52
- else {
53
- throw "Unsupported attachment type";
54
- }
55
- }),
56
- ];
57
- };
58
- const buildUserMessage = (userPrompt, promptContext, cache) => {
59
- if (promptContext?.attachments?.length) {
60
- return {
61
- role: "user",
62
- content: buildUserPromptContentWithParts(userPrompt, promptContext.attachments),
63
- providerOptions: cache(),
64
- };
65
- }
66
- else {
67
- return {
68
- role: "user",
69
- content: userPrompt,
70
- };
71
- }
72
- };
73
43
  export const doLLMGenerating = (clark, services) => {
74
44
  const { chatSessionStore } = services;
75
45
  const transitionTo = transitionFrom(clark);
@@ -97,7 +67,7 @@ export const doLLMGenerating = (clark, services) => {
97
67
  {
98
68
  role: "user",
99
69
  content: `<available_components>\nHere is a list of all the components that are available in the application.\n\`\`\`json\n${JSON.stringify(availableComponents)}\n\`\`\`\n</available_components>`,
100
- providerOptions: nextCacheBreakpoint(),
70
+ ...nextCacheBreakpoint(),
101
71
  },
102
72
  ...(latestSummary
103
73
  ? [
@@ -119,13 +89,16 @@ export const doLLMGenerating = (clark, services) => {
119
89
  },
120
90
  ]
121
91
  : []),
122
- buildUserMessage(userPrompt, promptContext, nextCacheBreakpoint),
92
+ {
93
+ role: "user",
94
+ content: userPrompt,
95
+ },
123
96
  ];
124
97
  const model = services.llmProvider.modelForClassification("broad_edit");
125
98
  // Process LLM configuration up front so we can log it once at the top
126
99
  const llmConfig = clark.context.llmConfig;
127
100
  const disabledTools = clark.context.llmConfig?.disabledTools;
128
- const { headers, thinkingEnabled, thinkingBudgetTokens, interleavedThinking, providerOptions, } = processLLMConfig(llmConfig, 5000, // default budget tokens
101
+ const { headers, thinkingEnabled, thinkingBudgetTokens, interleavedThinking, } = processLLMConfig(llmConfig, 5000, // default budget tokens
129
102
  `LLM Generating (model=${model.modelId}, disabledTools=${disabledTools?.length ? disabledTools.join(",") : "none"})`);
130
103
  const conversationId = Date.now();
131
104
  const startTimestamp = new Date().toISOString();
@@ -151,258 +124,277 @@ export const doLLMGenerating = (clark, services) => {
151
124
  });
152
125
  // Note: Input token count will be logged once LLM call begins
153
126
  const tools = buildTools(clark, services, promptContext, logRef, disabledTools);
154
- let stepCount = 0;
155
- // Track cumulative token usage across all steps
156
- let totalInputTokens = 0;
157
- let totalOutputTokens = 0;
158
- let totalCachedTokens = 0;
159
- services.clarkProfiler.startLLMWaiting({
160
- messages: messages,
161
- model: model.modelId,
162
- });
163
- let firstTokenReceived = false;
164
- let thinkingSpanActive = false;
165
- let textSpanActive = false;
166
- const allReasoningDeltas = [];
167
- let currentStepReasoningDeltas = [];
168
- let currentStepTextDeltas = [];
169
- const build = tracedStreamText({
170
- abortSignal: abortController?.signal,
171
- model,
172
- providerOptions,
173
- headers,
174
- experimental_transform: [smoothStream({ chunking: "line" })],
175
- messages,
176
- tools,
177
- prepareStep: breakpointAwarePrepareStep({
178
- getInputTokens: () => totalInputTokens,
179
- onAddBreakpoint: (stepNumber) => {
180
- logRef.content += `[CACHE] Added breakpoint at step ${stepNumber}, ${totalInputTokens} input tokens\n\n`;
181
- },
182
- }),
183
- stopWhen: hasToolCall("build_finalize"),
184
- onChunk: (chunkData) => {
185
- if (!firstTokenReceived) {
186
- firstTokenReceived = true;
187
- services.clarkProfiler.startLLMStreaming({
188
- firstChunk: chunkData.chunk.type === "text-delta"
189
- ? chunkData.chunk.text
190
- : `[${chunkData.chunk.type}]`,
191
- });
192
- }
193
- if (chunkData.chunk.type === "reasoning-delta") {
194
- const thinkingTrack = "thinking";
195
- services.clarkProfiler
196
- .getProfiler()
197
- .createTrack(thinkingTrack, "AI Thinking/Reasoning", "llm");
198
- allReasoningDeltas.push(chunkData.chunk.text);
199
- currentStepReasoningDeltas.push(chunkData.chunk.text);
200
- if (!thinkingSpanActive) {
201
- thinkingSpanActive = true;
127
+ await traceWorkflow("doLLMGenerating", async () => {
128
+ let stepCount = 0;
129
+ const llmConfig = clark.context.llmConfig;
130
+ const { providerOptions, headers } = processLLMConfig(llmConfig, 5000, `LLM Generating (model=${model.modelId}, disabledTools=${disabledTools?.length ? disabledTools.join(",") : "none"})`);
131
+ // Track cumulative token usage across all steps
132
+ let totalInputTokens = 0;
133
+ let totalOutputTokens = 0;
134
+ let totalCachedTokens = 0;
135
+ services.clarkProfiler.startLLMWaiting({
136
+ messages: messages,
137
+ model: model.modelId,
138
+ });
139
+ let firstTokenReceived = false;
140
+ let thinkingSpanActive = false;
141
+ let textSpanActive = false;
142
+ const allReasoningDeltas = [];
143
+ let currentStepReasoningDeltas = [];
144
+ let currentStepTextDeltas = [];
145
+ const build = tracedStreamText({
146
+ abortSignal: abortController?.signal,
147
+ model,
148
+ providerOptions,
149
+ headers,
150
+ experimental_transform: [smoothStream({ chunking: "line" })],
151
+ messages,
152
+ tools,
153
+ prepareStep: breakpointAwarePrepareStep({
154
+ getInputTokens: () => totalInputTokens,
155
+ onAddBreakpoint: (stepNumber) => {
156
+ logRef.content += `[CACHE] Added breakpoint at step ${stepNumber}, ${totalInputTokens} input tokens\n\n`;
157
+ },
158
+ }),
159
+ stopWhen: hasToolCall("build_finalize"),
160
+ onChunk: (chunkData) => {
161
+ if (!firstTokenReceived) {
162
+ firstTokenReceived = true;
163
+ services.clarkProfiler.startLLMStreaming({
164
+ firstChunk: chunkData.chunk.type === "text-delta"
165
+ ? chunkData.chunk.text
166
+ : `[${chunkData.chunk.type}]`,
167
+ });
168
+ }
169
+ if (chunkData.chunk.type === "reasoning-delta") {
170
+ const thinkingTrack = "thinking";
202
171
  services.clarkProfiler
203
172
  .getProfiler()
204
- .startFrame(`Thinking Step ${stepCount + 1}`, thinkingTrack, {
205
- stepNumber: stepCount + 1,
206
- chunkType: chunkData.chunk.type,
207
- });
173
+ .createTrack(thinkingTrack, "AI Thinking/Reasoning", "llm");
174
+ allReasoningDeltas.push(chunkData.chunk.text);
175
+ currentStepReasoningDeltas.push(chunkData.chunk.text);
176
+ if (!thinkingSpanActive) {
177
+ thinkingSpanActive = true;
178
+ services.clarkProfiler
179
+ .getProfiler()
180
+ .startFrame(`Thinking Step ${stepCount + 1}`, thinkingTrack, {
181
+ stepNumber: stepCount + 1,
182
+ chunkType: chunkData.chunk.type,
183
+ });
184
+ }
208
185
  }
209
- }
210
- if (chunkData.chunk.type === "text-delta") {
211
- const textTrack = "text_generation";
212
- services.clarkProfiler
213
- .getProfiler()
214
- .createTrack(textTrack, "Text Generation", "llm");
215
- currentStepTextDeltas.push(chunkData.chunk.text);
216
- if (!textSpanActive) {
217
- textSpanActive = true;
186
+ if (chunkData.chunk.type === "text-delta") {
187
+ const textTrack = "text_generation";
218
188
  services.clarkProfiler
219
189
  .getProfiler()
220
- .startFrame(`Text Generation Step ${stepCount + 1}`, textTrack, {
221
- stepNumber: stepCount + 1,
222
- firstTextDelta: chunkData.chunk.text.slice(0, 50) +
223
- (chunkData.chunk.text.length > 50 ? "..." : ""),
224
- });
190
+ .createTrack(textTrack, "Text Generation", "llm");
191
+ currentStepTextDeltas.push(chunkData.chunk.text);
192
+ if (!textSpanActive) {
193
+ textSpanActive = true;
194
+ services.clarkProfiler
195
+ .getProfiler()
196
+ .startFrame(`Text Generation Step ${stepCount + 1}`, textTrack, {
197
+ stepNumber: stepCount + 1,
198
+ firstTextDelta: chunkData.chunk.text.slice(0, 50) +
199
+ (chunkData.chunk.text.length > 50 ? "..." : ""),
200
+ });
201
+ }
225
202
  }
226
- }
227
- },
228
- onStepFinish: async (step) => {
229
- stepCount++;
230
- const stepTimestamp = new Date().toISOString();
231
- logRef.content += `--- OUTPUT STEP ${stepCount} [${stepTimestamp}] ---\n`;
232
- if (step.reasoning && thinkingSpanActive) {
233
- const thinkingTrack = "thinking";
234
- services.clarkProfiler
235
- .getProfiler()
236
- .updateActiveFrameArgs(thinkingTrack, {
237
- completeReasoningText: currentStepReasoningDeltas.join(" "),
238
- reasoningLength: step.reasoning.length,
239
- stepComplete: true,
240
- });
241
- services.clarkProfiler.getProfiler().endFrame(thinkingTrack);
242
- thinkingSpanActive = false;
243
- currentStepReasoningDeltas = [];
244
- }
245
- // Log token usage for this step and accumulate totals
246
- if (step.usage) {
247
- const stepInputTokens = step.usage.inputTokens ?? 0;
248
- const stepOutputTokens = step.usage.outputTokens ?? 0;
249
- const stepCachedTokens = step.usage.cachedInputTokens ?? 0;
250
- // Accumulate totals
251
- totalInputTokens += stepInputTokens;
252
- totalOutputTokens += stepOutputTokens;
253
- totalCachedTokens += stepCachedTokens;
254
- logRef.content += `[TOKEN USAGE] Input: ${stepInputTokens}, Output: ${stepOutputTokens}, Total: ${step.usage.totalTokens ?? 0}`;
255
- if (stepCachedTokens) {
256
- logRef.content += `, Cached: ${stepCachedTokens}`;
203
+ },
204
+ onStepFinish: async (step) => {
205
+ stepCount++;
206
+ const stepTimestamp = new Date().toISOString();
207
+ logRef.content += `--- OUTPUT STEP ${stepCount} [${stepTimestamp}] ---\n`;
208
+ if (step.reasoning && thinkingSpanActive) {
209
+ const thinkingTrack = "thinking";
210
+ services.clarkProfiler
211
+ .getProfiler()
212
+ .updateActiveFrameArgs(thinkingTrack, {
213
+ completeReasoningText: currentStepReasoningDeltas.join(" "),
214
+ reasoningLength: step.reasoning.length,
215
+ stepComplete: true,
216
+ });
217
+ services.clarkProfiler.getProfiler().endFrame(thinkingTrack);
218
+ thinkingSpanActive = false;
219
+ currentStepReasoningDeltas = [];
257
220
  }
258
- logRef.content += `\n`;
259
- }
260
- if (step.reasoning) {
261
- const reasoningLines = [
262
- "[REASONING]",
263
- ...step.reasoning.map(({ text }) => text),
264
- "",
265
- ];
266
- logRef.content += reasoningLines.join("\n");
267
- }
268
- if (step.text && textSpanActive) {
269
- const textTrack = "text_generation";
270
- services.clarkProfiler
271
- .getProfiler()
272
- .updateActiveFrameArgs(textTrack, {
273
- completeTextContent: currentStepTextDeltas.join(""),
274
- finalText: step.text,
275
- textLength: step.text.length,
276
- stepComplete: true,
277
- });
278
- services.clarkProfiler.getProfiler().endFrame(textTrack);
279
- textSpanActive = false;
280
- currentStepTextDeltas = [];
281
- }
282
- if (step.text) {
283
- logRef.content += `[ASSISTANT TEXT] ${step.text}\n`;
284
- void services.chatSessionStore.recordAssistant({
285
- type: "text",
286
- text: step.text,
287
- });
288
- }
289
- const toolsCalled = step.content
290
- .filter((c) => c.type === "tool-result")
291
- .map((c) => ({
292
- toolName: c.toolName,
293
- input: JSON.stringify(c.input),
294
- output: JSON.stringify(c.output, null, 2),
295
- }));
296
- if (toolsCalled.length > 0) {
297
- logRef.content += `[TOOLS CALLED]\n`;
298
- toolsCalled.forEach((tool, idx) => {
299
- logRef.content += ` Tool ${idx + 1}: ${tool.toolName}\n`;
300
- logRef.content += ` Input: ${tool.input}\n`;
301
- logRef.content += ` Output: ${tool.output}\n`;
302
- });
303
- toolsCalled.forEach((tool, idx) => {
304
- let parsedInput, parsedOutput;
305
- try {
306
- parsedInput = JSON.parse(tool.input);
307
- }
308
- catch {
309
- parsedInput = tool.input;
310
- }
311
- try {
312
- parsedOutput = JSON.parse(tool.output);
313
- }
314
- catch {
315
- parsedOutput = tool.output;
221
+ // Log token usage for this step and accumulate totals
222
+ if (step.usage) {
223
+ const stepInputTokens = step.usage.inputTokens ?? 0;
224
+ const stepOutputTokens = step.usage.outputTokens ?? 0;
225
+ const stepCachedTokens = step.usage.cachedInputTokens ?? 0;
226
+ // Accumulate totals
227
+ totalInputTokens += stepInputTokens;
228
+ totalOutputTokens += stepOutputTokens;
229
+ totalCachedTokens += stepCachedTokens;
230
+ logRef.content += `[TOKEN USAGE] Input: ${stepInputTokens}, Output: ${stepOutputTokens}, Total: ${step.usage.totalTokens ?? 0}`;
231
+ if (stepCachedTokens) {
232
+ logRef.content += `, Cached: ${stepCachedTokens}`;
316
233
  }
234
+ logRef.content += `\n`;
235
+ }
236
+ if (step.reasoning) {
237
+ const reasoningLines = [
238
+ "[REASONING]",
239
+ ...step.reasoning.map(({ text }) => text),
240
+ "",
241
+ ];
242
+ logRef.content += reasoningLines.join("\n");
243
+ }
244
+ if (step.text && textSpanActive) {
245
+ const textTrack = "text_generation";
317
246
  services.clarkProfiler
318
247
  .getProfiler()
319
- .addInstantEvent(`Tool Call: ${tool.toolName}`, "llm", {
320
- step: stepCount,
321
- toolIndex: idx + 1,
322
- toolName: tool.toolName,
323
- input: parsedInput,
324
- output: parsedOutput,
325
- inputSize: tool.input.length,
326
- outputSize: tool.output.length,
248
+ .updateActiveFrameArgs(textTrack, {
249
+ completeTextContent: currentStepTextDeltas.join(""),
250
+ finalText: step.text,
251
+ textLength: step.text.length,
252
+ stepComplete: true,
327
253
  });
328
- });
329
- }
330
- logRef.content += `\n`;
331
- },
332
- }, clark.tracer, clark.logger);
333
- for await (const chunk of build.fullStream) {
334
- await processStreamChunk(chunk, clark, services.chatSessionStore, logRef);
335
- }
336
- if (firstTokenReceived) {
337
- services.clarkProfiler.endFrame();
338
- }
339
- services.clarkProfiler.endFrame();
340
- if (thinkingSpanActive) {
341
- services.clarkProfiler.getProfiler().endFrame("thinking");
342
- }
343
- if (textSpanActive) {
344
- services.clarkProfiler.getProfiler().endFrame("text_generation");
345
- }
346
- const endTimestamp = new Date().toISOString();
347
- logRef.content += `=== LLM CONVERSATION END [${conversationId}] ===\n`;
348
- logRef.content += `End Timestamp: ${endTimestamp}\n`;
349
- logRef.content += `Total Steps: ${stepCount}\n`;
350
- // Log final token usage summary using accumulated totals
351
- const finalTotalTokens = totalInputTokens + totalOutputTokens;
352
- logRef.content += `[TOTAL TOKEN USAGE] Input: ${totalInputTokens}, Output: ${totalOutputTokens}, Total: ${finalTotalTokens}`;
353
- if (totalCachedTokens > 0) {
354
- logRef.content += `, Cached: ${totalCachedTokens}`;
355
- }
356
- logRef.content += `\n`;
357
- try {
358
- // Create TokenRequestData object from accumulated step data (not AI SDK usage which is only final step)
359
- const requestTokenData = {
360
- requestId: conversationId.toString(),
361
- inputTokens: totalInputTokens,
362
- outputTokens: totalOutputTokens,
363
- totalTokens: totalInputTokens + totalOutputTokens,
364
- cachedInputTokens: totalCachedTokens,
365
- model: model.modelId,
366
- startTime: startTimestamp,
367
- endTime: endTimestamp,
368
- };
369
- await clark.context.peer?.call.aiPushTokenUsage(requestTokenData);
370
- }
371
- catch (error) {
372
- // Token tracking is non-critical - log error but don't fail the AI request
373
- getLogger().warn("Failed to send token usage data", error instanceof Error ? error.message : String(error));
374
- }
375
- // Save the complete log using saveGeneratedArtifact
376
- try {
377
- const logArtifact = {
378
- type: "file",
379
- filePath: `llm-conversation-${conversationId}.log`,
380
- content: logRef.content,
381
- };
382
- const stepId = `llm-conversation-${conversationId}`;
383
- await services.appShell.saveGeneratedArtifact(logArtifact, stepId, runTimestamp);
384
- getLogger().debug("LLM conversation log saved");
385
- }
386
- catch (error) {
387
- getLogger().error("Failed to save LLM conversation log", {
388
- error: {
389
- kind: "SaveLogError",
390
- message: error instanceof Error ? error.message : String(error),
391
- stack: error instanceof Error ? error.stack : undefined,
254
+ services.clarkProfiler.getProfiler().endFrame(textTrack);
255
+ textSpanActive = false;
256
+ currentStepTextDeltas = [];
257
+ }
258
+ if (step.text) {
259
+ logRef.content += `[ASSISTANT TEXT] ${step.text}\n`;
260
+ void services.chatSessionStore.recordAssistant({
261
+ type: "text",
262
+ text: step.text,
263
+ });
264
+ }
265
+ const toolsCalled = step.content
266
+ .filter((c) => c.type === "tool-result")
267
+ .map((c) => ({
268
+ toolName: c.toolName,
269
+ input: JSON.stringify(c.input),
270
+ output: JSON.stringify(c.output, null, 2),
271
+ }));
272
+ if (toolsCalled.length > 0) {
273
+ logRef.content += `[TOOLS CALLED]\n`;
274
+ toolsCalled.forEach((tool, idx) => {
275
+ logRef.content += ` Tool ${idx + 1}: ${tool.toolName}\n`;
276
+ logRef.content += ` Input: ${tool.input}\n`;
277
+ logRef.content += ` Output: ${tool.output}\n`;
278
+ });
279
+ toolsCalled.forEach((tool, idx) => {
280
+ let parsedInput, parsedOutput;
281
+ try {
282
+ parsedInput = JSON.parse(tool.input);
283
+ }
284
+ catch {
285
+ parsedInput = tool.input;
286
+ }
287
+ try {
288
+ parsedOutput = JSON.parse(tool.output);
289
+ }
290
+ catch {
291
+ parsedOutput = tool.output;
292
+ }
293
+ services.clarkProfiler
294
+ .getProfiler()
295
+ .addInstantEvent(`Tool Call: ${tool.toolName}`, "llm", {
296
+ step: stepCount,
297
+ toolIndex: idx + 1,
298
+ toolName: tool.toolName,
299
+ input: parsedInput,
300
+ output: parsedOutput,
301
+ inputSize: tool.input.length,
302
+ outputSize: tool.output.length,
303
+ });
304
+ });
305
+ }
306
+ logRef.content += `\n`;
392
307
  },
393
308
  });
394
- }
395
- const hasLocalDraft = await services.draftInterface.hasLocalDraftChanges();
396
- if (hasLocalDraft) {
397
- void transitionTo({
398
- type: V3_AGENT_FINISHED,
399
- });
400
- }
401
- else {
402
- void transitionTo({
403
- type: APP_RUNTIME_UPDATED_WITHOUT_EDITS,
309
+ for await (const chunk of build.fullStream) {
310
+ await processStreamChunk(chunk, clark, services.chatSessionStore, logRef);
311
+ }
312
+ if (firstTokenReceived) {
313
+ services.clarkProfiler.endFrame();
314
+ }
315
+ services.clarkProfiler.endFrame();
316
+ if (thinkingSpanActive) {
317
+ services.clarkProfiler.getProfiler().endFrame("thinking");
318
+ }
319
+ if (textSpanActive) {
320
+ services.clarkProfiler.getProfiler().endFrame("text_generation");
321
+ }
322
+ const usage = await build.usage;
323
+ const { llmobs } = ddTrace;
324
+ llmobs.annotate({
325
+ inputData: JSON.stringify(messages),
326
+ outputData: JSON.stringify(build.fullStream),
327
+ metrics: {
328
+ inputTokens: usage?.inputTokens ?? 0,
329
+ outputTokens: usage?.outputTokens ?? 0,
330
+ totalTokens: usage?.totalTokens ?? 0,
331
+ cachedInputTokens: usage?.cachedInputTokens ?? 0,
332
+ },
333
+ metadata: {
334
+ request: build.request,
335
+ },
404
336
  });
405
- }
337
+ const endTimestamp = new Date().toISOString();
338
+ logRef.content += `=== LLM CONVERSATION END [${conversationId}] ===\n`;
339
+ logRef.content += `End Timestamp: ${endTimestamp}\n`;
340
+ logRef.content += `Total Steps: ${stepCount}\n`;
341
+ // Log final token usage summary using accumulated totals
342
+ const finalTotalTokens = totalInputTokens + totalOutputTokens;
343
+ logRef.content += `[TOTAL TOKEN USAGE] Input: ${totalInputTokens}, Output: ${totalOutputTokens}, Total: ${finalTotalTokens}`;
344
+ if (totalCachedTokens > 0) {
345
+ logRef.content += `, Cached: ${totalCachedTokens}`;
346
+ }
347
+ logRef.content += `\n`;
348
+ try {
349
+ // Create TokenRequestData object from accumulated step data (not AI SDK usage which is only final step)
350
+ const requestTokenData = {
351
+ requestId: conversationId.toString(),
352
+ inputTokens: totalInputTokens,
353
+ outputTokens: totalOutputTokens,
354
+ totalTokens: totalInputTokens + totalOutputTokens,
355
+ cachedInputTokens: totalCachedTokens,
356
+ model: model.modelId,
357
+ startTime: startTimestamp,
358
+ endTime: endTimestamp,
359
+ };
360
+ await clark.context.peer?.call.aiPushTokenUsage(requestTokenData);
361
+ }
362
+ catch (error) {
363
+ // Token tracking is non-critical - log error but don't fail the AI request
364
+ getLogger().warn("Failed to send token usage data", error instanceof Error ? error.message : String(error));
365
+ }
366
+ // Save the complete log using saveGeneratedArtifact
367
+ try {
368
+ const logArtifact = {
369
+ type: "file",
370
+ filePath: `llm-conversation-${conversationId}.log`,
371
+ content: logRef.content,
372
+ };
373
+ const stepId = `llm-conversation-${conversationId}`;
374
+ await services.appShell.saveGeneratedArtifact(logArtifact, stepId, runTimestamp);
375
+ getLogger().debug("LLM conversation log saved");
376
+ }
377
+ catch (error) {
378
+ getLogger().error("Failed to save LLM conversation log", {
379
+ error: {
380
+ kind: "SaveLogError",
381
+ message: error instanceof Error ? error.message : String(error),
382
+ stack: error instanceof Error ? error.stack : undefined,
383
+ },
384
+ });
385
+ }
386
+ const hasLocalDraft = await services.draftInterface.hasLocalDraftChanges();
387
+ if (hasLocalDraft) {
388
+ void transitionTo({
389
+ type: V3_AGENT_FINISHED,
390
+ });
391
+ }
392
+ else {
393
+ void transitionTo({
394
+ type: APP_RUNTIME_UPDATED_WITHOUT_EDITS,
395
+ });
396
+ }
397
+ });
406
398
  }
407
399
  }
408
400
  };