@databricks/appkit 0.1.5 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/AGENTS.md +57 -2
  2. package/CLAUDE.md +57 -2
  3. package/NOTICE.md +2 -0
  4. package/README.md +21 -15
  5. package/bin/appkit-lint.js +129 -0
  6. package/dist/analytics/analytics.d.ts.map +1 -1
  7. package/dist/analytics/analytics.js +33 -33
  8. package/dist/analytics/analytics.js.map +1 -1
  9. package/dist/analytics/query.js +8 -2
  10. package/dist/analytics/query.js.map +1 -1
  11. package/dist/app/index.d.ts +5 -1
  12. package/dist/app/index.d.ts.map +1 -1
  13. package/dist/app/index.js +41 -10
  14. package/dist/app/index.js.map +1 -1
  15. package/dist/appkit/package.js +1 -1
  16. package/dist/cache/index.d.ts.map +1 -1
  17. package/dist/cache/index.js +24 -3
  18. package/dist/cache/index.js.map +1 -1
  19. package/dist/cache/storage/persistent.js +12 -6
  20. package/dist/cache/storage/persistent.js.map +1 -1
  21. package/dist/connectors/lakebase/client.js +25 -14
  22. package/dist/connectors/lakebase/client.js.map +1 -1
  23. package/dist/connectors/sql-warehouse/client.js +68 -28
  24. package/dist/connectors/sql-warehouse/client.js.map +1 -1
  25. package/dist/context/service-context.js +13 -8
  26. package/dist/context/service-context.js.map +1 -1
  27. package/dist/errors/authentication.d.ts +38 -0
  28. package/dist/errors/authentication.d.ts.map +1 -0
  29. package/dist/errors/authentication.js +48 -0
  30. package/dist/errors/authentication.js.map +1 -0
  31. package/dist/errors/base.d.ts +58 -0
  32. package/dist/errors/base.d.ts.map +1 -0
  33. package/dist/errors/base.js +70 -0
  34. package/dist/errors/base.js.map +1 -0
  35. package/dist/errors/configuration.d.ts +38 -0
  36. package/dist/errors/configuration.d.ts.map +1 -0
  37. package/dist/errors/configuration.js +45 -0
  38. package/dist/errors/configuration.js.map +1 -0
  39. package/dist/errors/connection.d.ts +42 -0
  40. package/dist/errors/connection.d.ts.map +1 -0
  41. package/dist/errors/connection.js +54 -0
  42. package/dist/errors/connection.js.map +1 -0
  43. package/dist/errors/execution.d.ts +42 -0
  44. package/dist/errors/execution.d.ts.map +1 -0
  45. package/dist/errors/execution.js +51 -0
  46. package/dist/errors/execution.js.map +1 -0
  47. package/dist/errors/index.js +28 -0
  48. package/dist/errors/index.js.map +1 -0
  49. package/dist/errors/initialization.d.ts +34 -0
  50. package/dist/errors/initialization.d.ts.map +1 -0
  51. package/dist/errors/initialization.js +42 -0
  52. package/dist/errors/initialization.js.map +1 -0
  53. package/dist/errors/server.d.ts +38 -0
  54. package/dist/errors/server.d.ts.map +1 -0
  55. package/dist/errors/server.js +45 -0
  56. package/dist/errors/server.js.map +1 -0
  57. package/dist/errors/tunnel.d.ts +38 -0
  58. package/dist/errors/tunnel.d.ts.map +1 -0
  59. package/dist/errors/tunnel.js +51 -0
  60. package/dist/errors/tunnel.js.map +1 -0
  61. package/dist/errors/validation.d.ts +36 -0
  62. package/dist/errors/validation.d.ts.map +1 -0
  63. package/dist/errors/validation.js +45 -0
  64. package/dist/errors/validation.js.map +1 -0
  65. package/dist/index.d.ts +12 -3
  66. package/dist/index.js +18 -3
  67. package/dist/index.js.map +1 -0
  68. package/dist/logging/logger.js +179 -0
  69. package/dist/logging/logger.js.map +1 -0
  70. package/dist/logging/sampling.js +56 -0
  71. package/dist/logging/sampling.js.map +1 -0
  72. package/dist/logging/wide-event-emitter.js +108 -0
  73. package/dist/logging/wide-event-emitter.js.map +1 -0
  74. package/dist/logging/wide-event.js +167 -0
  75. package/dist/logging/wide-event.js.map +1 -0
  76. package/dist/plugin/dev-reader.d.ts.map +1 -1
  77. package/dist/plugin/dev-reader.js +8 -3
  78. package/dist/plugin/dev-reader.js.map +1 -1
  79. package/dist/plugin/interceptors/cache.js.map +1 -1
  80. package/dist/plugin/interceptors/retry.js +10 -2
  81. package/dist/plugin/interceptors/retry.js.map +1 -1
  82. package/dist/plugin/interceptors/telemetry.js +24 -9
  83. package/dist/plugin/interceptors/telemetry.js.map +1 -1
  84. package/dist/plugin/interceptors/timeout.js +4 -0
  85. package/dist/plugin/interceptors/timeout.js.map +1 -1
  86. package/dist/plugin/plugin.d.ts +1 -1
  87. package/dist/plugin/plugin.d.ts.map +1 -1
  88. package/dist/plugin/plugin.js +9 -4
  89. package/dist/plugin/plugin.js.map +1 -1
  90. package/dist/server/index.d.ts.map +1 -1
  91. package/dist/server/index.js +22 -17
  92. package/dist/server/index.js.map +1 -1
  93. package/dist/server/remote-tunnel/remote-tunnel-controller.js +4 -2
  94. package/dist/server/remote-tunnel/remote-tunnel-controller.js.map +1 -1
  95. package/dist/server/remote-tunnel/remote-tunnel-manager.js +10 -8
  96. package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
  97. package/dist/server/vite-dev-server.js +8 -3
  98. package/dist/server/vite-dev-server.js.map +1 -1
  99. package/dist/stream/arrow-stream-processor.js +13 -6
  100. package/dist/stream/arrow-stream-processor.js.map +1 -1
  101. package/dist/stream/buffers.js +5 -1
  102. package/dist/stream/buffers.js.map +1 -1
  103. package/dist/stream/stream-manager.d.ts.map +1 -1
  104. package/dist/stream/stream-manager.js +47 -36
  105. package/dist/stream/stream-manager.js.map +1 -1
  106. package/dist/stream/types.js.map +1 -1
  107. package/dist/telemetry/index.d.ts +2 -2
  108. package/dist/telemetry/index.js +2 -2
  109. package/dist/telemetry/instrumentations.js +14 -10
  110. package/dist/telemetry/instrumentations.js.map +1 -1
  111. package/dist/telemetry/telemetry-manager.js +8 -6
  112. package/dist/telemetry/telemetry-manager.js.map +1 -1
  113. package/dist/telemetry/trace-sampler.js +33 -0
  114. package/dist/telemetry/trace-sampler.js.map +1 -0
  115. package/dist/type-generator/index.js +4 -2
  116. package/dist/type-generator/index.js.map +1 -1
  117. package/dist/type-generator/query-registry.js +13 -3
  118. package/dist/type-generator/query-registry.js.map +1 -1
  119. package/dist/type-generator/vite-plugin.d.ts.map +1 -1
  120. package/dist/type-generator/vite-plugin.js +5 -3
  121. package/dist/type-generator/vite-plugin.js.map +1 -1
  122. package/dist/utils/env-validator.js +5 -1
  123. package/dist/utils/env-validator.js.map +1 -1
  124. package/dist/utils/path-exclusions.js +66 -0
  125. package/dist/utils/path-exclusions.js.map +1 -0
  126. package/llms.txt +57 -2
  127. package/package.json +4 -1
package/AGENTS.md CHANGED
@@ -431,9 +431,12 @@ WHERE workspace_id = :workspaceId
431
431
  HTTP endpoints exposed (mounted under `/api/analytics`):
432
432
 
433
433
  - `POST /api/analytics/query/:query_key`
434
- - `POST /api/analytics/users/me/query/:query_key`
435
434
  - `GET /api/analytics/arrow-result/:jobId`
436
- - `GET /api/analytics/users/me/arrow-result/:jobId`
435
+
436
+ **Query file naming convention determines execution context:**
437
+
438
+ - `config/queries/<query_key>.sql` - Executes as service principal (shared cache)
439
+ - `config/queries/<query_key>.obo.sql` - Executes as user (OBO = On-Behalf-Of, per-user cache)
437
440
 
438
441
  Formats:
439
442
 
@@ -668,6 +671,56 @@ export function SpendChart() {
668
671
  }
669
672
  ```
670
673
 
674
+ **Chart props reference (important):**
675
+
676
+ Charts are **self-contained ECharts components**. Configure via props, NOT children:
677
+
678
+ ```tsx
679
+ // ✅ Correct: use props for customization
680
+ <BarChart
681
+ queryKey="sales_by_region"
682
+ parameters={{}}
683
+ xKey="region" // X-axis field
684
+ yKey={["revenue", "expenses"]} // Y-axis field(s) - string or string[]
685
+ colors={['#40d1f5', '#4462c9']} // Custom colors
686
+ stacked // Stack bars (BarChart, AreaChart)
687
+ orientation="horizontal" // "vertical" (default) | "horizontal"
688
+ showLegend // Show legend
689
+ height={400} // Height in pixels (default: 300)
690
+ />
691
+
692
+ <LineChart
693
+ queryKey="trend_data"
694
+ parameters={{}}
695
+ xKey="date"
696
+ yKey="value"
697
+ smooth // Smooth curves (default: true)
698
+ showSymbol={false} // Hide data point markers
699
+ />
700
+ ```
701
+
702
+ **❌ CRITICAL: Charts do NOT accept Recharts children**
703
+
704
+ ```tsx
705
+ // ❌ WRONG - AppKit charts are NOT Recharts wrappers
706
+ import { BarChart } from "@databricks/appkit-ui/react";
707
+ import { Bar, XAxis, YAxis, CartesianGrid } from "recharts";
708
+
709
+ <BarChart queryKey="data" parameters={{}}>
710
+ <CartesianGrid /> // ❌ This will cause TypeScript errors
711
+ <XAxis dataKey="x" /> // ❌ Not supported
712
+ <Bar dataKey="y" /> // ❌ Not supported
713
+ </BarChart>
714
+
715
+ // ✅ CORRECT - use props instead
716
+ <BarChart
717
+ queryKey="data"
718
+ parameters={{}}
719
+ xKey="x"
720
+ yKey="y"
721
+ />
722
+ ```
723
+
671
724
  ### SQL helpers (`sql.*`)
672
725
 
673
726
  Use these to build typed parameters (they return marker objects: `{ __sql_type, value }`):
@@ -1169,6 +1222,7 @@ env:
1169
1222
  - `useMemo` wraps parameters objects
1170
1223
  - Loading/error/empty states are explicit
1171
1224
  - Charts use `format="auto"` unless you have a reason to force `"json"`/`"arrow"`
1225
+ - Charts use props (`xKey`, `yKey`, `colors`) NOT children (they're ECharts-based, not Recharts)
1172
1226
  - If using tooltips: root is wrapped with `<TooltipProvider>`
1173
1227
 
1174
1228
  - **Never**
@@ -1176,4 +1230,5 @@ env:
1176
1230
  - Don't pass untyped raw params for annotated queries
1177
1231
  - Don't ignore `createApp()`'s promise
1178
1232
  - Don't invent UI components not listed in this file
1233
+ - Don't pass Recharts children (`<Bar>`, `<XAxis>`, etc.) to AppKit chart components
1179
1234
 
package/CLAUDE.md CHANGED
@@ -431,9 +431,12 @@ WHERE workspace_id = :workspaceId
431
431
  HTTP endpoints exposed (mounted under `/api/analytics`):
432
432
 
433
433
  - `POST /api/analytics/query/:query_key`
434
- - `POST /api/analytics/users/me/query/:query_key`
435
434
  - `GET /api/analytics/arrow-result/:jobId`
436
- - `GET /api/analytics/users/me/arrow-result/:jobId`
435
+
436
+ **Query file naming convention determines execution context:**
437
+
438
+ - `config/queries/<query_key>.sql` - Executes as service principal (shared cache)
439
+ - `config/queries/<query_key>.obo.sql` - Executes as user (OBO = On-Behalf-Of, per-user cache)
437
440
 
438
441
  Formats:
439
442
 
@@ -668,6 +671,56 @@ export function SpendChart() {
668
671
  }
669
672
  ```
670
673
 
674
+ **Chart props reference (important):**
675
+
676
+ Charts are **self-contained ECharts components**. Configure via props, NOT children:
677
+
678
+ ```tsx
679
+ // ✅ Correct: use props for customization
680
+ <BarChart
681
+ queryKey="sales_by_region"
682
+ parameters={{}}
683
+ xKey="region" // X-axis field
684
+ yKey={["revenue", "expenses"]} // Y-axis field(s) - string or string[]
685
+ colors={['#40d1f5', '#4462c9']} // Custom colors
686
+ stacked // Stack bars (BarChart, AreaChart)
687
+ orientation="horizontal" // "vertical" (default) | "horizontal"
688
+ showLegend // Show legend
689
+ height={400} // Height in pixels (default: 300)
690
+ />
691
+
692
+ <LineChart
693
+ queryKey="trend_data"
694
+ parameters={{}}
695
+ xKey="date"
696
+ yKey="value"
697
+ smooth // Smooth curves (default: true)
698
+ showSymbol={false} // Hide data point markers
699
+ />
700
+ ```
701
+
702
+ **❌ CRITICAL: Charts do NOT accept Recharts children**
703
+
704
+ ```tsx
705
+ // ❌ WRONG - AppKit charts are NOT Recharts wrappers
706
+ import { BarChart } from "@databricks/appkit-ui/react";
707
+ import { Bar, XAxis, YAxis, CartesianGrid } from "recharts";
708
+
709
+ <BarChart queryKey="data" parameters={{}}>
710
+ <CartesianGrid /> // ❌ This will cause TypeScript errors
711
+ <XAxis dataKey="x" /> // ❌ Not supported
712
+ <Bar dataKey="y" /> // ❌ Not supported
713
+ </BarChart>
714
+
715
+ // ✅ CORRECT - use props instead
716
+ <BarChart
717
+ queryKey="data"
718
+ parameters={{}}
719
+ xKey="x"
720
+ yKey="y"
721
+ />
722
+ ```
723
+
671
724
  ### SQL helpers (`sql.*`)
672
725
 
673
726
  Use these to build typed parameters (they return marker objects: `{ __sql_type, value }`):
@@ -1169,6 +1222,7 @@ env:
1169
1222
  - `useMemo` wraps parameters objects
1170
1223
  - Loading/error/empty states are explicit
1171
1224
  - Charts use `format="auto"` unless you have a reason to force `"json"`/`"arrow"`
1225
+ - Charts use props (`xKey`, `yKey`, `colors`) NOT children (they're ECharts-based, not Recharts)
1172
1226
  - If using tooltips: root is wrapped with `<TooltipProvider>`
1173
1227
 
1174
1228
  - **Never**
@@ -1176,4 +1230,5 @@ env:
1176
1230
  - Don't pass untyped raw params for annotated queries
1177
1231
  - Don't ignore `createApp()`'s promise
1178
1232
  - Don't invent UI components not listed in this file
1233
+ - Don't pass Recharts children (`<Bar>`, `<XAxis>`, etc.) to AppKit chart components
1179
1234
 
package/NOTICE.md CHANGED
@@ -6,6 +6,7 @@ This Software contains code from the following open source projects:
6
6
 
7
7
  | Name | Installed version | License | Code |
8
8
  | :--------------- | :---------------- | :----------- | :--------------------------------------------------- |
9
+ | [@ast-grep/napi](https://www.npmjs.com/package/@ast-grep/napi) | 0.37.0 | MIT | https://ast-grep.github.io |
9
10
  | [@hookform/resolvers](https://www.npmjs.com/package/@hookform/resolvers) | 5.2.2 | MIT | https://react-hook-form.com |
10
11
  | [@opentelemetry/api](https://www.npmjs.com/package/@opentelemetry/api) | 1.9.0 | Apache-2.0 | https://github.com/open-telemetry/opentelemetry-js/tree/main/api |
11
12
  | [@opentelemetry/api-logs](https://www.npmjs.com/package/@opentelemetry/api-logs) | 0.208.0 | Apache-2.0 | https://github.com/open-telemetry/opentelemetry-js/tree/main/experimental/packages/api-logs |
@@ -63,6 +64,7 @@ This Software contains code from the following open source projects:
63
64
  | [input-otp](https://www.npmjs.com/package/input-otp) | 1.4.2 | MIT | https://input-otp.rodz.dev/ |
64
65
  | [lucide-react](https://www.npmjs.com/package/lucide-react) | 0.554.0 | ISC | https://lucide.dev |
65
66
  | [next-themes](https://www.npmjs.com/package/next-themes) | 0.4.6 | MIT | https://github.com/pacocoursey/next-themes#readme |
67
+ | [obug](https://www.npmjs.com/package/obug) | 2.1.1 | MIT | https://github.com/sxzz/obug#readme |
66
68
  | [pg](https://www.npmjs.com/package/pg) | 8.16.3 | MIT | https://github.com/brianc/node-postgres |
67
69
  | [react-day-picker](https://www.npmjs.com/package/react-day-picker) | 9.12.0 | MIT | https://daypicker.dev |
68
70
  | [react-hook-form](https://www.npmjs.com/package/react-hook-form) | 7.68.0 | MIT | https://react-hook-form.com |
package/README.md CHANGED
@@ -1,7 +1,8 @@
1
1
  # AppKit
2
2
 
3
- > [!WARNING]
4
- > ## ⚠️ PREVIEW - NOT FOR PRODUCTION USE
3
+ Build Databricks Apps faster with our brand-new Node.js + React SDK. Built for humans and AI.
4
+
5
+ > [!WARNING] PREVIEW - NOT FOR PRODUCTION USE
5
6
  >
6
7
  > **This SDK is in preview and is subject to change without notice.**
7
8
  >
@@ -11,25 +12,30 @@
11
12
  > - 📝 **Use for development and testing only**
12
13
  >
13
14
 
14
- ## Contributing
15
+ ## Introduction
15
16
 
16
- See [CONTRIBUTING.md](CONTRIBUTING.md) for development setup and contribution guidelines.
17
+ AppKit is a TypeScript SDK for building production-ready Databricks applications with a plugin-based architecture. It provides opinionated defaults, built-in observability, and seamless integration with Databricks services.
17
18
 
18
- ## Documentation
19
+ AppKit simplifies building data applications on Databricks by providing:
20
+
21
+ - **Plugin architecture**: Modular design with built-in server and analytics plugins
22
+ - **Type safety**: End-to-end TypeScript with automatic query type generation
23
+ - **Production-ready features**: Built-in caching, telemetry, retry logic, and error handling
24
+ - **Developer experience**: Remote hot reload, file-based queries, optimized for AI-assisted development
25
+ - **Databricks native**: Seamless integration with SQL Warehouses, Unity Catalog, and other workspace resources
19
26
 
20
- The `docs/` directory contains the AppKit documentation site, built with Docusaurus.
27
+ ## Getting started
21
28
 
22
- **Working with docs:**
29
+ Follow the [Getting Started](https://databricks.github.io/appkit/docs/) guide to get started with AppKit.
23
30
 
24
- ```bash
25
- # From root
26
- pnpm docs:dev # Start dev server
27
- pnpm docs:build # Build docs
28
- pnpm docs:serve # Serve built docs
29
- ```
31
+ ## Documentation
30
32
 
31
- See [docs/README.md](./docs/README.md) for more details.
33
+ 📖 For full AppKit documentation, visit the [AppKit Documentation](https://databricks.github.io/appkit/) website.
32
34
 
33
35
  👉 For AI/code assistants:
34
36
  - Use [llms-compact.txt](./llms-compact.txt) for quick usage patterns.
35
- - See [llms.txt](./llms.txt) for full guidance and anti-patterns.
37
+ - See [llms.txt](./llms.txt) for full guidance and anti-patterns.
38
+
39
+ ## Contributing
40
+
41
+ See [CONTRIBUTING.md](CONTRIBUTING.md) for development setup and contribution guidelines.
@@ -0,0 +1,129 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * AST-based linting using ast-grep.
4
+ * Catches patterns that ESLint/TypeScript miss or handle poorly.
5
+ * Usage: npx appkit-lint
6
+ */
7
+ import { parse, Lang } from "@ast-grep/napi";
8
+ import fs from "node:fs";
9
+ import path from "node:path";
10
+
11
+ const rules = [
12
+ {
13
+ id: "no-double-type-assertion",
14
+ pattern: "$X as unknown as $Y",
15
+ message:
16
+ "Avoid double type assertion (as unknown as). Use proper type guards or fix the source type.",
17
+ },
18
+ {
19
+ id: "no-as-any",
20
+ pattern: "$X as any",
21
+ message:
22
+ 'Avoid "as any" type assertion. Use proper typing or unknown with type guards.',
23
+ includeTests: false, // acceptable in test mocks
24
+ },
25
+ {
26
+ id: "no-array-index-key",
27
+ pattern: "key={$IDX}",
28
+ message:
29
+ "Avoid using array index as React key. Use a stable unique identifier.",
30
+ filter: (code) => /key=\{(idx|index|i)\}/.test(code),
31
+ },
32
+ {
33
+ id: "no-parse-float-without-validation",
34
+ pattern: "parseFloat($X).toFixed($Y)",
35
+ message:
36
+ "parseFloat can return NaN. Validate input or use toNumber() helper from shared/types.ts.",
37
+ },
38
+ ];
39
+
40
+ function isTestFile(filePath) {
41
+ return (
42
+ /\.(test|spec)\.(ts|tsx)$/.test(filePath) || filePath.includes("/tests/")
43
+ );
44
+ }
45
+
46
+ function findTsFiles(dir, files = []) {
47
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
48
+
49
+ for (const entry of entries) {
50
+ const fullPath = path.join(dir, entry.name);
51
+
52
+ if (entry.isDirectory()) {
53
+ if (["node_modules", "dist", "build", ".git"].includes(entry.name))
54
+ continue;
55
+ findTsFiles(fullPath, files);
56
+ } else if (entry.isFile() && /\.(ts|tsx)$/.test(entry.name)) {
57
+ files.push(fullPath);
58
+ }
59
+ }
60
+
61
+ return files;
62
+ }
63
+
64
+ function lintFile(filePath, rules) {
65
+ const violations = [];
66
+ const content = fs.readFileSync(filePath, "utf-8");
67
+ const lang = filePath.endsWith(".tsx") ? Lang.Tsx : Lang.TypeScript;
68
+ const testFile = isTestFile(filePath);
69
+
70
+ const ast = parse(lang, content);
71
+ const root = ast.root();
72
+
73
+ for (const rule of rules) {
74
+ // skip rules that don't apply to test files
75
+ if (testFile && rule.includeTests === false) continue;
76
+
77
+ const matches = root.findAll(rule.pattern);
78
+
79
+ for (const match of matches) {
80
+ const code = match.text();
81
+
82
+ if (rule.filter && !rule.filter(code)) continue;
83
+
84
+ const range = match.range();
85
+ violations.push({
86
+ file: filePath,
87
+ line: range.start.line + 1,
88
+ column: range.start.column + 1,
89
+ rule: rule.id,
90
+ message: rule.message,
91
+ code: code.length > 80 ? `${code.slice(0, 77)}...` : code,
92
+ });
93
+ }
94
+ }
95
+
96
+ return violations;
97
+ }
98
+
99
+ function main() {
100
+ const rootDir = process.cwd();
101
+ const files = findTsFiles(rootDir);
102
+
103
+ console.log(`Scanning ${files.length} TypeScript files...\n`);
104
+
105
+ const allViolations = [];
106
+
107
+ for (const file of files) {
108
+ const violations = lintFile(file, rules);
109
+ allViolations.push(...violations);
110
+ }
111
+
112
+ if (allViolations.length === 0) {
113
+ console.log("No ast-grep lint violations found.");
114
+ process.exit(0);
115
+ }
116
+
117
+ console.log(`Found ${allViolations.length} violation(s):\n`);
118
+
119
+ for (const v of allViolations) {
120
+ const relPath = path.relative(rootDir, v.file);
121
+ console.log(`${relPath}:${v.line}:${v.column}`);
122
+ console.log(` ${v.rule}: ${v.message}`);
123
+ console.log(` > ${v.code}\n`);
124
+ }
125
+
126
+ process.exit(1);
127
+ }
128
+
129
+ main();
@@ -1 +1 @@
1
- {"version":3,"file":"analytics.d.ts","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":[],"mappings":";;;;;;;;cAuBa,eAAA,SAAwB,MAAA;;;EAAxB,iBAAA,WAAgB,EAAA,MAAA;EAAA,UAAA,MAAA,EAKD,gBALC;UAKD,SAAA;UAMN,cAAA;aAWC,CAAA,MAAA,EAXD,gBAWC;cA6CN,CAAA,MAAA,EA7CM,UA6CN,CAAA,EAAA,IAAA;;;;;mBAqCZ,CAAA,GAAA,EArCI,OAAA,CAAQ,OAqCZ,EAAA,GAAA,EApCI,OAAA,CAAQ,QAoCZ,CAAA,EAnCA,OAmCA,CAAA,IAAA,CAAA;;;;;mBAkGA,CAAA,GAAA,EApGI,OAAA,CAAQ,OAoGZ,EAAA,GAAA,EAnGI,OAAA,CAAQ,QAmGZ,CAAA,EAlGA,OAkGA,CAAA,IAAA,CAAA;;;;;;;;AAwCL;;;;;;;;oCA3CiB,eAAe,sDACT,8BACV,cACR;;;;0CAyBgB,yCAER,cACR,QAAQ;cAIO;;;;;cAQP,WAAS,gBAAA,iBAAA"}
1
+ {"version":3,"file":"analytics.d.ts","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":[],"mappings":";;;;;;;;cA0Ba,eAAA,SAAwB,MAAA;;;EAAxB,iBAAA,WAAgB,EAAA,MAAA;EAAA,UAAA,MAAA,EAKD,gBALC;UAKD,SAAA;UAMN,cAAA;aAWC,CAAA,MAAA,EAXD,gBAWC;cA0BN,CAAA,MAAA,EA1BM,UA0BN,CAAA,EAAA,IAAA;;;;;mBA2CZ,CAAA,GAAA,EA3CI,OAAA,CAAQ,OA2CZ,EAAA,GAAA,EA1CI,OAAA,CAAQ,QA0CZ,CAAA,EAzCA,OAyCA,CAAA,IAAA,CAAA;;;;;mBAkHA,CAAA,GAAA,EApHI,OAAA,CAAQ,OAoHZ,EAAA,GAAA,EAnHI,OAAA,CAAQ,QAmHZ,CAAA,EAlHA,OAkHA,CAAA,IAAA,CAAA;;;;;;;;AAwCL;;;;;;;;oCA3CiB,eAAe,sDACT,8BACV,cACR;;;;0CAyBgB,yCAER,cACR,QAAQ;cAIO;;;;;cAQP,WAAS,gBAAA,iBAAA"}
@@ -1,3 +1,4 @@
1
+ import { createLogger } from "../logging/logger.js";
1
2
  import { getCurrentUserId, getWarehouseId, getWorkspaceClient } from "../context/execution-context.js";
2
3
  import { init_context } from "../context/index.js";
3
4
  import { SQLWarehouseConnector } from "../connectors/sql-warehouse/client.js";
@@ -10,6 +11,7 @@ import { QueryProcessor } from "./query.js";
10
11
 
11
12
  //#region src/analytics/analytics.ts
12
13
  init_context();
14
+ const logger = createLogger("analytics");
13
15
  var AnalyticsPlugin = class extends Plugin {
14
16
  static {
15
17
  this.description = "Analytics plugin for data analysis";
@@ -42,22 +44,6 @@ var AnalyticsPlugin = class extends Plugin {
42
44
  await this._handleQueryRoute(req, res);
43
45
  }
44
46
  });
45
- this.route(router, {
46
- name: "arrowAsUser",
47
- method: "get",
48
- path: "/users/me/arrow-result/:jobId",
49
- handler: async (req, res) => {
50
- await this.asUser(req)._handleArrowRoute(req, res);
51
- }
52
- });
53
- this.route(router, {
54
- name: "queryAsUser",
55
- method: "post",
56
- path: "/users/me/query/:query_key",
57
- handler: async (req, res) => {
58
- await this.asUser(req)._handleQueryRoute(req, res);
59
- }
60
- });
61
47
  }
62
48
  /**
63
49
  * Handle Arrow data download requests.
@@ -67,15 +53,19 @@ var AnalyticsPlugin = class extends Plugin {
67
53
  try {
68
54
  const { jobId } = req.params;
69
55
  const workspaceClient = getWorkspaceClient();
70
- console.log(`Processing Arrow job request: ${jobId} for plugin: ${this.name}`);
56
+ logger.debug("Processing Arrow job request for jobId=%s", jobId);
57
+ logger.event(req)?.setComponent("analytics", "getArrowData").setContext("analytics", {
58
+ job_id: jobId,
59
+ plugin: this.name
60
+ });
71
61
  const result = await this.getArrowData(workspaceClient, jobId);
72
62
  res.setHeader("Content-Type", "application/octet-stream");
73
63
  res.setHeader("Content-Length", result.data.length.toString());
74
64
  res.setHeader("Cache-Control", "public, max-age=3600");
75
- console.log(`Sending Arrow buffer: ${result.data.length} bytes for job ${jobId}`);
65
+ logger.debug("Sending Arrow buffer: %d bytes for job %s", result.data.length, jobId);
76
66
  res.send(Buffer.from(result.data));
77
67
  } catch (error) {
78
- console.error(`Arrow job error for ${this.name}:`, error);
68
+ logger.error("Arrow job error: %O", error);
79
69
  res.status(404).json({
80
70
  error: error instanceof Error ? error.message : "Arrow job not found",
81
71
  plugin: this.name
@@ -89,23 +79,33 @@ var AnalyticsPlugin = class extends Plugin {
89
79
  async _handleQueryRoute(req, res) {
90
80
  const { query_key } = req.params;
91
81
  const { parameters, format = "JSON" } = req.body;
92
- const queryParameters = format === "ARROW" ? {
93
- formatParameters: {
94
- disposition: "EXTERNAL_LINKS",
95
- format: "ARROW_STREAM"
96
- },
97
- type: "arrow"
98
- } : { type: "result" };
99
- const userKey = getCurrentUserId();
82
+ logger.debug(req, "Executing query: %s (format=%s)", query_key, format);
83
+ logger.event(req)?.setComponent("analytics", "executeQuery").setContext("analytics", {
84
+ query_key,
85
+ format,
86
+ parameter_count: parameters ? Object.keys(parameters).length : 0,
87
+ plugin: this.name
88
+ });
100
89
  if (!query_key) {
101
90
  res.status(400).json({ error: "query_key is required" });
102
91
  return;
103
92
  }
104
- const query = await this.app.getAppQuery(query_key, req, this.devFileReader);
105
- if (!query) {
93
+ const queryResult = await this.app.getAppQuery(query_key, req, this.devFileReader);
94
+ if (!queryResult) {
106
95
  res.status(404).json({ error: "Query not found" });
107
96
  return;
108
97
  }
98
+ const { query, isAsUser } = queryResult;
99
+ const executor = isAsUser ? this.asUser(req) : this;
100
+ const userKey = getCurrentUserId();
101
+ const executorKey = isAsUser ? userKey : "global";
102
+ const queryParameters = format === "ARROW" ? {
103
+ formatParameters: {
104
+ disposition: "EXTERNAL_LINKS",
105
+ format: "ARROW_STREAM"
106
+ },
107
+ type: "arrow"
108
+ } : { type: "result" };
109
109
  const hashedQuery = this.queryProcessor.hashQuery(query);
110
110
  const streamExecutionSettings = { default: {
111
111
  ...queryDefaults,
@@ -117,18 +117,18 @@ var AnalyticsPlugin = class extends Plugin {
117
117
  JSON.stringify(parameters),
118
118
  JSON.stringify(format),
119
119
  hashedQuery,
120
- userKey
120
+ executorKey
121
121
  ]
122
122
  }
123
123
  } };
124
- await this.executeStream(res, async (signal) => {
124
+ await executor.executeStream(res, async (signal) => {
125
125
  const processedParams = await this.queryProcessor.processQueryParams(query, parameters);
126
- const result = await this.query(query, processedParams, queryParameters.formatParameters, signal);
126
+ const result = await executor.query(query, processedParams, queryParameters.formatParameters, signal);
127
127
  return {
128
128
  type: queryParameters.type,
129
129
  ...result
130
130
  };
131
- }, streamExecutionSettings, userKey);
131
+ }, streamExecutionSettings, executorKey);
132
132
  }
133
133
  /**
134
134
  * Execute a SQL query using the current execution context.
@@ -1 +1 @@
1
- {"version":3,"file":"analytics.js","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport {\n getCurrentUserId,\n getWarehouseId,\n getWorkspaceClient,\n} from \"../context\";\nimport type express from \"express\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n envVars = [];\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n // Service principal endpoints\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleQueryRoute(req, res);\n },\n });\n\n // User context endpoints - use asUser(req) to execute with user's identity\n this.route(router, {\n name: \"arrowAsUser\",\n method: \"get\",\n path: \"/users/me/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this.asUser(req)._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"queryAsUser\",\n method: \"post\",\n path: \"/users/me/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this.asUser(req)._handleQueryRoute(req, res);\n },\n });\n }\n\n /**\n * Handle Arrow data download requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleArrowRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n const workspaceClient = getWorkspaceClient();\n\n console.log(\n `Processing Arrow job request: ${jobId} for plugin: ${this.name}`,\n );\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n console.log(\n `Sending Arrow buffer: ${result.data.length} bytes for job ${jobId}`,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n console.error(`Arrow job error for ${this.name}:`, error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n /**\n * Handle SQL query execution requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleQueryRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n // Get user key from current context (automatically includes user ID when in user context)\n const userKey = getCurrentUserId();\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const query = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!query) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n userKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await this.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await this.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n userKey,\n );\n }\n\n /**\n * Execute a SQL query using the current execution context.\n *\n * When called directly: uses service principal credentials.\n * When called via asUser(req).query(...): uses user's credentials.\n *\n * @example\n * ```typescript\n * // Service principal execution\n * const result = await analytics.query(\"SELECT * FROM table\")\n *\n * // User context execution (in route handler)\n * const result = await this.asUser(req).query(\"SELECT * FROM table\")\n * ```\n */\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n ): Promise<any> {\n const workspaceClient = getWorkspaceClient();\n const warehouseId = await getWarehouseId();\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n /**\n * Get Arrow-formatted data for a completed query job.\n */\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n}\n\n/**\n * @internal\n */\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;cAYoB;AAWpB,IAAa,kBAAb,cAAqC,OAAO;;qBAIX;;CAO/B,YAAY,QAA0B;AACpC,QAAM,OAAO;cAXR;iBACG,EAAE;AAWV,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAE/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAGF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,OAAO,IAAI,CAAC,kBAAkB,KAAK,IAAI;;GAErD,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,OAAO,IAAI,CAAC,kBAAkB,KAAK,IAAI;;GAErD,CAAC;;;;;;CAOJ,MAAM,kBACJ,KACA,KACe;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GACtB,MAAM,kBAAkB,oBAAoB;AAE5C,WAAQ,IACN,iCAAiC,MAAM,eAAe,KAAK,OAC5D;GAED,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,WAAQ,IACN,yBAAyB,OAAO,KAAK,OAAO,iBAAiB,QAC9D;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,WAAQ,MAAM,uBAAuB,KAAK,KAAK,IAAI,MAAM;AACzD,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;;;;;CAQN,MAAM,kBACJ,KACA,KACe;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;EAC5C,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAGP,MAAM,UAAU,kBAAkB;AAElC,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,QAAQ,MAAM,KAAK,IAAI,YAC3B,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,OAAO;AACV,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAM,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,KAAK,cACT,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,KAAK,MACxB,OACA,iBACA,gBAAgB,kBAChB,OACD;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,QACD;;;;;;;;;;;;;;;;;CAkBH,MAAM,MACJ,OACA,YACA,kBACA,QACc;EACd,MAAM,kBAAkB,oBAAoB;EAC5C,MAAM,cAAc,MAAM,gBAAgB;EAE1C,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc;GACd,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;;;;CAMlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;;;;AAOjC,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
1
+ {"version":3,"file":"analytics.js","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport {\n getCurrentUserId,\n getWarehouseId,\n getWorkspaceClient,\n} from \"../context\";\nimport { createLogger } from \"../logging/logger\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nconst logger = createLogger(\"analytics\");\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n envVars = [];\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n // Service principal endpoints\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleQueryRoute(req, res);\n },\n });\n }\n\n /**\n * Handle Arrow data download requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleArrowRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n const workspaceClient = getWorkspaceClient();\n\n logger.debug(\"Processing Arrow job request for jobId=%s\", jobId);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"getArrowData\").setContext(\"analytics\", {\n job_id: jobId,\n plugin: this.name,\n });\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n logger.debug(\n \"Sending Arrow buffer: %d bytes for job %s\",\n result.data.length,\n jobId,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n logger.error(\"Arrow job error: %O\", error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n /**\n * Handle SQL query execution requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleQueryRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n\n // Request-scoped logging with WideEvent tracking\n logger.debug(req, \"Executing query: %s (format=%s)\", query_key, format);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"executeQuery\").setContext(\"analytics\", {\n query_key,\n format,\n parameter_count: parameters ? Object.keys(parameters).length : 0,\n plugin: this.name,\n });\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const queryResult = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!queryResult) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const { query, isAsUser } = queryResult;\n\n // get execution context - user-scoped if .obo.sql, otherwise service principal\n const executor = isAsUser ? this.asUser(req) : this;\n const userKey = getCurrentUserId();\n const executorKey = isAsUser ? userKey : \"global\";\n\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n executorKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await executor.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await executor.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n executorKey,\n );\n }\n\n /**\n * Execute a SQL query using the current execution context.\n *\n * When called directly: uses service principal credentials.\n * When called via asUser(req).query(...): uses user's credentials.\n *\n * @example\n * ```typescript\n * // Service principal execution\n * const result = await analytics.query(\"SELECT * FROM table\")\n *\n * // User context execution (in route handler)\n * const result = await this.asUser(req).query(\"SELECT * FROM table\")\n * ```\n */\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n ): Promise<any> {\n const workspaceClient = getWorkspaceClient();\n const warehouseId = await getWarehouseId();\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n /**\n * Get Arrow-formatted data for a completed query job.\n */\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n}\n\n/**\n * @internal\n */\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;;cAaoB;AAWpB,MAAM,SAAS,aAAa,YAAY;AAExC,IAAa,kBAAb,cAAqC,OAAO;;qBAIX;;CAO/B,YAAY,QAA0B;AACpC,QAAM,OAAO;cAXR;iBACG,EAAE;AAWV,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAE/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;;;;;;CAOJ,MAAM,kBACJ,KACA,KACe;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GACtB,MAAM,kBAAkB,oBAAoB;AAE5C,UAAO,MAAM,6CAA6C,MAAM;AAGhE,GADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;IACvE,QAAQ;IACR,QAAQ,KAAK;IACd,CAAC;GAEF,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,UAAO,MACL,6CACA,OAAO,KAAK,QACZ,MACD;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,UAAO,MAAM,uBAAuB,MAAM;AAC1C,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;;;;;CAQN,MAAM,kBACJ,KACA,KACe;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;AAG5C,SAAO,MAAM,KAAK,mCAAmC,WAAW,OAAO;AAGvE,EADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;GACvE;GACA;GACA,iBAAiB,aAAa,OAAO,KAAK,WAAW,CAAC,SAAS;GAC/D,QAAQ,KAAK;GACd,CAAC;AAEF,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,cAAc,MAAM,KAAK,IAAI,YACjC,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,aAAa;AAChB,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,EAAE,OAAO,aAAa;EAG5B,MAAM,WAAW,WAAW,KAAK,OAAO,IAAI,GAAG;EAC/C,MAAM,UAAU,kBAAkB;EAClC,MAAM,cAAc,WAAW,UAAU;EAEzC,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAEP,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAM,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,SAAS,cACb,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,SAAS,MAC5B,OACA,iBACA,gBAAgB,kBAChB,OACD;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,YACD;;;;;;;;;;;;;;;;;CAkBH,MAAM,MACJ,OACA,YACA,kBACA,QACc;EACd,MAAM,kBAAkB,oBAAoB;EAC5C,MAAM,cAAc,MAAM,gBAAgB;EAE1C,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc;GACd,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;;;;CAMlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;;;;AAOjC,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
@@ -1,10 +1,13 @@
1
1
  import { isSQLTypeMarker, sql } from "../shared/src/sql/helpers.js";
2
+ import { ValidationError } from "../errors/validation.js";
3
+ import { init_errors } from "../errors/index.js";
2
4
  import { getWorkspaceId } from "../context/execution-context.js";
3
5
  import { init_context } from "../context/index.js";
4
6
  import { createHash } from "node:crypto";
5
7
 
6
8
  //#region src/analytics/query.ts
7
9
  init_context();
10
+ init_errors();
8
11
  var QueryProcessor = class {
9
12
  async processQueryParams(query, parameters) {
10
13
  const processed = { ...parameters };
@@ -23,7 +26,10 @@ var QueryProcessor = class {
23
26
  if (parameters) {
24
27
  const queryParamMatches = query.matchAll(/:([a-zA-Z_]\w*)/g);
25
28
  const queryParams = new Set(Array.from(queryParamMatches, (m) => m[1]));
26
- for (const key of Object.keys(parameters)) if (!queryParams.has(key)) throw new Error(`Parameter "${key}" not found in query. Valid parameters: ${Array.from(queryParams).join(", ") || "none"}`);
29
+ for (const key of Object.keys(parameters)) if (!queryParams.has(key)) {
30
+ const validParams = Array.from(queryParams).join(", ") || "none";
31
+ throw ValidationError.invalidValue(key, parameters[key], `a parameter defined in the query (valid: ${validParams})`);
32
+ }
27
33
  for (const [key, value] of Object.entries(parameters)) {
28
34
  const parameter = this._createParameter(key, value);
29
35
  if (parameter) sqlParameters.push(parameter);
@@ -36,7 +42,7 @@ var QueryProcessor = class {
36
42
  }
37
43
  _createParameter(key, value) {
38
44
  if (value === null || value === void 0) return null;
39
- if (!isSQLTypeMarker(value)) throw new Error(`Parameter "${key}" must be a SQL type. Use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean().`);
45
+ if (!isSQLTypeMarker(value)) throw ValidationError.invalidValue(key, value, "SQL type (use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean())");
40
46
  return {
41
47
  name: key,
42
48
  value: value.value,
@@ -1 +1 @@
1
- {"version":3,"file":"query.js","names":["sqlHelpers"],"sources":["../../src/analytics/query.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type { sql } from \"@databricks/sdk-experimental\";\nimport { isSQLTypeMarker, type SQLTypeMarker, sql as sqlHelpers } from \"shared\";\nimport { getWorkspaceId } from \"../context\";\n\ntype SQLParameterValue = SQLTypeMarker | null | undefined;\n\nexport class QueryProcessor {\n async processQueryParams(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): Promise<Record<string, SQLParameterValue>> {\n const processed = { ...parameters };\n\n // extract all params from the query\n const paramMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(paramMatches, (m) => m[1]));\n\n // auto-inject workspaceId if needed and not provided\n if (queryParams.has(\"workspaceId\") && !processed.workspaceId) {\n const workspaceId = await getWorkspaceId();\n if (workspaceId) {\n processed.workspaceId = sqlHelpers.string(workspaceId);\n }\n }\n\n return processed;\n }\n\n hashQuery(query: string): string {\n return createHash(\"md5\").update(query).digest(\"hex\");\n }\n\n convertToSQLParameters(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): { statement: string; parameters: sql.StatementParameterListItem[] } {\n const sqlParameters: sql.StatementParameterListItem[] = [];\n\n if (parameters) {\n // extract all params from the query\n const queryParamMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(queryParamMatches, (m) => m[1]));\n\n // only allow parameters that exist in the query\n for (const key of Object.keys(parameters)) {\n if (!queryParams.has(key)) {\n throw new Error(\n `Parameter \"${key}\" not found in query. Valid parameters: ${\n Array.from(queryParams).join(\", \") || \"none\"\n }`,\n );\n }\n }\n\n // convert parameters to SQL parameters\n for (const [key, value] of Object.entries(parameters)) {\n const parameter = this._createParameter(key, value);\n if (parameter) {\n sqlParameters.push(parameter);\n }\n }\n }\n\n return { statement: query, parameters: sqlParameters };\n }\n\n private _createParameter(\n key: string,\n value: SQLParameterValue,\n ): sql.StatementParameterListItem | null {\n if (value === null || value === undefined) {\n return null;\n }\n\n if (!isSQLTypeMarker(value)) {\n throw new Error(\n `Parameter \"${key}\" must be a SQL type. Use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean().`,\n );\n }\n\n return {\n name: key,\n value: value.value,\n type: value.__sql_type,\n };\n }\n}\n"],"mappings":";;;;;;cAG4C;AAI5C,IAAa,iBAAb,MAA4B;CAC1B,MAAM,mBACJ,OACA,YAC4C;EAC5C,MAAM,YAAY,EAAE,GAAG,YAAY;EAGnC,MAAM,eAAe,MAAM,SAAS,mBAAmB;AAIvD,MAHoB,IAAI,IAAI,MAAM,KAAK,eAAe,MAAM,EAAE,GAAG,CAAC,CAGlD,IAAI,cAAc,IAAI,CAAC,UAAU,aAAa;GAC5D,MAAM,cAAc,MAAM,gBAAgB;AAC1C,OAAI,YACF,WAAU,cAAcA,IAAW,OAAO,YAAY;;AAI1D,SAAO;;CAGT,UAAU,OAAuB;AAC/B,SAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;CAGtD,uBACE,OACA,YACqE;EACrE,MAAM,gBAAkD,EAAE;AAE1D,MAAI,YAAY;GAEd,MAAM,oBAAoB,MAAM,SAAS,mBAAmB;GAC5D,MAAM,cAAc,IAAI,IAAI,MAAM,KAAK,oBAAoB,MAAM,EAAE,GAAG,CAAC;AAGvE,QAAK,MAAM,OAAO,OAAO,KAAK,WAAW,CACvC,KAAI,CAAC,YAAY,IAAI,IAAI,CACvB,OAAM,IAAI,MACR,cAAc,IAAI,0CAChB,MAAM,KAAK,YAAY,CAAC,KAAK,KAAK,IAAI,SAEzC;AAKL,QAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,WAAW,EAAE;IACrD,MAAM,YAAY,KAAK,iBAAiB,KAAK,MAAM;AACnD,QAAI,UACF,eAAc,KAAK,UAAU;;;AAKnC,SAAO;GAAE,WAAW;GAAO,YAAY;GAAe;;CAGxD,AAAQ,iBACN,KACA,OACuC;AACvC,MAAI,UAAU,QAAQ,UAAU,OAC9B,QAAO;AAGT,MAAI,CAAC,gBAAgB,MAAM,CACzB,OAAM,IAAI,MACR,cAAc,IAAI,sGACnB;AAGH,SAAO;GACL,MAAM;GACN,OAAO,MAAM;GACb,MAAM,MAAM;GACb"}
1
+ {"version":3,"file":"query.js","names":["sqlHelpers"],"sources":["../../src/analytics/query.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type { sql } from \"@databricks/sdk-experimental\";\nimport { isSQLTypeMarker, type SQLTypeMarker, sql as sqlHelpers } from \"shared\";\nimport { getWorkspaceId } from \"../context\";\nimport { ValidationError } from \"../errors\";\n\ntype SQLParameterValue = SQLTypeMarker | null | undefined;\n\nexport class QueryProcessor {\n async processQueryParams(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): Promise<Record<string, SQLParameterValue>> {\n const processed = { ...parameters };\n\n // extract all params from the query\n const paramMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(paramMatches, (m) => m[1]));\n\n // auto-inject workspaceId if needed and not provided\n if (queryParams.has(\"workspaceId\") && !processed.workspaceId) {\n const workspaceId = await getWorkspaceId();\n if (workspaceId) {\n processed.workspaceId = sqlHelpers.string(workspaceId);\n }\n }\n\n return processed;\n }\n\n hashQuery(query: string): string {\n return createHash(\"md5\").update(query).digest(\"hex\");\n }\n\n convertToSQLParameters(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): { statement: string; parameters: sql.StatementParameterListItem[] } {\n const sqlParameters: sql.StatementParameterListItem[] = [];\n\n if (parameters) {\n // extract all params from the query\n const queryParamMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(queryParamMatches, (m) => m[1]));\n\n // only allow parameters that exist in the query\n for (const key of Object.keys(parameters)) {\n if (!queryParams.has(key)) {\n const validParams = Array.from(queryParams).join(\", \") || \"none\";\n throw ValidationError.invalidValue(\n key,\n parameters[key],\n `a parameter defined in the query (valid: ${validParams})`,\n );\n }\n }\n\n // convert parameters to SQL parameters\n for (const [key, value] of Object.entries(parameters)) {\n const parameter = this._createParameter(key, value);\n if (parameter) {\n sqlParameters.push(parameter);\n }\n }\n }\n\n return { statement: query, parameters: sqlParameters };\n }\n\n private _createParameter(\n key: string,\n value: SQLParameterValue,\n ): sql.StatementParameterListItem | null {\n if (value === null || value === undefined) {\n return null;\n }\n\n if (!isSQLTypeMarker(value)) {\n throw ValidationError.invalidValue(\n key,\n value,\n \"SQL type (use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean())\",\n );\n }\n\n return {\n name: key,\n value: value.value,\n type: value.__sql_type,\n };\n }\n}\n"],"mappings":";;;;;;;;cAG4C;aACA;AAI5C,IAAa,iBAAb,MAA4B;CAC1B,MAAM,mBACJ,OACA,YAC4C;EAC5C,MAAM,YAAY,EAAE,GAAG,YAAY;EAGnC,MAAM,eAAe,MAAM,SAAS,mBAAmB;AAIvD,MAHoB,IAAI,IAAI,MAAM,KAAK,eAAe,MAAM,EAAE,GAAG,CAAC,CAGlD,IAAI,cAAc,IAAI,CAAC,UAAU,aAAa;GAC5D,MAAM,cAAc,MAAM,gBAAgB;AAC1C,OAAI,YACF,WAAU,cAAcA,IAAW,OAAO,YAAY;;AAI1D,SAAO;;CAGT,UAAU,OAAuB;AAC/B,SAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;CAGtD,uBACE,OACA,YACqE;EACrE,MAAM,gBAAkD,EAAE;AAE1D,MAAI,YAAY;GAEd,MAAM,oBAAoB,MAAM,SAAS,mBAAmB;GAC5D,MAAM,cAAc,IAAI,IAAI,MAAM,KAAK,oBAAoB,MAAM,EAAE,GAAG,CAAC;AAGvE,QAAK,MAAM,OAAO,OAAO,KAAK,WAAW,CACvC,KAAI,CAAC,YAAY,IAAI,IAAI,EAAE;IACzB,MAAM,cAAc,MAAM,KAAK,YAAY,CAAC,KAAK,KAAK,IAAI;AAC1D,UAAM,gBAAgB,aACpB,KACA,WAAW,MACX,4CAA4C,YAAY,GACzD;;AAKL,QAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,WAAW,EAAE;IACrD,MAAM,YAAY,KAAK,iBAAiB,KAAK,MAAM;AACnD,QAAI,UACF,eAAc,KAAK,UAAU;;;AAKnC,SAAO;GAAE,WAAW;GAAO,YAAY;GAAe;;CAGxD,AAAQ,iBACN,KACA,OACuC;AACvC,MAAI,UAAU,QAAQ,UAAU,OAC9B,QAAO;AAGT,MAAI,CAAC,gBAAgB,MAAM,CACzB,OAAM,gBAAgB,aACpB,KACA,OACA,2FACD;AAGH,SAAO;GACL,MAAM;GACN,OAAO,MAAM;GACb,MAAM,MAAM;GACb"}
@@ -6,6 +6,10 @@ interface RequestLike {
6
6
  interface DevFileReader {
7
7
  readFile(filePath: string, req: RequestLike): Promise<string>;
8
8
  }
9
+ interface QueryResult {
10
+ query: string;
11
+ isAsUser: boolean;
12
+ }
9
13
  declare class AppManager {
10
14
  /**
11
15
  * Retrieves a query file by key from the queries directory
@@ -16,7 +20,7 @@ declare class AppManager {
16
20
  * @returns The query content as a string
17
21
  * @throws Error if query key is invalid or file not found
18
22
  */
19
- getAppQuery(queryKey: string, req?: RequestLike, devFileReader?: DevFileReader): Promise<string | null>;
23
+ getAppQuery(queryKey: string, req?: RequestLike, devFileReader?: DevFileReader): Promise<QueryResult | null>;
20
24
  }
21
25
  //#endregion
22
26
  export { AppManager };
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","names":[],"sources":["../../src/app/index.ts"],"sourcesContent":[],"mappings":";UAGU,WAAA;EAAA,KAAA,CAAA,EACA,MADW,CAAA,MAAA,EAAA,GAAA,CAAA;EAAA,OAAA,EAEV,MAFU,CAAA,MAAA,EAAA,MAAA,GAAA,MAAA,EAAA,GAAA,SAAA,CAAA;;UAKX,aAAA,CAHC;EAAM,QAAA,CAAA,QAAA,EAAA,MAAA,EAAA,GAAA,EAIiB,WAJjB,CAAA,EAI+B,OAJ/B,CAAA,MAAA,CAAA;AAAA;AAGM,cAIV,UAAA,CAJU;;;;AAIvB;;;;;;sCAYU,6BACU,gBACf"}
1
+ {"version":3,"file":"index.d.ts","names":[],"sources":["../../src/app/index.ts"],"sourcesContent":[],"mappings":";UAMU,WAAA;EAAA,KAAA,CAAA,EACA,MADW,CAAA,MAAA,EAAA,GAAA,CAAA;EAAA,OAAA,EAEV,MAFU,CAAA,MAAA,EAAA,MAAA,GAAA,MAAA,EAAA,GAAA,SAAA,CAAA;;UAKX,aAAA,CAHC;EAAM,QAAA,CAAA,QAAA,EAAA,MAAA,EAAA,GAAA,EAIiB,WAJjB,CAAA,EAI+B,OAJ/B,CAAA,MAAA,CAAA;AAAA;UAOP,WAAA,CAJa;OACW,EAAA,MAAA;UAAc,EAAA,OAAA;;AAGtC,cAKG,UAAA,CALQ;EAKR;;;;;;;;;sCAYH,6BACU,gBACf,QAAQ"}