@hanzo/runtime 0.0.0-dev

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,58 @@
1
+ /*
2
+ * Copyright 2025 Daytona Platforms Inc.
3
+ * SPDX-License-Identifier: Apache-2.0
4
+ */
5
+
6
+ import { Chart, parseChart } from '../types/Charts'
7
+ import { ExecutionArtifacts } from '../types/ExecuteResponse'
8
+
9
+ /**
10
+ * Utility class for parsing artifacts from command output
11
+ */
12
+ export class ArtifactParser {
13
+ /**
14
+ * Parses artifacts from command output text
15
+ *
16
+ * @param output - Raw output from command execution
17
+ * @returns Parsed artifacts including stdout and charts
18
+ */
19
+ public static parseArtifacts(output: string): ExecutionArtifacts {
20
+ const charts: Chart[] = []
21
+ let stdout = output
22
+
23
+ // Split output by lines to find artifact markers
24
+ const lines = output.split('\n')
25
+ const artifactLines: string[] = []
26
+
27
+ for (const line of lines) {
28
+ // Look for the artifact marker pattern
29
+ if (line.startsWith('dtn_artifact_k39fd2:')) {
30
+ artifactLines.push(line)
31
+
32
+ try {
33
+ const artifactJson = line.substring('dtn_artifact_k39fd2:'.length).trim()
34
+ const artifactData = JSON.parse(artifactJson)
35
+
36
+ if (artifactData.type === 'chart' && artifactData.value) {
37
+ const chartData = artifactData.value
38
+ charts.push(parseChart(chartData))
39
+ }
40
+ } catch (error) {
41
+ // Skip invalid artifacts
42
+ console.warn('Failed to parse artifact:', error)
43
+ }
44
+ }
45
+ }
46
+
47
+ // Remove artifact lines from stdout along with their following newlines
48
+ for (const line of artifactLines) {
49
+ stdout = stdout.replace(line + '\n', '')
50
+ stdout = stdout.replace(line, '')
51
+ }
52
+
53
+ return {
54
+ stdout,
55
+ charts: charts.length > 0 ? charts : undefined,
56
+ }
57
+ }
58
+ }
@@ -0,0 +1,25 @@
1
+ /*
2
+ * Copyright 2025 Daytona Platforms Inc.
3
+ * SPDX-License-Identifier: Apache-2.0
4
+ */
5
+
6
+ import * as _path from 'path'
7
+
8
+ export function prefixRelativePath(prefix: string, path?: string): string {
9
+ let result = prefix
10
+
11
+ if (path) {
12
+ path = path.trim()
13
+ if (path === '~') {
14
+ result = prefix
15
+ } else if (path.startsWith('~/')) {
16
+ result = _path.join(prefix, path.slice(2))
17
+ } else if (_path.isAbsolute(path)) {
18
+ result = path
19
+ } else {
20
+ result = _path.join(prefix, path)
21
+ }
22
+ }
23
+
24
+ return result
25
+ }
@@ -0,0 +1,89 @@
1
+ /*
2
+ * Copyright 2025 Daytona Platforms Inc.
3
+ * SPDX-License-Identifier: Apache-2.0
4
+ */
5
+
6
+ /**
7
+ * Process a streaming response from a URL. Stream will terminate if the server-side stream
8
+ * ends or if the shouldTerminate function returns True.
9
+ *
10
+ * @param getStream - A function that returns a promise of an AxiosResponse with .data being the stream
11
+ * @param onChunk - A function to process each chunk of the response
12
+ * @param shouldTerminate - A function to check if the response should be terminated
13
+ * @param chunkTimeout - The timeout for each chunk
14
+ * @param requireConsecutiveTermination - Whether to require two consecutive termination signals
15
+ * to terminate the stream.
16
+ */
17
+ export async function processStreamingResponse(
18
+ getStream: () => Promise<any>, // can return AxiosResponse with .data being the stream
19
+ onChunk: (chunk: string) => void,
20
+ shouldTerminate: () => Promise<boolean>,
21
+ chunkTimeout = 2000,
22
+ requireConsecutiveTermination = true,
23
+ ): Promise<void> {
24
+ const response = await getStream()
25
+ const stream = response.data
26
+
27
+ let nextChunkPromise: Promise<Buffer | null> | null = null
28
+ let exitCheckStreak = 0
29
+ let terminated = false
30
+
31
+ const readNext = (): Promise<Buffer | null> => {
32
+ return new Promise((resolve) => {
33
+ const onData = (data: Buffer) => {
34
+ cleanup()
35
+ resolve(data)
36
+ }
37
+ const cleanup = () => {
38
+ stream.off('data', onData)
39
+ }
40
+ stream.once('data', onData)
41
+ })
42
+ }
43
+
44
+ const terminationPromise = new Promise<void>((resolve, reject) => {
45
+ stream.on('end', () => {
46
+ terminated = true
47
+ resolve()
48
+ })
49
+ stream.on('close', () => {
50
+ terminated = true
51
+ resolve()
52
+ })
53
+ stream.on('error', (err: Error) => {
54
+ terminated = true
55
+ reject(err)
56
+ })
57
+ })
58
+
59
+ const processLoop = async () => {
60
+ while (!terminated) {
61
+ if (!nextChunkPromise) {
62
+ nextChunkPromise = readNext()
63
+ }
64
+
65
+ const timeoutPromise = new Promise<null>((resolve) => setTimeout(() => resolve(null), chunkTimeout))
66
+ const result = await Promise.race([nextChunkPromise, timeoutPromise])
67
+
68
+ if (result instanceof Buffer) {
69
+ onChunk(result.toString('utf8'))
70
+ nextChunkPromise = null
71
+ exitCheckStreak = 0
72
+ } else {
73
+ const shouldEnd = await shouldTerminate()
74
+ if (shouldEnd) {
75
+ exitCheckStreak += 1
76
+ if (!requireConsecutiveTermination || exitCheckStreak > 1) {
77
+ break
78
+ }
79
+ } else {
80
+ exitCheckStreak = 0
81
+ }
82
+ }
83
+ }
84
+ stream.destroy()
85
+ stream.removeAllListeners()
86
+ }
87
+
88
+ await Promise.race([processLoop(), terminationPromise])
89
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,16 @@
1
+ {
2
+ "extends": "../../tsconfig.base.json",
3
+ "files": [],
4
+ "include": [],
5
+ "references": [
6
+ {
7
+ "path": "./tsconfig.lib.json"
8
+ },
9
+ {
10
+ "path": "./tsconfig.spec.json"
11
+ }
12
+ ],
13
+ "compilerOptions": {
14
+ "esModuleInterop": true
15
+ }
16
+ }
@@ -0,0 +1,16 @@
1
+ {
2
+ "extends": "./tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "../../dist/out-tsc",
5
+ "module": "NodeNext",
6
+ "moduleResolution": "nodenext",
7
+ "types": ["node"],
8
+ "experimentalDecorators": true,
9
+ "emitDecoratorMetadata": true,
10
+ "target": "es2022",
11
+ "declaration": true,
12
+ "resolveJsonModule": true
13
+ },
14
+ "include": ["src/**/*.ts"],
15
+ "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"]
16
+ }
@@ -0,0 +1,12 @@
1
+ {
2
+ "extends": "./tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "../../dist/out-tsc",
5
+ "module": "commonjs",
6
+ "moduleResolution": "node10",
7
+ "types": ["jest", "node"],
8
+ "declaration": true,
9
+ "resolveJsonModule": true
10
+ },
11
+ "include": ["jest.config.ts", "src/**/*.test.ts", "src/**/*.spec.ts", "src/**/*.d.ts"]
12
+ }
package/typedoc.json ADDED
@@ -0,0 +1,33 @@
1
+ {
2
+ "name": "Daytona TypeScript SDK",
3
+ "includeVersion": true,
4
+ "readme": "none",
5
+ "cleanOutputDir": false,
6
+
7
+ "$schema": "https://typedoc-plugin-markdown.org/schema.json",
8
+ "entryPoints": ["./src/*.ts", "./src/errors/*.ts", "./src/types/*.ts"],
9
+ "exclude": ["src/index.ts"],
10
+ "out": "../../apps/docs/src/content/docs/typescript-sdk",
11
+ "excludePrivate": true,
12
+ "excludeProtected": true,
13
+ "excludeExternals": true,
14
+ "excludeNotDocumented": false,
15
+ "plugin": ["typedoc-plugin-markdown", "./hooks/typedoc-custom.mjs", "typedoc-plugin-merge-modules"],
16
+ "theme": "markdown",
17
+ "sort": ["static-first", "alphabetical"],
18
+ "groupOrder": ["Classes", "Enums", "*"],
19
+ "disableSources": true,
20
+
21
+ "membersWithOwnFile": [],
22
+ "flattenOutputFiles": true,
23
+
24
+ "hidePageHeader": true,
25
+ "hideBreadcrumbs": true,
26
+ "hidePageTitle": true,
27
+ "hideGroupHeadings": true,
28
+ "useCodeBlocks": true,
29
+ "expandObjects": true,
30
+ "expandParameters": true,
31
+
32
+ "mergeModulesMergeMode": "module"
33
+ }