@aippy/runtime 0.2.6 → 0.2.7-dev.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/README.md +34 -0
  2. package/dist/ai/config/bridge.d.ts +23 -0
  3. package/dist/ai/config/helper.d.ts +43 -0
  4. package/dist/ai/config/index.d.ts +11 -0
  5. package/dist/ai/config/parser.d.ts +60 -0
  6. package/dist/ai/config/types.d.ts +80 -0
  7. package/dist/ai/errors.d.ts +25 -0
  8. package/dist/ai/index.d.ts +46 -0
  9. package/dist/ai/index.js +25 -0
  10. package/dist/ai/openai/index.d.ts +11 -0
  11. package/dist/ai/openai/provider.d.ts +51 -0
  12. package/dist/ai/shared/config.d.ts +54 -0
  13. package/dist/ai/shared/fetch.d.ts +28 -0
  14. package/dist/ai/shared/index.d.ts +6 -0
  15. package/dist/ai/ui/config.d.ts +25 -0
  16. package/dist/ai/ui/endpoints.d.ts +12 -0
  17. package/dist/ai/ui/index.d.ts +13 -0
  18. package/dist/ai/ui/types.d.ts +21 -0
  19. package/dist/ai.d.ts +2 -0
  20. package/dist/bridge-D2d8hnO_.js +395 -0
  21. package/dist/container-message-DGrno17o.js +31 -0
  22. package/dist/core/container-message.d.ts +81 -0
  23. package/dist/core/index.d.ts +1 -0
  24. package/dist/core/index.js +21 -18
  25. package/dist/core/native-bridge.d.ts +9 -0
  26. package/dist/device/index.js +121 -129
  27. package/dist/device/sensors.d.ts +5 -1
  28. package/dist/errors-CDEBaBxB.js +26 -0
  29. package/dist/helper-CRGxnlsu.js +261 -0
  30. package/dist/index/index.js +113 -52
  31. package/dist/index.d.ts +2 -0
  32. package/dist/native-bridge-BnvipFJc.js +6 -0
  33. package/dist/{runtime-DjBdOttl.js → runtime-CmoG3v2m.js} +55 -76
  34. package/dist/user/api.d.ts +9 -0
  35. package/dist/user/bridge.d.ts +162 -0
  36. package/dist/user/config.d.ts +21 -0
  37. package/dist/user/hooks.d.ts +74 -0
  38. package/dist/user/index.d.ts +12 -0
  39. package/dist/user/index.js +38 -0
  40. package/dist/user/types.d.ts +113 -0
  41. package/dist/user/userSessionInfo.d.ts +6 -0
  42. package/dist/user.d.ts +2 -0
  43. package/dist/userSessionInfo-SbuNZ7JR.js +229 -0
  44. package/package.json +50 -36
package/README.md CHANGED
@@ -87,12 +87,46 @@ button.onclick = async () => {
87
87
  };
88
88
  ```
89
89
 
90
+ ### AI (Backend Proxy Adapter)
91
+
92
+ ```typescript
93
+ // Default base URL: https://api.aippy.dev/api/aisdk/v1
94
+
95
+ import { streamText, experimental_generateImage as generateImage } from 'ai';
96
+ import { aippy } from '@aippy/runtime/ai';
97
+
98
+ // Create provider (reads from env vars automatically)
99
+ const provider = aippy();
100
+
101
+ // Or override with config
102
+ // const provider = aippy({ baseUrl: '...', userToken: '...' });
103
+
104
+ // Streaming text generation (uses Vercel AI SDK)
105
+ const result = await streamText({
106
+ model: provider('gpt'),
107
+ prompt: 'Write a haiku about TypeScript.',
108
+ });
109
+
110
+ for await (const chunk of result.textStream) {
111
+ console.log(chunk);
112
+ }
113
+
114
+ // Image generation
115
+ const image = await generateImage({
116
+ model: provider.image('dall-e-3'),
117
+ prompt: 'A sunset over mountains',
118
+ });
119
+
120
+ console.log(image.image?.base64);
121
+ ```
122
+
90
123
  ## Packages
91
124
 
92
125
  - `@aippy/runtime/core` - Core types and configuration
93
126
  - `@aippy/runtime/device` - Device APIs (camera, geolocation, sensors, file system)
94
127
  - `@aippy/runtime/utils` - Platform detection, performance monitoring, PWA utilities
95
128
  - `@aippy/runtime/audio` - iOS-compatible Web Audio API wrapper
129
+ - `@aippy/runtime/ai` - AI SDK adapter wrapping `ai` package, routes through backend proxy
96
130
 
97
131
  ## Publishing
98
132
 
@@ -0,0 +1,23 @@
1
+ import { AIConfig } from './types';
2
+ /**
3
+ * Sends AI configuration to app container via postMessage
4
+ *
5
+ * Supports two scenarios:
6
+ * - iOS WebView: Uses `webkit.messageHandlers.aippyListener.postMessage()`
7
+ * - Iframe: Uses `window.parent.postMessage()`
8
+ *
9
+ * This is a one-way communication - we send the config but don't listen for updates
10
+ * because AI config is static (loaded at startup), not reactive like tweaks.
11
+ *
12
+ * @param config - The AI configuration to send
13
+ *
14
+ * @example
15
+ * ```ts
16
+ * import { sendAIConfigToContainer, loadAIConfig } from '@aippy/runtime/ai';
17
+ * import aiConfigJson from '@/config/aiConfig.json';
18
+ *
19
+ * const config = loadAIConfig(aiConfigJson);
20
+ * sendAIConfigToContainer(config);
21
+ * ```
22
+ */
23
+ export declare function sendAIConfigToContainer(config: AIConfig): void;
@@ -0,0 +1,43 @@
1
+ /**
2
+ * AI Configuration helper functions
3
+ *
4
+ * Provides convenient utilities for loading and accessing AI configuration
5
+ * from aiConfig.json files generated by LLMs.
6
+ */
7
+ /**
8
+ * Type for the values object returned by createAIConfig
9
+ * Provides direct property access to configuration values
10
+ */
11
+ export interface AIConfigValues {
12
+ readonly [key: string]: unknown;
13
+ }
14
+ /**
15
+ * Creates an AI configuration helper that loads, validates, and provides
16
+ * convenient access to configuration values.
17
+ *
18
+ * This function:
19
+ * 1. Loads and validates the configuration
20
+ * 2. Sends it to the app container (for aippy app editing)
21
+ * 3. Returns an object with direct property access to config values
22
+ *
23
+ * **Recommended usage**: Call this once at app initialization and export
24
+ * the result for use throughout your app.
25
+ *
26
+ * @param configJson - The AI configuration JSON (usually from `import aiConfigJson from '@/config/aiConfig.json'`)
27
+ * @returns An object with direct property access to configuration values
28
+ *
29
+ * @example
30
+ * ```ts
31
+ * // app.tsx or main.tsx
32
+ * import aiConfigJson from '@/config/aiConfig.json';
33
+ * import { createAIConfig } from '@aippy/runtime/ai';
34
+ *
35
+ * export const aiConfig = createAIConfig(aiConfigJson);
36
+ *
37
+ * // In your AI code - direct property access
38
+ * import { aiConfig } from './app';
39
+ * const model = aiConfig.model; // string | undefined
40
+ * const temperature = aiConfig.temperature; // number | undefined
41
+ * ```
42
+ */
43
+ export declare function createAIConfig(configJson: unknown): AIConfigValues;
@@ -0,0 +1,11 @@
1
+ /**
2
+ * AI Configuration module
3
+ *
4
+ * Provides types, validation, and utilities for working with aiConfig.json
5
+ * files generated by LLMs when creating AI-powered applications.
6
+ */
7
+ export type { AIConfig, AIConfigValue, AIConfigItem, NumberAIConfigItem, BooleanAIConfigItem, TextAIConfigItem, EnumAIConfigItem, AIConfigError, } from './types';
8
+ export type { AIConfigValues } from './helper';
9
+ export { AIConfigValidationError, validateAIConfig, parseAIConfig, loadAIConfig, getAIConfigValue, } from './parser';
10
+ export { sendAIConfigToContainer } from './bridge';
11
+ export { createAIConfig } from './helper';
@@ -0,0 +1,60 @@
1
+ import { AIConfig, AIConfigError } from './types';
2
+ /**
3
+ * Custom error class for AI configuration validation failures
4
+ */
5
+ export declare class AIConfigValidationError extends Error {
6
+ readonly errors: AIConfigError[];
7
+ constructor(message: string, errors: AIConfigError[]);
8
+ }
9
+ /**
10
+ * Validates an AI configuration object
11
+ *
12
+ * @param config - The configuration object to validate
13
+ * @throws {AIConfigValidationError} If validation fails
14
+ */
15
+ export declare function validateAIConfig(config: unknown): asserts config is AIConfig;
16
+ /**
17
+ * Parses and validates AI configuration from a JSON string
18
+ *
19
+ * @param json - JSON string to parse
20
+ * @returns Validated AIConfig object
21
+ * @throws {AIConfigValidationError} If JSON is invalid or validation fails
22
+ *
23
+ * @example
24
+ * ```ts
25
+ * const json = '{"model": {"index": 0, "name": "Model", ...}}';
26
+ * const config = parseAIConfig(json);
27
+ * ```
28
+ */
29
+ export declare function parseAIConfig(json: string): AIConfig;
30
+ /**
31
+ * Loads and validates AI configuration from a configuration object
32
+ *
33
+ * Typically used with: `import aiConfig from '@/config/aiConfig.json'`
34
+ *
35
+ * @param config - Configuration object (usually from JSON import)
36
+ * @returns Validated AIConfig object
37
+ * @throws {AIConfigValidationError} If validation fails
38
+ *
39
+ * @example
40
+ * ```ts
41
+ * import aiConfigJson from '@/config/aiConfig.json';
42
+ * const config = loadAIConfig(aiConfigJson);
43
+ * ```
44
+ */
45
+ export declare function loadAIConfig(config: unknown): AIConfig;
46
+ /**
47
+ * Gets a configuration value by key
48
+ *
49
+ * @param config - The AIConfig object
50
+ * @param key - Configuration key to retrieve
51
+ * @returns The value of the configuration item
52
+ * @throws {Error} If the key is not found
53
+ *
54
+ * @example
55
+ * ```ts
56
+ * const model = getAIConfigValue<string>(config, 'model');
57
+ * const temperature = getAIConfigValue<number>(config, 'temperature');
58
+ * ```
59
+ */
60
+ export declare function getAIConfigValue<T = unknown>(config: AIConfig, key: string): T;
@@ -0,0 +1,80 @@
1
+ /**
2
+ * AI Configuration types for aiConfig.json
3
+ *
4
+ * These types define the structure of the AI configuration file that LLMs
5
+ * generate when creating AI-powered applications.
6
+ */
7
+ /**
8
+ * Base interface for all AI configuration items
9
+ */
10
+ export interface AIConfigItem {
11
+ /** Display order (lower index = higher priority) */
12
+ index: number;
13
+ /** User-friendly display name */
14
+ name: string;
15
+ /** Description of the parameter */
16
+ description: string;
17
+ /** Optional group name for organizing related parameters */
18
+ group?: string;
19
+ /** Type of the configuration item */
20
+ type: 'number' | 'boolean' | 'text' | 'enum';
21
+ /** Current value */
22
+ value: number | string | boolean;
23
+ }
24
+ /**
25
+ * Number type configuration item
26
+ */
27
+ export interface NumberAIConfigItem extends AIConfigItem {
28
+ type: 'number';
29
+ /** Minimum allowed value */
30
+ min?: number;
31
+ /** Maximum allowed value */
32
+ max?: number;
33
+ /** Step size for adjustments */
34
+ step?: number;
35
+ value: number;
36
+ }
37
+ /**
38
+ * Boolean type configuration item
39
+ */
40
+ export interface BooleanAIConfigItem extends AIConfigItem {
41
+ type: 'boolean';
42
+ value: boolean;
43
+ }
44
+ /**
45
+ * Text type configuration item
46
+ */
47
+ export interface TextAIConfigItem extends AIConfigItem {
48
+ type: 'text';
49
+ value: string;
50
+ }
51
+ /**
52
+ * Enum type configuration item
53
+ * Used for parameters with a fixed set of valid values (e.g., model names, image sizes)
54
+ */
55
+ export interface EnumAIConfigItem extends AIConfigItem {
56
+ type: 'enum';
57
+ /** Array of valid option values */
58
+ options: string[];
59
+ value: string;
60
+ }
61
+ /**
62
+ * Union type of all possible AI configuration item types
63
+ */
64
+ export type AIConfigValue = NumberAIConfigItem | BooleanAIConfigItem | TextAIConfigItem | EnumAIConfigItem;
65
+ /**
66
+ * AI Configuration object structure
67
+ * Maps configuration keys to their respective configuration items
68
+ */
69
+ export interface AIConfig {
70
+ [key: string]: AIConfigValue;
71
+ }
72
+ /**
73
+ * Validation error information
74
+ */
75
+ export interface AIConfigError {
76
+ /** Configuration key that has the error */
77
+ key: string;
78
+ /** Error message describing the issue */
79
+ message: string;
80
+ }
@@ -0,0 +1,25 @@
1
+ import { AISDKError } from 'ai';
2
+ /**
3
+ * Normalizes a backend error response into an AISDKError.
4
+ *
5
+ * If the backend error response includes container message data (e.g., `appMessage` field),
6
+ * it will be automatically sent to the app container for user-facing UI (e.g., payment dialog,
7
+ * error popup, success notification).
8
+ */
9
+ export declare function normalizeError(response: Response, body?: unknown): AISDKError;
10
+ /**
11
+ * Creates an error for missing user token.
12
+ */
13
+ export declare function missingTokenError(): AISDKError;
14
+ /**
15
+ * Creates an error for aborted requests.
16
+ */
17
+ export declare function abortedError(): AISDKError;
18
+ /**
19
+ * Creates an error for network failures.
20
+ */
21
+ export declare function networkError(cause?: unknown): AISDKError;
22
+ /**
23
+ * Creates an error for parse failures.
24
+ */
25
+ export declare function parseError(message: string): AISDKError;
@@ -0,0 +1,46 @@
1
+ /**
2
+ * @aippy/runtime/ai - AI module for Aippy Runtime SDK
3
+ *
4
+ * This module provides two integration paths with Vercel AI SDK:
5
+ *
6
+ * ## Path 1: OpenAI-Compatible Provider (AI SDK Core)
7
+ *
8
+ * Use `aippyAIProvider()` provider with AI SDK Core functions:
9
+ * - generateText / streamText
10
+ * - generateObject / streamObject
11
+ * - embed / embedMany
12
+ * - generateImage
13
+ *
14
+ * ```ts
15
+ * import { streamText } from 'ai';
16
+ * import { aippyAIProvider } from '@aippy/runtime/ai';
17
+ *
18
+ * const provider = aippyAIProvider();
19
+ * const result = await streamText({
20
+ * model: provider('gpt-5'),
21
+ * prompt: 'Hello!',
22
+ * });
23
+ * ```
24
+ *
25
+ * ## Path 2: Data Stream Protocol (AI SDK UI / @ai-sdk/react)
26
+ *
27
+ * Use config builders with @ai-sdk/react hooks:
28
+ * - useChat → aippyChatConfig()
29
+ *
30
+ * ```tsx
31
+ * import { useChat } from '@ai-sdk/react';
32
+ * import { aippyChatConfig } from '@aippy/runtime/ai';
33
+ *
34
+ * const { messages, handleSubmit } = useChat(aippyChatConfig());
35
+ * ```
36
+ *
37
+ * @module
38
+ */
39
+ export { aippyAIProvider, type AippyProvider } from './openai';
40
+ export type { AippyOpenAIConfig } from './openai';
41
+ /** @deprecated Use aippyAIProvider() instead */
42
+ export { aippyAIProvider as aippy } from './openai';
43
+ export { aippyChatConfig, type AippyUseChatOptions, type AippyUIConfig, type AippyChatConfig, UI_CHAT_ENDPOINT, } from './ui';
44
+ export { DEFAULT_BASE_URL, DEFAULT_UI_BASE_URL, DEFAULT_CHAT_MODEL, DEFAULT_CHAT_SYSTEM, } from './shared';
45
+ export { normalizeError, missingTokenError, abortedError, networkError, parseError, } from './errors';
46
+ export { type AIConfig, type AIConfigValue, type AIConfigItem, type NumberAIConfigItem, type BooleanAIConfigItem, type TextAIConfigItem, type EnumAIConfigItem, type AIConfigError, type AIConfigValues, AIConfigValidationError, validateAIConfig, parseAIConfig, loadAIConfig, getAIConfigValue, sendAIConfigToContainer, createAIConfig, } from './config';
@@ -0,0 +1,25 @@
1
+ import { A as i, D as A, d as n, e, c as E, U as C, f as p, a as g, a as t, b as I, j as T, i as _, l as f, m as U, g as d, n as D, h as L, p as l, s as m, v as F } from "../helper-CRGxnlsu.js";
2
+ import "react";
3
+ import "../bridge-D2d8hnO_.js";
4
+ export {
5
+ i as AIConfigValidationError,
6
+ A as DEFAULT_BASE_URL,
7
+ n as DEFAULT_CHAT_MODEL,
8
+ e as DEFAULT_CHAT_SYSTEM,
9
+ E as DEFAULT_UI_BASE_URL,
10
+ C as UI_CHAT_ENDPOINT,
11
+ p as abortedError,
12
+ g as aippy,
13
+ t as aippyAIProvider,
14
+ I as aippyChatConfig,
15
+ T as createAIConfig,
16
+ _ as getAIConfigValue,
17
+ f as loadAIConfig,
18
+ U as missingTokenError,
19
+ d as networkError,
20
+ D as normalizeError,
21
+ L as parseAIConfig,
22
+ l as parseError,
23
+ m as sendAIConfigToContainer,
24
+ F as validateAIConfig
25
+ };
@@ -0,0 +1,11 @@
1
+ /**
2
+ * OpenAI-compatible module for Aippy AI SDK.
3
+ *
4
+ * Provides the `aippyAIProvider()` provider factory for use with AI SDK Core functions:
5
+ * - generateText / streamText
6
+ * - generateObject / streamObject
7
+ * - embed / embedMany
8
+ * - generateImage
9
+ */
10
+ export { aippyAIProvider, type AippyProvider } from './provider';
11
+ export type { AippyOpenAIConfig } from '../shared/config';
@@ -0,0 +1,51 @@
1
+ import { OpenAICompatibleProvider } from '@ai-sdk/openai-compatible';
2
+ import { AippyOpenAIConfig } from '../shared';
3
+ /**
4
+ * The Aippy provider type - an OpenAI-compatible provider.
5
+ */
6
+ export type AippyProvider = OpenAICompatibleProvider<string, string, string, string>;
7
+ /**
8
+ * Creates an Aippy AI provider that routes requests through the backend proxy.
9
+ *
10
+ * This provider is for use with AI SDK Core functions:
11
+ * - `generateText` / `streamText` - Text generation
12
+ * - `generateObject` / `streamObject` - Structured object generation
13
+ * - `embed` / `embedMany` - Text embeddings
14
+ * - `generateImage` - Image generation
15
+ *
16
+ * @param config - Optional configuration to override defaults
17
+ * @returns An AI SDK Core compatible provider
18
+ *
19
+ * @example
20
+ * ```ts
21
+ * import { generateText, streamText, embed, generateImage } from 'ai';
22
+ * import { aippyAIProvider } from '@aippy/runtime/ai';
23
+ *
24
+ * const provider = aippyAIProvider();
25
+ *
26
+ * // Text generation
27
+ * const text = await generateText({
28
+ * model: provider('gpt-5-nano'),
29
+ * prompt: 'Hello!',
30
+ * });
31
+ *
32
+ * // Streaming
33
+ * const stream = await streamText({
34
+ * model: provider('gpt-5-nano'),
35
+ * prompt: 'Tell me a story.',
36
+ * });
37
+ *
38
+ * // Embeddings
39
+ * const embedding = await embed({
40
+ * model: provider.embeddingModel('text-embedding-3-small'),
41
+ * value: 'Hello world',
42
+ * });
43
+ *
44
+ * // Image generation
45
+ * const image = await generateImage({
46
+ * model: provider.imageModel('gpt-image-1-mini'),
47
+ * prompt: 'A cat',
48
+ * });
49
+ * ```
50
+ */
51
+ export declare function aippyAIProvider(config?: AippyOpenAIConfig): AippyProvider;
@@ -0,0 +1,54 @@
1
+ /**
2
+ * Shared configuration for Aippy AI SDK.
3
+ * Used by both OpenAI-compatible (Core) and Data Stream Protocol (UI) paths.
4
+ */
5
+ /** Default backend base URL for OpenAI-compatible endpoints */
6
+ export declare const DEFAULT_BASE_URL: string;
7
+ /** Default backend base URL for UI (Data Stream Protocol) endpoints */
8
+ export declare const DEFAULT_UI_BASE_URL: string;
9
+ /**
10
+ * Configuration for OpenAI-compatible provider (AI SDK Core).
11
+ */
12
+ export interface AippyOpenAIConfig {
13
+ /** Backend proxy URL for OpenAI-compatible endpoints (defaults to DEFAULT_BASE_URL) */
14
+ baseUrl?: string;
15
+ }
16
+ /**
17
+ * Configuration for Data Stream Protocol endpoints (AI SDK UI).
18
+ */
19
+ export interface AippyUIConfig {
20
+ /** Backend proxy URL for UI endpoints (defaults to DEFAULT_UI_BASE_URL) */
21
+ baseUrl?: string;
22
+ }
23
+ /**
24
+ * Configuration for useChat hook with model and system prompt.
25
+ */
26
+ export interface AippyChatConfig extends AippyUIConfig {
27
+ /**
28
+ * Optional API endpoint override for useChat.
29
+ * Useful for local dev proxies (e.g. Vite) to avoid cross-origin streaming.
30
+ *
31
+ * If not provided, we use `${baseUrl}/chat`.
32
+ */
33
+ api?: string;
34
+ /** Model identifier (defaults to 'gpt-5-nano') */
35
+ model?: string;
36
+ /** System prompt for the conversation (defaults to empty string) */
37
+ system?: string;
38
+ }
39
+ /** Default model for chat */
40
+ export declare const DEFAULT_CHAT_MODEL = "gpt-5-nano";
41
+ /** Default model for image generation */
42
+ export declare const DEFAULT_IMAGE_MODEL = "gpt-image-1-mini";
43
+ /** Default model for embeddings */
44
+ export declare const DEFAULT_EMBEDDING_MODEL = "text-embedding-3-small";
45
+ /** Default system prompt for chat */
46
+ export declare const DEFAULT_CHAT_SYSTEM = "";
47
+ /**
48
+ * Resolves OpenAI-compatible configuration with defaults.
49
+ */
50
+ export declare function resolveOpenAIConfig(config?: AippyOpenAIConfig): Required<AippyOpenAIConfig>;
51
+ /**
52
+ * Resolves UI (Data Stream Protocol) configuration with defaults.
53
+ */
54
+ export declare function resolveUIConfig(config?: AippyUIConfig): Required<AippyUIConfig>;
@@ -0,0 +1,28 @@
1
+ /**
2
+ * Shared fetch utilities for Aippy AI SDK.
3
+ * Used by both OpenAI-compatible (Core) and Data Stream Protocol (UI) paths.
4
+ */
5
+ /**
6
+ * Joins a base URL with a path segment.
7
+ * Uses native URL API for reliable URL resolution.
8
+ *
9
+ * @param baseUrl - The base URL (e.g., 'https://api.aippy.dev/api/aisdk/v1/ui/')
10
+ * @param path - The path segment to append (e.g., '/chat' or 'chat')
11
+ * @returns The joined URL string
12
+ *
13
+ * @example
14
+ * joinUrl('https://api.aippy.dev/api/aisdk/v1/ui/', '/chat')
15
+ * // => 'https://api.aippy.dev/api/aisdk/v1/ui/chat'
16
+ */
17
+ export declare function joinUrl(baseUrl: string, path: string): string;
18
+ /**
19
+ * Creates a fetch wrapper that injects Authorization header with a fresh token.
20
+ * Token is fetched asynchronously on every request to ensure it's always current.
21
+ *
22
+ * @returns A fetch function compatible with globalThis.fetch signature
23
+ *
24
+ * @example
25
+ * const authFetch = createAuthFetch();
26
+ * const response = await authFetch('https://api.example.com/endpoint', { method: 'POST' });
27
+ */
28
+ export declare function createAuthFetch(): typeof globalThis.fetch;
@@ -0,0 +1,6 @@
1
+ /**
2
+ * Shared utilities for Aippy AI SDK.
3
+ * Re-exports config and fetch utilities used by both protocol paths.
4
+ */
5
+ export { DEFAULT_BASE_URL, DEFAULT_UI_BASE_URL, DEFAULT_CHAT_MODEL, DEFAULT_CHAT_SYSTEM, resolveOpenAIConfig, resolveUIConfig, type AippyOpenAIConfig, type AippyUIConfig, type AippyChatConfig, } from './config';
6
+ export { joinUrl, createAuthFetch } from './fetch';
@@ -0,0 +1,25 @@
1
+ import { AippyChatConfig } from '../shared';
2
+ import { AippyUseChatOptions } from './types';
3
+ /**
4
+ * Creates configuration for the useChat hook.
5
+ *
6
+ * @param config - Optional configuration to override defaults
7
+ * @returns Promise resolving to configuration object to spread into useChat()
8
+ *
9
+ * @example
10
+ * ```tsx
11
+ * import { useChat } from '@ai-sdk/react';
12
+ * import { aippyChatConfig } from '@aippy/runtime/ai';
13
+ *
14
+ * const chatConfig = await aippyChatConfig();
15
+ * const { messages, input, handleSubmit } = useChat(chatConfig);
16
+ *
17
+ * // With custom model and system prompt
18
+ * const config = await aippyChatConfig({
19
+ * model: 'claude-3',
20
+ * system: 'You are a helpful assistant.',
21
+ * });
22
+ * const { messages } = useChat(config);
23
+ * ```
24
+ */
25
+ export declare function aippyChatConfig(config?: AippyChatConfig): AippyUseChatOptions;
@@ -0,0 +1,12 @@
1
+ /**
2
+ * UI endpoint path constants for Data Stream Protocol.
3
+ *
4
+ * These endpoints are served by the Aippy backend and return
5
+ * responses in AI SDK Data Stream Protocol format.
6
+ *
7
+ * Required response headers from backend:
8
+ * - Content-Type: text/event-stream
9
+ * - x-vercel-ai-ui-message-stream: v1
10
+ */
11
+ /** Endpoint for useChat hook */
12
+ export declare const UI_CHAT_ENDPOINT = "/chat";
@@ -0,0 +1,13 @@
1
+ /**
2
+ * AI SDK UI module for Aippy AI SDK.
3
+ *
4
+ * Provides configuration builders for @ai-sdk/react hooks:
5
+ * - useChat → aippyChatConfig()
6
+ *
7
+ * These hooks communicate with the Aippy backend using
8
+ * AI SDK Data Stream Protocol (SSE with x-vercel-ai-ui-message-stream: v1).
9
+ */
10
+ export { aippyChatConfig } from './config';
11
+ export type { AippyUseChatOptions } from './types';
12
+ export { UI_CHAT_ENDPOINT } from './endpoints';
13
+ export type { AippyUIConfig, AippyChatConfig } from '../shared/config';
@@ -0,0 +1,21 @@
1
+ import { ChatTransport, UIMessage } from 'ai';
2
+ /**
3
+ * Body parameters for useChat requests (internal use).
4
+ */
5
+ interface UseChatBody {
6
+ /** Model identifier (e.g., 'gpt-5', 'claude-3') */
7
+ model: string;
8
+ /** System prompt for the conversation */
9
+ system: string;
10
+ }
11
+ /**
12
+ * Configuration options for useChat hook with Aippy transport.
13
+ *
14
+ * This is a minimal wrapper around ChatTransport for type safety.
15
+ * Users can also directly use ChatTransport from 'ai' package.
16
+ */
17
+ export interface AippyUseChatOptions<UI_MESSAGE extends UIMessage = UIMessage> {
18
+ /** Transport for custom API communication */
19
+ transport: ChatTransport<UI_MESSAGE>;
20
+ }
21
+ export type { UseChatBody };
package/dist/ai.d.ts ADDED
@@ -0,0 +1,2 @@
1
+ export * from './ai/index'
2
+ export {}