opencode-koji 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,68 @@
1
+ # OpenCode Koji Plugin
2
+
3
+ Auto-discovers models from [Koji](https://github.com/danielcherubini/koji) local AI server and provides OpenCode with model configuration.
4
+
5
+ ## Features
6
+
7
+ - **Auto-detection**: Finds koji running on default ports (11434, 8080)
8
+ - **Model Discovery**: Queries `/koji/v1/opencode/models` for rich model metadata
9
+ - **Configuration Enhancement**: Adds model metadata (context limits, name, etc.)
10
+ - **Graceful Fallback**: Works even if koji is offline
11
+
12
+ ## Installation
13
+
14
+ Add to your `opencode.json`:
15
+
16
+ ```json
17
+ {
18
+ "$schema": "https://opencode.ai/config.json",
19
+ "plugin": ["opencode-koji"]
20
+ }
21
+ ```
22
+
23
+ Or install via npm:
24
+
25
+ ```bash
26
+ npm install opencode-koji
27
+ ```
28
+
29
+ ## Usage
30
+
31
+ Simply install the plugin - it will auto-detect koji and discover models.
32
+
33
+ ### Manual Configuration
34
+
35
+ If you want to use a custom koji instance:
36
+
37
+ ```json
38
+ {
39
+ "provider": {
40
+ "koji": {
41
+ "npm": "@ai-sdk/openai-compatible",
42
+ "name": "Koji (local)",
43
+ "options": {
44
+ "baseURL": "http://localhost:11434/v1"
45
+ }
46
+ }
47
+ }
48
+ }
49
+ ```
50
+
51
+ The plugin will still enhance this with auto-discovered models, merging with any manually configured ones.
52
+
53
+ ## How It Works
54
+
55
+ 1. On opencode startup, the `config` hook is called
56
+ 2. Plugin checks for existing `koji` provider or auto-detects on default ports
57
+ 3. Queries `GET /koji/v1/opencode/models` from koji
58
+ 4. Merges discovered models into opencode's configuration
59
+ 5. Models appear in `/models` list automatically
60
+
61
+ ## Requirements
62
+
63
+ - Koji running with `koji serve`
64
+ - OpenCode with plugin support
65
+
66
+ ## License
67
+
68
+ MIT
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "$schema": "https://json.schemastore.org/package.json",
3
+ "name": "opencode-koji",
4
+ "version": "0.1.0",
5
+ "description": "OpenCode plugin for koji local AI server auto-discovery and model configuration",
6
+ "type": "module",
7
+ "main": "./src/index.ts",
8
+ "exports": {
9
+ ".": "./src/index.ts"
10
+ },
11
+ "files": [
12
+ "src"
13
+ ],
14
+ "scripts": {
15
+ "typecheck": "tsc --noEmit",
16
+ "test": "vitest",
17
+ "test:run": "vitest run",
18
+ "lint": "eslint src --ext .ts",
19
+ "lint:fix": "eslint src --ext .ts --fix"
20
+ },
21
+ "keywords": [
22
+ "opencode",
23
+ "koji",
24
+ "plugin",
25
+ "local-llm",
26
+ "openai-compatible"
27
+ ],
28
+ "author": "",
29
+ "license": "MIT",
30
+ "dependencies": {
31
+ "@opencode-ai/plugin": "^1.0.166"
32
+ },
33
+ "devDependencies": {
34
+ "@types/node": "^22.0.0",
35
+ "@vitest/coverage-v8": "^4.0.0",
36
+ "typescript": "^5.9.0",
37
+ "vitest": "^4.0.0"
38
+ }
39
+ }
package/src/index.ts ADDED
@@ -0,0 +1 @@
1
+ export { KojiPlugin } from './plugin'
@@ -0,0 +1,97 @@
1
+ import type { PluginInput } from '@opencode-ai/plugin'
2
+ import { normalizeBaseURL, checkKojiHealth, discoverKojiModels, autoDetectKoji, formatModelName, parseModelCapabilities } from '../utils/koji-api'
3
+
4
+ const DEFAULT_KOJI_URL = "http://127.0.0.1:11434"
5
+
6
+ interface ToastNotifier {
7
+ info(message: string): Promise<void>
8
+ warning(message: string): Promise<void>
9
+ error(message: string): Promise<void>
10
+ }
11
+
12
+ function createToastNotifier(client: PluginInput['client']): ToastNotifier {
13
+ return {
14
+ async info(_message: string) {
15
+ // Stub - can be enhanced later with actual toasts
16
+ },
17
+ async warning(_message: string) {
18
+ },
19
+ async error(_message: string) {
20
+ },
21
+ }
22
+ }
23
+
24
+ export function createConfigHook(client: PluginInput['client']) {
25
+ const toastNotifier = createToastNotifier(client)
26
+
27
+ return async (config: any) => {
28
+ if (!config || typeof config !== 'object') {
29
+ return
30
+ }
31
+
32
+ let kojiProvider = config.provider?.koji
33
+ let baseURL: string
34
+
35
+ if (kojiProvider?.options?.baseURL) {
36
+ baseURL = normalizeBaseURL(kojiProvider.options.baseURL)
37
+ } else {
38
+ const detectedURL = await autoDetectKoji()
39
+ if (!detectedURL) {
40
+ console.log('[opencode-koji] Koji not detected on default ports (11434, 8080)')
41
+ return
42
+ }
43
+ baseURL = detectedURL
44
+
45
+ if (!config.provider) {
46
+ config.provider = {}
47
+ }
48
+
49
+ config.provider.koji = {
50
+ npm: "@ai-sdk/openai-compatible",
51
+ name: "Koji (local)",
52
+ options: {
53
+ baseURL: `${baseURL}/v1`,
54
+ },
55
+ models: {},
56
+ }
57
+ kojiProvider = config.provider.koji
58
+ }
59
+
60
+ const isHealthy = await checkKojiHealth(baseURL)
61
+ if (!isHealthy) {
62
+ console.warn(`[opencode-koji] Koji appears offline at ${baseURL}`)
63
+ return
64
+ }
65
+
66
+ const models = await discoverKojiModels(baseURL)
67
+ if (models.length === 0) {
68
+ console.warn('[opencode-koji] No models discovered - ensure koji serve is running')
69
+ return
70
+ }
71
+
72
+ const existingModels = kojiProvider.models || {}
73
+ const discoveredModels: Record<string, any> = {}
74
+
75
+ for (const model of models) {
76
+ // model.id is already the API id (lowercased HF name), no sanitization needed
77
+ const modelKey = model.id
78
+
79
+ if (!existingModels[modelKey] && !existingModels[model.id]) {
80
+ const modelConfig = parseModelCapabilities(model)
81
+ modelConfig.name = model.name || modelKey
82
+
83
+ discoveredModels[modelKey] = modelConfig
84
+ }
85
+ }
86
+
87
+ if (Object.keys(discoveredModels).length > 0) {
88
+ config.provider.koji.models = {
89
+ ...existingModels,
90
+ ...discoveredModels,
91
+ }
92
+ console.log(`[opencode-koji] Discovered ${Object.keys(discoveredModels).length} models`)
93
+ }
94
+
95
+ await toastNotifier.info(`Loaded ${models.length} models from koji`)
96
+ }
97
+ }
@@ -0,0 +1,19 @@
1
+ import type { Plugin, PluginInput } from '@opencode-ai/plugin'
2
+ import { createConfigHook } from './config-hook'
3
+
4
+ export const KojiPlugin: Plugin = async (input: PluginInput) => {
5
+ console.log('[opencode-koji] Koji plugin initializing')
6
+
7
+ const { client } = input
8
+
9
+ if (!client || typeof client !== 'object') {
10
+ console.error('[opencode-koji] Invalid client provided to plugin')
11
+ return {
12
+ config: async () => {},
13
+ }
14
+ }
15
+
16
+ return {
17
+ config: createConfigHook(client),
18
+ }
19
+ }
@@ -0,0 +1,40 @@
1
+ export interface KojiModel {
2
+ id: string
3
+ name: string
4
+ model?: string
5
+ backend?: string
6
+ context_length?: number
7
+ limit?: {
8
+ context: number
9
+ output: number
10
+ }
11
+ modalities?: {
12
+ input: string[]
13
+ output: string[]
14
+ }
15
+ quant?: string
16
+ gpu_layers?: number
17
+ }
18
+
19
+ export interface KojiModelsResponse {
20
+ models: KojiModel[]
21
+ }
22
+
23
+ export interface KojiProviderConfig {
24
+ npm?: string
25
+ name?: string
26
+ options?: {
27
+ baseURL?: string
28
+ apiKey?: string
29
+ }
30
+ models?: Record<string, any>
31
+ }
32
+
33
+ export interface DiscoveredModel {
34
+ id: string
35
+ key: string
36
+ name: string
37
+ config: Record<string, any>
38
+ }
39
+
40
+ export type LoadingStatus = 'not_loaded' | 'loading' | 'loaded' | 'error'
@@ -0,0 +1,98 @@
1
+ import type { KojiModel, KojiModelsResponse } from '../types'
2
+
3
+ const DEFAULT_KOJI_URL = "http://localhost:11434"
4
+ const KOJI_OPENCODE_MODELS_ENDPOINT = "/koji/v1/opencode/models"
5
+ const KOJI_V1_MODELS_ENDPOINT = "/v1/models"
6
+
7
+ export function normalizeBaseURL(baseURL: string = DEFAULT_KOJI_URL): string {
8
+ let normalized = baseURL.replace(/\/+$/, '')
9
+ if (normalized.endsWith('/v1')) {
10
+ normalized = normalized.slice(0, -3)
11
+ }
12
+ return normalized
13
+ }
14
+
15
+ export function buildAPIURL(baseURL: string, endpoint: string = KOJI_OPENCODE_MODELS_ENDPOINT): string {
16
+ const normalized = normalizeBaseURL(baseURL)
17
+ return `${normalized}${endpoint}`
18
+ }
19
+
20
+ export async function checkKojiHealth(baseURL: string = DEFAULT_KOJI_URL): Promise<boolean> {
21
+ try {
22
+ const url = buildAPIURL(baseURL, KOJI_OPENCODE_MODELS_ENDPOINT)
23
+ const response = await fetch(url, {
24
+ method: "GET",
25
+ signal: AbortSignal.timeout(3000),
26
+ })
27
+ return response.ok
28
+ } catch {
29
+ return false
30
+ }
31
+ }
32
+
33
+ export async function discoverKojiModels(baseURL: string = DEFAULT_KOJI_URL): Promise<KojiModel[]> {
34
+ try {
35
+ const url = buildAPIURL(baseURL, KOJI_OPENCODE_MODELS_ENDPOINT)
36
+ const response = await fetch(url, {
37
+ method: "GET",
38
+ headers: {
39
+ "Content-Type": "application/json",
40
+ },
41
+ signal: AbortSignal.timeout(5000),
42
+ })
43
+
44
+ if (!response.ok) {
45
+ console.warn(`[opencode-koji] Koji returned ${response.status}: ${response.statusText}`)
46
+ return []
47
+ }
48
+
49
+ const data = (await response.json()) as { models: KojiModel[] }
50
+ return data.models ?? []
51
+ } catch (error) {
52
+ console.warn(`[opencode-koji] Failed to discover models: ${error instanceof Error ? error.message : String(error)}`)
53
+ return []
54
+ }
55
+ }
56
+
57
+ export async function autoDetectKoji(): Promise<string | null> {
58
+ const ports = [11434, 8080]
59
+ for (const port of ports) {
60
+ const baseURL = `http://127.0.0.1:${port}`
61
+ const isHealthy = await checkKojiHealth(baseURL)
62
+ if (isHealthy) {
63
+ console.log(`[opencode-koji] Auto-detected koji at ${baseURL}`)
64
+ return baseURL
65
+ }
66
+ }
67
+ return null
68
+ }
69
+
70
+ export function formatModelName(model: KojiModel): string {
71
+ if (model.name && model.name !== model.id) {
72
+ return model.name
73
+ }
74
+ const parts = model.id.split('/')
75
+ const modelName = parts[parts.length - 1] ?? model.id
76
+ return modelName.replace(/[-_]/g, ' ').replace(/\s+/g, ' ').trim()
77
+ }
78
+
79
+ export function parseModelCapabilities(model: KojiModel): Record<string, any> {
80
+ const config: Record<string, any> = {
81
+ id: model.id,
82
+ }
83
+
84
+ if (model.limit) {
85
+ config.limit = model.limit
86
+ } else if (model.context_length) {
87
+ config.limit = {
88
+ context: model.context_length,
89
+ output: model.context_length,
90
+ }
91
+ }
92
+
93
+ if (model.modalities) {
94
+ config.modalities = model.modalities
95
+ }
96
+
97
+ return config
98
+ }