stable-harness 0.0.15 → 0.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "stable-harness",
3
- "version": "0.0.15",
3
+ "version": "0.0.16",
4
4
  "type": "module",
5
5
  "description": "Stable application runtime and operator control plane for agent workspaces.",
6
6
  "license": "Apache-2.0",
@@ -1 +1 @@
1
- import{execFile as t}from"node:child_process";import{promisify as r}from"node:util";import{createOpenAiCompatibleHttpServer as e}from"@stable-harness/protocols";import{startOfficialLangGraphServer as o}from"./langgraph-official.js";const n="127.0.0.1",s=r(t);export async function serveProtocol(t,r){const e=createConfiguredServers(t,r),o=[];let n=0;for(const r of e)if("http"===r.kind){if(!await listen(r)){process.stdout.write(`stable-harness ${r.protocol} API already running on http://${r.host}:${r.port}/v1\n`);continue}o.push(()=>closeHttpServer(r.server)),n+=1;const t=r.server.address(),e="object"==typeof t&&t?t.port:r.port;process.stdout.write(`stable-harness ${r.protocol} API listening on http://${r.host}:${e}/v1\n`)}else{const e=await startLangGraphServer(t,r);if(!e){process.stdout.write(`stable-harness ${r.protocol} API already running on http://${r.config.host}:${r.config.port}\n`);continue}o.push(e.cleanup),n+=1,process.stdout.write(`stable-harness ${r.protocol} API listening on ${e.url}\n`)}0!==n&&await async function waitForShutdown(t){const r=setInterval(()=>{},864e5);await new Promise(e=>{const shutdown=()=>{clearInterval(r),Promise.allSettled(t.map(t=>t())).finally(()=>process.exit(0))};process.once("SIGINT",shutdown),process.once("SIGTERM",shutdown)})}(o)}export async function stopProtocol(t,r){const e=createConfiguredServers({getRuntimePolicy:()=>t.runtime},r).map(t=>"http"===t.kind?{protocol:t.protocol,host:t.host,port:t.port}:{protocol:t.protocol,host:t.config.host,port:t.config.port}),o=await Promise.all(e.map(async t=>({target:t,pids:await stableHarnessListenerPids(t.port)}))),n=[...new Set(o.flatMap(t=>t.pids))];for(const t of n)process.kill(t,"SIGTERM");for(const{target:t,pids:r}of o)0!==r.length?process.stdout.write(`stable-harness ${t.protocol} API stopped on ${t.host}:${t.port} pid=${r.join(",")}\n`):process.stdout.write(`stable-harness ${t.protocol} API not running on ${t.host}:${t.port}\n`)}function createConfiguredServers(t,r){const e=readRecord(t.getRuntimePolicy().protocols)??{},o=protocolConfig(e,"openaiCompatible","openai-compatible","openai")??{},n=protocolConfig(e,"langgraph")??{};return[...enabled(o)?[openAiServer(t,o,r)]:[],...enabled(n)?[langGraphServer(n)]:[]]}function openAiServer(t,r,o){const s=configString(r.host)??n,i=o.port??configNumber(r.port)??8642,a=o.host??s,c=configString(r.bearerToken)??configString(r.apiKey)??o.apiKey;return{kind:"http",protocol:"openai-compatible",server:e(t,{bearerToken:c}),host:a,port:i,...c?{bearerToken:c}:{}}}function langGraphServer(t){const r=configString(t.host)??n,e=configNumber(t.port)??2024,o=function configStringArray(t){if(Array.isArray(t)&&t.every(t=>"string"==typeof t))return t.filter(t=>t.trim()).map(t=>t.trim())}(t.exposeAgents);return{kind:"langgraph",protocol:"langgraph-compatible",config:{host:r,port:e,nWorkers:configNumber(t.nWorkers)??10,...o?{exposeAgents:o}:{},...void 0!==t.env?{env:t.env}:{},...void 0!==t.envFile?{envFile:t.envFile}:{}}}}function protocolConfig(t,...r){for(const e of r){const r=readRecord(t[e]);if(r)return r}}function enabled(t){return!1!==t.enabled}function configString(t){if("string"!=typeof t||!t.trim())return;const r=t.match(/^\$\{env:([A-Za-z_][A-Za-z0-9_]*)(?::-(.*))?\}$/u);return r?process.env[r[1]]??r[2]:t}function configNumber(t){return"number"==typeof t&&Number.isFinite(t)?t:"string"==typeof t&&t.trim()?Number(t):void 0}function readRecord(t){return"object"!=typeof t||null===t||Array.isArray(t)?void 0:t}async function listen(t){try{return await new Promise((r,e)=>{t.server.once("error",e),t.server.listen(t.port,t.host,()=>{t.server.off("error",e),r()})}),!0}catch(r){if(isAddressInUse(r)&&await async function isOpenAiServerAlreadyRunning(t){const r=await fetchJson(`http://${t.host}:${t.port}/v1/capabilities`,{...t.bearerToken?{authorization:`Bearer ${t.bearerToken}`}:{}});return"stable_harness.capabilities"===r?.object}(t))return!1;throw portConflictError(r,t.protocol,t.host,t.port)}}async function startLangGraphServer(t,r){if(!await isLangGraphServerAlreadyRunning(r))try{return await o(t,r.config)}catch(t){if(isAddressInUse(t)&&await isLangGraphServerAlreadyRunning(r))return;throw portConflictError(t,r.protocol,r.config.host,r.config.port)}}async function isLangGraphServerAlreadyRunning(t){const r=await fetchJson(`http://${t.config.host}:${t.config.port}/ok`);return!0===r?.ok}async function fetchJson(t,r={}){try{const e=await fetch(t,{headers:r});if(!e.ok)return;return await e.json()}catch{return}}function isAddressInUse(t){return"EADDRINUSE"===function readErrorCode(t){return"object"==typeof t&&null!==t&&"code"in t?t.code:void 0}(t)||String(t).includes("EADDRINUSE")}function portConflictError(t,r,e,o){return isAddressInUse(t)?new Error([`stable-harness ${r} port is already in use: ${e}:${o}.`,`Use --port <port>, update config/runtime/workspace.yaml, or stop the process currently listening on ${e}:${o}.`].join("\n")):t}async function stableHarnessListenerPids(t){const r=await async function listenerPids(t){try{const{stdout:r}=await s("lsof",[`-tiTCP:${t}`,"-sTCP:LISTEN"]);return r.split(/\s+/u).map(t=>Number(t)).filter(t=>Number.isInteger(t)&&t>0)}catch{return[]}}(t);return(await Promise.all(r.map(async t=>{const r=await async function processCommand(t){try{const{stdout:r}=await s("ps",["-p",String(t),"-o","command="]);return r.trim()}catch{return""}}(t);return isStableHarnessStartCommand(r)?t:void 0}))).filter(t=>"number"==typeof t)}export function isStableHarnessStartCommand(t){const r=function splitCommandLine(t){const r=[];let e,o="";for(const n of t)'"'!==n&&"'"!==n||void 0!==e?n!==e?/\s/u.test(n)&&void 0===e?o&&(r.push(o),o=""):o+=n:e=void 0:e=n;return o&&r.push(o),r}(t),e=function stableHarnessCommandIndex(t){return isStableHarnessExecutableToken(t[0]??"")?0:function isNodeExecutableToken(t){const r=t.split(/[\\/]/u).at(-1);return"node"===r||"nodejs"===r}(t[0]??"")&&(isStableHarnessExecutableToken(t[1]??"")||function isStableHarnessScriptToken(t){const r=t.replaceAll("\\","/");return r.includes("/stable-harness/")&&r.endsWith("/packages/cli/dist/src/cli.js")||r.includes("/stable-harness/")&&r.endsWith("/dist/cli.js")}(t[1]??""))?1:-1}(r);return e>=0&&r.slice(e+1).includes("start")}function isStableHarnessExecutableToken(t){const r=t.split(/[\\/]/u).at(-1);return"stable-harness"===r||"botbotgo"===r}async function closeHttpServer(t){await new Promise((r,e)=>{t.close(t=>{t?e(t):r()})})}
1
+ import{execFile as t}from"node:child_process";import{promisify as r}from"node:util";import{createOpenAiCompatibleHttpServer as e}from"@stable-harness/protocols";import{startOfficialLangGraphServer as n}from"./langgraph-official.js";const o="127.0.0.1",s=r(t);export async function serveProtocol(t,r){const e=createConfiguredServers(t,r),n=[];let o=0;for(const r of e)if("http"===r.kind){if(!await listen(r)){process.stdout.write(`stable-harness ${r.protocol} API already running on http://${r.host}:${r.port}/v1\n`);continue}n.push(()=>closeHttpServer(r.server)),o+=1;const t=r.server.address(),e="object"==typeof t&&t?t.port:r.port;process.stdout.write(`stable-harness ${r.protocol} API listening on http://${r.host}:${e}/v1\n`)}else{const e=await startLangGraphServer(t,r);if(!e){process.stdout.write(`stable-harness ${r.protocol} API already running on http://${r.config.host}:${r.config.port}\n`);continue}n.push(e.cleanup),o+=1,process.stdout.write(`stable-harness ${r.protocol} API listening on ${e.url}\n`)}0!==o&&await async function waitForShutdown(t){const r=setInterval(()=>{},864e5);await new Promise(e=>{const shutdown=()=>{clearInterval(r),Promise.allSettled(t.map(t=>t())).finally(()=>process.exit(0))};process.once("SIGINT",shutdown),process.once("SIGTERM",shutdown)})}(n)}export async function stopProtocol(t,r){const e=createConfiguredServers({getRuntimePolicy:()=>t.runtime},r).map(t=>"http"===t.kind?{protocol:t.protocol,host:t.host,port:t.port}:{protocol:t.protocol,host:t.config.host,port:t.config.port}),n=await Promise.all(e.map(async t=>({target:t,pids:await stableHarnessListenerPids(t.port)}))),o=[...new Set(n.flatMap(t=>t.pids))];for(const t of o)process.kill(t,"SIGTERM");for(const{target:t,pids:r}of n)0!==r.length?process.stdout.write(`stable-harness ${t.protocol} API stopped on ${t.host}:${t.port} pid=${r.join(",")}\n`):process.stdout.write(`stable-harness ${t.protocol} API not running on ${t.host}:${t.port}\n`)}function createConfiguredServers(t,r){const e=readRecord(t.getRuntimePolicy().protocols)??{},n=protocolConfig(e,"openaiCompatible","openai-compatible","openai")??{},o=protocolConfig(e,"langgraph")??{};return[...enabled(n)?[openAiServer(t,n,r)]:[],...enabled(o)?[langGraphServer(o)]:[]]}function openAiServer(t,r,n){const s=configString(r.host)??o,i=n.port??configNumber(r.port)??8642,a=n.host??s,c=configString(r.bearerToken)??configString(r.apiKey)??n.apiKey;return{kind:"http",protocol:"openai-compatible",server:e(t,{bearerToken:c}),host:a,port:i,...c?{bearerToken:c}:{}}}function langGraphServer(t){const r=configString(t.host)??o,e=configNumber(t.port)??2024,n=function configStringArray(t){if(Array.isArray(t)&&t.every(t=>"string"==typeof t))return t.filter(t=>t.trim()).map(t=>t.trim())}(t.exposeAgents);return{kind:"langgraph",protocol:"langgraph-compatible",config:{host:r,port:e,nWorkers:configNumber(t.nWorkers)??10,...n?{exposeAgents:n}:{},...void 0!==t.env?{env:t.env}:{},...void 0!==t.envFile?{envFile:t.envFile}:{}}}}function protocolConfig(t,...r){for(const e of r){const r=readRecord(t[e]);if(r)return r}}function enabled(t){return!1!==t.enabled}function configString(t){if("string"!=typeof t||!t.trim())return;const r=t.match(/^\$\{env:([A-Za-z_][A-Za-z0-9_]*)(?::-(.*))?\}$/u);return r?process.env[r[1]]??r[2]:t}function configNumber(t){return"number"==typeof t&&Number.isFinite(t)?t:"string"==typeof t&&t.trim()?Number(t):void 0}function readRecord(t){return"object"!=typeof t||null===t||Array.isArray(t)?void 0:t}async function listen(t){try{return await new Promise((r,e)=>{t.server.once("error",e),t.server.listen(t.port,t.host,()=>{t.server.off("error",e),r()})}),!0}catch(r){if(isAddressInUse(r)&&await async function isOpenAiServerAlreadyRunning(t){const r=await fetchJson(`http://${t.host}:${t.port}/v1/capabilities`,{...t.bearerToken?{authorization:`Bearer ${t.bearerToken}`}:{}});return"stable_harness.capabilities"===r?.object}(t))return!1;throw portConflictError(r,t.protocol,t.host,t.port)}}async function startLangGraphServer(t,r){if(!await isLangGraphServerAlreadyRunning(r))try{return await n(t,r.config)}catch(t){if(isAddressInUse(t)&&await isLangGraphServerAlreadyRunning(r))return;throw portConflictError(t,r.protocol,r.config.host,r.config.port)}}async function isLangGraphServerAlreadyRunning(t){const r=await fetchJson(`http://${t.config.host}:${t.config.port}/ok`);return!0===r?.ok}async function fetchJson(t,r={}){try{const e=await fetch(t,{headers:r});if(!e.ok)return;return await e.json()}catch{return}}function isAddressInUse(t){return"EADDRINUSE"===function readErrorCode(t){return"object"==typeof t&&null!==t&&"code"in t?t.code:void 0}(t)||String(t).includes("EADDRINUSE")}function portConflictError(t,r,e,n){return isAddressInUse(t)?new Error([`stable-harness ${r} port is already in use: ${e}:${n}.`,`Use --port <port>, update config/runtime/workspace.yaml, or stop the process currently listening on ${e}:${n}.`].join("\n")):t}async function stableHarnessListenerPids(t){const r=await async function listenerPids(t){try{const{stdout:r}=await s("lsof",[`-tiTCP:${t}`,"-sTCP:LISTEN"]);return r.split(/\s+/u).map(t=>Number(t)).filter(t=>Number.isInteger(t)&&t>0)}catch{return[]}}(t);return(await Promise.all(r.map(async t=>{const r=await async function processCommand(t){try{const{stdout:r}=await s("ps",["-p",String(t),"-o","command="]);return r.trim()}catch{return""}}(t);return isStableHarnessStartCommand(r)?t:void 0}))).filter(t=>"number"==typeof t)}export function isStableHarnessStartCommand(t){if(function hasUnsafeCommandCharacters(t){return/[\u0000-\u001F\u007F;|`&<>]/u.test(t)}(t))return!1;const r=function splitCommandLine(t){const r=[];let e,n="";for(const o of t)'"'!==o&&"'"!==o||void 0!==e?o!==e?/\s/u.test(o)&&void 0===e?n&&(r.push(n),n=""):n+=o:e=void 0:e=o;return n&&r.push(n),r}(t),e=function stableHarnessCommandIndex(t){return isStableHarnessExecutableToken(t[0]??"")?0:function isNodeExecutableToken(t){const r=t.split(/[\\/]/u).at(-1);return"node"===r||"nodejs"===r}(t[0]??"")&&(isStableHarnessExecutableToken(t[1]??"")||function isStableHarnessScriptToken(t){if(hasTraversalSegment(t))return!1;const r=t.replaceAll("\\","/");return r.includes("/stable-harness/")&&r.endsWith("/packages/cli/dist/src/cli.js")||r.includes("/stable-harness/")&&r.endsWith("/dist/cli.js")}(t[1]??""))?1:-1}(r);return e>=0&&r.slice(e+1).includes("start")}function isStableHarnessExecutableToken(t){if(hasTraversalSegment(t))return!1;const r=t.split(/[\\/]/u).at(-1);return"stable-harness"===r||"botbotgo"===r}function hasTraversalSegment(t){return t.split(/[\\/]/u).some(t=>"."===t||".."===t)}async function closeHttpServer(t){await new Promise((r,e)=>{t.close(t=>{t?e(t):r()})})}