promptlayer 1.0.21 → 1.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/esm/index.js CHANGED
@@ -1,3 +1,3 @@
1
- var Ie=Object.defineProperty,qe=Object.defineProperties;var Le=Object.getOwnPropertyDescriptors;var Z=Object.getOwnPropertySymbols;var We=Object.prototype.hasOwnProperty,xe=Object.prototype.propertyIsEnumerable;var ee=(r,e)=>{if(e=Symbol[r])return e;throw Error("Symbol."+r+" is not defined")};var te=(r,e,t)=>e in r?Ie(r,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):r[e]=t,g=(r,e)=>{for(var t in e||(e={}))We.call(e,t)&&te(r,t,e[t]);if(Z)for(var t of Z(e))xe.call(e,t)&&te(r,t,e[t]);return r},k=(r,e)=>qe(r,Le(e));var C=(r=>typeof require!="undefined"?require:typeof Proxy!="undefined"?new Proxy(r,{get:(e,t)=>(typeof require!="undefined"?require:e)[t]}):r)(function(r){if(typeof require!="undefined")return require.apply(this,arguments);throw Error('Dynamic require of "'+r+'" is not supported')});var l=(r,e,t)=>new Promise((o,n)=>{var s=i=>{try{c(t.next(i))}catch(p){n(p)}},a=i=>{try{c(t.throw(i))}catch(p){n(p)}},c=i=>i.done?o(i.value):Promise.resolve(i.value).then(s,a);c((t=t.apply(r,e)).next())}),O=function(r,e){this[0]=r,this[1]=e},U=(r,e,t)=>{var o=(a,c,i,p)=>{try{var u=t[a](c),f=(c=u.value)instanceof O,h=u.done;Promise.resolve(f?c[0]:c).then(d=>f?o(a==="return"?a:"next",c[1]?{done:d.done,value:d.value}:d,i,p):i({value:d,done:h})).catch(d=>o("throw",d,i,p))}catch(d){p(d)}},n=a=>s[a]=c=>new Promise((i,p)=>o(a,c,i,p)),s={};return t=t.apply(r,e),s[Symbol.asyncIterator]=()=>s,n("next"),n("throw"),n("return"),s};var J=(r,e,t)=>(e=r[ee("asyncIterator")])?e.call(r):(r=r[ee("iterator")](),e={},t=(o,n)=>(n=r[o])&&(e[o]=s=>new Promise((a,c,i)=>(s=n.call(r,s),i=s.done,Promise.resolve(s.value).then(p=>a({value:p,done:i}),c)))),t("next"),t("return"),e);import $e from"ably";var w=process.env.URL_API_PROMPTLAYER||"https://api.promptlayer.com",re=(r,e)=>l(void 0,null,function*(){return e.request_response[Symbol.asyncIterator]!==void 0?Ke(r,e.request_response,e):yield oe(r,e)}),oe=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(e)}),o=yield t.json();if(t.status!==200&&T(o,"WARNING: While logging your request, PromptLayer experienced the following error:"),o&&e.return_pl_id)return[e.request_response,o.request_id]}catch(t){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${t}`)}return e.request_response}),ne=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-metadata`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While logging metadata to your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),se=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-score`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While scoring your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While scoring your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),ae=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-prompt`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${t}`),!1}return!0}),ie=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a group, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a group, PromptLayer experienced the following error: ${t}`),!1}return!0}),ce=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/create-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({api_key:r})}),t=yield e.json();return e.status!==200?(T(t,"WARNING: While creating a group PromptLayer had the following error"),!1):t.id}catch(e){return console.warn(`WARNING: While creating a group PromptLayer had the following error: ${e}`),!1}}),pe=(r,e,t)=>l(void 0,null,function*(){try{let o=new URL(`${w}/prompt-templates/${e}`),n=yield fetch(o,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify(t)}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching a prompt template PromptLayer had the following error"),null):(s.warning&&console.warn(`WARNING: While tracking your prompt PromptLayer had the following error: ${s.warning}`),s)}catch(o){return console.warn(`WARNING: While fetching a prompt template PromptLayer had the following error: ${o}`),null}}),le=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/rest/prompt-templates`,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify({prompt_template:g({},e),prompt_version:g({},e),release_labels:e.release_labels?e.release_labels:void 0})}),o=yield t.json();return t.status===400&&T(o,"WARNING: While publishing a prompt template PromptLayer had the following error"),o}catch(t){console.warn(`WARNING: While publishing a prompt template PromptLayer had the following error: ${t}`)}}),ue=(r,e)=>l(void 0,null,function*(){var t;try{let o=new URL(`${w}/prompt-templates`);Object.entries(e||{}).forEach(([a,c])=>o.searchParams.append(a,c.toString()));let n=yield fetch(o,{headers:{"Content-Type":"application/json","X-API-KEY":r}}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching all prompt templates PromptLayer had the following error"),null):(t=s.items)!=null?t:[]}catch(o){return console.warn(`WARNING: While fetching all prompt templates PromptLayer had the following error: ${o}`),null}}),me=i=>l(void 0,[i],function*({workflow_name:r,input_variables:e,metadata:t={},workflow_label_name:o=null,workflow_version_number:n=null,return_all_outputs:s=!1,api_key:a,timeout:c=12e4}){let p={input_variables:e,metadata:t,workflow_label_name:o,workflow_version_number:n,return_all_outputs:s},u={"X-API-KEY":a,"Content-Type":"application/json"};try{let f=yield fetch(`${w}/workflows/${encodeURIComponent(r)}/run`,{method:"POST",headers:u,body:JSON.stringify(p)});if(f.status!==201)return{success:!1,message:`Failed to run workflow: ${(yield f.json().catch(()=>({}))).error||f.statusText}`};let h=yield f.json();h.warning&&console.warn(`WARNING: ${h.warning}`);let d=h.workflow_version_execution_id;if(!d)return console.log("No execution ID returned from workflow run"),{success:!1,message:"Failed to run workflow"};let S=`workflow_updates:${d}`,P=(yield(yield fetch(`${w}/ws-token-request-library?capability=${S}`,{method:"POST",headers:u})).json()).token_details.token,y=new $e.Realtime({token:P});try{let m=yield ve(y,S,c);return y.close(),m}finally{y.close()}}catch(f){throw console.error(`Failed to run workflow: ${f instanceof Error?f.message:f}`),f}});function ve(r,e,t){return l(this,null,function*(){let o=r.channels.get(e);return new Promise((n,s)=>l(this,null,function*(){let a=null,c=p=>{p.name==="SET_WORKFLOW_COMPLETE"&&(a=JSON.parse(p.data).final_output,clearTimeout(i),o.unsubscribe("SET_WORKFLOW_COMPLETE",c),n(a))},i=setTimeout(()=>{o.unsubscribe("SET_WORKFLOW_COMPLETE",c),s(new Error("Workflow execution did not complete properly (timeout)"))},t);try{yield o.subscribe("SET_WORKFLOW_COMPLETE",c)}catch(p){clearTimeout(i),s(p)}}))})}var W=r=>{var c,i,p,u,f,h,d,S,b;let e=null,t,o={id:"",choices:[],created:Date.now(),model:"",object:"chat.completion"},n=r.at(-1);if(!n)return o;let s;for(let _ of r){if(_.choices.length===0)continue;let P=_.choices[0].delta;P.content&&(e=`${e||""}${P.content||""}`),P.function_call&&(t={name:`${t?t.name:""}${P.function_call.name||""}`,arguments:`${t?t.arguments:""}${P.function_call.arguments||""}`});let y=(c=P.tool_calls)==null?void 0:c[0];if(y){s=s||[];let m=s.at(-1);if(!m||y.id){s.push({id:y.id||"",type:y.type||"function",function:{name:((i=y.function)==null?void 0:i.name)||"",arguments:((p=y.function)==null?void 0:p.arguments)||""}});continue}m.function.name=`${m.function.name}${((u=y.function)==null?void 0:u.name)||""}`,m.function.arguments=`${m.function.arguments}${((f=y.function)==null?void 0:f.arguments)||""}`}}let a=r[0].choices.at(0);return o.choices.push({finish_reason:(h=a==null?void 0:a.finish_reason)!=null?h:"stop",index:(d=a==null?void 0:a.index)!=null?d:0,logprobs:(S=a==null?void 0:a.logprobs)!=null?S:null,message:{role:"assistant",content:e,function_call:t||void 0,tool_calls:s||void 0,refusal:(b=a==null?void 0:a.delta.refusal)!=null?b:null}}),o.id=n.id,o.model=n.model,o.created=n.created,o.system_fingerprint=n.system_fingerprint,o.usage=n.usage,o},D=r=>{let e={id:"",model:"",content:[],role:"assistant",type:"message",stop_reason:"stop_sequence",stop_sequence:null,usage:{input_tokens:0,output_tokens:0}};if(!r.at(-1))return e;let o="";for(let n of r)switch(n.type){case"message_start":{e=g({},n.message);break}case"content_block_delta":n.delta.type==="text_delta"&&(o=`${o}${n.delta.text}`);case"message_delta":"usage"in n&&(e.usage.output_tokens=n.usage.output_tokens),"stop_reason"in n.delta&&(e.stop_reason=n.delta.stop_reason);default:break}return e.content.push({type:"text",text:o}),e},Ge=(r,e="openai.chat.completions.create")=>{if("completion"in r[0])return r.reduce((t,o)=>k(g({},o),{completion:`${t.completion}${o.completion}`}),{});if(e==="anthropic.messages.create")return D(r);if("text"in r[0].choices[0]){let t="";for(let n of r)t=`${t}${n.choices[0].text}`;let o=structuredClone(r.at(-1));return o.choices[0].text=t,o}if("delta"in r[0].choices[0]){let t=W(r);return t.choices[0]=g(g({},t.choices[0]),t.choices[0].message),t}return""};function Ke(r,e,t){return U(this,null,function*(){let o=[];try{for(var a=J(e),c,i,p;c=!(i=yield new O(a.next())).done;c=!1){let u=i.value;yield t.return_pl_id?[u,null]:u,o.push(u)}}catch(i){p=[i]}finally{try{c&&(i=a.return)&&(yield new O(i.call(a)))}finally{if(p)throw p[0]}}let n=Ge(o,t.function_name),s=yield new O(oe(r,k(g({},t),{request_response:n,request_end_time:new Date().toISOString()})));if(s&&t.return_pl_id){let u=s[1];yield[o.at(-1),u]}})}var T=(r,e)=>{try{console.warn(`${e}: ${r.message}`)}catch(t){console.warn(`${e}: ${r}`)}},fe=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(r)});return e.status!==200&&T(e,"WARNING: While logging your request, PromptLayer experienced the following error:"),e.json()}catch(e){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${e}`)}return{}}),Y=r=>{let e={id:"",choices:[{finish_reason:"stop",index:0,text:"",logprobs:null}],created:Date.now(),model:"",object:"text_completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)n.choices.length>0&&n.choices[0].text&&(o=`${o}${n.choices[0].text}`);return e.choices[0].text=o,e.id=t.id,e.created=t.created,e.model=t.model,e.system_fingerprint=t.system_fingerprint,e.usage=t.usage,e},de=r=>{let e={completion:"",id:"",model:"",stop_reason:"",type:"completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)o=`${o}${n.completion}`;return e.completion=o,e.id=t.id,e.model=t.model,e.stop_reason=t.stop_reason,e};function he(r,e,t){return U(this,null,function*(){let o={request_id:null,raw_response:null,prompt_blueprint:null},n=[];try{for(var c=J(r),i,p,u;i=!(p=yield new O(c.next())).done;i=!1){let f=p.value;n.push(f),o.raw_response=f,yield o}}catch(p){u=[p]}finally{try{i&&(p=c.return)&&(yield new O(p.call(c)))}finally{if(u)throw u[0]}}let s=t(n),a=yield new O(e({request_response:s}));o.request_id=a.request_id,o.prompt_blueprint=a.prompt_blueprint,yield o})}var je=(r,e)=>l(void 0,null,function*(){return r.chat.completions.create(e)}),Me=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),ye={chat:je,completion:Me},ge=(r,e)=>l(void 0,null,function*(){let t=C("openai").default,o=new t({baseURL:e.baseURL}),n=ye[r.prompt_template.type];return n(o,e)}),_e=(r,e)=>l(void 0,null,function*(){let t=C("openai").AzureOpenAI,o=new t({endpoint:e.baseURL});e==null||delete e.baseURL;let n=ye[r.prompt_template.type];return n(o,e)}),Ue=(r,e)=>l(void 0,null,function*(){return r.messages.create(e)}),Je=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),De={chat:Ue,completion:Je},we=(r,e)=>l(void 0,null,function*(){let t=C("@anthropic-ai/sdk").default,o=new t({baseURL:e.baseURL}),n=De[r.prompt_template.type];return n(o,e)}),Pe=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/log-request`,{method:"POST",headers:{"X-API-KEY":r,"Content-Type":"application/json"},body:JSON.stringify(e)});return t.status!==201?(T(t,"WARNING: While logging your request PromptLayer had the following error"),null):t.json()}catch(t){return console.warn(`WARNING: While tracking your prompt PromptLayer had the following error: ${t}`),null}});var x=class{constructor(e){this.create=()=>ce(this.apiKey);this.apiKey=e}};import*as Te from"@opentelemetry/api";import{SimpleSpanProcessor as Ye}from"@opentelemetry/sdk-trace-base";import{NodeTracerProvider as Fe}from"@opentelemetry/sdk-trace-node";import{SpanKind as I,SpanStatusCode as F}from"@opentelemetry/api";import{ExportResultCode as $}from"@opentelemetry/core";var z=class{constructor(e,t){this.apiKey=t||process.env.PROMPTLAYER_API_KEY,this.enableTracing=e,this.url=`${w}/spans-bulk`}attributesToObject(e){return e?Object.fromEntries(Object.entries(e)):{}}spanKindToString(e){return{[I.INTERNAL]:"SpanKind.INTERNAL",[I.SERVER]:"SpanKind.SERVER",[I.CLIENT]:"SpanKind.CLIENT",[I.PRODUCER]:"SpanKind.PRODUCER",[I.CONSUMER]:"SpanKind.CONSUMER"}[e]||"SpanKind.INTERNAL"}statusCodeToString(e){return{[F.ERROR]:"StatusCode.ERROR",[F.OK]:"StatusCode.OK",[F.UNSET]:"StatusCode.UNSET"}[e]||"StatusCode.UNSET"}toNanoseconds(e){return(BigInt(e[0])*BigInt(1e9)+BigInt(e[1])).toString()}export(e){if(!this.enableTracing)return Promise.resolve($.SUCCESS);let t=e.map(o=>{var n;return{name:o.name,context:{trace_id:o.spanContext().traceId,span_id:o.spanContext().spanId,trace_state:((n=o.spanContext().traceState)==null?void 0:n.serialize())||""},kind:this.spanKindToString(o.kind),parent_id:o.parentSpanId||null,start_time:this.toNanoseconds(o.startTime),end_time:this.toNanoseconds(o.endTime),status:{status_code:this.statusCodeToString(o.status.code),description:o.status.message},attributes:this.attributesToObject(o.attributes),events:o.events.map(s=>({name:s.name,timestamp:this.toNanoseconds(s.time),attributes:this.attributesToObject(s.attributes)})),links:o.links.map(s=>({context:s.context,attributes:this.attributesToObject(s.attributes)})),resource:{attributes:k(g({},o.resource.attributes),{"service.name":"prompt-layer-js"}),schema_url:""}}});return fetch(this.url,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":this.apiKey||""},body:JSON.stringify({spans:t})}).then(o=>o.ok?$.SUCCESS:(console.error(`Error exporting spans
1
+ var Ie=Object.defineProperty,qe=Object.defineProperties;var Le=Object.getOwnPropertyDescriptors;var Z=Object.getOwnPropertySymbols;var We=Object.prototype.hasOwnProperty,xe=Object.prototype.propertyIsEnumerable;var ee=(r,e)=>{if(e=Symbol[r])return e;throw Error("Symbol."+r+" is not defined")};var te=(r,e,t)=>e in r?Ie(r,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):r[e]=t,g=(r,e)=>{for(var t in e||(e={}))We.call(e,t)&&te(r,t,e[t]);if(Z)for(var t of Z(e))xe.call(e,t)&&te(r,t,e[t]);return r},k=(r,e)=>qe(r,Le(e));var C=(r=>typeof require!="undefined"?require:typeof Proxy!="undefined"?new Proxy(r,{get:(e,t)=>(typeof require!="undefined"?require:e)[t]}):r)(function(r){if(typeof require!="undefined")return require.apply(this,arguments);throw Error('Dynamic require of "'+r+'" is not supported')});var l=(r,e,t)=>new Promise((o,n)=>{var s=i=>{try{c(t.next(i))}catch(p){n(p)}},a=i=>{try{c(t.throw(i))}catch(p){n(p)}},c=i=>i.done?o(i.value):Promise.resolve(i.value).then(s,a);c((t=t.apply(r,e)).next())}),O=function(r,e){this[0]=r,this[1]=e},U=(r,e,t)=>{var o=(a,c,i,p)=>{try{var u=t[a](c),f=(c=u.value)instanceof O,h=u.done;Promise.resolve(f?c[0]:c).then(d=>f?o(a==="return"?a:"next",c[1]?{done:d.done,value:d.value}:d,i,p):i({value:d,done:h})).catch(d=>o("throw",d,i,p))}catch(d){p(d)}},n=a=>s[a]=c=>new Promise((i,p)=>o(a,c,i,p)),s={};return t=t.apply(r,e),s[Symbol.asyncIterator]=()=>s,n("next"),n("throw"),n("return"),s};var J=(r,e,t)=>(e=r[ee("asyncIterator")])?e.call(r):(r=r[ee("iterator")](),e={},t=(o,n)=>(n=r[o])&&(e[o]=s=>new Promise((a,c,i)=>(s=n.call(r,s),i=s.done,Promise.resolve(s.value).then(p=>a({value:p,done:i}),c)))),t("next"),t("return"),e);import $e from"ably";var w=process.env.URL_API_PROMPTLAYER||"https://api.promptlayer.com",re=(r,e)=>l(void 0,null,function*(){return e.request_response[Symbol.asyncIterator]!==void 0?Ke(r,e.request_response,e):yield oe(r,e)}),oe=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(e)}),o=yield t.json();if(t.status!==200&&T(o,"WARNING: While logging your request, PromptLayer experienced the following error:"),o&&e.return_pl_id)return[e.request_response,o.request_id]}catch(t){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${t}`)}return e.request_response}),ne=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-metadata`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While logging metadata to your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),se=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-score`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While scoring your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While scoring your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),ae=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-prompt`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${t}`),!1}return!0}),ie=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a group, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a group, PromptLayer experienced the following error: ${t}`),!1}return!0}),ce=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/create-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({api_key:r})}),t=yield e.json();return e.status!==200?(T(t,"WARNING: While creating a group PromptLayer had the following error"),!1):t.id}catch(e){return console.warn(`WARNING: While creating a group PromptLayer had the following error: ${e}`),!1}}),pe=(r,e,t)=>l(void 0,null,function*(){try{let o=new URL(`${w}/prompt-templates/${e}`),n=yield fetch(o,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify(t)}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching a prompt template PromptLayer had the following error"),null):(s.warning&&console.warn(`WARNING: While fetching your prompt PromptLayer had the following error: ${s.warning}`),s)}catch(o){return console.warn(`WARNING: While fetching a prompt template PromptLayer had the following error: ${o}`),null}}),le=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/rest/prompt-templates`,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify({prompt_template:g({},e),prompt_version:g({},e),release_labels:e.release_labels?e.release_labels:void 0})}),o=yield t.json();return t.status===400&&T(o,"WARNING: While publishing a prompt template PromptLayer had the following error"),o}catch(t){console.warn(`WARNING: While publishing a prompt template PromptLayer had the following error: ${t}`)}}),ue=(r,e)=>l(void 0,null,function*(){var t;try{let o=new URL(`${w}/prompt-templates`);Object.entries(e||{}).forEach(([a,c])=>o.searchParams.append(a,c.toString()));let n=yield fetch(o,{headers:{"Content-Type":"application/json","X-API-KEY":r}}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching all prompt templates PromptLayer had the following error"),null):(t=s.items)!=null?t:[]}catch(o){return console.warn(`WARNING: While fetching all prompt templates PromptLayer had the following error: ${o}`),null}}),me=i=>l(void 0,[i],function*({workflow_name:r,input_variables:e,metadata:t={},workflow_label_name:o=null,workflow_version_number:n=null,return_all_outputs:s=!1,api_key:a,timeout:c=36e5}){let p={input_variables:e,metadata:t,workflow_label_name:o,workflow_version_number:n,return_all_outputs:s},u={"X-API-KEY":a,"Content-Type":"application/json"};try{let f=yield fetch(`${w}/workflows/${encodeURIComponent(r)}/run`,{method:"POST",headers:u,body:JSON.stringify(p)});if(f.status!==201)return{success:!1,message:`Failed to run workflow: ${(yield f.json().catch(()=>({}))).error||f.statusText}`};let h=yield f.json();h.warning&&console.warn(`WARNING: ${h.warning}`);let d=h.workflow_version_execution_id;if(!d)return console.log("No execution ID returned from workflow run"),{success:!1,message:"Failed to run workflow"};let S=`workflow_updates:${d}`,P=(yield(yield fetch(`${w}/ws-token-request-library?capability=${S}`,{method:"POST",headers:u})).json()).token_details.token,y=new $e.Realtime({token:P});try{let m=yield ve(y,S,c);return y.close(),m}finally{y.close()}}catch(f){throw console.error(`Failed to run workflow: ${f instanceof Error?f.message:f}`),f}});function ve(r,e,t){return l(this,null,function*(){let o=r.channels.get(e);return new Promise((n,s)=>l(this,null,function*(){let a=null,c=p=>{p.name==="SET_WORKFLOW_COMPLETE"&&(a=JSON.parse(p.data).final_output,clearTimeout(i),o.unsubscribe("SET_WORKFLOW_COMPLETE",c),n(a))},i=setTimeout(()=>{o.unsubscribe("SET_WORKFLOW_COMPLETE",c),s(new Error("Workflow execution did not complete properly (timeout)"))},t);try{yield o.subscribe("SET_WORKFLOW_COMPLETE",c)}catch(p){clearTimeout(i),s(p)}}))})}var W=r=>{var c,i,p,u,f,h,d,S,b;let e=null,t,o={id:"",choices:[],created:Date.now(),model:"",object:"chat.completion"},n=r.at(-1);if(!n)return o;let s;for(let _ of r){if(_.choices.length===0)continue;let P=_.choices[0].delta;P.content&&(e=`${e||""}${P.content||""}`),P.function_call&&(t={name:`${t?t.name:""}${P.function_call.name||""}`,arguments:`${t?t.arguments:""}${P.function_call.arguments||""}`});let y=(c=P.tool_calls)==null?void 0:c[0];if(y){s=s||[];let m=s.at(-1);if(!m||y.id){s.push({id:y.id||"",type:y.type||"function",function:{name:((i=y.function)==null?void 0:i.name)||"",arguments:((p=y.function)==null?void 0:p.arguments)||""}});continue}m.function.name=`${m.function.name}${((u=y.function)==null?void 0:u.name)||""}`,m.function.arguments=`${m.function.arguments}${((f=y.function)==null?void 0:f.arguments)||""}`}}let a=r[0].choices.at(0);return o.choices.push({finish_reason:(h=a==null?void 0:a.finish_reason)!=null?h:"stop",index:(d=a==null?void 0:a.index)!=null?d:0,logprobs:(S=a==null?void 0:a.logprobs)!=null?S:null,message:{role:"assistant",content:e,function_call:t||void 0,tool_calls:s||void 0,refusal:(b=a==null?void 0:a.delta.refusal)!=null?b:null}}),o.id=n.id,o.model=n.model,o.created=n.created,o.system_fingerprint=n.system_fingerprint,o.usage=n.usage,o},D=r=>{let e={id:"",model:"",content:[],role:"assistant",type:"message",stop_reason:"stop_sequence",stop_sequence:null,usage:{input_tokens:0,output_tokens:0}};if(!r.at(-1))return e;let o="";for(let n of r)switch(n.type){case"message_start":{e=g({},n.message);break}case"content_block_delta":n.delta.type==="text_delta"&&(o=`${o}${n.delta.text}`);case"message_delta":"usage"in n&&(e.usage.output_tokens=n.usage.output_tokens),"stop_reason"in n.delta&&(e.stop_reason=n.delta.stop_reason);default:break}return e.content.push({type:"text",text:o}),e},Ge=(r,e="openai.chat.completions.create")=>{if("completion"in r[0])return r.reduce((t,o)=>k(g({},o),{completion:`${t.completion}${o.completion}`}),{});if(e==="anthropic.messages.create")return D(r);if("text"in r[0].choices[0]){let t="";for(let n of r)t=`${t}${n.choices[0].text}`;let o=structuredClone(r.at(-1));return o.choices[0].text=t,o}if("delta"in r[0].choices[0]){let t=W(r);return t.choices[0]=g(g({},t.choices[0]),t.choices[0].message),t}return""};function Ke(r,e,t){return U(this,null,function*(){let o=[];try{for(var a=J(e),c,i,p;c=!(i=yield new O(a.next())).done;c=!1){let u=i.value;yield t.return_pl_id?[u,null]:u,o.push(u)}}catch(i){p=[i]}finally{try{c&&(i=a.return)&&(yield new O(i.call(a)))}finally{if(p)throw p[0]}}let n=Ge(o,t.function_name),s=yield new O(oe(r,k(g({},t),{request_response:n,request_end_time:new Date().toISOString()})));if(s&&t.return_pl_id){let u=s[1];yield[o.at(-1),u]}})}var T=(r,e)=>{try{console.warn(`${e}: ${r.message}`)}catch(t){console.warn(`${e}: ${r}`)}},fe=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(r)});return e.status!==200&&T(e,"WARNING: While logging your request, PromptLayer experienced the following error:"),e.json()}catch(e){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${e}`)}return{}}),Y=r=>{let e={id:"",choices:[{finish_reason:"stop",index:0,text:"",logprobs:null}],created:Date.now(),model:"",object:"text_completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)n.choices.length>0&&n.choices[0].text&&(o=`${o}${n.choices[0].text}`);return e.choices[0].text=o,e.id=t.id,e.created=t.created,e.model=t.model,e.system_fingerprint=t.system_fingerprint,e.usage=t.usage,e},de=r=>{let e={completion:"",id:"",model:"",stop_reason:"",type:"completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)o=`${o}${n.completion}`;return e.completion=o,e.id=t.id,e.model=t.model,e.stop_reason=t.stop_reason,e};function he(r,e,t){return U(this,null,function*(){let o={request_id:null,raw_response:null,prompt_blueprint:null},n=[];try{for(var c=J(r),i,p,u;i=!(p=yield new O(c.next())).done;i=!1){let f=p.value;n.push(f),o.raw_response=f,yield o}}catch(p){u=[p]}finally{try{i&&(p=c.return)&&(yield new O(p.call(c)))}finally{if(u)throw u[0]}}let s=t(n),a=yield new O(e({request_response:s}));o.request_id=a.request_id,o.prompt_blueprint=a.prompt_blueprint,yield o})}var je=(r,e)=>l(void 0,null,function*(){return r.chat.completions.create(e)}),Me=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),ye={chat:je,completion:Me},ge=(r,e)=>l(void 0,null,function*(){let t=C("openai").default,o=new t({baseURL:e.baseURL}),n=ye[r.prompt_template.type];return n(o,e)}),_e=(r,e)=>l(void 0,null,function*(){let t=C("openai").AzureOpenAI,o=new t({endpoint:e.baseURL});e==null||delete e.baseURL;let n=ye[r.prompt_template.type];return n(o,e)}),Ue=(r,e)=>l(void 0,null,function*(){return r.messages.create(e)}),Je=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),De={chat:Ue,completion:Je},we=(r,e)=>l(void 0,null,function*(){let t=C("@anthropic-ai/sdk").default,o=new t({baseURL:e.baseURL}),n=De[r.prompt_template.type];return n(o,e)}),Pe=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/log-request`,{method:"POST",headers:{"X-API-KEY":r,"Content-Type":"application/json"},body:JSON.stringify(e)});return t.status!==201?(T(t,"WARNING: While logging your request PromptLayer had the following error"),null):t.json()}catch(t){return console.warn(`WARNING: While tracking your prompt PromptLayer had the following error: ${t}`),null}});var x=class{constructor(e){this.create=()=>ce(this.apiKey);this.apiKey=e}};import*as Te from"@opentelemetry/api";import{SimpleSpanProcessor as Ye}from"@opentelemetry/sdk-trace-base";import{NodeTracerProvider as Fe}from"@opentelemetry/sdk-trace-node";import{SpanKind as I,SpanStatusCode as F}from"@opentelemetry/api";import{ExportResultCode as $}from"@opentelemetry/core";var z=class{constructor(e,t){this.apiKey=t||process.env.PROMPTLAYER_API_KEY,this.enableTracing=e,this.url=`${w}/spans-bulk`}attributesToObject(e){return e?Object.fromEntries(Object.entries(e)):{}}spanKindToString(e){return{[I.INTERNAL]:"SpanKind.INTERNAL",[I.SERVER]:"SpanKind.SERVER",[I.CLIENT]:"SpanKind.CLIENT",[I.PRODUCER]:"SpanKind.PRODUCER",[I.CONSUMER]:"SpanKind.CONSUMER"}[e]||"SpanKind.INTERNAL"}statusCodeToString(e){return{[F.ERROR]:"StatusCode.ERROR",[F.OK]:"StatusCode.OK",[F.UNSET]:"StatusCode.UNSET"}[e]||"StatusCode.UNSET"}toNanoseconds(e){return(BigInt(e[0])*BigInt(1e9)+BigInt(e[1])).toString()}export(e){if(!this.enableTracing)return Promise.resolve($.SUCCESS);let t=e.map(o=>{var n;return{name:o.name,context:{trace_id:o.spanContext().traceId,span_id:o.spanContext().spanId,trace_state:((n=o.spanContext().traceState)==null?void 0:n.serialize())||""},kind:this.spanKindToString(o.kind),parent_id:o.parentSpanId||null,start_time:this.toNanoseconds(o.startTime),end_time:this.toNanoseconds(o.endTime),status:{status_code:this.statusCodeToString(o.status.code),description:o.status.message},attributes:this.attributesToObject(o.attributes),events:o.events.map(s=>({name:s.name,timestamp:this.toNanoseconds(s.time),attributes:this.attributesToObject(s.attributes)})),links:o.links.map(s=>({context:s.context,attributes:this.attributesToObject(s.attributes)})),resource:{attributes:k(g({},o.resource.attributes),{"service.name":"prompt-layer-js"}),schema_url:""}}});return fetch(this.url,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":this.apiKey||""},body:JSON.stringify({spans:t})}).then(o=>o.ok?$.SUCCESS:(console.error(`Error exporting spans
2
2
  HTTP error! status: ${o.status}`),$.FAILED)).catch(o=>(console.error("Error exporting spans:",o),$.FAILED))}shutdown(){return Promise.resolve()}},Re=z;var E=(r="promptlayer-tracer")=>Te.trace.getTracer(r),Se=(r,e)=>{let t=new Fe,o=new Re(r,e),n=new Ye(o);t.addSpanProcessor(n),t.register()};var ze=E(),B=(r,e,t="",o="openai")=>{let n={construct:(s,a)=>{let c=Reflect.construct(s,a);return Object.defineProperties(c,{function_name:{value:t,writable:!0},provider:{value:o}}),new Proxy(c,n)},get:(s,a,c)=>{let i=s[a],p=`${Reflect.get(s,"function_name")}.${a.toString()}`;return typeof i=="object"?(Object.defineProperties(i,{function_name:{value:p,writable:!0},provider:{value:o}}),new Proxy(i,n)):typeof i=="function"?(...u)=>{var b,_,P,y;let f=new Date().toISOString(),h=Reflect.get(s,"provider"),d=(b=u[0])==null?void 0:b.return_pl_id,S=(_=u[0])==null?void 0:_.pl_tags;return(P=u[0])==null||delete P.return_pl_id,(y=u[0])==null||delete y.pl_tags,ze.startActiveSpan(`${h}.${p}`,m=>l(void 0,null,function*(){try{m.setAttribute("function_input",JSON.stringify(u));let R=Reflect.apply(i,s,u),j=m.spanContext().spanId;return R instanceof Promise?new Promise((A,q)=>{R.then(N=>l(void 0,null,function*(){let L=yield re(r,{api_key:r,provider_type:h,function_name:p,request_start_time:f,request_end_time:new Date().toISOString(),request_response:N,kwargs:u[0],return_pl_id:d,tags:S,span_id:j});m.setAttribute("function_output",JSON.stringify(L)),m.setAttribute("response_status","success"),m.end(),A(L)})).catch(N=>{m.recordException(N),m.setAttribute("response_status","error"),m.end(),q(N)})}):(m.setAttribute("function_output",JSON.stringify(R)),m.setAttribute("response_status","success"),m.end(),R)}catch(R){throw m.recordException(R),m.setAttribute("response_status","error"),m.end(),R}}))}:Reflect.get(s,a,c)}};return new Proxy(e,n)};import*as v from"@opentelemetry/api";var ke=(r,e,t)=>function(...o){let n=E(),s=a=>{try{t&&Object.entries(t).forEach(([i,p])=>{a.setAttribute(i,p)}),a.setAttribute("function_input",JSON.stringify(o));let c=e(...o);return c instanceof Promise?c.then(i=>(a.setAttribute("function_output",JSON.stringify(i)),a.setStatus({code:v.SpanStatusCode.OK}),i)).catch(i=>{throw be(a,i,o),i}).finally(()=>a.end()):(a.setAttribute("function_output",JSON.stringify(c)),a.setStatus({code:v.SpanStatusCode.OK}),a.end(),c)}catch(c){throw be(a,c,o),c}};return n.startActiveSpan(r,s)},be=(r,e,t)=>{r.setAttribute("function_input",JSON.stringify(t)),r.setStatus({code:v.SpanStatusCode.ERROR,message:e instanceof Error?e.message:"Unknown error"}),r.end()};var G=class{constructor(e){this.get=(e,t)=>pe(this.apiKey,e,t);this.publish=e=>le(this.apiKey,e);this.all=e=>ue(this.apiKey,e);this.apiKey=e}};var Be=(r,e)=>{if(!(e.metadata instanceof Object))throw new Error("Please provide a dictionary of metadata.");for(let[t,o]of Object.entries(e.metadata))if(typeof t!="string"||typeof o!="string")throw new Error("Please provide a dictionary of metadata with key value pair of strings.");return ne(r,e)},Xe=(r,e)=>{if(typeof e.score!="number")throw new Error("Score must be a number");if(e.score<0||e.score>100)throw new Error("Score must be a number between 0 and 100.");return se(r,e)},He=(r,e)=>{if(!(e.prompt_input_variables instanceof Object))throw new Error("Prompt template input variable dictionary not provided.");return ae(r,e)},Ve=(r,e)=>ie(r,e),K=class{constructor(e){this.group=e=>Ve(this.apiKey,e);this.metadata=e=>Be(this.apiKey,e);this.prompt=e=>He(this.apiKey,e);this.score=e=>Xe(this.apiKey,e);this.apiKey=e}};import*as Ae from"@opentelemetry/api";var Qe={openai:{chat:{function_name:"openai.chat.completions.create",stream_function:W},completion:{function_name:"openai.completions.create",stream_function:Y}},anthropic:{chat:{function_name:"anthropic.messages.create",stream_function:D},completion:{function_name:"anthropic.completions.create",stream_function:de}},"openai.azure":{chat:{function_name:"openai.AzureOpenAI.chat.completions.create",stream_function:W},completion:{function_name:"openai.AzureOpenAI.completions.create",stream_function:Y}}},Ze={openai:ge,anthropic:we,"openai.azure":_e},et=r=>{if(!r||typeof r!="object"||Array.isArray(r))return!1;let e=["status","value","error_message","raw_error_message","is_output_node"];return Object.values(r).every(o=>typeof o!="object"||o===null?!1:e.every(n=>n in o))},Oe=class{constructor({apiKey:e=process.env.PROMPTLAYER_API_KEY,enableTracing:t=!1}={}){if(e===void 0)throw new Error("PromptLayer API key not provided. Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter.");this.apiKey=e,this.enableTracing=t,this.templates=new G(e),this.group=new x(e),this.track=new K(e),this.wrapWithSpan=ke,t&&Se(t,e)}get Anthropic(){try{let e=C("@anthropic-ai/sdk").default;return B(this.apiKey,e,"anthropic","anthropic")}catch(e){console.error("To use the Anthropic module, you must install the @anthropic-ai/sdk package.")}}get OpenAI(){try{let e=C("openai").default;return B(this.apiKey,e,"openai","openai")}catch(e){console.error("To use the OpenAI module, you must install the @openai/api package.")}}run(u){return l(this,arguments,function*({promptName:e,promptVersion:t,promptReleaseLabel:o,inputVariables:n,tags:s,metadata:a,groupId:c,modelParameterOverrides:i,stream:p=!1}){return E().startActiveSpan("PromptLayer Run",h=>l(this,null,function*(){try{let d={promptName:e,promptVersion:t,promptReleaseLabel:o,inputVariables:n,tags:s,metadata:a,groupId:c,modelParameterOverrides:i,stream:p};h.setAttribute("function_input",JSON.stringify(d));let S=n,b={label:o,version:t,metadata_filters:a};n&&(b.input_variables=n);let _=yield this.templates.get(e,b);if(!_)throw new Error("Prompt not found");let P=_.prompt_template;if(!_.llm_kwargs)throw new Error(`Prompt '${e}' does not have any LLM kwargs associated with it.`);let y=_.metadata;if(!y)throw new Error(`Prompt '${e}' does not have any metadata associated with it.`);let m=y.model;if(!m)throw new Error(`Prompt '${e}' does not have a model parameters associated with it.`);let R=m.provider,j=new Date().toISOString(),A=g(g({},_.llm_kwargs),i||{}),q=Qe[R][P.type],N=q.function_name,L=q.stream_function,Ne=Ze[R],X=_.provider_base_url;X&&(A.baseURL=X.url),A.stream=p,p&&["openai","openai.azure"].includes(R)&&(A.stream_options={include_usage:!0});let M=yield Ne(_,A),H=Ee=>{let Ce=new Date().toISOString();return fe(g({function_name:N,provider_type:R,args:[],kwargs:A,tags:s,request_start_time:j,request_end_time:Ce,api_key:this.apiKey,metadata:a,prompt_id:_.id,prompt_version:_.version,prompt_input_variables:S,group_id:c,return_prompt_blueprint:!0,span_id:h.spanContext().spanId},Ee))};if(p)return he(M,H,L);let V=yield H({request_response:M}),Q={request_id:V.request_id,raw_response:M,prompt_blueprint:V.prompt_blueprint};return h.setAttribute("function_output",JSON.stringify(Q)),Q}catch(d){throw h.setStatus({code:Ae.SpanStatusCode.ERROR,message:d instanceof Error?d.message:"Unknown error"}),d}finally{h.end()}}))})}runWorkflow(c){return l(this,arguments,function*({workflowName:e,inputVariables:t={},metadata:o={},workflowLabelName:n=null,workflowVersion:s=null,returnAllOutputs:a=!1}){try{let i=yield me({workflow_name:e,input_variables:t,metadata:o,workflow_label_name:n,workflow_version_number:s,return_all_outputs:a,api_key:this.apiKey});if(!a&&et(i)){let u=Object.values(i).filter(h=>h.is_output_node===!0);if(u.length===0)throw new Error(JSON.stringify(i,null,2));if(!u.some(h=>h.status==="SUCCESS"))throw new Error(JSON.stringify(i,null,2))}return i}catch(i){throw i instanceof Error?(console.error("Error running workflow:",i.message),new Error(`Error running workflow: ${i.message}`)):(console.error("Unknown error running workflow:",i),new Error("Unknown error running workflow"))}})}logRequest(e){return l(this,null,function*(){return Pe(this.apiKey,e)})}};export{Oe as PromptLayer};
3
3
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/utils.ts","../../src/groups.ts","../../src/tracing.ts","../../src/span-exporter.ts","../../src/promptlayer.ts","../../src/span-wrapper.ts","../../src/templates.ts","../../src/track.ts","../../src/index.ts"],"sourcesContent":["import {\n GetPromptTemplateParams,\n GetPromptTemplateResponse,\n ListPromptTemplatesResponse,\n LogRequest,\n Pagination,\n PublishPromptTemplate,\n PublishPromptTemplateResponse,\n RequestLog,\n RunWorkflowRequestParams,\n TrackGroup,\n TrackMetadata,\n TrackPrompt,\n TrackRequest,\n TrackScore,\n WorkflowResponse,\n} from \"@/types\";\nimport type TypeAnthropic from \"@anthropic-ai/sdk\";\nimport {\n Completion as AnthropicCompletion,\n Message,\n MessageStreamEvent,\n} from \"@anthropic-ai/sdk/resources\";\nimport Ably from \"ably\";\nimport type TypeOpenAI from \"openai\";\nimport {\n ChatCompletion,\n ChatCompletionChunk,\n Completion,\n} from \"openai/resources\";\n\nexport const URL_API_PROMPTLAYER =\n process.env.URL_API_PROMPTLAYER || \"https://api.promptlayer.com\";\n\nconst promptlayerApiHandler = async <Item>(\n apiKey: string,\n body: TrackRequest & {\n request_response: AsyncIterable<Item> | any;\n }\n) => {\n const isGenerator = body.request_response[Symbol.asyncIterator] !== undefined;\n if (isGenerator) {\n return proxyGenerator(apiKey, body.request_response, body);\n }\n return await promptLayerApiRequest(apiKey, body);\n};\n\nconst promptLayerApiRequest = async (apiKey: string, body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n }\n if (data && body.return_pl_id) {\n return [body.request_response, data.request_id];\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return body.request_response;\n};\n\nconst promptLayerTrackMetadata = async (\n apiKey: string,\n body: TrackMetadata\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-metadata`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging metadata to your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackScore = async (\n apiKey: string,\n body: TrackScore\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/library-track-score`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While scoring your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While scoring your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackPrompt = async (\n apiKey: string,\n body: TrackPrompt\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-prompt`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackGroup = async (\n apiKey: string,\n body: TrackGroup\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a group, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a group, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerCreateGroup = async (\n apiKey: string\n): Promise<number | boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/create-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While creating a group PromptLayer had the following error\"\n );\n return false;\n }\n return data.id;\n } catch (e) {\n console.warn(\n `WARNING: While creating a group PromptLayer had the following error: ${e}`\n );\n return false;\n }\n};\n\nconst getPromptTemplate = async (\n apiKey: string,\n promptName: string,\n params?: Partial<GetPromptTemplateParams>\n) => {\n try {\n const url = new URL(\n `${URL_API_PROMPTLAYER}/prompt-templates/${promptName}`\n );\n const response = await fetch(url, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify(params),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching a prompt template PromptLayer had the following error\"\n );\n return null;\n }\n if (data.warning) {\n console.warn(\n `WARNING: While tracking your prompt PromptLayer had the following error: ${data.warning}`\n );\n }\n return data as Promise<GetPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching a prompt template PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nconst publishPromptTemplate = async (\n apiKey: string,\n body: PublishPromptTemplate\n) => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/rest/prompt-templates`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify({\n prompt_template: { ...body },\n prompt_version: { ...body },\n release_labels: body.release_labels ? body.release_labels : undefined,\n }),\n }\n );\n const data = await response.json();\n if (response.status === 400) {\n warnOnBadResponse(\n data,\n \"WARNING: While publishing a prompt template PromptLayer had the following error\"\n );\n }\n return data as Promise<PublishPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While publishing a prompt template PromptLayer had the following error: ${e}`\n );\n }\n};\n\nconst getAllPromptTemplates = async (\n apiKey: string,\n params?: Partial<Pagination>\n) => {\n try {\n const url = new URL(`${URL_API_PROMPTLAYER}/prompt-templates`);\n Object.entries(params || {}).forEach(([key, value]) =>\n url.searchParams.append(key, value.toString())\n );\n const response = await fetch(url, {\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching all prompt templates PromptLayer had the following error\"\n );\n return null;\n }\n return (data.items ?? []) as Promise<Array<ListPromptTemplatesResponse>>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching all prompt templates PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport const runWorkflowRequest = async ({\n workflow_name,\n input_variables,\n metadata = {},\n workflow_label_name = null,\n workflow_version_number = null,\n return_all_outputs = false,\n api_key,\n timeout = 120000, // Default timeout is 2 minutes in milliseconds\n}: RunWorkflowRequestParams): Promise<WorkflowResponse> => {\n const payload = {\n input_variables,\n metadata,\n workflow_label_name,\n workflow_version_number,\n return_all_outputs,\n };\n\n const headers = {\n \"X-API-KEY\": api_key,\n \"Content-Type\": \"application/json\",\n };\n\n try {\n // Start the workflow by making a POST request\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/workflows/${encodeURIComponent(\n workflow_name\n )}/run`,\n {\n method: \"POST\",\n headers: headers,\n body: JSON.stringify(payload),\n }\n );\n\n if (response.status !== 201) {\n const errorData = await response.json().catch(() => ({}));\n return {\n success: false,\n message: `Failed to run workflow: ${\n errorData.error || response.statusText\n }`,\n };\n }\n\n const result = await response.json();\n if (result.warning) {\n console.warn(`WARNING: ${result.warning}`);\n }\n const execution_id = result.workflow_version_execution_id;\n if (!execution_id) {\n console.log(\"No execution ID returned from workflow run\");\n return { success: false, message: \"Failed to run workflow\" };\n }\n\n const channel_name = `workflow_updates:${execution_id}`;\n\n // Request a token to subscribe to the channel\n const ws_response = await fetch(\n `${URL_API_PROMPTLAYER}/ws-token-request-library?capability=${channel_name}`,\n {\n method: \"POST\",\n headers: headers,\n }\n );\n\n const ws_token_response = await ws_response.json();\n\n const ably_token = ws_token_response.token_details.token;\n\n // Initialize Ably client using the Promise-based client\n const ably = new Ably.Realtime({ token: ably_token });\n\n try {\n // Wait for the workflow to complete and get the final output\n const final_output = await waitForWorkflowCompletion(\n ably,\n channel_name,\n timeout\n );\n ably.close();\n return final_output;\n } finally {\n // Ensure the Ably client is closed in all cases\n ably.close();\n }\n } catch (error) {\n console.error(\n `Failed to run workflow: ${\n error instanceof Error ? error.message : error\n }`\n );\n throw error;\n }\n};\n\nasync function waitForWorkflowCompletion(\n ably: Ably.Realtime,\n channel_name: string,\n timeout: number\n): Promise<any> {\n const channel = ably.channels.get(channel_name);\n\n return new Promise(async (resolve, reject) => {\n let results: any = null;\n\n const messageListener = (message: Ably.Message) => {\n if (message.name === \"SET_WORKFLOW_COMPLETE\") {\n const message_data = JSON.parse(message.data as string);\n results = message_data.final_output;\n clearTimeout(timer);\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n resolve(results);\n }\n };\n\n // Set up a timeout to reject the promise if no message is received in time\n const timer = setTimeout(() => {\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n reject(new Error(\"Workflow execution did not complete properly (timeout)\"));\n }, timeout);\n\n try {\n // Subscribe to the channel to receive updates\n await channel.subscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n } catch (err) {\n clearTimeout(timer);\n reject(err);\n }\n });\n}\n\nconst openaiStreamChat = (results: ChatCompletionChunk[]): ChatCompletion => {\n let content: ChatCompletion.Choice[\"message\"][\"content\"] = null;\n let functionCall: ChatCompletion.Choice[\"message\"][\"function_call\"] =\n undefined;\n const response: ChatCompletion = {\n id: \"\",\n choices: [],\n created: Date.now(),\n model: \"\",\n object: \"chat.completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let toolCalls: ChatCompletion.Choice[\"message\"][\"tool_calls\"] = undefined;\n for (const result of results) {\n if (result.choices.length === 0) continue;\n const delta = result.choices[0].delta;\n\n if (delta.content) {\n content = `${content || \"\"}${delta.content || \"\"}`;\n }\n if (delta.function_call) {\n functionCall = {\n name: `${functionCall ? functionCall.name : \"\"}${\n delta.function_call.name || \"\"\n }`,\n arguments: `${functionCall ? functionCall.arguments : \"\"}${\n delta.function_call.arguments || \"\"\n }`,\n };\n }\n const toolCall = delta.tool_calls?.[0];\n if (toolCall) {\n toolCalls = toolCalls || [];\n const lastToolCall = toolCalls.at(-1);\n if (!lastToolCall || toolCall.id) {\n toolCalls.push({\n id: toolCall.id || \"\",\n type: toolCall.type || \"function\",\n function: {\n name: toolCall.function?.name || \"\",\n arguments: toolCall.function?.arguments || \"\",\n },\n });\n continue;\n }\n lastToolCall.function.name = `${lastToolCall.function.name}${\n toolCall.function?.name || \"\"\n }`;\n lastToolCall.function.arguments = `${lastToolCall.function.arguments}${\n toolCall.function?.arguments || \"\"\n }`;\n }\n }\n const firstChoice = results[0].choices.at(0);\n response.choices.push({\n finish_reason: firstChoice?.finish_reason ?? \"stop\",\n index: firstChoice?.index ?? 0,\n logprobs: firstChoice?.logprobs ?? null,\n message: {\n role: \"assistant\",\n content,\n function_call: functionCall ? functionCall : undefined,\n tool_calls: toolCalls ? toolCalls : undefined,\n refusal: firstChoice?.delta.refusal ?? null,\n },\n });\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.created = lastResult.created;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamMessage = (results: MessageStreamEvent[]): Message => {\n let response: Message = {\n id: \"\",\n model: \"\",\n content: [],\n role: \"assistant\",\n type: \"message\",\n stop_reason: \"stop_sequence\",\n stop_sequence: null,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let content = \"\";\n for (const result of results) {\n switch (result.type) {\n case \"message_start\": {\n response = {\n ...result.message,\n };\n break;\n }\n case \"content_block_delta\": {\n if (result.delta.type === \"text_delta\")\n content = `${content}${result.delta.text}`;\n }\n case \"message_delta\": {\n if (\"usage\" in result)\n response.usage.output_tokens = result.usage.output_tokens;\n if (\"stop_reason\" in result.delta)\n response.stop_reason = result.delta.stop_reason;\n }\n default: {\n break;\n }\n }\n }\n response.content.push({\n type: \"text\",\n text: content,\n });\n return response;\n};\n\nconst cleaned_result = (\n results: any[],\n function_name = \"openai.chat.completions.create\"\n) => {\n if (\"completion\" in results[0]) {\n return results.reduce(\n (prev, current) => ({\n ...current,\n completion: `${prev.completion}${current.completion}`,\n }),\n {}\n );\n }\n\n if (function_name === \"anthropic.messages.create\")\n return anthropicStreamMessage(results);\n\n if (\"text\" in results[0].choices[0]) {\n let response = \"\";\n for (const result of results) {\n response = `${response}${result.choices[0].text}`;\n }\n const final_result = structuredClone(results.at(-1));\n final_result.choices[0].text = response;\n return final_result;\n }\n\n if (\"delta\" in results[0].choices[0]) {\n const response = openaiStreamChat(results);\n response.choices[0] = {\n ...response.choices[0],\n ...response.choices[0].message,\n };\n return response;\n }\n\n return \"\";\n};\n\nasync function* proxyGenerator<Item>(\n apiKey: string,\n generator: AsyncIterable<Item>,\n body: TrackRequest\n) {\n const results = [];\n for await (const value of generator) {\n yield body.return_pl_id ? [value, null] : value;\n results.push(value);\n }\n const request_response = cleaned_result(results, body.function_name);\n const response = await promptLayerApiRequest(apiKey, {\n ...body,\n request_response,\n request_end_time: new Date().toISOString(),\n });\n if (response) {\n if (body.return_pl_id) {\n const request_id = (response as any)[1];\n const lastResult = results.at(-1);\n yield [lastResult, request_id];\n }\n }\n}\n\nconst warnOnBadResponse = (request_response: any, main_message: string) => {\n try {\n console.warn(`${main_message}: ${request_response.message}`);\n } catch (e) {\n console.warn(`${main_message}: ${request_response}`);\n }\n};\n\nconst trackRequest = async (body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 200)\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return {};\n};\n\nconst openaiStreamCompletion = (results: Completion[]) => {\n const response: Completion = {\n id: \"\",\n choices: [\n {\n finish_reason: \"stop\",\n index: 0,\n text: \"\",\n logprobs: null,\n },\n ],\n created: Date.now(),\n model: \"\",\n object: \"text_completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let text = \"\";\n for (const result of results) {\n if (result.choices.length > 0 && result.choices[0].text) {\n text = `${text}${result.choices[0].text}`;\n }\n }\n response.choices[0].text = text;\n response.id = lastResult.id;\n response.created = lastResult.created;\n response.model = lastResult.model;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamCompletion = (results: AnthropicCompletion[]) => {\n const response: AnthropicCompletion = {\n completion: \"\",\n id: \"\",\n model: \"\",\n stop_reason: \"\",\n type: \"completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let completion = \"\";\n for (const result of results) {\n completion = `${completion}${result.completion}`;\n }\n response.completion = completion;\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.stop_reason = lastResult.stop_reason;\n return response;\n};\n\nasync function* streamResponse<Item>(\n generator: AsyncIterable<Item>,\n afterStream: (body: object) => any,\n mapResults: any\n) {\n const data: {\n request_id: number | null;\n raw_response: any;\n prompt_blueprint: any;\n } = {\n request_id: null,\n raw_response: null,\n prompt_blueprint: null,\n };\n const results = [];\n for await (const result of generator) {\n results.push(result);\n data.raw_response = result;\n yield data;\n }\n const request_response = mapResults(results);\n const response = await afterStream({ request_response });\n data.request_id = response.request_id;\n data.prompt_blueprint = response.prompt_blueprint;\n yield data;\n}\n\nconst openaiChatRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.chat.completions.create(kwargs);\n};\n\nconst openaiCompletionsRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_OPENAI_FUNCTION = {\n chat: openaiChatRequest,\n completion: openaiCompletionsRequest,\n};\n\nconst openaiRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").default;\n const client = new OpenAI({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst azureOpenAIRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").AzureOpenAI;\n const client = new OpenAI({\n endpoint: kwargs.baseURL,\n });\n delete kwargs?.baseURL;\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst anthropicChatRequest = async (client: TypeAnthropic, kwargs: any) => {\n return client.messages.create(kwargs);\n};\n\nconst anthropicCompletionsRequest = async (\n client: TypeAnthropic,\n kwargs: any\n) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_ANTHROPIC_FUNCTION = {\n chat: anthropicChatRequest,\n completion: anthropicCompletionsRequest,\n};\n\nconst anthropicRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const Anthropic = require(\"@anthropic-ai/sdk\").default;\n const client = new Anthropic({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_ANTHROPIC_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst utilLogRequest = async (\n apiKey: string,\n body: LogRequest\n): Promise<RequestLog | null> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/log-request`, {\n method: \"POST\",\n headers: {\n \"X-API-KEY\": apiKey,\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 201) {\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request PromptLayer had the following error\"\n );\n return null;\n }\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While tracking your prompt PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n getAllPromptTemplates,\n getPromptTemplate,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n promptlayerApiHandler,\n promptLayerApiRequest,\n promptLayerCreateGroup,\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n publishPromptTemplate,\n streamResponse,\n trackRequest,\n utilLogRequest,\n};\n","import { promptLayerCreateGroup } from \"@/utils\";\n\nexport class GroupManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n create = () => promptLayerCreateGroup(this.apiKey);\n}\n","import * as opentelemetry from '@opentelemetry/api';\nimport {SimpleSpanProcessor} from '@opentelemetry/sdk-trace-base';\nimport {NodeTracerProvider} from '@opentelemetry/sdk-trace-node';\nimport PromptLayerSpanExporter from '@/span-exporter';\n\nexport const getTracer = (name: string = 'promptlayer-tracer') => {\n return opentelemetry.trace.getTracer(name);\n}\n\nexport const setupTracing = (enableTracing: boolean, apiKey?: string) => {\n const provider = new NodeTracerProvider();\n const exporter = new PromptLayerSpanExporter(enableTracing, apiKey);\n const processor = new SimpleSpanProcessor(exporter);\n provider.addSpanProcessor(processor);\n provider.register();\n}\n","import {Attributes, SpanKind, SpanStatusCode} from '@opentelemetry/api';\nimport {ReadableSpan, SpanExporter} from '@opentelemetry/sdk-trace-base';\nimport {ExportResultCode} from '@opentelemetry/core';\nimport {URL_API_PROMPTLAYER} from '@/utils';\n\nclass PromptLayerSpanExporter implements SpanExporter {\n private apiKey: string | undefined;\n private enableTracing: boolean;\n private url: string;\n\n constructor(enableTracing: boolean, apiKey?: string) {\n this.apiKey = apiKey || process.env.PROMPTLAYER_API_KEY;\n this.enableTracing = enableTracing;\n this.url = `${URL_API_PROMPTLAYER}/spans-bulk`;\n }\n\n private attributesToObject(attributes: Attributes | undefined): Record<string, any> {\n if (!attributes) return {};\n return Object.fromEntries(Object.entries(attributes));\n }\n\n private spanKindToString(kind: SpanKind): string {\n const kindMap: Record<SpanKind, string> = {\n [SpanKind.INTERNAL]: 'SpanKind.INTERNAL',\n [SpanKind.SERVER]: 'SpanKind.SERVER',\n [SpanKind.CLIENT]: 'SpanKind.CLIENT',\n [SpanKind.PRODUCER]: 'SpanKind.PRODUCER',\n [SpanKind.CONSUMER]: 'SpanKind.CONSUMER',\n };\n return kindMap[kind] || 'SpanKind.INTERNAL';\n }\n\n private statusCodeToString(code: SpanStatusCode): string {\n const statusMap: Record<SpanStatusCode, string> = {\n [SpanStatusCode.ERROR]: 'StatusCode.ERROR',\n [SpanStatusCode.OK]: 'StatusCode.OK',\n [SpanStatusCode.UNSET]: 'StatusCode.UNSET',\n };\n return statusMap[code] || 'StatusCode.UNSET';\n }\n\n private toNanoseconds(time: [number, number]): string {\n return (BigInt(time[0]) * BigInt(1e9) + BigInt(time[1])).toString();\n };\n\n export(spans: ReadableSpan[]): Promise<ExportResultCode> {\n if (!this.enableTracing) {\n return Promise.resolve(ExportResultCode.SUCCESS);\n }\n\n const requestData = spans.map(span => ({\n name: span.name,\n context: {\n trace_id: span.spanContext().traceId,\n span_id: span.spanContext().spanId,\n trace_state: span.spanContext().traceState?.serialize() || '',\n },\n kind: this.spanKindToString(span.kind),\n parent_id: span.parentSpanId || null,\n start_time: this.toNanoseconds(span.startTime),\n end_time: this.toNanoseconds(span.endTime),\n status: {\n status_code: this.statusCodeToString(span.status.code),\n description: span.status.message,\n },\n attributes: this.attributesToObject(span.attributes),\n events: span.events.map(event => ({\n name: event.name,\n timestamp: this.toNanoseconds(event.time),\n attributes: this.attributesToObject(event.attributes),\n })),\n links: span.links.map(link => ({\n context: link.context,\n attributes: this.attributesToObject(link.attributes),\n })),\n resource: {\n attributes: {\n ...span.resource.attributes,\n \"service.name\": \"prompt-layer-js\",\n },\n schema_url: '',\n },\n }));\n\n return fetch(this.url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-API-KEY': this.apiKey || '',\n },\n body: JSON.stringify({\n spans: requestData,\n }),\n })\n .then(response => {\n if (!response.ok) {\n console.error(`Error exporting spans\\nHTTP error! status: ${response.status}`);\n return ExportResultCode.FAILED;\n }\n return ExportResultCode.SUCCESS;\n })\n .catch((error) => {\n console.error('Error exporting spans:', error);\n return ExportResultCode.FAILED;\n });\n }\n\n shutdown(): Promise<void> {\n return Promise.resolve();\n }\n}\n\nexport default PromptLayerSpanExporter;\n","import {getTracer} from \"@/tracing\";\nimport {promptlayerApiHandler} from \"@/utils\";\n\nconst tracer = getTracer();\n\nexport const promptLayerBase = (\n apiKey: string,\n llm: object,\n function_name = \"\",\n provider = \"openai\"\n) => {\n const handler: ProxyHandler<any> = {\n construct: (target, args) => {\n const newTarget = Reflect.construct(target, args);\n Object.defineProperties(newTarget, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(newTarget, handler);\n },\n get: (target, prop, receiver) => {\n const value = target[prop];\n const function_name = `${Reflect.get(\n target,\n \"function_name\"\n )}.${prop.toString()}`;\n\n if (typeof value === \"object\") {\n Object.defineProperties(value, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(value, handler);\n }\n\n if (typeof value === \"function\") {\n return (...args: any[]) => {\n const request_start_time = new Date().toISOString();\n const provider_type = Reflect.get(target, \"provider\");\n const return_pl_id = args[0]?.return_pl_id;\n const pl_tags = args[0]?.pl_tags;\n delete args[0]?.return_pl_id;\n delete args[0]?.pl_tags;\n\n return tracer.startActiveSpan(`${provider_type}.${function_name}`, async (span: any) => {\n try {\n span.setAttribute('function_input', JSON.stringify(args));\n const response = Reflect.apply(value, target, args);\n const spanId = span.spanContext().spanId;\n\n if (response instanceof Promise) {\n return new Promise((resolve, reject) => {\n response\n .then(async (request_response) => {\n const response = await promptlayerApiHandler(apiKey, {\n api_key: apiKey,\n provider_type,\n function_name,\n request_start_time,\n request_end_time: new Date().toISOString(),\n request_response,\n kwargs: args[0],\n return_pl_id,\n tags: pl_tags,\n span_id: spanId,\n });\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n resolve(response);\n })\n .catch((error) => {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n reject(error);\n });\n });\n }\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n return response;\n } catch (error) {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n throw error;\n }\n });\n };\n }\n\n return Reflect.get(target, prop, receiver);\n },\n };\n\n return new Proxy(llm, handler);\n};\n","import * as opentelemetry from '@opentelemetry/api';\nimport { getTracer } from '@/tracing';\n\nexport const wrapWithSpan = (functionName: string, func: Function, attributes?: Record<string, any>) => {\n return function (...args: any[]) {\n const tracer = getTracer();\n\n const wrapperFunction = (span: opentelemetry.Span) => {\n try {\n if (attributes) {\n Object.entries(attributes).forEach(([key, value]) => {\n span.setAttribute(key, value);\n });\n }\n\n span.setAttribute('function_input', JSON.stringify(args));\n const result = func(...args);\n\n if (result instanceof Promise) {\n return result.then((resolvedResult) => {\n span.setAttribute('function_output', JSON.stringify(resolvedResult));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n return resolvedResult;\n }).catch((error) => {\n handleError(span, error, args);\n throw error;\n }).finally(() => span.end());\n } else {\n span.setAttribute('function_output', JSON.stringify(result));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n span.end();\n return result;\n }\n } catch (error) {\n handleError(span, error, args);\n throw error;\n }\n };\n\n return tracer.startActiveSpan(functionName, wrapperFunction);\n };\n};\n\nconst handleError = (span: opentelemetry.Span, error: any, args: any[]) => {\n span.setAttribute('function_input', JSON.stringify(args));\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : 'Unknown error',\n });\n span.end();\n}\n","import {\n GetPromptTemplateParams,\n Pagination,\n PublishPromptTemplate,\n} from \"@/types\";\nimport {\n getAllPromptTemplates,\n getPromptTemplate,\n publishPromptTemplate,\n} from \"@/utils\";\n\nexport class TemplateManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n get = (promptName: string, params?: Partial<GetPromptTemplateParams>) =>\n getPromptTemplate(this.apiKey, promptName, params);\n\n publish = (body: PublishPromptTemplate) =>\n publishPromptTemplate(this.apiKey, body);\n\n all = (params?: Pagination) => getAllPromptTemplates(this.apiKey, params);\n}\n","import { TrackGroup, TrackMetadata, TrackPrompt, TrackScore } from \"@/types\";\nimport {\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n} from \"@/utils\";\n\nconst metadata = (apiKey: string, body: TrackMetadata): Promise<boolean> => {\n if (!(body.metadata instanceof Object)) {\n throw new Error(\"Please provide a dictionary of metadata.\");\n }\n for (const [key, value] of Object.entries(body.metadata)) {\n if (typeof key !== \"string\" || typeof value !== \"string\") {\n throw new Error(\n \"Please provide a dictionary of metadata with key value pair of strings.\"\n );\n }\n }\n return promptLayerTrackMetadata(apiKey, body);\n};\n\nconst score = (apiKey: string, body: TrackScore): Promise<boolean> => {\n if (typeof body.score !== \"number\") {\n throw new Error(\"Score must be a number\");\n }\n if (body.score < 0 || body.score > 100) {\n throw new Error(\"Score must be a number between 0 and 100.\");\n }\n return promptLayerTrackScore(apiKey, body);\n};\n\nconst prompt = (apiKey: string, body: TrackPrompt): Promise<boolean> => {\n if (!(body.prompt_input_variables instanceof Object)) {\n throw new Error(\"Prompt template input variable dictionary not provided.\");\n }\n return promptLayerTrackPrompt(apiKey, body);\n};\n\nconst group = (apiKey: string, body: TrackGroup) =>\n promptLayerTrackGroup(apiKey, body);\n\nexport class TrackManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n group = (body: TrackGroup) => group(this.apiKey, body);\n\n metadata = (body: TrackMetadata) => metadata(this.apiKey, body);\n\n prompt = (body: TrackPrompt) => prompt(this.apiKey, body);\n\n score = (body: TrackScore) => score(this.apiKey, body);\n}\n","import { GroupManager } from \"@/groups\";\nimport { promptLayerBase } from \"@/promptlayer\";\nimport { wrapWithSpan } from \"@/span-wrapper\";\nimport { TemplateManager } from \"@/templates\";\nimport { getTracer, setupTracing } from \"@/tracing\";\nimport { TrackManager } from \"@/track\";\nimport { GetPromptTemplateParams, LogRequest, RunRequest, WorkflowRequest, WorkflowResponse } from \"@/types\";\nimport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n runWorkflowRequest,\n streamResponse,\n trackRequest,\n utilLogRequest,\n} from \"@/utils\";\nimport * as opentelemetry from \"@opentelemetry/api\";\n\nconst MAP_PROVIDER_TO_FUNCTION_NAME = {\n openai: {\n chat: {\n function_name: \"openai.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n anthropic: {\n chat: {\n function_name: \"anthropic.messages.create\",\n stream_function: anthropicStreamMessage,\n },\n completion: {\n function_name: \"anthropic.completions.create\",\n stream_function: anthropicStreamCompletion,\n },\n },\n \"openai.azure\": {\n chat: {\n function_name: \"openai.AzureOpenAI.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.AzureOpenAI.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n};\n\nconst MAP_PROVIDER_TO_FUNCTION: Record<string, any> = {\n openai: openaiRequest,\n anthropic: anthropicRequest,\n \"openai.azure\": azureOpenAIRequest,\n};\n\nexport interface ClientOptions {\n apiKey?: string;\n enableTracing?: boolean;\n workspaceId?: number;\n}\n\nconst isWorkflowResultsDict = (obj: any): boolean => {\n if (!obj || typeof obj !== \"object\" || Array.isArray(obj)) {\n return false;\n }\n\n const REQUIRED_KEYS = [\n \"status\",\n \"value\",\n \"error_message\",\n \"raw_error_message\",\n \"is_output_node\",\n ];\n const values = Object.values(obj);\n\n return values.every((val) => {\n if (typeof val !== \"object\" || val === null) return false;\n return REQUIRED_KEYS.every((key) => key in val);\n });\n}\n\nexport class PromptLayer {\n apiKey: string;\n templates: TemplateManager;\n group: GroupManager;\n track: TrackManager;\n enableTracing: boolean;\n wrapWithSpan: typeof wrapWithSpan;\n\n constructor({\n apiKey = process.env.PROMPTLAYER_API_KEY,\n enableTracing = false,\n }: ClientOptions = {}) {\n if (apiKey === undefined) {\n throw new Error(\n \"PromptLayer API key not provided. Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter.\"\n );\n }\n\n this.apiKey = apiKey;\n this.enableTracing = enableTracing;\n this.templates = new TemplateManager(apiKey);\n this.group = new GroupManager(apiKey);\n this.track = new TrackManager(apiKey);\n this.wrapWithSpan = wrapWithSpan;\n\n if (enableTracing) {\n setupTracing(enableTracing, apiKey);\n }\n }\n\n get Anthropic() {\n try {\n const module = require(\"@anthropic-ai/sdk\").default;\n return promptLayerBase(this.apiKey, module, \"anthropic\", \"anthropic\");\n } catch (e) {\n console.error(\n \"To use the Anthropic module, you must install the @anthropic-ai/sdk package.\"\n );\n }\n }\n\n get OpenAI() {\n try {\n const module = require(\"openai\").default;\n return promptLayerBase(this.apiKey, module, \"openai\", \"openai\");\n } catch (e) {\n console.error(\n \"To use the OpenAI module, you must install the @openai/api package.\"\n );\n }\n }\n\n async run({\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream = false,\n }: RunRequest) {\n const tracer = getTracer();\n\n return tracer.startActiveSpan(\"PromptLayer Run\", async (span) => {\n try {\n const functionInput = {\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream,\n };\n span.setAttribute(\"function_input\", JSON.stringify(functionInput));\n\n const prompt_input_variables = inputVariables;\n const templateGetParams: GetPromptTemplateParams = {\n label: promptReleaseLabel,\n version: promptVersion,\n metadata_filters: metadata,\n };\n if (inputVariables) templateGetParams.input_variables = inputVariables;\n\n const promptBlueprint = await this.templates.get(\n promptName,\n templateGetParams\n );\n\n if (!promptBlueprint) throw new Error(\"Prompt not found\");\n\n const promptTemplate = promptBlueprint.prompt_template;\n if (!promptBlueprint.llm_kwargs) {\n throw new Error(\n `Prompt '${promptName}' does not have any LLM kwargs associated with it.`\n );\n }\n\n const promptBlueprintMetadata = promptBlueprint.metadata;\n if (!promptBlueprintMetadata) {\n throw new Error(\n `Prompt '${promptName}' does not have any metadata associated with it.`\n );\n }\n\n const promptBlueprintModel = promptBlueprintMetadata.model;\n if (!promptBlueprintModel) {\n throw new Error(\n `Prompt '${promptName}' does not have a model parameters associated with it.`\n );\n }\n\n const provider_type = promptBlueprintModel.provider;\n\n const request_start_time = new Date().toISOString();\n const kwargs = {\n ...promptBlueprint.llm_kwargs,\n ...(modelParameterOverrides || {}),\n };\n const config =\n MAP_PROVIDER_TO_FUNCTION_NAME[\n provider_type as keyof typeof MAP_PROVIDER_TO_FUNCTION_NAME\n ][promptTemplate.type];\n const function_name = config.function_name;\n\n const stream_function = config.stream_function;\n const request_function = MAP_PROVIDER_TO_FUNCTION[provider_type];\n const provider_base_url = promptBlueprint.provider_base_url;\n if (provider_base_url) {\n kwargs[\"baseURL\"] = provider_base_url.url;\n }\n kwargs[\"stream\"] = stream;\n if (stream && [\"openai\", \"openai.azure\"].includes(provider_type)) {\n kwargs[\"stream_options\"] = { include_usage: true };\n }\n\n const response = await request_function(promptBlueprint, kwargs);\n\n const _trackRequest = (body: object) => {\n const request_end_time = new Date().toISOString();\n return trackRequest({\n function_name,\n provider_type,\n args: [],\n kwargs,\n tags,\n request_start_time,\n request_end_time,\n api_key: this.apiKey,\n metadata,\n prompt_id: promptBlueprint.id,\n prompt_version: promptBlueprint.version,\n prompt_input_variables,\n group_id: groupId,\n return_prompt_blueprint: true,\n span_id: span.spanContext().spanId,\n ...body,\n });\n };\n\n if (stream)\n return streamResponse(response, _trackRequest, stream_function);\n const requestLog = await _trackRequest({ request_response: response });\n\n const functionOutput = {\n request_id: requestLog.request_id,\n raw_response: response,\n prompt_blueprint: requestLog.prompt_blueprint,\n };\n span.setAttribute(\"function_output\", JSON.stringify(functionOutput));\n\n return functionOutput;\n } catch (error) {\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : \"Unknown error\",\n });\n throw error;\n } finally {\n span.end();\n }\n });\n }\n\n async runWorkflow({\n workflowName,\n inputVariables = {},\n metadata = {},\n workflowLabelName = null,\n workflowVersion = null, // This is the version number, not the version ID\n returnAllOutputs = false,\n }: WorkflowRequest): Promise<WorkflowResponse> {\n try {\n const result = await runWorkflowRequest({\n workflow_name: workflowName,\n input_variables: inputVariables,\n metadata,\n workflow_label_name: workflowLabelName,\n workflow_version_number: workflowVersion,\n return_all_outputs: returnAllOutputs,\n api_key: this.apiKey,\n });\n\n if (!returnAllOutputs) {\n if (isWorkflowResultsDict(result)) {\n const nodeValues = Object.values(result);\n\n const outputNodes = nodeValues.filter(\n (node: any) => node.is_output_node === true\n );\n\n if (outputNodes.length === 0) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n\n const anyOutputSuccess = outputNodes.some(\n (node: any) => node.status === \"SUCCESS\"\n );\n if (!anyOutputSuccess) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n }\n }\n\n return result;\n } catch (error) {\n if (error instanceof Error) {\n console.error(\"Error running workflow:\", error.message);\n throw new Error(`Error running workflow: ${error.message}`);\n } else {\n console.error(\"Unknown error running workflow:\", error);\n throw new Error(\"Unknown error running workflow\");\n }\n }\n }\n\n async logRequest(body: LogRequest) {\n return utilLogRequest(this.apiKey, body);\n }\n}\n"],"mappings":"yoDAuBA,OAAOA,OAAU,OAQV,IAAMC,EACX,QAAQ,IAAI,qBAAuB,8BAE/BC,GAAwB,CAC5BC,EACAC,IAGGC,EAAA,wBAEH,OADoBD,EAAK,iBAAiB,OAAO,aAAa,IAAM,OAE3DE,GAAeH,EAAQC,EAAK,iBAAkBA,CAAI,EAEpD,MAAMG,GAAsBJ,EAAQC,CAAI,CACjD,GAEMG,GAAwB,CAAOJ,EAAgBC,IAAuBC,EAAA,wBAC1E,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACKK,EAAO,MAAMD,EAAS,KAAK,EAOjC,GANIA,EAAS,SAAW,KACtBE,EACED,EACA,mFACF,EAEEA,GAAQL,EAAK,aACf,MAAO,CAACA,EAAK,iBAAkBK,EAAK,UAAU,CAElD,OAASE,EAAG,CACV,QAAQ,KACN,4EAA4EA,CAAC,EAC/E,CACF,CACA,OAAOP,EAAK,gBACd,GAEMQ,GAA2B,CAC/BT,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,0BACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,8FACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,iGAAiGA,CAAC,EACpG,EACO,EACT,CACA,MAAO,EACT,GAEMI,GAAwB,CAC5BZ,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,uBAAwB,CACzE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,kFACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,EACT,CACA,MAAO,EACT,GAEMK,GAAyB,CAC7Bb,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,wBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,6GACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,gHAAgHA,CAAC,EACnH,EACO,EACT,CACA,MAAO,EACT,GAEMM,GAAwB,CAC5Bd,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,mGACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,sGAAsGA,CAAC,EACzG,EACO,EACT,CACA,MAAO,EACT,GAEMO,GACJf,GAC8BE,EAAA,wBAC9B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,gBAAiB,CAClE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAU,CACnB,QAASE,CACX,CAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,qEACF,EACO,IAEFA,EAAK,EACd,OAAS,EAAG,CACV,eAAQ,KACN,wEAAwE,CAAC,EAC3E,EACO,EACT,CACF,GAEMU,GAAoB,CACxBhB,EACAiB,EACAC,IACGhB,EAAA,wBACH,GAAI,CACF,IAAMiB,EAAM,IAAI,IACd,GAAGrB,CAAmB,qBAAqBmB,CAAU,EACvD,EACMZ,EAAW,MAAM,MAAMc,EAAK,CAChC,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,EACA,KAAM,KAAK,UAAUkB,CAAM,CAC7B,CAAC,EACKZ,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,+EACF,EACO,OAELA,EAAK,SACP,QAAQ,KACN,4EAA4EA,EAAK,OAAO,EAC1F,EAEKA,EACT,OAASE,EAAG,CACV,eAAQ,KACN,kFAAkFA,CAAC,EACrF,EACO,IACT,CACF,GAEMY,GAAwB,CAC5BpB,EACAC,IACGC,EAAA,wBACH,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,yBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAaE,CACf,EACA,KAAM,KAAK,UAAU,CACnB,gBAAiBW,EAAA,GAAKV,GACtB,eAAgBU,EAAA,GAAKV,GACrB,eAAgBA,EAAK,eAAiBA,EAAK,eAAiB,MAC9D,CAAC,CACH,CACF,EACMK,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,iFACF,EAEKA,CACT,OAASE,EAAG,CACV,QAAQ,KACN,oFAAoFA,CAAC,EACvF,CACF,CACF,GAEMa,GAAwB,CAC5BrB,EACAkB,IACGhB,EAAA,wBA3TL,IAAAoB,EA4TE,GAAI,CACF,IAAMH,EAAM,IAAI,IAAI,GAAGrB,CAAmB,mBAAmB,EAC7D,OAAO,QAAQoB,GAAU,CAAC,CAAC,EAAE,QAAQ,CAAC,CAACK,EAAKC,CAAK,IAC/CL,EAAI,aAAa,OAAOI,EAAKC,EAAM,SAAS,CAAC,CAC/C,EACA,IAAMnB,EAAW,MAAM,MAAMc,EAAK,CAChC,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,CACF,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,kFACF,EACO,OAEDgB,EAAAhB,EAAK,QAAL,KAAAgB,EAAc,CAAC,CACzB,OAASd,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,IACT,CACF,GAEaiB,GAA4BC,GASkBxB,EAAA,QATlBwB,GASkB,UATlB,CACvC,cAAAC,EACA,gBAAAC,EACA,SAAAC,EAAW,CAAC,EACZ,oBAAAC,EAAsB,KACtB,wBAAAC,EAA0B,KAC1B,mBAAAC,EAAqB,GACrB,QAAAC,EACA,QAAAC,EAAU,IACZ,EAA2D,CACzD,IAAMC,EAAU,CACd,gBAAAP,EACA,SAAAC,EACA,oBAAAC,EACA,wBAAAC,EACA,mBAAAC,CACF,EAEMI,EAAU,CACd,YAAaH,EACb,eAAgB,kBAClB,EAEA,GAAI,CAEF,IAAM5B,EAAW,MAAM,MACrB,GAAGP,CAAmB,cAAc,mBAClC6B,CACF,CAAC,OACD,CACE,OAAQ,OACR,QAASS,EACT,KAAM,KAAK,UAAUD,CAAO,CAC9B,CACF,EAEA,GAAI9B,EAAS,SAAW,IAEtB,MAAO,CACL,QAAS,GACT,QAAS,4BAHO,MAAMA,EAAS,KAAK,EAAE,MAAM,KAAO,CAAC,EAAE,GAI1C,OAASA,EAAS,UAC9B,EACF,EAGF,IAAMgC,EAAS,MAAMhC,EAAS,KAAK,EAC/BgC,EAAO,SACT,QAAQ,KAAK,YAAYA,EAAO,OAAO,EAAE,EAE3C,IAAMC,EAAeD,EAAO,8BAC5B,GAAI,CAACC,EACH,eAAQ,IAAI,4CAA4C,EACjD,CAAE,QAAS,GAAO,QAAS,wBAAyB,EAG7D,IAAMC,EAAe,oBAAoBD,CAAY,GAa/CE,GAFoB,MARN,MAAM,MACxB,GAAG1C,CAAmB,wCAAwCyC,CAAY,GAC1E,CACE,OAAQ,OACR,QAASH,CACX,CACF,GAE4C,KAAK,GAEZ,cAAc,MAG7CK,EAAO,IAAIC,GAAK,SAAS,CAAE,MAAOF,CAAW,CAAC,EAEpD,GAAI,CAEF,IAAMG,EAAe,MAAMC,GACzBH,EACAF,EACAL,CACF,EACA,OAAAO,EAAK,MAAM,EACJE,CACT,QAAE,CAEAF,EAAK,MAAM,CACb,CACF,OAASI,EAAO,CACd,cAAQ,MACN,2BACEA,aAAiB,MAAQA,EAAM,QAAUA,CAC3C,EACF,EACMA,CACR,CACF,GAEA,SAAeD,GACbH,EACAF,EACAL,EACc,QAAAhC,EAAA,sBACd,IAAM4C,EAAUL,EAAK,SAAS,IAAIF,CAAY,EAE9C,OAAO,IAAI,QAAQ,CAAOQ,EAASC,IAAW9C,EAAA,sBAC5C,IAAI+C,EAAe,KAEbC,EAAmBC,GAA0B,CAC7CA,EAAQ,OAAS,0BAEnBF,EADqB,KAAK,MAAME,EAAQ,IAAc,EAC/B,aACvB,aAAaC,CAAK,EAClBN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DH,EAAQE,CAAO,EAEnB,EAGMG,EAAQ,WAAW,IAAM,CAC7BN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DF,EAAO,IAAI,MAAM,wDAAwD,CAAC,CAC5E,EAAGd,CAAO,EAEV,GAAI,CAEF,MAAMY,EAAQ,UAAU,wBAAyBI,CAAe,CAClE,OAASG,EAAK,CACZ,aAAaD,CAAK,EAClBJ,EAAOK,CAAG,CACZ,CACF,EAAC,CACH,GAEA,IAAMC,EAAoBL,GAAmD,CA7d7E,IAAA3B,EAAAiC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EA8dE,IAAIC,EAAuD,KACvDC,EAEE3D,EAA2B,CAC/B,GAAI,GACJ,QAAS,CAAC,EACV,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAI6D,EACJ,QAAW7B,KAAUY,EAAS,CAC5B,GAAIZ,EAAO,QAAQ,SAAW,EAAG,SACjC,IAAM8B,EAAQ9B,EAAO,QAAQ,CAAC,EAAE,MAE5B8B,EAAM,UACRJ,EAAU,GAAGA,GAAW,EAAE,GAAGI,EAAM,SAAW,EAAE,IAE9CA,EAAM,gBACRH,EAAe,CACb,KAAM,GAAGA,EAAeA,EAAa,KAAO,EAAE,GAC5CG,EAAM,cAAc,MAAQ,EAC9B,GACA,UAAW,GAAGH,EAAeA,EAAa,UAAY,EAAE,GACtDG,EAAM,cAAc,WAAa,EACnC,EACF,GAEF,IAAMC,GAAW9C,EAAA6C,EAAM,aAAN,YAAA7C,EAAmB,GACpC,GAAI8C,EAAU,CACZF,EAAYA,GAAa,CAAC,EAC1B,IAAMG,EAAeH,EAAU,GAAG,EAAE,EACpC,GAAI,CAACG,GAAgBD,EAAS,GAAI,CAChCF,EAAU,KAAK,CACb,GAAIE,EAAS,IAAM,GACnB,KAAMA,EAAS,MAAQ,WACvB,SAAU,CACR,OAAMb,EAAAa,EAAS,WAAT,YAAAb,EAAmB,OAAQ,GACjC,YAAWC,EAAAY,EAAS,WAAT,YAAAZ,EAAmB,YAAa,EAC7C,CACF,CAAC,EACD,QACF,CACAa,EAAa,SAAS,KAAO,GAAGA,EAAa,SAAS,IAAI,KACxDZ,EAAAW,EAAS,WAAT,YAAAX,EAAmB,OAAQ,EAC7B,GACAY,EAAa,SAAS,UAAY,GAAGA,EAAa,SAAS,SAAS,KAClEX,EAAAU,EAAS,WAAT,YAAAV,EAAmB,YAAa,EAClC,EACF,CACF,CACA,IAAMY,EAAcrB,EAAQ,CAAC,EAAE,QAAQ,GAAG,CAAC,EAC3C,OAAA5C,EAAS,QAAQ,KAAK,CACpB,eAAesD,EAAAW,GAAA,YAAAA,EAAa,gBAAb,KAAAX,EAA8B,OAC7C,OAAOC,EAAAU,GAAA,YAAAA,EAAa,QAAb,KAAAV,EAAsB,EAC7B,UAAUC,EAAAS,GAAA,YAAAA,EAAa,WAAb,KAAAT,EAAyB,KACnC,QAAS,CACP,KAAM,YACN,QAAAE,EACA,cAAeC,GAA8B,OAC7C,WAAYE,GAAwB,OACpC,SAASJ,EAAAQ,GAAA,YAAAA,EAAa,MAAM,UAAnB,KAAAR,EAA8B,IACzC,CACF,CAAC,EACDzD,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMkE,EAA0BtB,GAA2C,CACzE,IAAI5C,EAAoB,CACtB,GAAI,GACJ,MAAO,GACP,QAAS,CAAC,EACV,KAAM,YACN,KAAM,UACN,YAAa,gBACb,cAAe,KACf,MAAO,CACL,aAAc,EACd,cAAe,CACjB,CACF,EAEA,GAAI,CADe4C,EAAQ,GAAG,EAAE,EACf,OAAO5C,EACxB,IAAI0D,EAAU,GACd,QAAW1B,KAAUY,EACnB,OAAQZ,EAAO,KAAM,CACnB,IAAK,gBAAiB,CACpBhC,EAAWM,EAAA,GACN0B,EAAO,SAEZ,KACF,CACA,IAAK,sBACCA,EAAO,MAAM,OAAS,eACxB0B,EAAU,GAAGA,CAAO,GAAG1B,EAAO,MAAM,IAAI,IAE5C,IAAK,gBACC,UAAWA,IACbhC,EAAS,MAAM,cAAgBgC,EAAO,MAAM,eAC1C,gBAAiBA,EAAO,QAC1BhC,EAAS,YAAcgC,EAAO,MAAM,aAExC,QACE,KAEJ,CAEF,OAAAhC,EAAS,QAAQ,KAAK,CACpB,KAAM,OACN,KAAM0D,CACR,CAAC,EACM1D,CACT,EAEMmE,GAAiB,CACrBvB,EACAwB,EAAgB,mCACb,CACH,GAAI,eAAgBxB,EAAQ,CAAC,EAC3B,OAAOA,EAAQ,OACb,CAACyB,EAAMC,IAAajE,EAAAC,EAAA,GACfgE,GADe,CAElB,WAAY,GAAGD,EAAK,UAAU,GAAGC,EAAQ,UAAU,EACrD,GACA,CAAC,CACH,EAGF,GAAIF,IAAkB,4BACpB,OAAOF,EAAuBtB,CAAO,EAEvC,GAAI,SAAUA,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACnC,IAAI5C,EAAW,GACf,QAAWgC,KAAUY,EACnB5C,EAAW,GAAGA,CAAQ,GAAGgC,EAAO,QAAQ,CAAC,EAAE,IAAI,GAEjD,IAAMuC,EAAe,gBAAgB3B,EAAQ,GAAG,EAAE,CAAC,EACnD,OAAA2B,EAAa,QAAQ,CAAC,EAAE,KAAOvE,EACxBuE,CACT,CAEA,GAAI,UAAW3B,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACpC,IAAM5C,EAAWiD,EAAiBL,CAAO,EACzC,OAAA5C,EAAS,QAAQ,CAAC,EAAIM,IAAA,GACjBN,EAAS,QAAQ,CAAC,GAClBA,EAAS,QAAQ,CAAC,EAAE,SAElBA,CACT,CAEA,MAAO,EACT,EAEA,SAAgBF,GACdH,EACA6E,EACA5E,EACA,QAAA6E,EAAA,sBACA,IAAM7B,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA0BH,GAA1BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAMzD,EAAjB0D,EAAA,MACE,MAAMjF,EAAK,aAAe,CAACuB,EAAO,IAAI,EAAIA,EAC1CyB,EAAQ,KAAKzB,CAAK,SAFpB0D,EApoBF,CAooBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAIA,IAAMuC,EAAmBZ,GAAevB,EAAShD,EAAK,aAAa,EAC7DI,EAAW,UAAA8E,EAAM/E,GAAsBJ,EAAQU,EAAAC,EAAA,GAChDV,GADgD,CAEnD,iBAAAmF,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,CAC3C,EAAC,GACD,GAAI/E,GACEJ,EAAK,aAAc,CACrB,IAAMoF,EAAchF,EAAiB,CAAC,EAEtC,KAAM,CADa4C,EAAQ,GAAG,EAAE,EACboC,CAAU,CAC/B,CAEJ,GAEA,IAAM9E,EAAoB,CAAC6E,EAAuBE,IAAyB,CACzE,GAAI,CACF,QAAQ,KAAK,GAAGA,CAAY,KAAKF,EAAiB,OAAO,EAAE,CAC7D,OAAS5E,EAAG,CACV,QAAQ,KAAK,GAAG8E,CAAY,KAAKF,CAAgB,EAAE,CACrD,CACF,EAEMG,GAAsBtF,GAAuBC,EAAA,wBACjD,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,mFACF,EACKA,EAAS,KAAK,CACvB,OAAS,EAAG,CACV,QAAQ,KACN,4EAA4E,CAAC,EAC/E,CACF,CACA,MAAO,CAAC,CACV,GAEMmF,EAA0BvC,GAA0B,CACxD,IAAM5C,EAAuB,CAC3B,GAAI,GACJ,QAAS,CACP,CACE,cAAe,OACf,MAAO,EACP,KAAM,GACN,SAAU,IACZ,CACF,EACA,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIoF,EAAO,GACX,QAAWpD,KAAUY,EACfZ,EAAO,QAAQ,OAAS,GAAKA,EAAO,QAAQ,CAAC,EAAE,OACjDoD,EAAO,GAAGA,CAAI,GAAGpD,EAAO,QAAQ,CAAC,EAAE,IAAI,IAG3C,OAAAhC,EAAS,QAAQ,CAAC,EAAE,KAAOoF,EAC3BpF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMqF,GAA6BzC,GAAmC,CACpE,IAAM5C,EAAgC,CACpC,WAAY,GACZ,GAAI,GACJ,MAAO,GACP,YAAa,GACb,KAAM,YACR,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIsF,EAAa,GACjB,QAAWtD,KAAUY,EACnB0C,EAAa,GAAGA,CAAU,GAAGtD,EAAO,UAAU,GAEhD,OAAAhC,EAAS,WAAasF,EACtBtF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,YAAc4D,EAAW,YAC3B5D,CACT,EAEA,SAAgBuF,GACdf,EACAgB,EACAC,EACA,QAAAhB,EAAA,sBACA,IAAMxE,EAIF,CACF,WAAY,KACZ,aAAc,KACd,iBAAkB,IACpB,EACM2C,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA2BH,GAA3BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAM5C,EAAjB6C,EAAA,MACEjC,EAAQ,KAAKZ,CAAM,EACnB/B,EAAK,aAAe+B,EACpB,MAAM/B,SAHR4E,EA1vBF,CA0vBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAKA,IAAMuC,EAAmBU,EAAW7C,CAAO,EACrC5C,EAAW,UAAA8E,EAAMU,EAAY,CAAE,iBAAAT,CAAiB,CAAC,GACvD9E,EAAK,WAAaD,EAAS,WAC3BC,EAAK,iBAAmBD,EAAS,iBACjC,MAAMC,CACR,GAEA,IAAMyF,GAAoB,CAAOC,EAAoBC,IAAgB/F,EAAA,wBACnE,OAAO8F,EAAO,KAAK,YAAY,OAAOC,CAAM,CAC9C,GAEMC,GAA2B,CAAOF,EAAoBC,IAAgB/F,EAAA,wBAC1E,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEME,GAA8B,CAClC,KAAMJ,GACN,WAAYG,EACd,EAEME,GAAgB,CACpBC,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,EAAQ,QAAQ,EAAE,QAC3BN,EAAS,IAAIM,EAAO,CACxB,QAASL,EAAO,OAClB,CAAC,EACKM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMO,GAAqB,CACzBH,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,EAAQ,QAAQ,EAAE,YAC3BN,EAAS,IAAIM,EAAO,CACxB,SAAUL,EAAO,OACnB,CAAC,EACDA,GAAA,aAAAA,EAAe,QACf,IAAMM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMQ,GAAuB,CAAOT,EAAuBC,IAAgB/F,EAAA,wBACzE,OAAO8F,EAAO,SAAS,OAAOC,CAAM,CACtC,GAEMS,GAA8B,CAClCV,EACAC,IACG/F,EAAA,wBACH,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEMU,GAAiC,CACrC,KAAMF,GACN,WAAYC,EACd,EAEME,GAAmB,CACvBP,EACAJ,IACG/F,EAAA,wBACH,IAAM2G,EAAY,EAAQ,mBAAmB,EAAE,QACzCb,EAAS,IAAIa,EAAU,CAC3B,QAASZ,EAAO,OAClB,CAAC,EACKM,EACJI,GAA+BN,EAAgB,gBAAgB,IAAI,EACrE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMa,GAAiB,CACrB9G,EACAC,IAC+BC,EAAA,wBAC/B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,YAAaE,EACb,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUC,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,yEACF,EACO,MAEFA,EAAS,KAAK,CACvB,OAASG,EAAG,CACV,eAAQ,KACN,4EAA4EA,CAAC,EAC/E,EACO,IACT,CACF,GCp2BO,IAAMuG,EAAN,KAAmB,CAGxB,YAAYC,EAAgB,CAI5B,YAAS,IAAMC,GAAuB,KAAK,MAAM,EAH/C,KAAK,OAASD,CAChB,CAGF,ECVA,UAAYE,OAAmB,qBAC/B,OAAQ,uBAAAC,OAA0B,gCAClC,OAAQ,sBAAAC,OAAyB,gCCFjC,OAAoB,YAAAC,EAAU,kBAAAC,MAAqB,qBAEnD,OAAQ,oBAAAC,MAAuB,sBAG/B,IAAMC,EAAN,KAAsD,CAKpD,YAAYC,EAAwBC,EAAiB,CACnD,KAAK,OAASA,GAAU,QAAQ,IAAI,oBACpC,KAAK,cAAgBD,EACrB,KAAK,IAAM,GAAGE,CAAmB,aACnC,CAEQ,mBAAmBC,EAAyD,CAClF,OAAKA,EACE,OAAO,YAAY,OAAO,QAAQA,CAAU,CAAC,EAD5B,CAAC,CAE3B,CAEQ,iBAAiBC,EAAwB,CAQ/C,MAP0C,CACxC,CAACC,EAAS,QAAQ,EAAG,oBACrB,CAACA,EAAS,MAAM,EAAG,kBACnB,CAACA,EAAS,MAAM,EAAG,kBACnB,CAACA,EAAS,QAAQ,EAAG,oBACrB,CAACA,EAAS,QAAQ,EAAG,mBACvB,EACeD,CAAI,GAAK,mBAC1B,CAEQ,mBAAmBE,EAA8B,CAMvD,MALkD,CAChD,CAACC,EAAe,KAAK,EAAG,mBACxB,CAACA,EAAe,EAAE,EAAG,gBACrB,CAACA,EAAe,KAAK,EAAG,kBAC1B,EACiBD,CAAI,GAAK,kBAC5B,CAEQ,cAAcE,EAAgC,CACpD,OAAQ,OAAOA,EAAK,CAAC,CAAC,EAAI,OAAO,GAAG,EAAI,OAAOA,EAAK,CAAC,CAAC,GAAG,SAAS,CACpE,CAEA,OAAOC,EAAkD,CACvD,GAAI,CAAC,KAAK,cACR,OAAO,QAAQ,QAAQC,EAAiB,OAAO,EAGjD,IAAMC,EAAcF,EAAM,IAAIG,GAAK,CAlDvC,IAAAC,EAkD2C,OACrC,KAAMD,EAAK,KACX,QAAS,CACP,SAAUA,EAAK,YAAY,EAAE,QAC7B,QAASA,EAAK,YAAY,EAAE,OAC5B,cAAaC,EAAAD,EAAK,YAAY,EAAE,aAAnB,YAAAC,EAA+B,cAAe,EAC7D,EACA,KAAM,KAAK,iBAAiBD,EAAK,IAAI,EACrC,UAAWA,EAAK,cAAgB,KAChC,WAAY,KAAK,cAAcA,EAAK,SAAS,EAC7C,SAAU,KAAK,cAAcA,EAAK,OAAO,EACzC,OAAQ,CACN,YAAa,KAAK,mBAAmBA,EAAK,OAAO,IAAI,EACrD,YAAaA,EAAK,OAAO,OAC3B,EACA,WAAY,KAAK,mBAAmBA,EAAK,UAAU,EACnD,OAAQA,EAAK,OAAO,IAAIE,IAAU,CAChC,KAAMA,EAAM,KACZ,UAAW,KAAK,cAAcA,EAAM,IAAI,EACxC,WAAY,KAAK,mBAAmBA,EAAM,UAAU,CACtD,EAAE,EACF,MAAOF,EAAK,MAAM,IAAIG,IAAS,CAC7B,QAASA,EAAK,QACd,WAAY,KAAK,mBAAmBA,EAAK,UAAU,CACrD,EAAE,EACF,SAAU,CACR,WAAYC,EAAAC,EAAA,GACPL,EAAK,SAAS,YADP,CAEV,eAAgB,iBAClB,GACA,WAAY,EACd,CACF,EAAE,EAEF,OAAO,MAAM,KAAK,IAAK,CACrB,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAa,KAAK,QAAU,EAC9B,EACA,KAAM,KAAK,UAAU,CACnB,MAAOD,CACT,CAAC,CACH,CAAC,EACE,KAAKO,GACCA,EAAS,GAIPR,EAAiB,SAHtB,QAAQ,MAAM;AAAA,sBAA8CQ,EAAS,MAAM,EAAE,EACtER,EAAiB,OAG3B,EACA,MAAOS,IACN,QAAQ,MAAM,yBAA0BA,CAAK,EACtCT,EAAiB,OACzB,CACL,CAEA,UAA0B,CACxB,OAAO,QAAQ,QAAQ,CACzB,CACF,EAEOU,GAAQrB,ED3GR,IAAMsB,EAAY,CAACC,EAAe,uBAClB,SAAM,UAAUA,CAAI,EAG9BC,GAAe,CAACC,EAAwBC,IAAoB,CACvE,IAAMC,EAAW,IAAIC,GACfC,EAAW,IAAIC,GAAwBL,EAAeC,CAAM,EAC5DK,EAAY,IAAIC,GAAoBH,CAAQ,EAClDF,EAAS,iBAAiBI,CAAS,EACnCJ,EAAS,SAAS,CACpB,EEZA,IAAMM,GAASC,EAAU,EAEZC,EAAkB,CAC7BC,EACAC,EACAC,EAAgB,GAChBC,EAAW,WACR,CACH,IAAMC,EAA6B,CACjC,UAAW,CAACC,EAAQC,IAAS,CAC3B,IAAMC,EAAY,QAAQ,UAAUF,EAAQC,CAAI,EAChD,cAAO,iBAAiBC,EAAW,CACjC,cAAe,CACb,MAAOL,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMI,EAAWH,CAAO,CACrC,EACA,IAAK,CAACC,EAAQG,EAAMC,IAAa,CAC/B,IAAMC,EAAQL,EAAOG,CAAI,EACnBN,EAAgB,GAAG,QAAQ,IAC/BG,EACA,eACF,CAAC,IAAIG,EAAK,SAAS,CAAC,GAEpB,OAAI,OAAOE,GAAU,UACnB,OAAO,iBAAiBA,EAAO,CAC7B,cAAe,CACb,MAAOR,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMO,EAAON,CAAO,GAG7B,OAAOM,GAAU,WACZ,IAAIJ,IAAgB,CA9CnC,IAAAK,EAAAC,EAAAC,EAAAC,EA+CU,IAAMC,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAAgB,QAAQ,IAAIX,EAAQ,UAAU,EAC9CY,GAAeN,EAAAL,EAAK,CAAC,IAAN,YAAAK,EAAS,aACxBO,GAAUN,EAAAN,EAAK,CAAC,IAAN,YAAAM,EAAS,QACzB,OAAAC,EAAOP,EAAK,CAAC,IAAb,aAAAO,EAAgB,cAChBC,EAAOR,EAAK,CAAC,IAAb,aAAAQ,EAAgB,QAETjB,GAAO,gBAAgB,GAAGmB,CAAa,IAAId,CAAa,GAAWiB,GAAcC,EAAA,wBACtF,GAAI,CACFD,EAAK,aAAa,iBAAkB,KAAK,UAAUb,CAAI,CAAC,EACxD,IAAMe,EAAW,QAAQ,MAAMX,EAAOL,EAAQC,CAAI,EAC5CgB,EAASH,EAAK,YAAY,EAAE,OAElC,OAAIE,aAAoB,QACf,IAAI,QAAQ,CAACE,EAASC,IAAW,CACtCH,EACG,KAAYI,GAAqBL,EAAA,wBAChC,IAAMC,EAAW,MAAMK,GAAsB1B,EAAQ,CACnD,QAASA,EACT,cAAAgB,EACA,cAAAd,EACA,mBAAAa,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,EACzC,iBAAAU,EACA,OAAQnB,EAAK,CAAC,EACd,aAAAW,EACA,KAAMC,EACN,QAASI,CACX,CAAC,EAEDH,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACTI,EAAQF,CAAQ,CAClB,EAAC,EACA,MAAOM,GAAU,CAChBR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACTK,EAAOG,CAAK,CACd,CAAC,CACL,CAAC,GAGHR,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACFE,EACT,OAASM,EAAO,CACd,MAAAR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACHQ,CACR,CACF,EAAC,CACH,EAGK,QAAQ,IAAItB,EAAQG,EAAMC,CAAQ,CAC3C,CACF,EAEA,OAAO,IAAI,MAAMR,EAAKG,CAAO,CAC/B,EC9GA,UAAYwB,MAAmB,qBAGxB,IAAMC,GAAe,CAACC,EAAsBC,EAAgBC,IAC1D,YAAaC,EAAa,CAC/B,IAAMC,EAASC,EAAU,EAEnBC,EAAmBC,GAA6B,CACpD,GAAI,CACEL,GACF,OAAO,QAAQA,CAAU,EAAE,QAAQ,CAAC,CAACM,EAAKC,CAAK,IAAM,CACnDF,EAAK,aAAaC,EAAKC,CAAK,CAC9B,CAAC,EAGHF,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxD,IAAMO,EAAST,EAAK,GAAGE,CAAI,EAE3B,OAAIO,aAAkB,QACbA,EAAO,KAAMC,IAClBJ,EAAK,aAAa,kBAAmB,KAAK,UAAUI,CAAc,CAAC,EACnEJ,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACjDI,EACR,EAAE,MAAOC,GAAU,CAClB,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CAAC,EAAE,QAAQ,IAAML,EAAK,IAAI,CAAC,GAE3BA,EAAK,aAAa,kBAAmB,KAAK,UAAUG,CAAM,CAAC,EAC3DH,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACxDA,EAAK,IAAI,EACFG,EAEX,OAASE,EAAO,CACd,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CACF,EAEA,OAAOR,EAAO,gBAAgBJ,EAAcM,CAAe,CAC7D,EAGIO,GAAc,CAACN,EAA0BK,EAAYT,IAAgB,CACzEI,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxDI,EAAK,UAAU,CACb,KAAoB,iBAAe,MACnC,QAASK,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACDL,EAAK,IAAI,CACX,ECvCO,IAAMO,EAAN,KAAsB,CAG3B,YAAYC,EAAgB,CAI5B,SAAM,CAACC,EAAoBC,IACzBC,GAAkB,KAAK,OAAQF,EAAYC,CAAM,EAEnD,aAAWE,GACTC,GAAsB,KAAK,OAAQD,CAAI,EAEzC,SAAOF,GAAwBI,GAAsB,KAAK,OAAQJ,CAAM,EATtE,KAAK,OAASF,CAChB,CASF,ECjBA,IAAMO,GAAW,CAACC,EAAgBC,IAA0C,CAC1E,GAAI,EAAEA,EAAK,oBAAoB,QAC7B,MAAM,IAAI,MAAM,0CAA0C,EAE5D,OAAW,CAACC,EAAKC,CAAK,IAAK,OAAO,QAAQF,EAAK,QAAQ,EACrD,GAAI,OAAOC,GAAQ,UAAY,OAAOC,GAAU,SAC9C,MAAM,IAAI,MACR,yEACF,EAGJ,OAAOC,GAAyBJ,EAAQC,CAAI,CAC9C,EAEMI,GAAQ,CAACL,EAAgBC,IAAuC,CACpE,GAAI,OAAOA,EAAK,OAAU,SACxB,MAAM,IAAI,MAAM,wBAAwB,EAE1C,GAAIA,EAAK,MAAQ,GAAKA,EAAK,MAAQ,IACjC,MAAM,IAAI,MAAM,2CAA2C,EAE7D,OAAOK,GAAsBN,EAAQC,CAAI,CAC3C,EAEMM,GAAS,CAACP,EAAgBC,IAAwC,CACtE,GAAI,EAAEA,EAAK,kCAAkC,QAC3C,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAOO,GAAuBR,EAAQC,CAAI,CAC5C,EAEMQ,GAAQ,CAACT,EAAgBC,IAC7BS,GAAsBV,EAAQC,CAAI,EAEvBU,EAAN,KAAmB,CAGxB,YAAYX,EAAgB,CAI5B,WAASC,GAAqBQ,GAAM,KAAK,OAAQR,CAAI,EAErD,cAAYA,GAAwBF,GAAS,KAAK,OAAQE,CAAI,EAE9D,YAAUA,GAAsBM,GAAO,KAAK,OAAQN,CAAI,EAExD,WAASA,GAAqBI,GAAM,KAAK,OAAQJ,CAAI,EATnD,KAAK,OAASD,CAChB,CASF,ECpCA,UAAYY,OAAmB,qBAE/B,IAAMC,GAAgC,CACpC,OAAQ,CACN,KAAM,CACJ,cAAe,iCACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,4BACf,gBAAiBC,CACnB,CACF,EACA,UAAW,CACT,KAAM,CACJ,cAAe,4BACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,+BACf,gBAAiBC,EACnB,CACF,EACA,eAAgB,CACd,KAAM,CACJ,cAAe,6CACf,gBAAiBH,CACnB,EACA,WAAY,CACV,cAAe,wCACf,gBAAiBC,CACnB,CACF,CACF,EAEMG,GAAgD,CACpD,OAAQC,GACR,UAAWC,GACX,eAAgBC,EAClB,EAQMC,GAAyBC,GAAsB,CACnD,GAAI,CAACA,GAAO,OAAOA,GAAQ,UAAY,MAAM,QAAQA,CAAG,EACtD,MAAO,GAGT,IAAMC,EAAgB,CACpB,SACA,QACA,gBACA,oBACA,gBACF,EAGA,OAFe,OAAO,OAAOD,CAAG,EAElB,MAAOE,GACf,OAAOA,GAAQ,UAAYA,IAAQ,KAAa,GAC7CD,EAAc,MAAOE,GAAQA,KAAOD,CAAG,CAC/C,CACH,EAEaE,GAAN,KAAkB,CAQvB,YAAY,CACV,OAAAC,EAAS,QAAQ,IAAI,oBACrB,cAAAC,EAAgB,EAClB,EAAmB,CAAC,EAAG,CACrB,GAAID,IAAW,OACb,MAAM,IAAI,MACR,0HACF,EAGF,KAAK,OAASA,EACd,KAAK,cAAgBC,EACrB,KAAK,UAAY,IAAIC,EAAgBF,CAAM,EAC3C,KAAK,MAAQ,IAAIG,EAAaH,CAAM,EACpC,KAAK,MAAQ,IAAII,EAAaJ,CAAM,EACpC,KAAK,aAAeK,GAEhBJ,GACFK,GAAaL,EAAeD,CAAM,CAEtC,CAEA,IAAI,WAAY,CACd,GAAI,CACF,IAAMO,EAAS,EAAQ,mBAAmB,EAAE,QAC5C,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,YAAa,WAAW,CACtE,OAAS,EAAG,CACV,QAAQ,MACN,8EACF,CACF,CACF,CAEA,IAAI,QAAS,CACX,GAAI,CACF,IAAMA,EAAS,EAAQ,QAAQ,EAAE,QACjC,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,SAAU,QAAQ,CAChE,OAAS,EAAG,CACV,QAAQ,MACN,qEACF,CACF,CACF,CAEM,IAAIE,EAUK,QAAAC,EAAA,yBAVL,CACR,WAAAC,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,EAAS,EACX,EAAe,CAGb,OAFeC,EAAU,EAEX,gBAAgB,kBAA0BC,GAASX,EAAA,sBAC/D,GAAI,CACF,IAAMY,EAAgB,CACpB,WAAAX,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,CACF,EACAE,EAAK,aAAa,iBAAkB,KAAK,UAAUC,CAAa,CAAC,EAEjE,IAAMC,EAAyBT,EACzBU,EAA6C,CACjD,MAAOX,EACP,QAASD,EACT,iBAAkBI,CACpB,EACIF,IAAgBU,EAAkB,gBAAkBV,GAExD,IAAMW,EAAkB,MAAM,KAAK,UAAU,IAC3Cd,EACAa,CACF,EAEA,GAAI,CAACC,EAAiB,MAAM,IAAI,MAAM,kBAAkB,EAExD,IAAMC,EAAiBD,EAAgB,gBACvC,GAAI,CAACA,EAAgB,WACnB,MAAM,IAAI,MACR,WAAWd,CAAU,oDACvB,EAGF,IAAMgB,EAA0BF,EAAgB,SAChD,GAAI,CAACE,EACH,MAAM,IAAI,MACR,WAAWhB,CAAU,kDACvB,EAGF,IAAMiB,EAAuBD,EAAwB,MACrD,GAAI,CAACC,EACH,MAAM,IAAI,MACR,WAAWjB,CAAU,wDACvB,EAGF,IAAMkB,EAAgBD,EAAqB,SAErCE,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAASC,IAAA,GACVP,EAAgB,YACfP,GAA2B,CAAC,GAE5Be,EACJhD,GACE4C,CACF,EAAEH,EAAe,IAAI,EACjBQ,EAAgBD,EAAO,cAEvBE,EAAkBF,EAAO,gBACzBG,GAAmB9C,GAAyBuC,CAAa,EACzDQ,EAAoBZ,EAAgB,kBACtCY,IACFN,EAAO,QAAaM,EAAkB,KAExCN,EAAO,OAAYZ,EACfA,GAAU,CAAC,SAAU,cAAc,EAAE,SAASU,CAAa,IAC7DE,EAAO,eAAoB,CAAE,cAAe,EAAK,GAGnD,IAAMO,EAAW,MAAMF,GAAiBX,EAAiBM,CAAM,EAEzDQ,EAAiBC,IAAiB,CACtC,IAAMC,GAAmB,IAAI,KAAK,EAAE,YAAY,EAChD,OAAOC,GAAaV,EAAA,CAClB,cAAAE,EACA,cAAAL,EACA,KAAM,CAAC,EACP,OAAAE,EACA,KAAAhB,EACA,mBAAAe,EACA,iBAAAW,GACA,QAAS,KAAK,OACd,SAAAzB,EACA,UAAWS,EAAgB,GAC3B,eAAgBA,EAAgB,QAChC,uBAAAF,EACA,SAAUN,EACV,wBAAyB,GACzB,QAASI,EAAK,YAAY,EAAE,QACzBmB,GACJ,CACH,EAEA,GAAIrB,EACF,OAAOwB,GAAeL,EAAUC,EAAeJ,CAAe,EAChE,IAAMS,EAAa,MAAML,EAAc,CAAE,iBAAkBD,CAAS,CAAC,EAE/DO,EAAiB,CACrB,WAAYD,EAAW,WACvB,aAAcN,EACd,iBAAkBM,EAAW,gBAC/B,EACA,OAAAvB,EAAK,aAAa,kBAAmB,KAAK,UAAUwB,CAAc,CAAC,EAE5DA,CACT,OAASC,EAAO,CACd,MAAAzB,EAAK,UAAU,CACb,KAAoB,kBAAe,MACnC,QAASyB,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACKA,CACR,QAAE,CACAzB,EAAK,IAAI,CACX,CACF,EAAC,CACH,GAEM,YAAYZ,EAO6B,QAAAC,EAAA,yBAP7B,CAChB,aAAAqC,EACA,eAAAjC,EAAiB,CAAC,EAClB,SAAAE,EAAW,CAAC,EACZ,kBAAAgC,EAAoB,KACpB,gBAAAC,EAAkB,KAClB,iBAAAC,EAAmB,EACrB,EAA+C,CAC7C,GAAI,CACF,IAAMC,EAAS,MAAMC,GAAmB,CACtC,cAAeL,EACf,gBAAiBjC,EACjB,SAAAE,EACA,oBAAqBgC,EACrB,wBAAyBC,EACzB,mBAAoBC,EACpB,QAAS,KAAK,MAChB,CAAC,EAED,GAAI,CAACA,GACCxD,GAAsByD,CAAM,EAAG,CAGjC,IAAME,EAFa,OAAO,OAAOF,CAAM,EAER,OAC5BG,GAAcA,EAAK,iBAAmB,EACzC,EAEA,GAAID,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,KAAK,UAAUF,EAAQ,KAAM,CAAC,CAAC,EAMjD,GAAI,CAHqBE,EAAY,KAClCC,GAAcA,EAAK,SAAW,SACjC,EAEE,MAAM,IAAI,MAAM,KAAK,UAAUH,EAAQ,KAAM,CAAC,CAAC,CAEnD,CAGF,OAAOA,CACT,OAASL,EAAO,CACd,MAAIA,aAAiB,OACnB,QAAQ,MAAM,0BAA2BA,EAAM,OAAO,EAChD,IAAI,MAAM,2BAA2BA,EAAM,OAAO,EAAE,IAE1D,QAAQ,MAAM,kCAAmCA,CAAK,EAChD,IAAI,MAAM,gCAAgC,EAEpD,CACF,GAEM,WAAWN,EAAkB,QAAA9B,EAAA,sBACjC,OAAO6C,GAAe,KAAK,OAAQf,CAAI,CACzC,GACF","names":["Ably","URL_API_PROMPTLAYER","promptlayerApiHandler","apiKey","body","__async","proxyGenerator","promptLayerApiRequest","response","data","warnOnBadResponse","e","promptLayerTrackMetadata","__spreadProps","__spreadValues","promptLayerTrackScore","promptLayerTrackPrompt","promptLayerTrackGroup","promptLayerCreateGroup","getPromptTemplate","promptName","params","url","publishPromptTemplate","getAllPromptTemplates","_a","key","value","runWorkflowRequest","_0","workflow_name","input_variables","metadata","workflow_label_name","workflow_version_number","return_all_outputs","api_key","timeout","payload","headers","result","execution_id","channel_name","ably_token","ably","Ably","final_output","waitForWorkflowCompletion","error","channel","resolve","reject","results","messageListener","message","timer","err","openaiStreamChat","_b","_c","_d","_e","_f","_g","_h","_i","content","functionCall","lastResult","toolCalls","delta","toolCall","lastToolCall","firstChoice","anthropicStreamMessage","cleaned_result","function_name","prev","current","final_result","generator","__asyncGenerator","iter","__forAwait","more","temp","__await","request_response","request_id","main_message","trackRequest","openaiStreamCompletion","text","anthropicStreamCompletion","completion","streamResponse","afterStream","mapResults","openaiChatRequest","client","kwargs","openaiCompletionsRequest","MAP_TYPE_TO_OPENAI_FUNCTION","openaiRequest","promptBlueprint","OpenAI","requestToMake","azureOpenAIRequest","anthropicChatRequest","anthropicCompletionsRequest","MAP_TYPE_TO_ANTHROPIC_FUNCTION","anthropicRequest","Anthropic","utilLogRequest","GroupManager","apiKey","promptLayerCreateGroup","opentelemetry","SimpleSpanProcessor","NodeTracerProvider","SpanKind","SpanStatusCode","ExportResultCode","PromptLayerSpanExporter","enableTracing","apiKey","URL_API_PROMPTLAYER","attributes","kind","SpanKind","code","SpanStatusCode","time","spans","ExportResultCode","requestData","span","_a","event","link","__spreadProps","__spreadValues","response","error","span_exporter_default","getTracer","name","setupTracing","enableTracing","apiKey","provider","NodeTracerProvider","exporter","span_exporter_default","processor","SimpleSpanProcessor","tracer","getTracer","promptLayerBase","apiKey","llm","function_name","provider","handler","target","args","newTarget","prop","receiver","value","_a","_b","_c","_d","request_start_time","provider_type","return_pl_id","pl_tags","span","__async","response","spanId","resolve","reject","request_response","promptlayerApiHandler","error","opentelemetry","wrapWithSpan","functionName","func","attributes","args","tracer","getTracer","wrapperFunction","span","key","value","result","resolvedResult","error","handleError","TemplateManager","apiKey","promptName","params","getPromptTemplate","body","publishPromptTemplate","getAllPromptTemplates","metadata","apiKey","body","key","value","promptLayerTrackMetadata","score","promptLayerTrackScore","prompt","promptLayerTrackPrompt","group","promptLayerTrackGroup","TrackManager","opentelemetry","MAP_PROVIDER_TO_FUNCTION_NAME","openaiStreamChat","openaiStreamCompletion","anthropicStreamMessage","anthropicStreamCompletion","MAP_PROVIDER_TO_FUNCTION","openaiRequest","anthropicRequest","azureOpenAIRequest","isWorkflowResultsDict","obj","REQUIRED_KEYS","val","key","PromptLayer","apiKey","enableTracing","TemplateManager","GroupManager","TrackManager","wrapWithSpan","setupTracing","module","promptLayerBase","_0","__async","promptName","promptVersion","promptReleaseLabel","inputVariables","tags","metadata","groupId","modelParameterOverrides","stream","getTracer","span","functionInput","prompt_input_variables","templateGetParams","promptBlueprint","promptTemplate","promptBlueprintMetadata","promptBlueprintModel","provider_type","request_start_time","kwargs","__spreadValues","config","function_name","stream_function","request_function","provider_base_url","response","_trackRequest","body","request_end_time","trackRequest","streamResponse","requestLog","functionOutput","error","workflowName","workflowLabelName","workflowVersion","returnAllOutputs","result","runWorkflowRequest","outputNodes","node","utilLogRequest"]}
1
+ {"version":3,"sources":["../../src/utils.ts","../../src/groups.ts","../../src/tracing.ts","../../src/span-exporter.ts","../../src/promptlayer.ts","../../src/span-wrapper.ts","../../src/templates.ts","../../src/track.ts","../../src/index.ts"],"sourcesContent":["import {\n GetPromptTemplateParams,\n GetPromptTemplateResponse,\n ListPromptTemplatesResponse,\n LogRequest,\n Pagination,\n PublishPromptTemplate,\n PublishPromptTemplateResponse,\n RequestLog,\n RunWorkflowRequestParams,\n TrackGroup,\n TrackMetadata,\n TrackPrompt,\n TrackRequest,\n TrackScore,\n WorkflowResponse,\n} from \"@/types\";\nimport type TypeAnthropic from \"@anthropic-ai/sdk\";\nimport {\n Completion as AnthropicCompletion,\n Message,\n MessageStreamEvent,\n} from \"@anthropic-ai/sdk/resources\";\nimport Ably from \"ably\";\nimport type TypeOpenAI from \"openai\";\nimport {\n ChatCompletion,\n ChatCompletionChunk,\n Completion,\n} from \"openai/resources\";\n\nexport const URL_API_PROMPTLAYER =\n process.env.URL_API_PROMPTLAYER || \"https://api.promptlayer.com\";\n\nconst promptlayerApiHandler = async <Item>(\n apiKey: string,\n body: TrackRequest & {\n request_response: AsyncIterable<Item> | any;\n }\n) => {\n const isGenerator = body.request_response[Symbol.asyncIterator] !== undefined;\n if (isGenerator) {\n return proxyGenerator(apiKey, body.request_response, body);\n }\n return await promptLayerApiRequest(apiKey, body);\n};\n\nconst promptLayerApiRequest = async (apiKey: string, body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n }\n if (data && body.return_pl_id) {\n return [body.request_response, data.request_id];\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return body.request_response;\n};\n\nconst promptLayerTrackMetadata = async (\n apiKey: string,\n body: TrackMetadata\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-metadata`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging metadata to your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackScore = async (\n apiKey: string,\n body: TrackScore\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/library-track-score`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While scoring your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While scoring your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackPrompt = async (\n apiKey: string,\n body: TrackPrompt\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-prompt`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackGroup = async (\n apiKey: string,\n body: TrackGroup\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a group, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a group, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerCreateGroup = async (\n apiKey: string\n): Promise<number | boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/create-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While creating a group PromptLayer had the following error\"\n );\n return false;\n }\n return data.id;\n } catch (e) {\n console.warn(\n `WARNING: While creating a group PromptLayer had the following error: ${e}`\n );\n return false;\n }\n};\n\nconst getPromptTemplate = async (\n apiKey: string,\n promptName: string,\n params?: Partial<GetPromptTemplateParams>\n) => {\n try {\n const url = new URL(\n `${URL_API_PROMPTLAYER}/prompt-templates/${promptName}`\n );\n const response = await fetch(url, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify(params),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching a prompt template PromptLayer had the following error\"\n );\n return null;\n }\n if (data.warning) {\n console.warn(\n `WARNING: While fetching your prompt PromptLayer had the following error: ${data.warning}`\n );\n }\n return data as Promise<GetPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching a prompt template PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nconst publishPromptTemplate = async (\n apiKey: string,\n body: PublishPromptTemplate\n) => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/rest/prompt-templates`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify({\n prompt_template: { ...body },\n prompt_version: { ...body },\n release_labels: body.release_labels ? body.release_labels : undefined,\n }),\n }\n );\n const data = await response.json();\n if (response.status === 400) {\n warnOnBadResponse(\n data,\n \"WARNING: While publishing a prompt template PromptLayer had the following error\"\n );\n }\n return data as Promise<PublishPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While publishing a prompt template PromptLayer had the following error: ${e}`\n );\n }\n};\n\nconst getAllPromptTemplates = async (\n apiKey: string,\n params?: Partial<Pagination>\n) => {\n try {\n const url = new URL(`${URL_API_PROMPTLAYER}/prompt-templates`);\n Object.entries(params || {}).forEach(([key, value]) =>\n url.searchParams.append(key, value.toString())\n );\n const response = await fetch(url, {\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching all prompt templates PromptLayer had the following error\"\n );\n return null;\n }\n return (data.items ?? []) as Promise<Array<ListPromptTemplatesResponse>>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching all prompt templates PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport const runWorkflowRequest = async ({\n workflow_name,\n input_variables,\n metadata = {},\n workflow_label_name = null,\n workflow_version_number = null,\n return_all_outputs = false,\n api_key,\n timeout = 3600000, // Default timeout is 1 hour in milliseconds\n}: RunWorkflowRequestParams): Promise<WorkflowResponse> => {\n const payload = {\n input_variables,\n metadata,\n workflow_label_name,\n workflow_version_number,\n return_all_outputs,\n };\n\n const headers = {\n \"X-API-KEY\": api_key,\n \"Content-Type\": \"application/json\",\n };\n\n try {\n // Start the workflow by making a POST request\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/workflows/${encodeURIComponent(\n workflow_name\n )}/run`,\n {\n method: \"POST\",\n headers: headers,\n body: JSON.stringify(payload),\n }\n );\n\n if (response.status !== 201) {\n const errorData = await response.json().catch(() => ({}));\n return {\n success: false,\n message: `Failed to run workflow: ${\n errorData.error || response.statusText\n }`,\n };\n }\n\n const result = await response.json();\n if (result.warning) {\n console.warn(`WARNING: ${result.warning}`);\n }\n const execution_id = result.workflow_version_execution_id;\n if (!execution_id) {\n console.log(\"No execution ID returned from workflow run\");\n return { success: false, message: \"Failed to run workflow\" };\n }\n\n const channel_name = `workflow_updates:${execution_id}`;\n\n // Request a token to subscribe to the channel\n const ws_response = await fetch(\n `${URL_API_PROMPTLAYER}/ws-token-request-library?capability=${channel_name}`,\n {\n method: \"POST\",\n headers: headers,\n }\n );\n\n const ws_token_response = await ws_response.json();\n\n const ably_token = ws_token_response.token_details.token;\n\n // Initialize Ably client using the Promise-based client\n const ably = new Ably.Realtime({ token: ably_token });\n\n try {\n // Wait for the workflow to complete and get the final output\n const final_output = await waitForWorkflowCompletion(\n ably,\n channel_name,\n timeout\n );\n ably.close();\n return final_output;\n } finally {\n // Ensure the Ably client is closed in all cases\n ably.close();\n }\n } catch (error) {\n console.error(\n `Failed to run workflow: ${\n error instanceof Error ? error.message : error\n }`\n );\n throw error;\n }\n};\n\nasync function waitForWorkflowCompletion(\n ably: Ably.Realtime,\n channel_name: string,\n timeout: number\n): Promise<any> {\n const channel = ably.channels.get(channel_name);\n\n return new Promise(async (resolve, reject) => {\n let results: any = null;\n\n const messageListener = (message: Ably.Message) => {\n if (message.name === \"SET_WORKFLOW_COMPLETE\") {\n const message_data = JSON.parse(message.data as string);\n results = message_data.final_output;\n clearTimeout(timer);\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n resolve(results);\n }\n };\n\n // Set up a timeout to reject the promise if no message is received in time\n const timer = setTimeout(() => {\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n reject(new Error(\"Workflow execution did not complete properly (timeout)\"));\n }, timeout);\n\n try {\n // Subscribe to the channel to receive updates\n await channel.subscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n } catch (err) {\n clearTimeout(timer);\n reject(err);\n }\n });\n}\n\nconst openaiStreamChat = (results: ChatCompletionChunk[]): ChatCompletion => {\n let content: ChatCompletion.Choice[\"message\"][\"content\"] = null;\n let functionCall: ChatCompletion.Choice[\"message\"][\"function_call\"] =\n undefined;\n const response: ChatCompletion = {\n id: \"\",\n choices: [],\n created: Date.now(),\n model: \"\",\n object: \"chat.completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let toolCalls: ChatCompletion.Choice[\"message\"][\"tool_calls\"] = undefined;\n for (const result of results) {\n if (result.choices.length === 0) continue;\n const delta = result.choices[0].delta;\n\n if (delta.content) {\n content = `${content || \"\"}${delta.content || \"\"}`;\n }\n if (delta.function_call) {\n functionCall = {\n name: `${functionCall ? functionCall.name : \"\"}${\n delta.function_call.name || \"\"\n }`,\n arguments: `${functionCall ? functionCall.arguments : \"\"}${\n delta.function_call.arguments || \"\"\n }`,\n };\n }\n const toolCall = delta.tool_calls?.[0];\n if (toolCall) {\n toolCalls = toolCalls || [];\n const lastToolCall = toolCalls.at(-1);\n if (!lastToolCall || toolCall.id) {\n toolCalls.push({\n id: toolCall.id || \"\",\n type: toolCall.type || \"function\",\n function: {\n name: toolCall.function?.name || \"\",\n arguments: toolCall.function?.arguments || \"\",\n },\n });\n continue;\n }\n lastToolCall.function.name = `${lastToolCall.function.name}${\n toolCall.function?.name || \"\"\n }`;\n lastToolCall.function.arguments = `${lastToolCall.function.arguments}${\n toolCall.function?.arguments || \"\"\n }`;\n }\n }\n const firstChoice = results[0].choices.at(0);\n response.choices.push({\n finish_reason: firstChoice?.finish_reason ?? \"stop\",\n index: firstChoice?.index ?? 0,\n logprobs: firstChoice?.logprobs ?? null,\n message: {\n role: \"assistant\",\n content,\n function_call: functionCall ? functionCall : undefined,\n tool_calls: toolCalls ? toolCalls : undefined,\n refusal: firstChoice?.delta.refusal ?? null,\n },\n });\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.created = lastResult.created;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamMessage = (results: MessageStreamEvent[]): Message => {\n let response: Message = {\n id: \"\",\n model: \"\",\n content: [],\n role: \"assistant\",\n type: \"message\",\n stop_reason: \"stop_sequence\",\n stop_sequence: null,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let content = \"\";\n for (const result of results) {\n switch (result.type) {\n case \"message_start\": {\n response = {\n ...result.message,\n };\n break;\n }\n case \"content_block_delta\": {\n if (result.delta.type === \"text_delta\")\n content = `${content}${result.delta.text}`;\n }\n case \"message_delta\": {\n if (\"usage\" in result)\n response.usage.output_tokens = result.usage.output_tokens;\n if (\"stop_reason\" in result.delta)\n response.stop_reason = result.delta.stop_reason;\n }\n default: {\n break;\n }\n }\n }\n response.content.push({\n type: \"text\",\n text: content,\n });\n return response;\n};\n\nconst cleaned_result = (\n results: any[],\n function_name = \"openai.chat.completions.create\"\n) => {\n if (\"completion\" in results[0]) {\n return results.reduce(\n (prev, current) => ({\n ...current,\n completion: `${prev.completion}${current.completion}`,\n }),\n {}\n );\n }\n\n if (function_name === \"anthropic.messages.create\")\n return anthropicStreamMessage(results);\n\n if (\"text\" in results[0].choices[0]) {\n let response = \"\";\n for (const result of results) {\n response = `${response}${result.choices[0].text}`;\n }\n const final_result = structuredClone(results.at(-1));\n final_result.choices[0].text = response;\n return final_result;\n }\n\n if (\"delta\" in results[0].choices[0]) {\n const response = openaiStreamChat(results);\n response.choices[0] = {\n ...response.choices[0],\n ...response.choices[0].message,\n };\n return response;\n }\n\n return \"\";\n};\n\nasync function* proxyGenerator<Item>(\n apiKey: string,\n generator: AsyncIterable<Item>,\n body: TrackRequest\n) {\n const results = [];\n for await (const value of generator) {\n yield body.return_pl_id ? [value, null] : value;\n results.push(value);\n }\n const request_response = cleaned_result(results, body.function_name);\n const response = await promptLayerApiRequest(apiKey, {\n ...body,\n request_response,\n request_end_time: new Date().toISOString(),\n });\n if (response) {\n if (body.return_pl_id) {\n const request_id = (response as any)[1];\n const lastResult = results.at(-1);\n yield [lastResult, request_id];\n }\n }\n}\n\nconst warnOnBadResponse = (request_response: any, main_message: string) => {\n try {\n console.warn(`${main_message}: ${request_response.message}`);\n } catch (e) {\n console.warn(`${main_message}: ${request_response}`);\n }\n};\n\nconst trackRequest = async (body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 200)\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return {};\n};\n\nconst openaiStreamCompletion = (results: Completion[]) => {\n const response: Completion = {\n id: \"\",\n choices: [\n {\n finish_reason: \"stop\",\n index: 0,\n text: \"\",\n logprobs: null,\n },\n ],\n created: Date.now(),\n model: \"\",\n object: \"text_completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let text = \"\";\n for (const result of results) {\n if (result.choices.length > 0 && result.choices[0].text) {\n text = `${text}${result.choices[0].text}`;\n }\n }\n response.choices[0].text = text;\n response.id = lastResult.id;\n response.created = lastResult.created;\n response.model = lastResult.model;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamCompletion = (results: AnthropicCompletion[]) => {\n const response: AnthropicCompletion = {\n completion: \"\",\n id: \"\",\n model: \"\",\n stop_reason: \"\",\n type: \"completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let completion = \"\";\n for (const result of results) {\n completion = `${completion}${result.completion}`;\n }\n response.completion = completion;\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.stop_reason = lastResult.stop_reason;\n return response;\n};\n\nasync function* streamResponse<Item>(\n generator: AsyncIterable<Item>,\n afterStream: (body: object) => any,\n mapResults: any\n) {\n const data: {\n request_id: number | null;\n raw_response: any;\n prompt_blueprint: any;\n } = {\n request_id: null,\n raw_response: null,\n prompt_blueprint: null,\n };\n const results = [];\n for await (const result of generator) {\n results.push(result);\n data.raw_response = result;\n yield data;\n }\n const request_response = mapResults(results);\n const response = await afterStream({ request_response });\n data.request_id = response.request_id;\n data.prompt_blueprint = response.prompt_blueprint;\n yield data;\n}\n\nconst openaiChatRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.chat.completions.create(kwargs);\n};\n\nconst openaiCompletionsRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_OPENAI_FUNCTION = {\n chat: openaiChatRequest,\n completion: openaiCompletionsRequest,\n};\n\nconst openaiRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").default;\n const client = new OpenAI({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst azureOpenAIRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").AzureOpenAI;\n const client = new OpenAI({\n endpoint: kwargs.baseURL,\n });\n delete kwargs?.baseURL;\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst anthropicChatRequest = async (client: TypeAnthropic, kwargs: any) => {\n return client.messages.create(kwargs);\n};\n\nconst anthropicCompletionsRequest = async (\n client: TypeAnthropic,\n kwargs: any\n) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_ANTHROPIC_FUNCTION = {\n chat: anthropicChatRequest,\n completion: anthropicCompletionsRequest,\n};\n\nconst anthropicRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const Anthropic = require(\"@anthropic-ai/sdk\").default;\n const client = new Anthropic({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_ANTHROPIC_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst utilLogRequest = async (\n apiKey: string,\n body: LogRequest\n): Promise<RequestLog | null> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/log-request`, {\n method: \"POST\",\n headers: {\n \"X-API-KEY\": apiKey,\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 201) {\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request PromptLayer had the following error\"\n );\n return null;\n }\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While tracking your prompt PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n getAllPromptTemplates,\n getPromptTemplate,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n promptlayerApiHandler,\n promptLayerApiRequest,\n promptLayerCreateGroup,\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n publishPromptTemplate,\n streamResponse,\n trackRequest,\n utilLogRequest,\n};\n","import { promptLayerCreateGroup } from \"@/utils\";\n\nexport class GroupManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n create = () => promptLayerCreateGroup(this.apiKey);\n}\n","import * as opentelemetry from '@opentelemetry/api';\nimport {SimpleSpanProcessor} from '@opentelemetry/sdk-trace-base';\nimport {NodeTracerProvider} from '@opentelemetry/sdk-trace-node';\nimport PromptLayerSpanExporter from '@/span-exporter';\n\nexport const getTracer = (name: string = 'promptlayer-tracer') => {\n return opentelemetry.trace.getTracer(name);\n}\n\nexport const setupTracing = (enableTracing: boolean, apiKey?: string) => {\n const provider = new NodeTracerProvider();\n const exporter = new PromptLayerSpanExporter(enableTracing, apiKey);\n const processor = new SimpleSpanProcessor(exporter);\n provider.addSpanProcessor(processor);\n provider.register();\n}\n","import {Attributes, SpanKind, SpanStatusCode} from '@opentelemetry/api';\nimport {ReadableSpan, SpanExporter} from '@opentelemetry/sdk-trace-base';\nimport {ExportResultCode} from '@opentelemetry/core';\nimport {URL_API_PROMPTLAYER} from '@/utils';\n\nclass PromptLayerSpanExporter implements SpanExporter {\n private apiKey: string | undefined;\n private enableTracing: boolean;\n private url: string;\n\n constructor(enableTracing: boolean, apiKey?: string) {\n this.apiKey = apiKey || process.env.PROMPTLAYER_API_KEY;\n this.enableTracing = enableTracing;\n this.url = `${URL_API_PROMPTLAYER}/spans-bulk`;\n }\n\n private attributesToObject(attributes: Attributes | undefined): Record<string, any> {\n if (!attributes) return {};\n return Object.fromEntries(Object.entries(attributes));\n }\n\n private spanKindToString(kind: SpanKind): string {\n const kindMap: Record<SpanKind, string> = {\n [SpanKind.INTERNAL]: 'SpanKind.INTERNAL',\n [SpanKind.SERVER]: 'SpanKind.SERVER',\n [SpanKind.CLIENT]: 'SpanKind.CLIENT',\n [SpanKind.PRODUCER]: 'SpanKind.PRODUCER',\n [SpanKind.CONSUMER]: 'SpanKind.CONSUMER',\n };\n return kindMap[kind] || 'SpanKind.INTERNAL';\n }\n\n private statusCodeToString(code: SpanStatusCode): string {\n const statusMap: Record<SpanStatusCode, string> = {\n [SpanStatusCode.ERROR]: 'StatusCode.ERROR',\n [SpanStatusCode.OK]: 'StatusCode.OK',\n [SpanStatusCode.UNSET]: 'StatusCode.UNSET',\n };\n return statusMap[code] || 'StatusCode.UNSET';\n }\n\n private toNanoseconds(time: [number, number]): string {\n return (BigInt(time[0]) * BigInt(1e9) + BigInt(time[1])).toString();\n };\n\n export(spans: ReadableSpan[]): Promise<ExportResultCode> {\n if (!this.enableTracing) {\n return Promise.resolve(ExportResultCode.SUCCESS);\n }\n\n const requestData = spans.map(span => ({\n name: span.name,\n context: {\n trace_id: span.spanContext().traceId,\n span_id: span.spanContext().spanId,\n trace_state: span.spanContext().traceState?.serialize() || '',\n },\n kind: this.spanKindToString(span.kind),\n parent_id: span.parentSpanId || null,\n start_time: this.toNanoseconds(span.startTime),\n end_time: this.toNanoseconds(span.endTime),\n status: {\n status_code: this.statusCodeToString(span.status.code),\n description: span.status.message,\n },\n attributes: this.attributesToObject(span.attributes),\n events: span.events.map(event => ({\n name: event.name,\n timestamp: this.toNanoseconds(event.time),\n attributes: this.attributesToObject(event.attributes),\n })),\n links: span.links.map(link => ({\n context: link.context,\n attributes: this.attributesToObject(link.attributes),\n })),\n resource: {\n attributes: {\n ...span.resource.attributes,\n \"service.name\": \"prompt-layer-js\",\n },\n schema_url: '',\n },\n }));\n\n return fetch(this.url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-API-KEY': this.apiKey || '',\n },\n body: JSON.stringify({\n spans: requestData,\n }),\n })\n .then(response => {\n if (!response.ok) {\n console.error(`Error exporting spans\\nHTTP error! status: ${response.status}`);\n return ExportResultCode.FAILED;\n }\n return ExportResultCode.SUCCESS;\n })\n .catch((error) => {\n console.error('Error exporting spans:', error);\n return ExportResultCode.FAILED;\n });\n }\n\n shutdown(): Promise<void> {\n return Promise.resolve();\n }\n}\n\nexport default PromptLayerSpanExporter;\n","import {getTracer} from \"@/tracing\";\nimport {promptlayerApiHandler} from \"@/utils\";\n\nconst tracer = getTracer();\n\nexport const promptLayerBase = (\n apiKey: string,\n llm: object,\n function_name = \"\",\n provider = \"openai\"\n) => {\n const handler: ProxyHandler<any> = {\n construct: (target, args) => {\n const newTarget = Reflect.construct(target, args);\n Object.defineProperties(newTarget, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(newTarget, handler);\n },\n get: (target, prop, receiver) => {\n const value = target[prop];\n const function_name = `${Reflect.get(\n target,\n \"function_name\"\n )}.${prop.toString()}`;\n\n if (typeof value === \"object\") {\n Object.defineProperties(value, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(value, handler);\n }\n\n if (typeof value === \"function\") {\n return (...args: any[]) => {\n const request_start_time = new Date().toISOString();\n const provider_type = Reflect.get(target, \"provider\");\n const return_pl_id = args[0]?.return_pl_id;\n const pl_tags = args[0]?.pl_tags;\n delete args[0]?.return_pl_id;\n delete args[0]?.pl_tags;\n\n return tracer.startActiveSpan(`${provider_type}.${function_name}`, async (span: any) => {\n try {\n span.setAttribute('function_input', JSON.stringify(args));\n const response = Reflect.apply(value, target, args);\n const spanId = span.spanContext().spanId;\n\n if (response instanceof Promise) {\n return new Promise((resolve, reject) => {\n response\n .then(async (request_response) => {\n const response = await promptlayerApiHandler(apiKey, {\n api_key: apiKey,\n provider_type,\n function_name,\n request_start_time,\n request_end_time: new Date().toISOString(),\n request_response,\n kwargs: args[0],\n return_pl_id,\n tags: pl_tags,\n span_id: spanId,\n });\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n resolve(response);\n })\n .catch((error) => {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n reject(error);\n });\n });\n }\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n return response;\n } catch (error) {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n throw error;\n }\n });\n };\n }\n\n return Reflect.get(target, prop, receiver);\n },\n };\n\n return new Proxy(llm, handler);\n};\n","import * as opentelemetry from '@opentelemetry/api';\nimport { getTracer } from '@/tracing';\n\nexport const wrapWithSpan = (functionName: string, func: Function, attributes?: Record<string, any>) => {\n return function (...args: any[]) {\n const tracer = getTracer();\n\n const wrapperFunction = (span: opentelemetry.Span) => {\n try {\n if (attributes) {\n Object.entries(attributes).forEach(([key, value]) => {\n span.setAttribute(key, value);\n });\n }\n\n span.setAttribute('function_input', JSON.stringify(args));\n const result = func(...args);\n\n if (result instanceof Promise) {\n return result.then((resolvedResult) => {\n span.setAttribute('function_output', JSON.stringify(resolvedResult));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n return resolvedResult;\n }).catch((error) => {\n handleError(span, error, args);\n throw error;\n }).finally(() => span.end());\n } else {\n span.setAttribute('function_output', JSON.stringify(result));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n span.end();\n return result;\n }\n } catch (error) {\n handleError(span, error, args);\n throw error;\n }\n };\n\n return tracer.startActiveSpan(functionName, wrapperFunction);\n };\n};\n\nconst handleError = (span: opentelemetry.Span, error: any, args: any[]) => {\n span.setAttribute('function_input', JSON.stringify(args));\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : 'Unknown error',\n });\n span.end();\n}\n","import {\n GetPromptTemplateParams,\n Pagination,\n PublishPromptTemplate,\n} from \"@/types\";\nimport {\n getAllPromptTemplates,\n getPromptTemplate,\n publishPromptTemplate,\n} from \"@/utils\";\n\nexport class TemplateManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n get = (promptName: string, params?: Partial<GetPromptTemplateParams>) =>\n getPromptTemplate(this.apiKey, promptName, params);\n\n publish = (body: PublishPromptTemplate) =>\n publishPromptTemplate(this.apiKey, body);\n\n all = (params?: Pagination) => getAllPromptTemplates(this.apiKey, params);\n}\n","import { TrackGroup, TrackMetadata, TrackPrompt, TrackScore } from \"@/types\";\nimport {\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n} from \"@/utils\";\n\nconst metadata = (apiKey: string, body: TrackMetadata): Promise<boolean> => {\n if (!(body.metadata instanceof Object)) {\n throw new Error(\"Please provide a dictionary of metadata.\");\n }\n for (const [key, value] of Object.entries(body.metadata)) {\n if (typeof key !== \"string\" || typeof value !== \"string\") {\n throw new Error(\n \"Please provide a dictionary of metadata with key value pair of strings.\"\n );\n }\n }\n return promptLayerTrackMetadata(apiKey, body);\n};\n\nconst score = (apiKey: string, body: TrackScore): Promise<boolean> => {\n if (typeof body.score !== \"number\") {\n throw new Error(\"Score must be a number\");\n }\n if (body.score < 0 || body.score > 100) {\n throw new Error(\"Score must be a number between 0 and 100.\");\n }\n return promptLayerTrackScore(apiKey, body);\n};\n\nconst prompt = (apiKey: string, body: TrackPrompt): Promise<boolean> => {\n if (!(body.prompt_input_variables instanceof Object)) {\n throw new Error(\"Prompt template input variable dictionary not provided.\");\n }\n return promptLayerTrackPrompt(apiKey, body);\n};\n\nconst group = (apiKey: string, body: TrackGroup) =>\n promptLayerTrackGroup(apiKey, body);\n\nexport class TrackManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n group = (body: TrackGroup) => group(this.apiKey, body);\n\n metadata = (body: TrackMetadata) => metadata(this.apiKey, body);\n\n prompt = (body: TrackPrompt) => prompt(this.apiKey, body);\n\n score = (body: TrackScore) => score(this.apiKey, body);\n}\n","import { GroupManager } from \"@/groups\";\nimport { promptLayerBase } from \"@/promptlayer\";\nimport { wrapWithSpan } from \"@/span-wrapper\";\nimport { TemplateManager } from \"@/templates\";\nimport { getTracer, setupTracing } from \"@/tracing\";\nimport { TrackManager } from \"@/track\";\nimport { GetPromptTemplateParams, LogRequest, RunRequest, WorkflowRequest, WorkflowResponse } from \"@/types\";\nimport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n runWorkflowRequest,\n streamResponse,\n trackRequest,\n utilLogRequest,\n} from \"@/utils\";\nimport * as opentelemetry from \"@opentelemetry/api\";\n\nconst MAP_PROVIDER_TO_FUNCTION_NAME = {\n openai: {\n chat: {\n function_name: \"openai.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n anthropic: {\n chat: {\n function_name: \"anthropic.messages.create\",\n stream_function: anthropicStreamMessage,\n },\n completion: {\n function_name: \"anthropic.completions.create\",\n stream_function: anthropicStreamCompletion,\n },\n },\n \"openai.azure\": {\n chat: {\n function_name: \"openai.AzureOpenAI.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.AzureOpenAI.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n};\n\nconst MAP_PROVIDER_TO_FUNCTION: Record<string, any> = {\n openai: openaiRequest,\n anthropic: anthropicRequest,\n \"openai.azure\": azureOpenAIRequest,\n};\n\nexport interface ClientOptions {\n apiKey?: string;\n enableTracing?: boolean;\n workspaceId?: number;\n}\n\nconst isWorkflowResultsDict = (obj: any): boolean => {\n if (!obj || typeof obj !== \"object\" || Array.isArray(obj)) {\n return false;\n }\n\n const REQUIRED_KEYS = [\n \"status\",\n \"value\",\n \"error_message\",\n \"raw_error_message\",\n \"is_output_node\",\n ];\n const values = Object.values(obj);\n\n return values.every((val) => {\n if (typeof val !== \"object\" || val === null) return false;\n return REQUIRED_KEYS.every((key) => key in val);\n });\n}\n\nexport class PromptLayer {\n apiKey: string;\n templates: TemplateManager;\n group: GroupManager;\n track: TrackManager;\n enableTracing: boolean;\n wrapWithSpan: typeof wrapWithSpan;\n\n constructor({\n apiKey = process.env.PROMPTLAYER_API_KEY,\n enableTracing = false,\n }: ClientOptions = {}) {\n if (apiKey === undefined) {\n throw new Error(\n \"PromptLayer API key not provided. Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter.\"\n );\n }\n\n this.apiKey = apiKey;\n this.enableTracing = enableTracing;\n this.templates = new TemplateManager(apiKey);\n this.group = new GroupManager(apiKey);\n this.track = new TrackManager(apiKey);\n this.wrapWithSpan = wrapWithSpan;\n\n if (enableTracing) {\n setupTracing(enableTracing, apiKey);\n }\n }\n\n get Anthropic() {\n try {\n const module = require(\"@anthropic-ai/sdk\").default;\n return promptLayerBase(this.apiKey, module, \"anthropic\", \"anthropic\");\n } catch (e) {\n console.error(\n \"To use the Anthropic module, you must install the @anthropic-ai/sdk package.\"\n );\n }\n }\n\n get OpenAI() {\n try {\n const module = require(\"openai\").default;\n return promptLayerBase(this.apiKey, module, \"openai\", \"openai\");\n } catch (e) {\n console.error(\n \"To use the OpenAI module, you must install the @openai/api package.\"\n );\n }\n }\n\n async run({\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream = false,\n }: RunRequest) {\n const tracer = getTracer();\n\n return tracer.startActiveSpan(\"PromptLayer Run\", async (span) => {\n try {\n const functionInput = {\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream,\n };\n span.setAttribute(\"function_input\", JSON.stringify(functionInput));\n\n const prompt_input_variables = inputVariables;\n const templateGetParams: GetPromptTemplateParams = {\n label: promptReleaseLabel,\n version: promptVersion,\n metadata_filters: metadata,\n };\n if (inputVariables) templateGetParams.input_variables = inputVariables;\n\n const promptBlueprint = await this.templates.get(\n promptName,\n templateGetParams\n );\n\n if (!promptBlueprint) throw new Error(\"Prompt not found\");\n\n const promptTemplate = promptBlueprint.prompt_template;\n if (!promptBlueprint.llm_kwargs) {\n throw new Error(\n `Prompt '${promptName}' does not have any LLM kwargs associated with it.`\n );\n }\n\n const promptBlueprintMetadata = promptBlueprint.metadata;\n if (!promptBlueprintMetadata) {\n throw new Error(\n `Prompt '${promptName}' does not have any metadata associated with it.`\n );\n }\n\n const promptBlueprintModel = promptBlueprintMetadata.model;\n if (!promptBlueprintModel) {\n throw new Error(\n `Prompt '${promptName}' does not have a model parameters associated with it.`\n );\n }\n\n const provider_type = promptBlueprintModel.provider;\n\n const request_start_time = new Date().toISOString();\n const kwargs = {\n ...promptBlueprint.llm_kwargs,\n ...(modelParameterOverrides || {}),\n };\n const config =\n MAP_PROVIDER_TO_FUNCTION_NAME[\n provider_type as keyof typeof MAP_PROVIDER_TO_FUNCTION_NAME\n ][promptTemplate.type];\n const function_name = config.function_name;\n\n const stream_function = config.stream_function;\n const request_function = MAP_PROVIDER_TO_FUNCTION[provider_type];\n const provider_base_url = promptBlueprint.provider_base_url;\n if (provider_base_url) {\n kwargs[\"baseURL\"] = provider_base_url.url;\n }\n kwargs[\"stream\"] = stream;\n if (stream && [\"openai\", \"openai.azure\"].includes(provider_type)) {\n kwargs[\"stream_options\"] = { include_usage: true };\n }\n\n const response = await request_function(promptBlueprint, kwargs);\n\n const _trackRequest = (body: object) => {\n const request_end_time = new Date().toISOString();\n return trackRequest({\n function_name,\n provider_type,\n args: [],\n kwargs,\n tags,\n request_start_time,\n request_end_time,\n api_key: this.apiKey,\n metadata,\n prompt_id: promptBlueprint.id,\n prompt_version: promptBlueprint.version,\n prompt_input_variables,\n group_id: groupId,\n return_prompt_blueprint: true,\n span_id: span.spanContext().spanId,\n ...body,\n });\n };\n\n if (stream)\n return streamResponse(response, _trackRequest, stream_function);\n const requestLog = await _trackRequest({ request_response: response });\n\n const functionOutput = {\n request_id: requestLog.request_id,\n raw_response: response,\n prompt_blueprint: requestLog.prompt_blueprint,\n };\n span.setAttribute(\"function_output\", JSON.stringify(functionOutput));\n\n return functionOutput;\n } catch (error) {\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : \"Unknown error\",\n });\n throw error;\n } finally {\n span.end();\n }\n });\n }\n\n async runWorkflow({\n workflowName,\n inputVariables = {},\n metadata = {},\n workflowLabelName = null,\n workflowVersion = null, // This is the version number, not the version ID\n returnAllOutputs = false,\n }: WorkflowRequest): Promise<WorkflowResponse> {\n try {\n const result = await runWorkflowRequest({\n workflow_name: workflowName,\n input_variables: inputVariables,\n metadata,\n workflow_label_name: workflowLabelName,\n workflow_version_number: workflowVersion,\n return_all_outputs: returnAllOutputs,\n api_key: this.apiKey,\n });\n\n if (!returnAllOutputs) {\n if (isWorkflowResultsDict(result)) {\n const nodeValues = Object.values(result);\n\n const outputNodes = nodeValues.filter(\n (node: any) => node.is_output_node === true\n );\n\n if (outputNodes.length === 0) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n\n const anyOutputSuccess = outputNodes.some(\n (node: any) => node.status === \"SUCCESS\"\n );\n if (!anyOutputSuccess) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n }\n }\n\n return result;\n } catch (error) {\n if (error instanceof Error) {\n console.error(\"Error running workflow:\", error.message);\n throw new Error(`Error running workflow: ${error.message}`);\n } else {\n console.error(\"Unknown error running workflow:\", error);\n throw new Error(\"Unknown error running workflow\");\n }\n }\n }\n\n async logRequest(body: LogRequest) {\n return utilLogRequest(this.apiKey, body);\n }\n}\n"],"mappings":"yoDAuBA,OAAOA,OAAU,OAQV,IAAMC,EACX,QAAQ,IAAI,qBAAuB,8BAE/BC,GAAwB,CAC5BC,EACAC,IAGGC,EAAA,wBAEH,OADoBD,EAAK,iBAAiB,OAAO,aAAa,IAAM,OAE3DE,GAAeH,EAAQC,EAAK,iBAAkBA,CAAI,EAEpD,MAAMG,GAAsBJ,EAAQC,CAAI,CACjD,GAEMG,GAAwB,CAAOJ,EAAgBC,IAAuBC,EAAA,wBAC1E,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACKK,EAAO,MAAMD,EAAS,KAAK,EAOjC,GANIA,EAAS,SAAW,KACtBE,EACED,EACA,mFACF,EAEEA,GAAQL,EAAK,aACf,MAAO,CAACA,EAAK,iBAAkBK,EAAK,UAAU,CAElD,OAASE,EAAG,CACV,QAAQ,KACN,4EAA4EA,CAAC,EAC/E,CACF,CACA,OAAOP,EAAK,gBACd,GAEMQ,GAA2B,CAC/BT,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,0BACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,8FACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,iGAAiGA,CAAC,EACpG,EACO,EACT,CACA,MAAO,EACT,GAEMI,GAAwB,CAC5BZ,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,uBAAwB,CACzE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,kFACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,EACT,CACA,MAAO,EACT,GAEMK,GAAyB,CAC7Bb,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,wBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,6GACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,gHAAgHA,CAAC,EACnH,EACO,EACT,CACA,MAAO,EACT,GAEMM,GAAwB,CAC5Bd,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,mGACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,sGAAsGA,CAAC,EACzG,EACO,EACT,CACA,MAAO,EACT,GAEMO,GACJf,GAC8BE,EAAA,wBAC9B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,gBAAiB,CAClE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAU,CACnB,QAASE,CACX,CAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,qEACF,EACO,IAEFA,EAAK,EACd,OAAS,EAAG,CACV,eAAQ,KACN,wEAAwE,CAAC,EAC3E,EACO,EACT,CACF,GAEMU,GAAoB,CACxBhB,EACAiB,EACAC,IACGhB,EAAA,wBACH,GAAI,CACF,IAAMiB,EAAM,IAAI,IACd,GAAGrB,CAAmB,qBAAqBmB,CAAU,EACvD,EACMZ,EAAW,MAAM,MAAMc,EAAK,CAChC,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,EACA,KAAM,KAAK,UAAUkB,CAAM,CAC7B,CAAC,EACKZ,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,+EACF,EACO,OAELA,EAAK,SACP,QAAQ,KACN,4EAA4EA,EAAK,OAAO,EAC1F,EAEKA,EACT,OAASE,EAAG,CACV,eAAQ,KACN,kFAAkFA,CAAC,EACrF,EACO,IACT,CACF,GAEMY,GAAwB,CAC5BpB,EACAC,IACGC,EAAA,wBACH,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,yBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAaE,CACf,EACA,KAAM,KAAK,UAAU,CACnB,gBAAiBW,EAAA,GAAKV,GACtB,eAAgBU,EAAA,GAAKV,GACrB,eAAgBA,EAAK,eAAiBA,EAAK,eAAiB,MAC9D,CAAC,CACH,CACF,EACMK,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,iFACF,EAEKA,CACT,OAASE,EAAG,CACV,QAAQ,KACN,oFAAoFA,CAAC,EACvF,CACF,CACF,GAEMa,GAAwB,CAC5BrB,EACAkB,IACGhB,EAAA,wBA3TL,IAAAoB,EA4TE,GAAI,CACF,IAAMH,EAAM,IAAI,IAAI,GAAGrB,CAAmB,mBAAmB,EAC7D,OAAO,QAAQoB,GAAU,CAAC,CAAC,EAAE,QAAQ,CAAC,CAACK,EAAKC,CAAK,IAC/CL,EAAI,aAAa,OAAOI,EAAKC,EAAM,SAAS,CAAC,CAC/C,EACA,IAAMnB,EAAW,MAAM,MAAMc,EAAK,CAChC,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,CACF,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,kFACF,EACO,OAEDgB,EAAAhB,EAAK,QAAL,KAAAgB,EAAc,CAAC,CACzB,OAASd,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,IACT,CACF,GAEaiB,GAA4BC,GASkBxB,EAAA,QATlBwB,GASkB,UATlB,CACvC,cAAAC,EACA,gBAAAC,EACA,SAAAC,EAAW,CAAC,EACZ,oBAAAC,EAAsB,KACtB,wBAAAC,EAA0B,KAC1B,mBAAAC,EAAqB,GACrB,QAAAC,EACA,QAAAC,EAAU,IACZ,EAA2D,CACzD,IAAMC,EAAU,CACd,gBAAAP,EACA,SAAAC,EACA,oBAAAC,EACA,wBAAAC,EACA,mBAAAC,CACF,EAEMI,EAAU,CACd,YAAaH,EACb,eAAgB,kBAClB,EAEA,GAAI,CAEF,IAAM5B,EAAW,MAAM,MACrB,GAAGP,CAAmB,cAAc,mBAClC6B,CACF,CAAC,OACD,CACE,OAAQ,OACR,QAASS,EACT,KAAM,KAAK,UAAUD,CAAO,CAC9B,CACF,EAEA,GAAI9B,EAAS,SAAW,IAEtB,MAAO,CACL,QAAS,GACT,QAAS,4BAHO,MAAMA,EAAS,KAAK,EAAE,MAAM,KAAO,CAAC,EAAE,GAI1C,OAASA,EAAS,UAC9B,EACF,EAGF,IAAMgC,EAAS,MAAMhC,EAAS,KAAK,EAC/BgC,EAAO,SACT,QAAQ,KAAK,YAAYA,EAAO,OAAO,EAAE,EAE3C,IAAMC,EAAeD,EAAO,8BAC5B,GAAI,CAACC,EACH,eAAQ,IAAI,4CAA4C,EACjD,CAAE,QAAS,GAAO,QAAS,wBAAyB,EAG7D,IAAMC,EAAe,oBAAoBD,CAAY,GAa/CE,GAFoB,MARN,MAAM,MACxB,GAAG1C,CAAmB,wCAAwCyC,CAAY,GAC1E,CACE,OAAQ,OACR,QAASH,CACX,CACF,GAE4C,KAAK,GAEZ,cAAc,MAG7CK,EAAO,IAAIC,GAAK,SAAS,CAAE,MAAOF,CAAW,CAAC,EAEpD,GAAI,CAEF,IAAMG,EAAe,MAAMC,GACzBH,EACAF,EACAL,CACF,EACA,OAAAO,EAAK,MAAM,EACJE,CACT,QAAE,CAEAF,EAAK,MAAM,CACb,CACF,OAASI,EAAO,CACd,cAAQ,MACN,2BACEA,aAAiB,MAAQA,EAAM,QAAUA,CAC3C,EACF,EACMA,CACR,CACF,GAEA,SAAeD,GACbH,EACAF,EACAL,EACc,QAAAhC,EAAA,sBACd,IAAM4C,EAAUL,EAAK,SAAS,IAAIF,CAAY,EAE9C,OAAO,IAAI,QAAQ,CAAOQ,EAASC,IAAW9C,EAAA,sBAC5C,IAAI+C,EAAe,KAEbC,EAAmBC,GAA0B,CAC7CA,EAAQ,OAAS,0BAEnBF,EADqB,KAAK,MAAME,EAAQ,IAAc,EAC/B,aACvB,aAAaC,CAAK,EAClBN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DH,EAAQE,CAAO,EAEnB,EAGMG,EAAQ,WAAW,IAAM,CAC7BN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DF,EAAO,IAAI,MAAM,wDAAwD,CAAC,CAC5E,EAAGd,CAAO,EAEV,GAAI,CAEF,MAAMY,EAAQ,UAAU,wBAAyBI,CAAe,CAClE,OAASG,EAAK,CACZ,aAAaD,CAAK,EAClBJ,EAAOK,CAAG,CACZ,CACF,EAAC,CACH,GAEA,IAAMC,EAAoBL,GAAmD,CA7d7E,IAAA3B,EAAAiC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EA8dE,IAAIC,EAAuD,KACvDC,EAEE3D,EAA2B,CAC/B,GAAI,GACJ,QAAS,CAAC,EACV,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAI6D,EACJ,QAAW7B,KAAUY,EAAS,CAC5B,GAAIZ,EAAO,QAAQ,SAAW,EAAG,SACjC,IAAM8B,EAAQ9B,EAAO,QAAQ,CAAC,EAAE,MAE5B8B,EAAM,UACRJ,EAAU,GAAGA,GAAW,EAAE,GAAGI,EAAM,SAAW,EAAE,IAE9CA,EAAM,gBACRH,EAAe,CACb,KAAM,GAAGA,EAAeA,EAAa,KAAO,EAAE,GAC5CG,EAAM,cAAc,MAAQ,EAC9B,GACA,UAAW,GAAGH,EAAeA,EAAa,UAAY,EAAE,GACtDG,EAAM,cAAc,WAAa,EACnC,EACF,GAEF,IAAMC,GAAW9C,EAAA6C,EAAM,aAAN,YAAA7C,EAAmB,GACpC,GAAI8C,EAAU,CACZF,EAAYA,GAAa,CAAC,EAC1B,IAAMG,EAAeH,EAAU,GAAG,EAAE,EACpC,GAAI,CAACG,GAAgBD,EAAS,GAAI,CAChCF,EAAU,KAAK,CACb,GAAIE,EAAS,IAAM,GACnB,KAAMA,EAAS,MAAQ,WACvB,SAAU,CACR,OAAMb,EAAAa,EAAS,WAAT,YAAAb,EAAmB,OAAQ,GACjC,YAAWC,EAAAY,EAAS,WAAT,YAAAZ,EAAmB,YAAa,EAC7C,CACF,CAAC,EACD,QACF,CACAa,EAAa,SAAS,KAAO,GAAGA,EAAa,SAAS,IAAI,KACxDZ,EAAAW,EAAS,WAAT,YAAAX,EAAmB,OAAQ,EAC7B,GACAY,EAAa,SAAS,UAAY,GAAGA,EAAa,SAAS,SAAS,KAClEX,EAAAU,EAAS,WAAT,YAAAV,EAAmB,YAAa,EAClC,EACF,CACF,CACA,IAAMY,EAAcrB,EAAQ,CAAC,EAAE,QAAQ,GAAG,CAAC,EAC3C,OAAA5C,EAAS,QAAQ,KAAK,CACpB,eAAesD,EAAAW,GAAA,YAAAA,EAAa,gBAAb,KAAAX,EAA8B,OAC7C,OAAOC,EAAAU,GAAA,YAAAA,EAAa,QAAb,KAAAV,EAAsB,EAC7B,UAAUC,EAAAS,GAAA,YAAAA,EAAa,WAAb,KAAAT,EAAyB,KACnC,QAAS,CACP,KAAM,YACN,QAAAE,EACA,cAAeC,GAA8B,OAC7C,WAAYE,GAAwB,OACpC,SAASJ,EAAAQ,GAAA,YAAAA,EAAa,MAAM,UAAnB,KAAAR,EAA8B,IACzC,CACF,CAAC,EACDzD,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMkE,EAA0BtB,GAA2C,CACzE,IAAI5C,EAAoB,CACtB,GAAI,GACJ,MAAO,GACP,QAAS,CAAC,EACV,KAAM,YACN,KAAM,UACN,YAAa,gBACb,cAAe,KACf,MAAO,CACL,aAAc,EACd,cAAe,CACjB,CACF,EAEA,GAAI,CADe4C,EAAQ,GAAG,EAAE,EACf,OAAO5C,EACxB,IAAI0D,EAAU,GACd,QAAW1B,KAAUY,EACnB,OAAQZ,EAAO,KAAM,CACnB,IAAK,gBAAiB,CACpBhC,EAAWM,EAAA,GACN0B,EAAO,SAEZ,KACF,CACA,IAAK,sBACCA,EAAO,MAAM,OAAS,eACxB0B,EAAU,GAAGA,CAAO,GAAG1B,EAAO,MAAM,IAAI,IAE5C,IAAK,gBACC,UAAWA,IACbhC,EAAS,MAAM,cAAgBgC,EAAO,MAAM,eAC1C,gBAAiBA,EAAO,QAC1BhC,EAAS,YAAcgC,EAAO,MAAM,aAExC,QACE,KAEJ,CAEF,OAAAhC,EAAS,QAAQ,KAAK,CACpB,KAAM,OACN,KAAM0D,CACR,CAAC,EACM1D,CACT,EAEMmE,GAAiB,CACrBvB,EACAwB,EAAgB,mCACb,CACH,GAAI,eAAgBxB,EAAQ,CAAC,EAC3B,OAAOA,EAAQ,OACb,CAACyB,EAAMC,IAAajE,EAAAC,EAAA,GACfgE,GADe,CAElB,WAAY,GAAGD,EAAK,UAAU,GAAGC,EAAQ,UAAU,EACrD,GACA,CAAC,CACH,EAGF,GAAIF,IAAkB,4BACpB,OAAOF,EAAuBtB,CAAO,EAEvC,GAAI,SAAUA,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACnC,IAAI5C,EAAW,GACf,QAAWgC,KAAUY,EACnB5C,EAAW,GAAGA,CAAQ,GAAGgC,EAAO,QAAQ,CAAC,EAAE,IAAI,GAEjD,IAAMuC,EAAe,gBAAgB3B,EAAQ,GAAG,EAAE,CAAC,EACnD,OAAA2B,EAAa,QAAQ,CAAC,EAAE,KAAOvE,EACxBuE,CACT,CAEA,GAAI,UAAW3B,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACpC,IAAM5C,EAAWiD,EAAiBL,CAAO,EACzC,OAAA5C,EAAS,QAAQ,CAAC,EAAIM,IAAA,GACjBN,EAAS,QAAQ,CAAC,GAClBA,EAAS,QAAQ,CAAC,EAAE,SAElBA,CACT,CAEA,MAAO,EACT,EAEA,SAAgBF,GACdH,EACA6E,EACA5E,EACA,QAAA6E,EAAA,sBACA,IAAM7B,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA0BH,GAA1BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAMzD,EAAjB0D,EAAA,MACE,MAAMjF,EAAK,aAAe,CAACuB,EAAO,IAAI,EAAIA,EAC1CyB,EAAQ,KAAKzB,CAAK,SAFpB0D,EApoBF,CAooBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAIA,IAAMuC,EAAmBZ,GAAevB,EAAShD,EAAK,aAAa,EAC7DI,EAAW,UAAA8E,EAAM/E,GAAsBJ,EAAQU,EAAAC,EAAA,GAChDV,GADgD,CAEnD,iBAAAmF,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,CAC3C,EAAC,GACD,GAAI/E,GACEJ,EAAK,aAAc,CACrB,IAAMoF,EAAchF,EAAiB,CAAC,EAEtC,KAAM,CADa4C,EAAQ,GAAG,EAAE,EACboC,CAAU,CAC/B,CAEJ,GAEA,IAAM9E,EAAoB,CAAC6E,EAAuBE,IAAyB,CACzE,GAAI,CACF,QAAQ,KAAK,GAAGA,CAAY,KAAKF,EAAiB,OAAO,EAAE,CAC7D,OAAS5E,EAAG,CACV,QAAQ,KAAK,GAAG8E,CAAY,KAAKF,CAAgB,EAAE,CACrD,CACF,EAEMG,GAAsBtF,GAAuBC,EAAA,wBACjD,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,mFACF,EACKA,EAAS,KAAK,CACvB,OAAS,EAAG,CACV,QAAQ,KACN,4EAA4E,CAAC,EAC/E,CACF,CACA,MAAO,CAAC,CACV,GAEMmF,EAA0BvC,GAA0B,CACxD,IAAM5C,EAAuB,CAC3B,GAAI,GACJ,QAAS,CACP,CACE,cAAe,OACf,MAAO,EACP,KAAM,GACN,SAAU,IACZ,CACF,EACA,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIoF,EAAO,GACX,QAAWpD,KAAUY,EACfZ,EAAO,QAAQ,OAAS,GAAKA,EAAO,QAAQ,CAAC,EAAE,OACjDoD,EAAO,GAAGA,CAAI,GAAGpD,EAAO,QAAQ,CAAC,EAAE,IAAI,IAG3C,OAAAhC,EAAS,QAAQ,CAAC,EAAE,KAAOoF,EAC3BpF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMqF,GAA6BzC,GAAmC,CACpE,IAAM5C,EAAgC,CACpC,WAAY,GACZ,GAAI,GACJ,MAAO,GACP,YAAa,GACb,KAAM,YACR,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIsF,EAAa,GACjB,QAAWtD,KAAUY,EACnB0C,EAAa,GAAGA,CAAU,GAAGtD,EAAO,UAAU,GAEhD,OAAAhC,EAAS,WAAasF,EACtBtF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,YAAc4D,EAAW,YAC3B5D,CACT,EAEA,SAAgBuF,GACdf,EACAgB,EACAC,EACA,QAAAhB,EAAA,sBACA,IAAMxE,EAIF,CACF,WAAY,KACZ,aAAc,KACd,iBAAkB,IACpB,EACM2C,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA2BH,GAA3BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAM5C,EAAjB6C,EAAA,MACEjC,EAAQ,KAAKZ,CAAM,EACnB/B,EAAK,aAAe+B,EACpB,MAAM/B,SAHR4E,EA1vBF,CA0vBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAKA,IAAMuC,EAAmBU,EAAW7C,CAAO,EACrC5C,EAAW,UAAA8E,EAAMU,EAAY,CAAE,iBAAAT,CAAiB,CAAC,GACvD9E,EAAK,WAAaD,EAAS,WAC3BC,EAAK,iBAAmBD,EAAS,iBACjC,MAAMC,CACR,GAEA,IAAMyF,GAAoB,CAAOC,EAAoBC,IAAgB/F,EAAA,wBACnE,OAAO8F,EAAO,KAAK,YAAY,OAAOC,CAAM,CAC9C,GAEMC,GAA2B,CAAOF,EAAoBC,IAAgB/F,EAAA,wBAC1E,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEME,GAA8B,CAClC,KAAMJ,GACN,WAAYG,EACd,EAEME,GAAgB,CACpBC,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,EAAQ,QAAQ,EAAE,QAC3BN,EAAS,IAAIM,EAAO,CACxB,QAASL,EAAO,OAClB,CAAC,EACKM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMO,GAAqB,CACzBH,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,EAAQ,QAAQ,EAAE,YAC3BN,EAAS,IAAIM,EAAO,CACxB,SAAUL,EAAO,OACnB,CAAC,EACDA,GAAA,aAAAA,EAAe,QACf,IAAMM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMQ,GAAuB,CAAOT,EAAuBC,IAAgB/F,EAAA,wBACzE,OAAO8F,EAAO,SAAS,OAAOC,CAAM,CACtC,GAEMS,GAA8B,CAClCV,EACAC,IACG/F,EAAA,wBACH,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEMU,GAAiC,CACrC,KAAMF,GACN,WAAYC,EACd,EAEME,GAAmB,CACvBP,EACAJ,IACG/F,EAAA,wBACH,IAAM2G,EAAY,EAAQ,mBAAmB,EAAE,QACzCb,EAAS,IAAIa,EAAU,CAC3B,QAASZ,EAAO,OAClB,CAAC,EACKM,EACJI,GAA+BN,EAAgB,gBAAgB,IAAI,EACrE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMa,GAAiB,CACrB9G,EACAC,IAC+BC,EAAA,wBAC/B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,YAAaE,EACb,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUC,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,yEACF,EACO,MAEFA,EAAS,KAAK,CACvB,OAASG,EAAG,CACV,eAAQ,KACN,4EAA4EA,CAAC,EAC/E,EACO,IACT,CACF,GCp2BO,IAAMuG,EAAN,KAAmB,CAGxB,YAAYC,EAAgB,CAI5B,YAAS,IAAMC,GAAuB,KAAK,MAAM,EAH/C,KAAK,OAASD,CAChB,CAGF,ECVA,UAAYE,OAAmB,qBAC/B,OAAQ,uBAAAC,OAA0B,gCAClC,OAAQ,sBAAAC,OAAyB,gCCFjC,OAAoB,YAAAC,EAAU,kBAAAC,MAAqB,qBAEnD,OAAQ,oBAAAC,MAAuB,sBAG/B,IAAMC,EAAN,KAAsD,CAKpD,YAAYC,EAAwBC,EAAiB,CACnD,KAAK,OAASA,GAAU,QAAQ,IAAI,oBACpC,KAAK,cAAgBD,EACrB,KAAK,IAAM,GAAGE,CAAmB,aACnC,CAEQ,mBAAmBC,EAAyD,CAClF,OAAKA,EACE,OAAO,YAAY,OAAO,QAAQA,CAAU,CAAC,EAD5B,CAAC,CAE3B,CAEQ,iBAAiBC,EAAwB,CAQ/C,MAP0C,CACxC,CAACC,EAAS,QAAQ,EAAG,oBACrB,CAACA,EAAS,MAAM,EAAG,kBACnB,CAACA,EAAS,MAAM,EAAG,kBACnB,CAACA,EAAS,QAAQ,EAAG,oBACrB,CAACA,EAAS,QAAQ,EAAG,mBACvB,EACeD,CAAI,GAAK,mBAC1B,CAEQ,mBAAmBE,EAA8B,CAMvD,MALkD,CAChD,CAACC,EAAe,KAAK,EAAG,mBACxB,CAACA,EAAe,EAAE,EAAG,gBACrB,CAACA,EAAe,KAAK,EAAG,kBAC1B,EACiBD,CAAI,GAAK,kBAC5B,CAEQ,cAAcE,EAAgC,CACpD,OAAQ,OAAOA,EAAK,CAAC,CAAC,EAAI,OAAO,GAAG,EAAI,OAAOA,EAAK,CAAC,CAAC,GAAG,SAAS,CACpE,CAEA,OAAOC,EAAkD,CACvD,GAAI,CAAC,KAAK,cACR,OAAO,QAAQ,QAAQC,EAAiB,OAAO,EAGjD,IAAMC,EAAcF,EAAM,IAAIG,GAAK,CAlDvC,IAAAC,EAkD2C,OACrC,KAAMD,EAAK,KACX,QAAS,CACP,SAAUA,EAAK,YAAY,EAAE,QAC7B,QAASA,EAAK,YAAY,EAAE,OAC5B,cAAaC,EAAAD,EAAK,YAAY,EAAE,aAAnB,YAAAC,EAA+B,cAAe,EAC7D,EACA,KAAM,KAAK,iBAAiBD,EAAK,IAAI,EACrC,UAAWA,EAAK,cAAgB,KAChC,WAAY,KAAK,cAAcA,EAAK,SAAS,EAC7C,SAAU,KAAK,cAAcA,EAAK,OAAO,EACzC,OAAQ,CACN,YAAa,KAAK,mBAAmBA,EAAK,OAAO,IAAI,EACrD,YAAaA,EAAK,OAAO,OAC3B,EACA,WAAY,KAAK,mBAAmBA,EAAK,UAAU,EACnD,OAAQA,EAAK,OAAO,IAAIE,IAAU,CAChC,KAAMA,EAAM,KACZ,UAAW,KAAK,cAAcA,EAAM,IAAI,EACxC,WAAY,KAAK,mBAAmBA,EAAM,UAAU,CACtD,EAAE,EACF,MAAOF,EAAK,MAAM,IAAIG,IAAS,CAC7B,QAASA,EAAK,QACd,WAAY,KAAK,mBAAmBA,EAAK,UAAU,CACrD,EAAE,EACF,SAAU,CACR,WAAYC,EAAAC,EAAA,GACPL,EAAK,SAAS,YADP,CAEV,eAAgB,iBAClB,GACA,WAAY,EACd,CACF,EAAE,EAEF,OAAO,MAAM,KAAK,IAAK,CACrB,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAa,KAAK,QAAU,EAC9B,EACA,KAAM,KAAK,UAAU,CACnB,MAAOD,CACT,CAAC,CACH,CAAC,EACE,KAAKO,GACCA,EAAS,GAIPR,EAAiB,SAHtB,QAAQ,MAAM;AAAA,sBAA8CQ,EAAS,MAAM,EAAE,EACtER,EAAiB,OAG3B,EACA,MAAOS,IACN,QAAQ,MAAM,yBAA0BA,CAAK,EACtCT,EAAiB,OACzB,CACL,CAEA,UAA0B,CACxB,OAAO,QAAQ,QAAQ,CACzB,CACF,EAEOU,GAAQrB,ED3GR,IAAMsB,EAAY,CAACC,EAAe,uBAClB,SAAM,UAAUA,CAAI,EAG9BC,GAAe,CAACC,EAAwBC,IAAoB,CACvE,IAAMC,EAAW,IAAIC,GACfC,EAAW,IAAIC,GAAwBL,EAAeC,CAAM,EAC5DK,EAAY,IAAIC,GAAoBH,CAAQ,EAClDF,EAAS,iBAAiBI,CAAS,EACnCJ,EAAS,SAAS,CACpB,EEZA,IAAMM,GAASC,EAAU,EAEZC,EAAkB,CAC7BC,EACAC,EACAC,EAAgB,GAChBC,EAAW,WACR,CACH,IAAMC,EAA6B,CACjC,UAAW,CAACC,EAAQC,IAAS,CAC3B,IAAMC,EAAY,QAAQ,UAAUF,EAAQC,CAAI,EAChD,cAAO,iBAAiBC,EAAW,CACjC,cAAe,CACb,MAAOL,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMI,EAAWH,CAAO,CACrC,EACA,IAAK,CAACC,EAAQG,EAAMC,IAAa,CAC/B,IAAMC,EAAQL,EAAOG,CAAI,EACnBN,EAAgB,GAAG,QAAQ,IAC/BG,EACA,eACF,CAAC,IAAIG,EAAK,SAAS,CAAC,GAEpB,OAAI,OAAOE,GAAU,UACnB,OAAO,iBAAiBA,EAAO,CAC7B,cAAe,CACb,MAAOR,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMO,EAAON,CAAO,GAG7B,OAAOM,GAAU,WACZ,IAAIJ,IAAgB,CA9CnC,IAAAK,EAAAC,EAAAC,EAAAC,EA+CU,IAAMC,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAAgB,QAAQ,IAAIX,EAAQ,UAAU,EAC9CY,GAAeN,EAAAL,EAAK,CAAC,IAAN,YAAAK,EAAS,aACxBO,GAAUN,EAAAN,EAAK,CAAC,IAAN,YAAAM,EAAS,QACzB,OAAAC,EAAOP,EAAK,CAAC,IAAb,aAAAO,EAAgB,cAChBC,EAAOR,EAAK,CAAC,IAAb,aAAAQ,EAAgB,QAETjB,GAAO,gBAAgB,GAAGmB,CAAa,IAAId,CAAa,GAAWiB,GAAcC,EAAA,wBACtF,GAAI,CACFD,EAAK,aAAa,iBAAkB,KAAK,UAAUb,CAAI,CAAC,EACxD,IAAMe,EAAW,QAAQ,MAAMX,EAAOL,EAAQC,CAAI,EAC5CgB,EAASH,EAAK,YAAY,EAAE,OAElC,OAAIE,aAAoB,QACf,IAAI,QAAQ,CAACE,EAASC,IAAW,CACtCH,EACG,KAAYI,GAAqBL,EAAA,wBAChC,IAAMC,EAAW,MAAMK,GAAsB1B,EAAQ,CACnD,QAASA,EACT,cAAAgB,EACA,cAAAd,EACA,mBAAAa,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,EACzC,iBAAAU,EACA,OAAQnB,EAAK,CAAC,EACd,aAAAW,EACA,KAAMC,EACN,QAASI,CACX,CAAC,EAEDH,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACTI,EAAQF,CAAQ,CAClB,EAAC,EACA,MAAOM,GAAU,CAChBR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACTK,EAAOG,CAAK,CACd,CAAC,CACL,CAAC,GAGHR,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACFE,EACT,OAASM,EAAO,CACd,MAAAR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACHQ,CACR,CACF,EAAC,CACH,EAGK,QAAQ,IAAItB,EAAQG,EAAMC,CAAQ,CAC3C,CACF,EAEA,OAAO,IAAI,MAAMR,EAAKG,CAAO,CAC/B,EC9GA,UAAYwB,MAAmB,qBAGxB,IAAMC,GAAe,CAACC,EAAsBC,EAAgBC,IAC1D,YAAaC,EAAa,CAC/B,IAAMC,EAASC,EAAU,EAEnBC,EAAmBC,GAA6B,CACpD,GAAI,CACEL,GACF,OAAO,QAAQA,CAAU,EAAE,QAAQ,CAAC,CAACM,EAAKC,CAAK,IAAM,CACnDF,EAAK,aAAaC,EAAKC,CAAK,CAC9B,CAAC,EAGHF,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxD,IAAMO,EAAST,EAAK,GAAGE,CAAI,EAE3B,OAAIO,aAAkB,QACbA,EAAO,KAAMC,IAClBJ,EAAK,aAAa,kBAAmB,KAAK,UAAUI,CAAc,CAAC,EACnEJ,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACjDI,EACR,EAAE,MAAOC,GAAU,CAClB,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CAAC,EAAE,QAAQ,IAAML,EAAK,IAAI,CAAC,GAE3BA,EAAK,aAAa,kBAAmB,KAAK,UAAUG,CAAM,CAAC,EAC3DH,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACxDA,EAAK,IAAI,EACFG,EAEX,OAASE,EAAO,CACd,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CACF,EAEA,OAAOR,EAAO,gBAAgBJ,EAAcM,CAAe,CAC7D,EAGIO,GAAc,CAACN,EAA0BK,EAAYT,IAAgB,CACzEI,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxDI,EAAK,UAAU,CACb,KAAoB,iBAAe,MACnC,QAASK,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACDL,EAAK,IAAI,CACX,ECvCO,IAAMO,EAAN,KAAsB,CAG3B,YAAYC,EAAgB,CAI5B,SAAM,CAACC,EAAoBC,IACzBC,GAAkB,KAAK,OAAQF,EAAYC,CAAM,EAEnD,aAAWE,GACTC,GAAsB,KAAK,OAAQD,CAAI,EAEzC,SAAOF,GAAwBI,GAAsB,KAAK,OAAQJ,CAAM,EATtE,KAAK,OAASF,CAChB,CASF,ECjBA,IAAMO,GAAW,CAACC,EAAgBC,IAA0C,CAC1E,GAAI,EAAEA,EAAK,oBAAoB,QAC7B,MAAM,IAAI,MAAM,0CAA0C,EAE5D,OAAW,CAACC,EAAKC,CAAK,IAAK,OAAO,QAAQF,EAAK,QAAQ,EACrD,GAAI,OAAOC,GAAQ,UAAY,OAAOC,GAAU,SAC9C,MAAM,IAAI,MACR,yEACF,EAGJ,OAAOC,GAAyBJ,EAAQC,CAAI,CAC9C,EAEMI,GAAQ,CAACL,EAAgBC,IAAuC,CACpE,GAAI,OAAOA,EAAK,OAAU,SACxB,MAAM,IAAI,MAAM,wBAAwB,EAE1C,GAAIA,EAAK,MAAQ,GAAKA,EAAK,MAAQ,IACjC,MAAM,IAAI,MAAM,2CAA2C,EAE7D,OAAOK,GAAsBN,EAAQC,CAAI,CAC3C,EAEMM,GAAS,CAACP,EAAgBC,IAAwC,CACtE,GAAI,EAAEA,EAAK,kCAAkC,QAC3C,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAOO,GAAuBR,EAAQC,CAAI,CAC5C,EAEMQ,GAAQ,CAACT,EAAgBC,IAC7BS,GAAsBV,EAAQC,CAAI,EAEvBU,EAAN,KAAmB,CAGxB,YAAYX,EAAgB,CAI5B,WAASC,GAAqBQ,GAAM,KAAK,OAAQR,CAAI,EAErD,cAAYA,GAAwBF,GAAS,KAAK,OAAQE,CAAI,EAE9D,YAAUA,GAAsBM,GAAO,KAAK,OAAQN,CAAI,EAExD,WAASA,GAAqBI,GAAM,KAAK,OAAQJ,CAAI,EATnD,KAAK,OAASD,CAChB,CASF,ECpCA,UAAYY,OAAmB,qBAE/B,IAAMC,GAAgC,CACpC,OAAQ,CACN,KAAM,CACJ,cAAe,iCACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,4BACf,gBAAiBC,CACnB,CACF,EACA,UAAW,CACT,KAAM,CACJ,cAAe,4BACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,+BACf,gBAAiBC,EACnB,CACF,EACA,eAAgB,CACd,KAAM,CACJ,cAAe,6CACf,gBAAiBH,CACnB,EACA,WAAY,CACV,cAAe,wCACf,gBAAiBC,CACnB,CACF,CACF,EAEMG,GAAgD,CACpD,OAAQC,GACR,UAAWC,GACX,eAAgBC,EAClB,EAQMC,GAAyBC,GAAsB,CACnD,GAAI,CAACA,GAAO,OAAOA,GAAQ,UAAY,MAAM,QAAQA,CAAG,EACtD,MAAO,GAGT,IAAMC,EAAgB,CACpB,SACA,QACA,gBACA,oBACA,gBACF,EAGA,OAFe,OAAO,OAAOD,CAAG,EAElB,MAAOE,GACf,OAAOA,GAAQ,UAAYA,IAAQ,KAAa,GAC7CD,EAAc,MAAOE,GAAQA,KAAOD,CAAG,CAC/C,CACH,EAEaE,GAAN,KAAkB,CAQvB,YAAY,CACV,OAAAC,EAAS,QAAQ,IAAI,oBACrB,cAAAC,EAAgB,EAClB,EAAmB,CAAC,EAAG,CACrB,GAAID,IAAW,OACb,MAAM,IAAI,MACR,0HACF,EAGF,KAAK,OAASA,EACd,KAAK,cAAgBC,EACrB,KAAK,UAAY,IAAIC,EAAgBF,CAAM,EAC3C,KAAK,MAAQ,IAAIG,EAAaH,CAAM,EACpC,KAAK,MAAQ,IAAII,EAAaJ,CAAM,EACpC,KAAK,aAAeK,GAEhBJ,GACFK,GAAaL,EAAeD,CAAM,CAEtC,CAEA,IAAI,WAAY,CACd,GAAI,CACF,IAAMO,EAAS,EAAQ,mBAAmB,EAAE,QAC5C,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,YAAa,WAAW,CACtE,OAAS,EAAG,CACV,QAAQ,MACN,8EACF,CACF,CACF,CAEA,IAAI,QAAS,CACX,GAAI,CACF,IAAMA,EAAS,EAAQ,QAAQ,EAAE,QACjC,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,SAAU,QAAQ,CAChE,OAAS,EAAG,CACV,QAAQ,MACN,qEACF,CACF,CACF,CAEM,IAAIE,EAUK,QAAAC,EAAA,yBAVL,CACR,WAAAC,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,EAAS,EACX,EAAe,CAGb,OAFeC,EAAU,EAEX,gBAAgB,kBAA0BC,GAASX,EAAA,sBAC/D,GAAI,CACF,IAAMY,EAAgB,CACpB,WAAAX,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,CACF,EACAE,EAAK,aAAa,iBAAkB,KAAK,UAAUC,CAAa,CAAC,EAEjE,IAAMC,EAAyBT,EACzBU,EAA6C,CACjD,MAAOX,EACP,QAASD,EACT,iBAAkBI,CACpB,EACIF,IAAgBU,EAAkB,gBAAkBV,GAExD,IAAMW,EAAkB,MAAM,KAAK,UAAU,IAC3Cd,EACAa,CACF,EAEA,GAAI,CAACC,EAAiB,MAAM,IAAI,MAAM,kBAAkB,EAExD,IAAMC,EAAiBD,EAAgB,gBACvC,GAAI,CAACA,EAAgB,WACnB,MAAM,IAAI,MACR,WAAWd,CAAU,oDACvB,EAGF,IAAMgB,EAA0BF,EAAgB,SAChD,GAAI,CAACE,EACH,MAAM,IAAI,MACR,WAAWhB,CAAU,kDACvB,EAGF,IAAMiB,EAAuBD,EAAwB,MACrD,GAAI,CAACC,EACH,MAAM,IAAI,MACR,WAAWjB,CAAU,wDACvB,EAGF,IAAMkB,EAAgBD,EAAqB,SAErCE,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAASC,IAAA,GACVP,EAAgB,YACfP,GAA2B,CAAC,GAE5Be,EACJhD,GACE4C,CACF,EAAEH,EAAe,IAAI,EACjBQ,EAAgBD,EAAO,cAEvBE,EAAkBF,EAAO,gBACzBG,GAAmB9C,GAAyBuC,CAAa,EACzDQ,EAAoBZ,EAAgB,kBACtCY,IACFN,EAAO,QAAaM,EAAkB,KAExCN,EAAO,OAAYZ,EACfA,GAAU,CAAC,SAAU,cAAc,EAAE,SAASU,CAAa,IAC7DE,EAAO,eAAoB,CAAE,cAAe,EAAK,GAGnD,IAAMO,EAAW,MAAMF,GAAiBX,EAAiBM,CAAM,EAEzDQ,EAAiBC,IAAiB,CACtC,IAAMC,GAAmB,IAAI,KAAK,EAAE,YAAY,EAChD,OAAOC,GAAaV,EAAA,CAClB,cAAAE,EACA,cAAAL,EACA,KAAM,CAAC,EACP,OAAAE,EACA,KAAAhB,EACA,mBAAAe,EACA,iBAAAW,GACA,QAAS,KAAK,OACd,SAAAzB,EACA,UAAWS,EAAgB,GAC3B,eAAgBA,EAAgB,QAChC,uBAAAF,EACA,SAAUN,EACV,wBAAyB,GACzB,QAASI,EAAK,YAAY,EAAE,QACzBmB,GACJ,CACH,EAEA,GAAIrB,EACF,OAAOwB,GAAeL,EAAUC,EAAeJ,CAAe,EAChE,IAAMS,EAAa,MAAML,EAAc,CAAE,iBAAkBD,CAAS,CAAC,EAE/DO,EAAiB,CACrB,WAAYD,EAAW,WACvB,aAAcN,EACd,iBAAkBM,EAAW,gBAC/B,EACA,OAAAvB,EAAK,aAAa,kBAAmB,KAAK,UAAUwB,CAAc,CAAC,EAE5DA,CACT,OAASC,EAAO,CACd,MAAAzB,EAAK,UAAU,CACb,KAAoB,kBAAe,MACnC,QAASyB,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACKA,CACR,QAAE,CACAzB,EAAK,IAAI,CACX,CACF,EAAC,CACH,GAEM,YAAYZ,EAO6B,QAAAC,EAAA,yBAP7B,CAChB,aAAAqC,EACA,eAAAjC,EAAiB,CAAC,EAClB,SAAAE,EAAW,CAAC,EACZ,kBAAAgC,EAAoB,KACpB,gBAAAC,EAAkB,KAClB,iBAAAC,EAAmB,EACrB,EAA+C,CAC7C,GAAI,CACF,IAAMC,EAAS,MAAMC,GAAmB,CACtC,cAAeL,EACf,gBAAiBjC,EACjB,SAAAE,EACA,oBAAqBgC,EACrB,wBAAyBC,EACzB,mBAAoBC,EACpB,QAAS,KAAK,MAChB,CAAC,EAED,GAAI,CAACA,GACCxD,GAAsByD,CAAM,EAAG,CAGjC,IAAME,EAFa,OAAO,OAAOF,CAAM,EAER,OAC5BG,GAAcA,EAAK,iBAAmB,EACzC,EAEA,GAAID,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,KAAK,UAAUF,EAAQ,KAAM,CAAC,CAAC,EAMjD,GAAI,CAHqBE,EAAY,KAClCC,GAAcA,EAAK,SAAW,SACjC,EAEE,MAAM,IAAI,MAAM,KAAK,UAAUH,EAAQ,KAAM,CAAC,CAAC,CAEnD,CAGF,OAAOA,CACT,OAASL,EAAO,CACd,MAAIA,aAAiB,OACnB,QAAQ,MAAM,0BAA2BA,EAAM,OAAO,EAChD,IAAI,MAAM,2BAA2BA,EAAM,OAAO,EAAE,IAE1D,QAAQ,MAAM,kCAAmCA,CAAK,EAChD,IAAI,MAAM,gCAAgC,EAEpD,CACF,GAEM,WAAWN,EAAkB,QAAA9B,EAAA,sBACjC,OAAO6C,GAAe,KAAK,OAAQf,CAAI,CACzC,GACF","names":["Ably","URL_API_PROMPTLAYER","promptlayerApiHandler","apiKey","body","__async","proxyGenerator","promptLayerApiRequest","response","data","warnOnBadResponse","e","promptLayerTrackMetadata","__spreadProps","__spreadValues","promptLayerTrackScore","promptLayerTrackPrompt","promptLayerTrackGroup","promptLayerCreateGroup","getPromptTemplate","promptName","params","url","publishPromptTemplate","getAllPromptTemplates","_a","key","value","runWorkflowRequest","_0","workflow_name","input_variables","metadata","workflow_label_name","workflow_version_number","return_all_outputs","api_key","timeout","payload","headers","result","execution_id","channel_name","ably_token","ably","Ably","final_output","waitForWorkflowCompletion","error","channel","resolve","reject","results","messageListener","message","timer","err","openaiStreamChat","_b","_c","_d","_e","_f","_g","_h","_i","content","functionCall","lastResult","toolCalls","delta","toolCall","lastToolCall","firstChoice","anthropicStreamMessage","cleaned_result","function_name","prev","current","final_result","generator","__asyncGenerator","iter","__forAwait","more","temp","__await","request_response","request_id","main_message","trackRequest","openaiStreamCompletion","text","anthropicStreamCompletion","completion","streamResponse","afterStream","mapResults","openaiChatRequest","client","kwargs","openaiCompletionsRequest","MAP_TYPE_TO_OPENAI_FUNCTION","openaiRequest","promptBlueprint","OpenAI","requestToMake","azureOpenAIRequest","anthropicChatRequest","anthropicCompletionsRequest","MAP_TYPE_TO_ANTHROPIC_FUNCTION","anthropicRequest","Anthropic","utilLogRequest","GroupManager","apiKey","promptLayerCreateGroup","opentelemetry","SimpleSpanProcessor","NodeTracerProvider","SpanKind","SpanStatusCode","ExportResultCode","PromptLayerSpanExporter","enableTracing","apiKey","URL_API_PROMPTLAYER","attributes","kind","SpanKind","code","SpanStatusCode","time","spans","ExportResultCode","requestData","span","_a","event","link","__spreadProps","__spreadValues","response","error","span_exporter_default","getTracer","name","setupTracing","enableTracing","apiKey","provider","NodeTracerProvider","exporter","span_exporter_default","processor","SimpleSpanProcessor","tracer","getTracer","promptLayerBase","apiKey","llm","function_name","provider","handler","target","args","newTarget","prop","receiver","value","_a","_b","_c","_d","request_start_time","provider_type","return_pl_id","pl_tags","span","__async","response","spanId","resolve","reject","request_response","promptlayerApiHandler","error","opentelemetry","wrapWithSpan","functionName","func","attributes","args","tracer","getTracer","wrapperFunction","span","key","value","result","resolvedResult","error","handleError","TemplateManager","apiKey","promptName","params","getPromptTemplate","body","publishPromptTemplate","getAllPromptTemplates","metadata","apiKey","body","key","value","promptLayerTrackMetadata","score","promptLayerTrackScore","prompt","promptLayerTrackPrompt","group","promptLayerTrackGroup","TrackManager","opentelemetry","MAP_PROVIDER_TO_FUNCTION_NAME","openaiStreamChat","openaiStreamCompletion","anthropicStreamMessage","anthropicStreamCompletion","MAP_PROVIDER_TO_FUNCTION","openaiRequest","anthropicRequest","azureOpenAIRequest","isWorkflowResultsDict","obj","REQUIRED_KEYS","val","key","PromptLayer","apiKey","enableTracing","TemplateManager","GroupManager","TrackManager","wrapWithSpan","setupTracing","module","promptLayerBase","_0","__async","promptName","promptVersion","promptReleaseLabel","inputVariables","tags","metadata","groupId","modelParameterOverrides","stream","getTracer","span","functionInput","prompt_input_variables","templateGetParams","promptBlueprint","promptTemplate","promptBlueprintMetadata","promptBlueprintModel","provider_type","request_start_time","kwargs","__spreadValues","config","function_name","stream_function","request_function","provider_base_url","response","_trackRequest","body","request_end_time","trackRequest","streamResponse","requestLog","functionOutput","error","workflowName","workflowLabelName","workflowVersion","returnAllOutputs","result","runWorkflowRequest","outputNodes","node","utilLogRequest"]}
package/dist/index.js CHANGED
@@ -1,3 +1,3 @@
1
- "use strict";var $e=Object.create;var I=Object.defineProperty,ve=Object.defineProperties,Ge=Object.getOwnPropertyDescriptor,Ke=Object.getOwnPropertyDescriptors,je=Object.getOwnPropertyNames,ee=Object.getOwnPropertySymbols,Me=Object.getPrototypeOf,oe=Object.prototype.hasOwnProperty,Ue=Object.prototype.propertyIsEnumerable;var te=(r,e)=>{if(e=Symbol[r])return e;throw Error("Symbol."+r+" is not defined")};var re=(r,e,t)=>e in r?I(r,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):r[e]=t,g=(r,e)=>{for(var t in e||(e={}))oe.call(e,t)&&re(r,t,e[t]);if(ee)for(var t of ee(e))Ue.call(e,t)&&re(r,t,e[t]);return r},O=(r,e)=>ve(r,Ke(e));var Je=(r,e)=>{for(var t in e)I(r,t,{get:e[t],enumerable:!0})},ne=(r,e,t,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of je(e))!oe.call(r,n)&&n!==t&&I(r,n,{get:()=>e[n],enumerable:!(o=Ge(e,n))||o.enumerable});return r};var x=(r,e,t)=>(t=r!=null?$e(Me(r)):{},ne(e||!r||!r.__esModule?I(t,"default",{value:r,enumerable:!0}):t,r)),De=r=>ne(I({},"__esModule",{value:!0}),r);var l=(r,e,t)=>new Promise((o,n)=>{var s=i=>{try{c(t.next(i))}catch(p){n(p)}},a=i=>{try{c(t.throw(i))}catch(p){n(p)}},c=i=>i.done?o(i.value):Promise.resolve(i.value).then(s,a);c((t=t.apply(r,e)).next())}),A=function(r,e){this[0]=r,this[1]=e},J=(r,e,t)=>{var o=(a,c,i,p)=>{try{var u=t[a](c),f=(c=u.value)instanceof A,h=u.done;Promise.resolve(f?c[0]:c).then(d=>f?o(a==="return"?a:"next",c[1]?{done:d.done,value:d.value}:d,i,p):i({value:d,done:h})).catch(d=>o("throw",d,i,p))}catch(d){p(d)}},n=a=>s[a]=c=>new Promise((i,p)=>o(a,c,i,p)),s={};return t=t.apply(r,e),s[Symbol.asyncIterator]=()=>s,n("next"),n("throw"),n("return"),s};var D=(r,e,t)=>(e=r[te("asyncIterator")])?e.call(r):(r=r[te("iterator")](),e={},t=(o,n)=>(n=r[o])&&(e[o]=s=>new Promise((a,c,i)=>(s=n.call(r,s),i=s.done,Promise.resolve(s.value).then(p=>a({value:p,done:i}),c)))),t("next"),t("return"),e);var it={};Je(it,{PromptLayer:()=>X});module.exports=De(it);var se=x(require("ably"));var w=process.env.URL_API_PROMPTLAYER||"https://api.promptlayer.com",ae=(r,e)=>l(void 0,null,function*(){return e.request_response[Symbol.asyncIterator]!==void 0?ze(r,e.request_response,e):yield ie(r,e)}),ie=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(e)}),o=yield t.json();if(t.status!==200&&T(o,"WARNING: While logging your request, PromptLayer experienced the following error:"),o&&e.return_pl_id)return[e.request_response,o.request_id]}catch(t){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${t}`)}return e.request_response}),ce=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-metadata`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While logging metadata to your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),pe=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-score`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While scoring your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While scoring your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),le=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-prompt`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${t}`),!1}return!0}),ue=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a group, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a group, PromptLayer experienced the following error: ${t}`),!1}return!0}),me=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/create-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({api_key:r})}),t=yield e.json();return e.status!==200?(T(t,"WARNING: While creating a group PromptLayer had the following error"),!1):t.id}catch(e){return console.warn(`WARNING: While creating a group PromptLayer had the following error: ${e}`),!1}}),fe=(r,e,t)=>l(void 0,null,function*(){try{let o=new URL(`${w}/prompt-templates/${e}`),n=yield fetch(o,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify(t)}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching a prompt template PromptLayer had the following error"),null):(s.warning&&console.warn(`WARNING: While tracking your prompt PromptLayer had the following error: ${s.warning}`),s)}catch(o){return console.warn(`WARNING: While fetching a prompt template PromptLayer had the following error: ${o}`),null}}),de=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/rest/prompt-templates`,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify({prompt_template:g({},e),prompt_version:g({},e),release_labels:e.release_labels?e.release_labels:void 0})}),o=yield t.json();return t.status===400&&T(o,"WARNING: While publishing a prompt template PromptLayer had the following error"),o}catch(t){console.warn(`WARNING: While publishing a prompt template PromptLayer had the following error: ${t}`)}}),he=(r,e)=>l(void 0,null,function*(){var t;try{let o=new URL(`${w}/prompt-templates`);Object.entries(e||{}).forEach(([a,c])=>o.searchParams.append(a,c.toString()));let n=yield fetch(o,{headers:{"Content-Type":"application/json","X-API-KEY":r}}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching all prompt templates PromptLayer had the following error"),null):(t=s.items)!=null?t:[]}catch(o){return console.warn(`WARNING: While fetching all prompt templates PromptLayer had the following error: ${o}`),null}}),ye=i=>l(void 0,[i],function*({workflow_name:r,input_variables:e,metadata:t={},workflow_label_name:o=null,workflow_version_number:n=null,return_all_outputs:s=!1,api_key:a,timeout:c=12e4}){let p={input_variables:e,metadata:t,workflow_label_name:o,workflow_version_number:n,return_all_outputs:s},u={"X-API-KEY":a,"Content-Type":"application/json"};try{let f=yield fetch(`${w}/workflows/${encodeURIComponent(r)}/run`,{method:"POST",headers:u,body:JSON.stringify(p)});if(f.status!==201)return{success:!1,message:`Failed to run workflow: ${(yield f.json().catch(()=>({}))).error||f.statusText}`};let h=yield f.json();h.warning&&console.warn(`WARNING: ${h.warning}`);let d=h.workflow_version_execution_id;if(!d)return console.log("No execution ID returned from workflow run"),{success:!1,message:"Failed to run workflow"};let b=`workflow_updates:${d}`,P=(yield(yield fetch(`${w}/ws-token-request-library?capability=${b}`,{method:"POST",headers:u})).json()).token_details.token,y=new se.default.Realtime({token:P});try{let m=yield Ye(y,b,c);return y.close(),m}finally{y.close()}}catch(f){throw console.error(`Failed to run workflow: ${f instanceof Error?f.message:f}`),f}});function Ye(r,e,t){return l(this,null,function*(){let o=r.channels.get(e);return new Promise((n,s)=>l(this,null,function*(){let a=null,c=p=>{p.name==="SET_WORKFLOW_COMPLETE"&&(a=JSON.parse(p.data).final_output,clearTimeout(i),o.unsubscribe("SET_WORKFLOW_COMPLETE",c),n(a))},i=setTimeout(()=>{o.unsubscribe("SET_WORKFLOW_COMPLETE",c),s(new Error("Workflow execution did not complete properly (timeout)"))},t);try{yield o.subscribe("SET_WORKFLOW_COMPLETE",c)}catch(p){clearTimeout(i),s(p)}}))})}var $=r=>{var c,i,p,u,f,h,d,b,k;let e=null,t,o={id:"",choices:[],created:Date.now(),model:"",object:"chat.completion"},n=r.at(-1);if(!n)return o;let s;for(let _ of r){if(_.choices.length===0)continue;let P=_.choices[0].delta;P.content&&(e=`${e||""}${P.content||""}`),P.function_call&&(t={name:`${t?t.name:""}${P.function_call.name||""}`,arguments:`${t?t.arguments:""}${P.function_call.arguments||""}`});let y=(c=P.tool_calls)==null?void 0:c[0];if(y){s=s||[];let m=s.at(-1);if(!m||y.id){s.push({id:y.id||"",type:y.type||"function",function:{name:((i=y.function)==null?void 0:i.name)||"",arguments:((p=y.function)==null?void 0:p.arguments)||""}});continue}m.function.name=`${m.function.name}${((u=y.function)==null?void 0:u.name)||""}`,m.function.arguments=`${m.function.arguments}${((f=y.function)==null?void 0:f.arguments)||""}`}}let a=r[0].choices.at(0);return o.choices.push({finish_reason:(h=a==null?void 0:a.finish_reason)!=null?h:"stop",index:(d=a==null?void 0:a.index)!=null?d:0,logprobs:(b=a==null?void 0:a.logprobs)!=null?b:null,message:{role:"assistant",content:e,function_call:t||void 0,tool_calls:s||void 0,refusal:(k=a==null?void 0:a.delta.refusal)!=null?k:null}}),o.id=n.id,o.model=n.model,o.created=n.created,o.system_fingerprint=n.system_fingerprint,o.usage=n.usage,o},Y=r=>{let e={id:"",model:"",content:[],role:"assistant",type:"message",stop_reason:"stop_sequence",stop_sequence:null,usage:{input_tokens:0,output_tokens:0}};if(!r.at(-1))return e;let o="";for(let n of r)switch(n.type){case"message_start":{e=g({},n.message);break}case"content_block_delta":n.delta.type==="text_delta"&&(o=`${o}${n.delta.text}`);case"message_delta":"usage"in n&&(e.usage.output_tokens=n.usage.output_tokens),"stop_reason"in n.delta&&(e.stop_reason=n.delta.stop_reason);default:break}return e.content.push({type:"text",text:o}),e},Fe=(r,e="openai.chat.completions.create")=>{if("completion"in r[0])return r.reduce((t,o)=>O(g({},o),{completion:`${t.completion}${o.completion}`}),{});if(e==="anthropic.messages.create")return Y(r);if("text"in r[0].choices[0]){let t="";for(let n of r)t=`${t}${n.choices[0].text}`;let o=structuredClone(r.at(-1));return o.choices[0].text=t,o}if("delta"in r[0].choices[0]){let t=$(r);return t.choices[0]=g(g({},t.choices[0]),t.choices[0].message),t}return""};function ze(r,e,t){return J(this,null,function*(){let o=[];try{for(var a=D(e),c,i,p;c=!(i=yield new A(a.next())).done;c=!1){let u=i.value;yield t.return_pl_id?[u,null]:u,o.push(u)}}catch(i){p=[i]}finally{try{c&&(i=a.return)&&(yield new A(i.call(a)))}finally{if(p)throw p[0]}}let n=Fe(o,t.function_name),s=yield new A(ie(r,O(g({},t),{request_response:n,request_end_time:new Date().toISOString()})));if(s&&t.return_pl_id){let u=s[1];yield[o.at(-1),u]}})}var T=(r,e)=>{try{console.warn(`${e}: ${r.message}`)}catch(t){console.warn(`${e}: ${r}`)}},ge=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(r)});return e.status!==200&&T(e,"WARNING: While logging your request, PromptLayer experienced the following error:"),e.json()}catch(e){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${e}`)}return{}}),F=r=>{let e={id:"",choices:[{finish_reason:"stop",index:0,text:"",logprobs:null}],created:Date.now(),model:"",object:"text_completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)n.choices.length>0&&n.choices[0].text&&(o=`${o}${n.choices[0].text}`);return e.choices[0].text=o,e.id=t.id,e.created=t.created,e.model=t.model,e.system_fingerprint=t.system_fingerprint,e.usage=t.usage,e},_e=r=>{let e={completion:"",id:"",model:"",stop_reason:"",type:"completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)o=`${o}${n.completion}`;return e.completion=o,e.id=t.id,e.model=t.model,e.stop_reason=t.stop_reason,e};function we(r,e,t){return J(this,null,function*(){let o={request_id:null,raw_response:null,prompt_blueprint:null},n=[];try{for(var c=D(r),i,p,u;i=!(p=yield new A(c.next())).done;i=!1){let f=p.value;n.push(f),o.raw_response=f,yield o}}catch(p){u=[p]}finally{try{i&&(p=c.return)&&(yield new A(p.call(c)))}finally{if(u)throw u[0]}}let s=t(n),a=yield new A(e({request_response:s}));o.request_id=a.request_id,o.prompt_blueprint=a.prompt_blueprint,yield o})}var Be=(r,e)=>l(void 0,null,function*(){return r.chat.completions.create(e)}),Xe=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),Pe={chat:Be,completion:Xe},Re=(r,e)=>l(void 0,null,function*(){let t=require("openai").default,o=new t({baseURL:e.baseURL}),n=Pe[r.prompt_template.type];return n(o,e)}),Te=(r,e)=>l(void 0,null,function*(){let t=require("openai").AzureOpenAI,o=new t({endpoint:e.baseURL});e==null||delete e.baseURL;let n=Pe[r.prompt_template.type];return n(o,e)}),He=(r,e)=>l(void 0,null,function*(){return r.messages.create(e)}),Ve=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),Qe={chat:He,completion:Ve},Se=(r,e)=>l(void 0,null,function*(){let t=require("@anthropic-ai/sdk").default,o=new t({baseURL:e.baseURL}),n=Qe[r.prompt_template.type];return n(o,e)}),be=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/log-request`,{method:"POST",headers:{"X-API-KEY":r,"Content-Type":"application/json"},body:JSON.stringify(e)});return t.status!==201?(T(t,"WARNING: While logging your request PromptLayer had the following error"),null):t.json()}catch(t){return console.warn(`WARNING: While tracking your prompt PromptLayer had the following error: ${t}`),null}});var v=class{constructor(e){this.create=()=>me(this.apiKey);this.apiKey=e}};var Oe=x(require("@opentelemetry/api")),Ae=require("@opentelemetry/sdk-trace-base"),Ne=require("@opentelemetry/sdk-trace-node");var S=require("@opentelemetry/api"),q=require("@opentelemetry/core");var z=class{constructor(e,t){this.apiKey=t||process.env.PROMPTLAYER_API_KEY,this.enableTracing=e,this.url=`${w}/spans-bulk`}attributesToObject(e){return e?Object.fromEntries(Object.entries(e)):{}}spanKindToString(e){return{[S.SpanKind.INTERNAL]:"SpanKind.INTERNAL",[S.SpanKind.SERVER]:"SpanKind.SERVER",[S.SpanKind.CLIENT]:"SpanKind.CLIENT",[S.SpanKind.PRODUCER]:"SpanKind.PRODUCER",[S.SpanKind.CONSUMER]:"SpanKind.CONSUMER"}[e]||"SpanKind.INTERNAL"}statusCodeToString(e){return{[S.SpanStatusCode.ERROR]:"StatusCode.ERROR",[S.SpanStatusCode.OK]:"StatusCode.OK",[S.SpanStatusCode.UNSET]:"StatusCode.UNSET"}[e]||"StatusCode.UNSET"}toNanoseconds(e){return(BigInt(e[0])*BigInt(1e9)+BigInt(e[1])).toString()}export(e){if(!this.enableTracing)return Promise.resolve(q.ExportResultCode.SUCCESS);let t=e.map(o=>{var n;return{name:o.name,context:{trace_id:o.spanContext().traceId,span_id:o.spanContext().spanId,trace_state:((n=o.spanContext().traceState)==null?void 0:n.serialize())||""},kind:this.spanKindToString(o.kind),parent_id:o.parentSpanId||null,start_time:this.toNanoseconds(o.startTime),end_time:this.toNanoseconds(o.endTime),status:{status_code:this.statusCodeToString(o.status.code),description:o.status.message},attributes:this.attributesToObject(o.attributes),events:o.events.map(s=>({name:s.name,timestamp:this.toNanoseconds(s.time),attributes:this.attributesToObject(s.attributes)})),links:o.links.map(s=>({context:s.context,attributes:this.attributesToObject(s.attributes)})),resource:{attributes:O(g({},o.resource.attributes),{"service.name":"prompt-layer-js"}),schema_url:""}}});return fetch(this.url,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":this.apiKey||""},body:JSON.stringify({spans:t})}).then(o=>o.ok?q.ExportResultCode.SUCCESS:(console.error(`Error exporting spans
1
+ "use strict";var $e=Object.create;var I=Object.defineProperty,ve=Object.defineProperties,Ge=Object.getOwnPropertyDescriptor,Ke=Object.getOwnPropertyDescriptors,je=Object.getOwnPropertyNames,ee=Object.getOwnPropertySymbols,Me=Object.getPrototypeOf,oe=Object.prototype.hasOwnProperty,Ue=Object.prototype.propertyIsEnumerable;var te=(r,e)=>{if(e=Symbol[r])return e;throw Error("Symbol."+r+" is not defined")};var re=(r,e,t)=>e in r?I(r,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):r[e]=t,g=(r,e)=>{for(var t in e||(e={}))oe.call(e,t)&&re(r,t,e[t]);if(ee)for(var t of ee(e))Ue.call(e,t)&&re(r,t,e[t]);return r},O=(r,e)=>ve(r,Ke(e));var Je=(r,e)=>{for(var t in e)I(r,t,{get:e[t],enumerable:!0})},ne=(r,e,t,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of je(e))!oe.call(r,n)&&n!==t&&I(r,n,{get:()=>e[n],enumerable:!(o=Ge(e,n))||o.enumerable});return r};var x=(r,e,t)=>(t=r!=null?$e(Me(r)):{},ne(e||!r||!r.__esModule?I(t,"default",{value:r,enumerable:!0}):t,r)),De=r=>ne(I({},"__esModule",{value:!0}),r);var l=(r,e,t)=>new Promise((o,n)=>{var s=i=>{try{c(t.next(i))}catch(p){n(p)}},a=i=>{try{c(t.throw(i))}catch(p){n(p)}},c=i=>i.done?o(i.value):Promise.resolve(i.value).then(s,a);c((t=t.apply(r,e)).next())}),A=function(r,e){this[0]=r,this[1]=e},J=(r,e,t)=>{var o=(a,c,i,p)=>{try{var u=t[a](c),f=(c=u.value)instanceof A,h=u.done;Promise.resolve(f?c[0]:c).then(d=>f?o(a==="return"?a:"next",c[1]?{done:d.done,value:d.value}:d,i,p):i({value:d,done:h})).catch(d=>o("throw",d,i,p))}catch(d){p(d)}},n=a=>s[a]=c=>new Promise((i,p)=>o(a,c,i,p)),s={};return t=t.apply(r,e),s[Symbol.asyncIterator]=()=>s,n("next"),n("throw"),n("return"),s};var D=(r,e,t)=>(e=r[te("asyncIterator")])?e.call(r):(r=r[te("iterator")](),e={},t=(o,n)=>(n=r[o])&&(e[o]=s=>new Promise((a,c,i)=>(s=n.call(r,s),i=s.done,Promise.resolve(s.value).then(p=>a({value:p,done:i}),c)))),t("next"),t("return"),e);var it={};Je(it,{PromptLayer:()=>X});module.exports=De(it);var se=x(require("ably"));var w=process.env.URL_API_PROMPTLAYER||"https://api.promptlayer.com",ae=(r,e)=>l(void 0,null,function*(){return e.request_response[Symbol.asyncIterator]!==void 0?ze(r,e.request_response,e):yield ie(r,e)}),ie=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(e)}),o=yield t.json();if(t.status!==200&&T(o,"WARNING: While logging your request, PromptLayer experienced the following error:"),o&&e.return_pl_id)return[e.request_response,o.request_id]}catch(t){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${t}`)}return e.request_response}),ce=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-metadata`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While logging metadata to your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),pe=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-score`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While scoring your request, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While scoring your request, PromptLayer experienced the following error: ${t}`),!1}return!0}),le=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/library-track-prompt`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${t}`),!1}return!0}),ue=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/track-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(O(g({},e),{api_key:r}))}),o=yield t.json();if(t.status!==200)return T(o,"WARNING: While associating your request with a group, PromptLayer experienced the following error"),!1}catch(t){return console.warn(`WARNING: While associating your request with a group, PromptLayer experienced the following error: ${t}`),!1}return!0}),me=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/create-group`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({api_key:r})}),t=yield e.json();return e.status!==200?(T(t,"WARNING: While creating a group PromptLayer had the following error"),!1):t.id}catch(e){return console.warn(`WARNING: While creating a group PromptLayer had the following error: ${e}`),!1}}),fe=(r,e,t)=>l(void 0,null,function*(){try{let o=new URL(`${w}/prompt-templates/${e}`),n=yield fetch(o,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify(t)}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching a prompt template PromptLayer had the following error"),null):(s.warning&&console.warn(`WARNING: While fetching your prompt PromptLayer had the following error: ${s.warning}`),s)}catch(o){return console.warn(`WARNING: While fetching a prompt template PromptLayer had the following error: ${o}`),null}}),de=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/rest/prompt-templates`,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":r},body:JSON.stringify({prompt_template:g({},e),prompt_version:g({},e),release_labels:e.release_labels?e.release_labels:void 0})}),o=yield t.json();return t.status===400&&T(o,"WARNING: While publishing a prompt template PromptLayer had the following error"),o}catch(t){console.warn(`WARNING: While publishing a prompt template PromptLayer had the following error: ${t}`)}}),he=(r,e)=>l(void 0,null,function*(){var t;try{let o=new URL(`${w}/prompt-templates`);Object.entries(e||{}).forEach(([a,c])=>o.searchParams.append(a,c.toString()));let n=yield fetch(o,{headers:{"Content-Type":"application/json","X-API-KEY":r}}),s=yield n.json();return n.status!==200?(T(s,"WARNING: While fetching all prompt templates PromptLayer had the following error"),null):(t=s.items)!=null?t:[]}catch(o){return console.warn(`WARNING: While fetching all prompt templates PromptLayer had the following error: ${o}`),null}}),ye=i=>l(void 0,[i],function*({workflow_name:r,input_variables:e,metadata:t={},workflow_label_name:o=null,workflow_version_number:n=null,return_all_outputs:s=!1,api_key:a,timeout:c=36e5}){let p={input_variables:e,metadata:t,workflow_label_name:o,workflow_version_number:n,return_all_outputs:s},u={"X-API-KEY":a,"Content-Type":"application/json"};try{let f=yield fetch(`${w}/workflows/${encodeURIComponent(r)}/run`,{method:"POST",headers:u,body:JSON.stringify(p)});if(f.status!==201)return{success:!1,message:`Failed to run workflow: ${(yield f.json().catch(()=>({}))).error||f.statusText}`};let h=yield f.json();h.warning&&console.warn(`WARNING: ${h.warning}`);let d=h.workflow_version_execution_id;if(!d)return console.log("No execution ID returned from workflow run"),{success:!1,message:"Failed to run workflow"};let b=`workflow_updates:${d}`,P=(yield(yield fetch(`${w}/ws-token-request-library?capability=${b}`,{method:"POST",headers:u})).json()).token_details.token,y=new se.default.Realtime({token:P});try{let m=yield Ye(y,b,c);return y.close(),m}finally{y.close()}}catch(f){throw console.error(`Failed to run workflow: ${f instanceof Error?f.message:f}`),f}});function Ye(r,e,t){return l(this,null,function*(){let o=r.channels.get(e);return new Promise((n,s)=>l(this,null,function*(){let a=null,c=p=>{p.name==="SET_WORKFLOW_COMPLETE"&&(a=JSON.parse(p.data).final_output,clearTimeout(i),o.unsubscribe("SET_WORKFLOW_COMPLETE",c),n(a))},i=setTimeout(()=>{o.unsubscribe("SET_WORKFLOW_COMPLETE",c),s(new Error("Workflow execution did not complete properly (timeout)"))},t);try{yield o.subscribe("SET_WORKFLOW_COMPLETE",c)}catch(p){clearTimeout(i),s(p)}}))})}var $=r=>{var c,i,p,u,f,h,d,b,k;let e=null,t,o={id:"",choices:[],created:Date.now(),model:"",object:"chat.completion"},n=r.at(-1);if(!n)return o;let s;for(let _ of r){if(_.choices.length===0)continue;let P=_.choices[0].delta;P.content&&(e=`${e||""}${P.content||""}`),P.function_call&&(t={name:`${t?t.name:""}${P.function_call.name||""}`,arguments:`${t?t.arguments:""}${P.function_call.arguments||""}`});let y=(c=P.tool_calls)==null?void 0:c[0];if(y){s=s||[];let m=s.at(-1);if(!m||y.id){s.push({id:y.id||"",type:y.type||"function",function:{name:((i=y.function)==null?void 0:i.name)||"",arguments:((p=y.function)==null?void 0:p.arguments)||""}});continue}m.function.name=`${m.function.name}${((u=y.function)==null?void 0:u.name)||""}`,m.function.arguments=`${m.function.arguments}${((f=y.function)==null?void 0:f.arguments)||""}`}}let a=r[0].choices.at(0);return o.choices.push({finish_reason:(h=a==null?void 0:a.finish_reason)!=null?h:"stop",index:(d=a==null?void 0:a.index)!=null?d:0,logprobs:(b=a==null?void 0:a.logprobs)!=null?b:null,message:{role:"assistant",content:e,function_call:t||void 0,tool_calls:s||void 0,refusal:(k=a==null?void 0:a.delta.refusal)!=null?k:null}}),o.id=n.id,o.model=n.model,o.created=n.created,o.system_fingerprint=n.system_fingerprint,o.usage=n.usage,o},Y=r=>{let e={id:"",model:"",content:[],role:"assistant",type:"message",stop_reason:"stop_sequence",stop_sequence:null,usage:{input_tokens:0,output_tokens:0}};if(!r.at(-1))return e;let o="";for(let n of r)switch(n.type){case"message_start":{e=g({},n.message);break}case"content_block_delta":n.delta.type==="text_delta"&&(o=`${o}${n.delta.text}`);case"message_delta":"usage"in n&&(e.usage.output_tokens=n.usage.output_tokens),"stop_reason"in n.delta&&(e.stop_reason=n.delta.stop_reason);default:break}return e.content.push({type:"text",text:o}),e},Fe=(r,e="openai.chat.completions.create")=>{if("completion"in r[0])return r.reduce((t,o)=>O(g({},o),{completion:`${t.completion}${o.completion}`}),{});if(e==="anthropic.messages.create")return Y(r);if("text"in r[0].choices[0]){let t="";for(let n of r)t=`${t}${n.choices[0].text}`;let o=structuredClone(r.at(-1));return o.choices[0].text=t,o}if("delta"in r[0].choices[0]){let t=$(r);return t.choices[0]=g(g({},t.choices[0]),t.choices[0].message),t}return""};function ze(r,e,t){return J(this,null,function*(){let o=[];try{for(var a=D(e),c,i,p;c=!(i=yield new A(a.next())).done;c=!1){let u=i.value;yield t.return_pl_id?[u,null]:u,o.push(u)}}catch(i){p=[i]}finally{try{c&&(i=a.return)&&(yield new A(i.call(a)))}finally{if(p)throw p[0]}}let n=Fe(o,t.function_name),s=yield new A(ie(r,O(g({},t),{request_response:n,request_end_time:new Date().toISOString()})));if(s&&t.return_pl_id){let u=s[1];yield[o.at(-1),u]}})}var T=(r,e)=>{try{console.warn(`${e}: ${r.message}`)}catch(t){console.warn(`${e}: ${r}`)}},ge=r=>l(void 0,null,function*(){try{let e=yield fetch(`${w}/track-request`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(r)});return e.status!==200&&T(e,"WARNING: While logging your request, PromptLayer experienced the following error:"),e.json()}catch(e){console.warn(`WARNING: While logging your request PromptLayer had the following error: ${e}`)}return{}}),F=r=>{let e={id:"",choices:[{finish_reason:"stop",index:0,text:"",logprobs:null}],created:Date.now(),model:"",object:"text_completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)n.choices.length>0&&n.choices[0].text&&(o=`${o}${n.choices[0].text}`);return e.choices[0].text=o,e.id=t.id,e.created=t.created,e.model=t.model,e.system_fingerprint=t.system_fingerprint,e.usage=t.usage,e},_e=r=>{let e={completion:"",id:"",model:"",stop_reason:"",type:"completion"},t=r.at(-1);if(!t)return e;let o="";for(let n of r)o=`${o}${n.completion}`;return e.completion=o,e.id=t.id,e.model=t.model,e.stop_reason=t.stop_reason,e};function we(r,e,t){return J(this,null,function*(){let o={request_id:null,raw_response:null,prompt_blueprint:null},n=[];try{for(var c=D(r),i,p,u;i=!(p=yield new A(c.next())).done;i=!1){let f=p.value;n.push(f),o.raw_response=f,yield o}}catch(p){u=[p]}finally{try{i&&(p=c.return)&&(yield new A(p.call(c)))}finally{if(u)throw u[0]}}let s=t(n),a=yield new A(e({request_response:s}));o.request_id=a.request_id,o.prompt_blueprint=a.prompt_blueprint,yield o})}var Be=(r,e)=>l(void 0,null,function*(){return r.chat.completions.create(e)}),Xe=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),Pe={chat:Be,completion:Xe},Re=(r,e)=>l(void 0,null,function*(){let t=require("openai").default,o=new t({baseURL:e.baseURL}),n=Pe[r.prompt_template.type];return n(o,e)}),Te=(r,e)=>l(void 0,null,function*(){let t=require("openai").AzureOpenAI,o=new t({endpoint:e.baseURL});e==null||delete e.baseURL;let n=Pe[r.prompt_template.type];return n(o,e)}),He=(r,e)=>l(void 0,null,function*(){return r.messages.create(e)}),Ve=(r,e)=>l(void 0,null,function*(){return r.completions.create(e)}),Qe={chat:He,completion:Ve},Se=(r,e)=>l(void 0,null,function*(){let t=require("@anthropic-ai/sdk").default,o=new t({baseURL:e.baseURL}),n=Qe[r.prompt_template.type];return n(o,e)}),be=(r,e)=>l(void 0,null,function*(){try{let t=yield fetch(`${w}/log-request`,{method:"POST",headers:{"X-API-KEY":r,"Content-Type":"application/json"},body:JSON.stringify(e)});return t.status!==201?(T(t,"WARNING: While logging your request PromptLayer had the following error"),null):t.json()}catch(t){return console.warn(`WARNING: While tracking your prompt PromptLayer had the following error: ${t}`),null}});var v=class{constructor(e){this.create=()=>me(this.apiKey);this.apiKey=e}};var Oe=x(require("@opentelemetry/api")),Ae=require("@opentelemetry/sdk-trace-base"),Ne=require("@opentelemetry/sdk-trace-node");var S=require("@opentelemetry/api"),q=require("@opentelemetry/core");var z=class{constructor(e,t){this.apiKey=t||process.env.PROMPTLAYER_API_KEY,this.enableTracing=e,this.url=`${w}/spans-bulk`}attributesToObject(e){return e?Object.fromEntries(Object.entries(e)):{}}spanKindToString(e){return{[S.SpanKind.INTERNAL]:"SpanKind.INTERNAL",[S.SpanKind.SERVER]:"SpanKind.SERVER",[S.SpanKind.CLIENT]:"SpanKind.CLIENT",[S.SpanKind.PRODUCER]:"SpanKind.PRODUCER",[S.SpanKind.CONSUMER]:"SpanKind.CONSUMER"}[e]||"SpanKind.INTERNAL"}statusCodeToString(e){return{[S.SpanStatusCode.ERROR]:"StatusCode.ERROR",[S.SpanStatusCode.OK]:"StatusCode.OK",[S.SpanStatusCode.UNSET]:"StatusCode.UNSET"}[e]||"StatusCode.UNSET"}toNanoseconds(e){return(BigInt(e[0])*BigInt(1e9)+BigInt(e[1])).toString()}export(e){if(!this.enableTracing)return Promise.resolve(q.ExportResultCode.SUCCESS);let t=e.map(o=>{var n;return{name:o.name,context:{trace_id:o.spanContext().traceId,span_id:o.spanContext().spanId,trace_state:((n=o.spanContext().traceState)==null?void 0:n.serialize())||""},kind:this.spanKindToString(o.kind),parent_id:o.parentSpanId||null,start_time:this.toNanoseconds(o.startTime),end_time:this.toNanoseconds(o.endTime),status:{status_code:this.statusCodeToString(o.status.code),description:o.status.message},attributes:this.attributesToObject(o.attributes),events:o.events.map(s=>({name:s.name,timestamp:this.toNanoseconds(s.time),attributes:this.attributesToObject(s.attributes)})),links:o.links.map(s=>({context:s.context,attributes:this.attributesToObject(s.attributes)})),resource:{attributes:O(g({},o.resource.attributes),{"service.name":"prompt-layer-js"}),schema_url:""}}});return fetch(this.url,{method:"POST",headers:{"Content-Type":"application/json","X-API-KEY":this.apiKey||""},body:JSON.stringify({spans:t})}).then(o=>o.ok?q.ExportResultCode.SUCCESS:(console.error(`Error exporting spans
2
2
  HTTP error! status: ${o.status}`),q.ExportResultCode.FAILED)).catch(o=>(console.error("Error exporting spans:",o),q.ExportResultCode.FAILED))}shutdown(){return Promise.resolve()}},ke=z;var C=(r="promptlayer-tracer")=>Oe.trace.getTracer(r),Ee=(r,e)=>{let t=new Ne.NodeTracerProvider,o=new ke(r,e),n=new Ae.SimpleSpanProcessor(o);t.addSpanProcessor(n),t.register()};var Ze=C(),B=(r,e,t="",o="openai")=>{let n={construct:(s,a)=>{let c=Reflect.construct(s,a);return Object.defineProperties(c,{function_name:{value:t,writable:!0},provider:{value:o}}),new Proxy(c,n)},get:(s,a,c)=>{let i=s[a],p=`${Reflect.get(s,"function_name")}.${a.toString()}`;return typeof i=="object"?(Object.defineProperties(i,{function_name:{value:p,writable:!0},provider:{value:o}}),new Proxy(i,n)):typeof i=="function"?(...u)=>{var k,_,P,y;let f=new Date().toISOString(),h=Reflect.get(s,"provider"),d=(k=u[0])==null?void 0:k.return_pl_id,b=(_=u[0])==null?void 0:_.pl_tags;return(P=u[0])==null||delete P.return_pl_id,(y=u[0])==null||delete y.pl_tags,Ze.startActiveSpan(`${h}.${p}`,m=>l(void 0,null,function*(){try{m.setAttribute("function_input",JSON.stringify(u));let R=Reflect.apply(i,s,u),M=m.spanContext().spanId;return R instanceof Promise?new Promise((N,L)=>{R.then(E=>l(void 0,null,function*(){let W=yield ae(r,{api_key:r,provider_type:h,function_name:p,request_start_time:f,request_end_time:new Date().toISOString(),request_response:E,kwargs:u[0],return_pl_id:d,tags:b,span_id:M});m.setAttribute("function_output",JSON.stringify(W)),m.setAttribute("response_status","success"),m.end(),N(W)})).catch(E=>{m.recordException(E),m.setAttribute("response_status","error"),m.end(),L(E)})}):(m.setAttribute("function_output",JSON.stringify(R)),m.setAttribute("response_status","success"),m.end(),R)}catch(R){throw m.recordException(R),m.setAttribute("response_status","error"),m.end(),R}}))}:Reflect.get(s,a,c)}};return new Proxy(e,n)};var G=x(require("@opentelemetry/api"));var Ie=(r,e,t)=>function(...o){let n=C(),s=a=>{try{t&&Object.entries(t).forEach(([i,p])=>{a.setAttribute(i,p)}),a.setAttribute("function_input",JSON.stringify(o));let c=e(...o);return c instanceof Promise?c.then(i=>(a.setAttribute("function_output",JSON.stringify(i)),a.setStatus({code:G.SpanStatusCode.OK}),i)).catch(i=>{throw Ce(a,i,o),i}).finally(()=>a.end()):(a.setAttribute("function_output",JSON.stringify(c)),a.setStatus({code:G.SpanStatusCode.OK}),a.end(),c)}catch(c){throw Ce(a,c,o),c}};return n.startActiveSpan(r,s)},Ce=(r,e,t)=>{r.setAttribute("function_input",JSON.stringify(t)),r.setStatus({code:G.SpanStatusCode.ERROR,message:e instanceof Error?e.message:"Unknown error"}),r.end()};var K=class{constructor(e){this.get=(e,t)=>fe(this.apiKey,e,t);this.publish=e=>de(this.apiKey,e);this.all=e=>he(this.apiKey,e);this.apiKey=e}};var et=(r,e)=>{if(!(e.metadata instanceof Object))throw new Error("Please provide a dictionary of metadata.");for(let[t,o]of Object.entries(e.metadata))if(typeof t!="string"||typeof o!="string")throw new Error("Please provide a dictionary of metadata with key value pair of strings.");return ce(r,e)},tt=(r,e)=>{if(typeof e.score!="number")throw new Error("Score must be a number");if(e.score<0||e.score>100)throw new Error("Score must be a number between 0 and 100.");return pe(r,e)},rt=(r,e)=>{if(!(e.prompt_input_variables instanceof Object))throw new Error("Prompt template input variable dictionary not provided.");return le(r,e)},ot=(r,e)=>ue(r,e),j=class{constructor(e){this.group=e=>ot(this.apiKey,e);this.metadata=e=>et(this.apiKey,e);this.prompt=e=>rt(this.apiKey,e);this.score=e=>tt(this.apiKey,e);this.apiKey=e}};var qe=x(require("@opentelemetry/api"));var nt={openai:{chat:{function_name:"openai.chat.completions.create",stream_function:$},completion:{function_name:"openai.completions.create",stream_function:F}},anthropic:{chat:{function_name:"anthropic.messages.create",stream_function:Y},completion:{function_name:"anthropic.completions.create",stream_function:_e}},"openai.azure":{chat:{function_name:"openai.AzureOpenAI.chat.completions.create",stream_function:$},completion:{function_name:"openai.AzureOpenAI.completions.create",stream_function:F}}},st={openai:Re,anthropic:Se,"openai.azure":Te},at=r=>{if(!r||typeof r!="object"||Array.isArray(r))return!1;let e=["status","value","error_message","raw_error_message","is_output_node"];return Object.values(r).every(o=>typeof o!="object"||o===null?!1:e.every(n=>n in o))},X=class{constructor({apiKey:e=process.env.PROMPTLAYER_API_KEY,enableTracing:t=!1}={}){if(e===void 0)throw new Error("PromptLayer API key not provided. Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter.");this.apiKey=e,this.enableTracing=t,this.templates=new K(e),this.group=new v(e),this.track=new j(e),this.wrapWithSpan=Ie,t&&Ee(t,e)}get Anthropic(){try{let e=require("@anthropic-ai/sdk").default;return B(this.apiKey,e,"anthropic","anthropic")}catch(e){console.error("To use the Anthropic module, you must install the @anthropic-ai/sdk package.")}}get OpenAI(){try{let e=require("openai").default;return B(this.apiKey,e,"openai","openai")}catch(e){console.error("To use the OpenAI module, you must install the @openai/api package.")}}run(u){return l(this,arguments,function*({promptName:e,promptVersion:t,promptReleaseLabel:o,inputVariables:n,tags:s,metadata:a,groupId:c,modelParameterOverrides:i,stream:p=!1}){return C().startActiveSpan("PromptLayer Run",h=>l(this,null,function*(){try{let d={promptName:e,promptVersion:t,promptReleaseLabel:o,inputVariables:n,tags:s,metadata:a,groupId:c,modelParameterOverrides:i,stream:p};h.setAttribute("function_input",JSON.stringify(d));let b=n,k={label:o,version:t,metadata_filters:a};n&&(k.input_variables=n);let _=yield this.templates.get(e,k);if(!_)throw new Error("Prompt not found");let P=_.prompt_template;if(!_.llm_kwargs)throw new Error(`Prompt '${e}' does not have any LLM kwargs associated with it.`);let y=_.metadata;if(!y)throw new Error(`Prompt '${e}' does not have any metadata associated with it.`);let m=y.model;if(!m)throw new Error(`Prompt '${e}' does not have a model parameters associated with it.`);let R=m.provider,M=new Date().toISOString(),N=g(g({},_.llm_kwargs),i||{}),L=nt[R][P.type],E=L.function_name,W=L.stream_function,Le=st[R],H=_.provider_base_url;H&&(N.baseURL=H.url),N.stream=p,p&&["openai","openai.azure"].includes(R)&&(N.stream_options={include_usage:!0});let U=yield Le(_,N),V=We=>{let xe=new Date().toISOString();return ge(g({function_name:E,provider_type:R,args:[],kwargs:N,tags:s,request_start_time:M,request_end_time:xe,api_key:this.apiKey,metadata:a,prompt_id:_.id,prompt_version:_.version,prompt_input_variables:b,group_id:c,return_prompt_blueprint:!0,span_id:h.spanContext().spanId},We))};if(p)return we(U,V,W);let Q=yield V({request_response:U}),Z={request_id:Q.request_id,raw_response:U,prompt_blueprint:Q.prompt_blueprint};return h.setAttribute("function_output",JSON.stringify(Z)),Z}catch(d){throw h.setStatus({code:qe.SpanStatusCode.ERROR,message:d instanceof Error?d.message:"Unknown error"}),d}finally{h.end()}}))})}runWorkflow(c){return l(this,arguments,function*({workflowName:e,inputVariables:t={},metadata:o={},workflowLabelName:n=null,workflowVersion:s=null,returnAllOutputs:a=!1}){try{let i=yield ye({workflow_name:e,input_variables:t,metadata:o,workflow_label_name:n,workflow_version_number:s,return_all_outputs:a,api_key:this.apiKey});if(!a&&at(i)){let u=Object.values(i).filter(h=>h.is_output_node===!0);if(u.length===0)throw new Error(JSON.stringify(i,null,2));if(!u.some(h=>h.status==="SUCCESS"))throw new Error(JSON.stringify(i,null,2))}return i}catch(i){throw i instanceof Error?(console.error("Error running workflow:",i.message),new Error(`Error running workflow: ${i.message}`)):(console.error("Unknown error running workflow:",i),new Error("Unknown error running workflow"))}})}logRequest(e){return l(this,null,function*(){return be(this.apiKey,e)})}};0&&(module.exports={PromptLayer});
3
3
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/utils.ts","../src/groups.ts","../src/tracing.ts","../src/span-exporter.ts","../src/promptlayer.ts","../src/span-wrapper.ts","../src/templates.ts","../src/track.ts"],"sourcesContent":["import { GroupManager } from \"@/groups\";\nimport { promptLayerBase } from \"@/promptlayer\";\nimport { wrapWithSpan } from \"@/span-wrapper\";\nimport { TemplateManager } from \"@/templates\";\nimport { getTracer, setupTracing } from \"@/tracing\";\nimport { TrackManager } from \"@/track\";\nimport { GetPromptTemplateParams, LogRequest, RunRequest, WorkflowRequest, WorkflowResponse } from \"@/types\";\nimport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n runWorkflowRequest,\n streamResponse,\n trackRequest,\n utilLogRequest,\n} from \"@/utils\";\nimport * as opentelemetry from \"@opentelemetry/api\";\n\nconst MAP_PROVIDER_TO_FUNCTION_NAME = {\n openai: {\n chat: {\n function_name: \"openai.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n anthropic: {\n chat: {\n function_name: \"anthropic.messages.create\",\n stream_function: anthropicStreamMessage,\n },\n completion: {\n function_name: \"anthropic.completions.create\",\n stream_function: anthropicStreamCompletion,\n },\n },\n \"openai.azure\": {\n chat: {\n function_name: \"openai.AzureOpenAI.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.AzureOpenAI.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n};\n\nconst MAP_PROVIDER_TO_FUNCTION: Record<string, any> = {\n openai: openaiRequest,\n anthropic: anthropicRequest,\n \"openai.azure\": azureOpenAIRequest,\n};\n\nexport interface ClientOptions {\n apiKey?: string;\n enableTracing?: boolean;\n workspaceId?: number;\n}\n\nconst isWorkflowResultsDict = (obj: any): boolean => {\n if (!obj || typeof obj !== \"object\" || Array.isArray(obj)) {\n return false;\n }\n\n const REQUIRED_KEYS = [\n \"status\",\n \"value\",\n \"error_message\",\n \"raw_error_message\",\n \"is_output_node\",\n ];\n const values = Object.values(obj);\n\n return values.every((val) => {\n if (typeof val !== \"object\" || val === null) return false;\n return REQUIRED_KEYS.every((key) => key in val);\n });\n}\n\nexport class PromptLayer {\n apiKey: string;\n templates: TemplateManager;\n group: GroupManager;\n track: TrackManager;\n enableTracing: boolean;\n wrapWithSpan: typeof wrapWithSpan;\n\n constructor({\n apiKey = process.env.PROMPTLAYER_API_KEY,\n enableTracing = false,\n }: ClientOptions = {}) {\n if (apiKey === undefined) {\n throw new Error(\n \"PromptLayer API key not provided. Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter.\"\n );\n }\n\n this.apiKey = apiKey;\n this.enableTracing = enableTracing;\n this.templates = new TemplateManager(apiKey);\n this.group = new GroupManager(apiKey);\n this.track = new TrackManager(apiKey);\n this.wrapWithSpan = wrapWithSpan;\n\n if (enableTracing) {\n setupTracing(enableTracing, apiKey);\n }\n }\n\n get Anthropic() {\n try {\n const module = require(\"@anthropic-ai/sdk\").default;\n return promptLayerBase(this.apiKey, module, \"anthropic\", \"anthropic\");\n } catch (e) {\n console.error(\n \"To use the Anthropic module, you must install the @anthropic-ai/sdk package.\"\n );\n }\n }\n\n get OpenAI() {\n try {\n const module = require(\"openai\").default;\n return promptLayerBase(this.apiKey, module, \"openai\", \"openai\");\n } catch (e) {\n console.error(\n \"To use the OpenAI module, you must install the @openai/api package.\"\n );\n }\n }\n\n async run({\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream = false,\n }: RunRequest) {\n const tracer = getTracer();\n\n return tracer.startActiveSpan(\"PromptLayer Run\", async (span) => {\n try {\n const functionInput = {\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream,\n };\n span.setAttribute(\"function_input\", JSON.stringify(functionInput));\n\n const prompt_input_variables = inputVariables;\n const templateGetParams: GetPromptTemplateParams = {\n label: promptReleaseLabel,\n version: promptVersion,\n metadata_filters: metadata,\n };\n if (inputVariables) templateGetParams.input_variables = inputVariables;\n\n const promptBlueprint = await this.templates.get(\n promptName,\n templateGetParams\n );\n\n if (!promptBlueprint) throw new Error(\"Prompt not found\");\n\n const promptTemplate = promptBlueprint.prompt_template;\n if (!promptBlueprint.llm_kwargs) {\n throw new Error(\n `Prompt '${promptName}' does not have any LLM kwargs associated with it.`\n );\n }\n\n const promptBlueprintMetadata = promptBlueprint.metadata;\n if (!promptBlueprintMetadata) {\n throw new Error(\n `Prompt '${promptName}' does not have any metadata associated with it.`\n );\n }\n\n const promptBlueprintModel = promptBlueprintMetadata.model;\n if (!promptBlueprintModel) {\n throw new Error(\n `Prompt '${promptName}' does not have a model parameters associated with it.`\n );\n }\n\n const provider_type = promptBlueprintModel.provider;\n\n const request_start_time = new Date().toISOString();\n const kwargs = {\n ...promptBlueprint.llm_kwargs,\n ...(modelParameterOverrides || {}),\n };\n const config =\n MAP_PROVIDER_TO_FUNCTION_NAME[\n provider_type as keyof typeof MAP_PROVIDER_TO_FUNCTION_NAME\n ][promptTemplate.type];\n const function_name = config.function_name;\n\n const stream_function = config.stream_function;\n const request_function = MAP_PROVIDER_TO_FUNCTION[provider_type];\n const provider_base_url = promptBlueprint.provider_base_url;\n if (provider_base_url) {\n kwargs[\"baseURL\"] = provider_base_url.url;\n }\n kwargs[\"stream\"] = stream;\n if (stream && [\"openai\", \"openai.azure\"].includes(provider_type)) {\n kwargs[\"stream_options\"] = { include_usage: true };\n }\n\n const response = await request_function(promptBlueprint, kwargs);\n\n const _trackRequest = (body: object) => {\n const request_end_time = new Date().toISOString();\n return trackRequest({\n function_name,\n provider_type,\n args: [],\n kwargs,\n tags,\n request_start_time,\n request_end_time,\n api_key: this.apiKey,\n metadata,\n prompt_id: promptBlueprint.id,\n prompt_version: promptBlueprint.version,\n prompt_input_variables,\n group_id: groupId,\n return_prompt_blueprint: true,\n span_id: span.spanContext().spanId,\n ...body,\n });\n };\n\n if (stream)\n return streamResponse(response, _trackRequest, stream_function);\n const requestLog = await _trackRequest({ request_response: response });\n\n const functionOutput = {\n request_id: requestLog.request_id,\n raw_response: response,\n prompt_blueprint: requestLog.prompt_blueprint,\n };\n span.setAttribute(\"function_output\", JSON.stringify(functionOutput));\n\n return functionOutput;\n } catch (error) {\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : \"Unknown error\",\n });\n throw error;\n } finally {\n span.end();\n }\n });\n }\n\n async runWorkflow({\n workflowName,\n inputVariables = {},\n metadata = {},\n workflowLabelName = null,\n workflowVersion = null, // This is the version number, not the version ID\n returnAllOutputs = false,\n }: WorkflowRequest): Promise<WorkflowResponse> {\n try {\n const result = await runWorkflowRequest({\n workflow_name: workflowName,\n input_variables: inputVariables,\n metadata,\n workflow_label_name: workflowLabelName,\n workflow_version_number: workflowVersion,\n return_all_outputs: returnAllOutputs,\n api_key: this.apiKey,\n });\n\n if (!returnAllOutputs) {\n if (isWorkflowResultsDict(result)) {\n const nodeValues = Object.values(result);\n\n const outputNodes = nodeValues.filter(\n (node: any) => node.is_output_node === true\n );\n\n if (outputNodes.length === 0) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n\n const anyOutputSuccess = outputNodes.some(\n (node: any) => node.status === \"SUCCESS\"\n );\n if (!anyOutputSuccess) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n }\n }\n\n return result;\n } catch (error) {\n if (error instanceof Error) {\n console.error(\"Error running workflow:\", error.message);\n throw new Error(`Error running workflow: ${error.message}`);\n } else {\n console.error(\"Unknown error running workflow:\", error);\n throw new Error(\"Unknown error running workflow\");\n }\n }\n }\n\n async logRequest(body: LogRequest) {\n return utilLogRequest(this.apiKey, body);\n }\n}\n","import {\n GetPromptTemplateParams,\n GetPromptTemplateResponse,\n ListPromptTemplatesResponse,\n LogRequest,\n Pagination,\n PublishPromptTemplate,\n PublishPromptTemplateResponse,\n RequestLog,\n RunWorkflowRequestParams,\n TrackGroup,\n TrackMetadata,\n TrackPrompt,\n TrackRequest,\n TrackScore,\n WorkflowResponse,\n} from \"@/types\";\nimport type TypeAnthropic from \"@anthropic-ai/sdk\";\nimport {\n Completion as AnthropicCompletion,\n Message,\n MessageStreamEvent,\n} from \"@anthropic-ai/sdk/resources\";\nimport Ably from \"ably\";\nimport type TypeOpenAI from \"openai\";\nimport {\n ChatCompletion,\n ChatCompletionChunk,\n Completion,\n} from \"openai/resources\";\n\nexport const URL_API_PROMPTLAYER =\n process.env.URL_API_PROMPTLAYER || \"https://api.promptlayer.com\";\n\nconst promptlayerApiHandler = async <Item>(\n apiKey: string,\n body: TrackRequest & {\n request_response: AsyncIterable<Item> | any;\n }\n) => {\n const isGenerator = body.request_response[Symbol.asyncIterator] !== undefined;\n if (isGenerator) {\n return proxyGenerator(apiKey, body.request_response, body);\n }\n return await promptLayerApiRequest(apiKey, body);\n};\n\nconst promptLayerApiRequest = async (apiKey: string, body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n }\n if (data && body.return_pl_id) {\n return [body.request_response, data.request_id];\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return body.request_response;\n};\n\nconst promptLayerTrackMetadata = async (\n apiKey: string,\n body: TrackMetadata\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-metadata`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging metadata to your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackScore = async (\n apiKey: string,\n body: TrackScore\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/library-track-score`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While scoring your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While scoring your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackPrompt = async (\n apiKey: string,\n body: TrackPrompt\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-prompt`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackGroup = async (\n apiKey: string,\n body: TrackGroup\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a group, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a group, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerCreateGroup = async (\n apiKey: string\n): Promise<number | boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/create-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While creating a group PromptLayer had the following error\"\n );\n return false;\n }\n return data.id;\n } catch (e) {\n console.warn(\n `WARNING: While creating a group PromptLayer had the following error: ${e}`\n );\n return false;\n }\n};\n\nconst getPromptTemplate = async (\n apiKey: string,\n promptName: string,\n params?: Partial<GetPromptTemplateParams>\n) => {\n try {\n const url = new URL(\n `${URL_API_PROMPTLAYER}/prompt-templates/${promptName}`\n );\n const response = await fetch(url, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify(params),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching a prompt template PromptLayer had the following error\"\n );\n return null;\n }\n if (data.warning) {\n console.warn(\n `WARNING: While tracking your prompt PromptLayer had the following error: ${data.warning}`\n );\n }\n return data as Promise<GetPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching a prompt template PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nconst publishPromptTemplate = async (\n apiKey: string,\n body: PublishPromptTemplate\n) => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/rest/prompt-templates`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify({\n prompt_template: { ...body },\n prompt_version: { ...body },\n release_labels: body.release_labels ? body.release_labels : undefined,\n }),\n }\n );\n const data = await response.json();\n if (response.status === 400) {\n warnOnBadResponse(\n data,\n \"WARNING: While publishing a prompt template PromptLayer had the following error\"\n );\n }\n return data as Promise<PublishPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While publishing a prompt template PromptLayer had the following error: ${e}`\n );\n }\n};\n\nconst getAllPromptTemplates = async (\n apiKey: string,\n params?: Partial<Pagination>\n) => {\n try {\n const url = new URL(`${URL_API_PROMPTLAYER}/prompt-templates`);\n Object.entries(params || {}).forEach(([key, value]) =>\n url.searchParams.append(key, value.toString())\n );\n const response = await fetch(url, {\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching all prompt templates PromptLayer had the following error\"\n );\n return null;\n }\n return (data.items ?? []) as Promise<Array<ListPromptTemplatesResponse>>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching all prompt templates PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport const runWorkflowRequest = async ({\n workflow_name,\n input_variables,\n metadata = {},\n workflow_label_name = null,\n workflow_version_number = null,\n return_all_outputs = false,\n api_key,\n timeout = 120000, // Default timeout is 2 minutes in milliseconds\n}: RunWorkflowRequestParams): Promise<WorkflowResponse> => {\n const payload = {\n input_variables,\n metadata,\n workflow_label_name,\n workflow_version_number,\n return_all_outputs,\n };\n\n const headers = {\n \"X-API-KEY\": api_key,\n \"Content-Type\": \"application/json\",\n };\n\n try {\n // Start the workflow by making a POST request\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/workflows/${encodeURIComponent(\n workflow_name\n )}/run`,\n {\n method: \"POST\",\n headers: headers,\n body: JSON.stringify(payload),\n }\n );\n\n if (response.status !== 201) {\n const errorData = await response.json().catch(() => ({}));\n return {\n success: false,\n message: `Failed to run workflow: ${\n errorData.error || response.statusText\n }`,\n };\n }\n\n const result = await response.json();\n if (result.warning) {\n console.warn(`WARNING: ${result.warning}`);\n }\n const execution_id = result.workflow_version_execution_id;\n if (!execution_id) {\n console.log(\"No execution ID returned from workflow run\");\n return { success: false, message: \"Failed to run workflow\" };\n }\n\n const channel_name = `workflow_updates:${execution_id}`;\n\n // Request a token to subscribe to the channel\n const ws_response = await fetch(\n `${URL_API_PROMPTLAYER}/ws-token-request-library?capability=${channel_name}`,\n {\n method: \"POST\",\n headers: headers,\n }\n );\n\n const ws_token_response = await ws_response.json();\n\n const ably_token = ws_token_response.token_details.token;\n\n // Initialize Ably client using the Promise-based client\n const ably = new Ably.Realtime({ token: ably_token });\n\n try {\n // Wait for the workflow to complete and get the final output\n const final_output = await waitForWorkflowCompletion(\n ably,\n channel_name,\n timeout\n );\n ably.close();\n return final_output;\n } finally {\n // Ensure the Ably client is closed in all cases\n ably.close();\n }\n } catch (error) {\n console.error(\n `Failed to run workflow: ${\n error instanceof Error ? error.message : error\n }`\n );\n throw error;\n }\n};\n\nasync function waitForWorkflowCompletion(\n ably: Ably.Realtime,\n channel_name: string,\n timeout: number\n): Promise<any> {\n const channel = ably.channels.get(channel_name);\n\n return new Promise(async (resolve, reject) => {\n let results: any = null;\n\n const messageListener = (message: Ably.Message) => {\n if (message.name === \"SET_WORKFLOW_COMPLETE\") {\n const message_data = JSON.parse(message.data as string);\n results = message_data.final_output;\n clearTimeout(timer);\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n resolve(results);\n }\n };\n\n // Set up a timeout to reject the promise if no message is received in time\n const timer = setTimeout(() => {\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n reject(new Error(\"Workflow execution did not complete properly (timeout)\"));\n }, timeout);\n\n try {\n // Subscribe to the channel to receive updates\n await channel.subscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n } catch (err) {\n clearTimeout(timer);\n reject(err);\n }\n });\n}\n\nconst openaiStreamChat = (results: ChatCompletionChunk[]): ChatCompletion => {\n let content: ChatCompletion.Choice[\"message\"][\"content\"] = null;\n let functionCall: ChatCompletion.Choice[\"message\"][\"function_call\"] =\n undefined;\n const response: ChatCompletion = {\n id: \"\",\n choices: [],\n created: Date.now(),\n model: \"\",\n object: \"chat.completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let toolCalls: ChatCompletion.Choice[\"message\"][\"tool_calls\"] = undefined;\n for (const result of results) {\n if (result.choices.length === 0) continue;\n const delta = result.choices[0].delta;\n\n if (delta.content) {\n content = `${content || \"\"}${delta.content || \"\"}`;\n }\n if (delta.function_call) {\n functionCall = {\n name: `${functionCall ? functionCall.name : \"\"}${\n delta.function_call.name || \"\"\n }`,\n arguments: `${functionCall ? functionCall.arguments : \"\"}${\n delta.function_call.arguments || \"\"\n }`,\n };\n }\n const toolCall = delta.tool_calls?.[0];\n if (toolCall) {\n toolCalls = toolCalls || [];\n const lastToolCall = toolCalls.at(-1);\n if (!lastToolCall || toolCall.id) {\n toolCalls.push({\n id: toolCall.id || \"\",\n type: toolCall.type || \"function\",\n function: {\n name: toolCall.function?.name || \"\",\n arguments: toolCall.function?.arguments || \"\",\n },\n });\n continue;\n }\n lastToolCall.function.name = `${lastToolCall.function.name}${\n toolCall.function?.name || \"\"\n }`;\n lastToolCall.function.arguments = `${lastToolCall.function.arguments}${\n toolCall.function?.arguments || \"\"\n }`;\n }\n }\n const firstChoice = results[0].choices.at(0);\n response.choices.push({\n finish_reason: firstChoice?.finish_reason ?? \"stop\",\n index: firstChoice?.index ?? 0,\n logprobs: firstChoice?.logprobs ?? null,\n message: {\n role: \"assistant\",\n content,\n function_call: functionCall ? functionCall : undefined,\n tool_calls: toolCalls ? toolCalls : undefined,\n refusal: firstChoice?.delta.refusal ?? null,\n },\n });\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.created = lastResult.created;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamMessage = (results: MessageStreamEvent[]): Message => {\n let response: Message = {\n id: \"\",\n model: \"\",\n content: [],\n role: \"assistant\",\n type: \"message\",\n stop_reason: \"stop_sequence\",\n stop_sequence: null,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let content = \"\";\n for (const result of results) {\n switch (result.type) {\n case \"message_start\": {\n response = {\n ...result.message,\n };\n break;\n }\n case \"content_block_delta\": {\n if (result.delta.type === \"text_delta\")\n content = `${content}${result.delta.text}`;\n }\n case \"message_delta\": {\n if (\"usage\" in result)\n response.usage.output_tokens = result.usage.output_tokens;\n if (\"stop_reason\" in result.delta)\n response.stop_reason = result.delta.stop_reason;\n }\n default: {\n break;\n }\n }\n }\n response.content.push({\n type: \"text\",\n text: content,\n });\n return response;\n};\n\nconst cleaned_result = (\n results: any[],\n function_name = \"openai.chat.completions.create\"\n) => {\n if (\"completion\" in results[0]) {\n return results.reduce(\n (prev, current) => ({\n ...current,\n completion: `${prev.completion}${current.completion}`,\n }),\n {}\n );\n }\n\n if (function_name === \"anthropic.messages.create\")\n return anthropicStreamMessage(results);\n\n if (\"text\" in results[0].choices[0]) {\n let response = \"\";\n for (const result of results) {\n response = `${response}${result.choices[0].text}`;\n }\n const final_result = structuredClone(results.at(-1));\n final_result.choices[0].text = response;\n return final_result;\n }\n\n if (\"delta\" in results[0].choices[0]) {\n const response = openaiStreamChat(results);\n response.choices[0] = {\n ...response.choices[0],\n ...response.choices[0].message,\n };\n return response;\n }\n\n return \"\";\n};\n\nasync function* proxyGenerator<Item>(\n apiKey: string,\n generator: AsyncIterable<Item>,\n body: TrackRequest\n) {\n const results = [];\n for await (const value of generator) {\n yield body.return_pl_id ? [value, null] : value;\n results.push(value);\n }\n const request_response = cleaned_result(results, body.function_name);\n const response = await promptLayerApiRequest(apiKey, {\n ...body,\n request_response,\n request_end_time: new Date().toISOString(),\n });\n if (response) {\n if (body.return_pl_id) {\n const request_id = (response as any)[1];\n const lastResult = results.at(-1);\n yield [lastResult, request_id];\n }\n }\n}\n\nconst warnOnBadResponse = (request_response: any, main_message: string) => {\n try {\n console.warn(`${main_message}: ${request_response.message}`);\n } catch (e) {\n console.warn(`${main_message}: ${request_response}`);\n }\n};\n\nconst trackRequest = async (body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 200)\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return {};\n};\n\nconst openaiStreamCompletion = (results: Completion[]) => {\n const response: Completion = {\n id: \"\",\n choices: [\n {\n finish_reason: \"stop\",\n index: 0,\n text: \"\",\n logprobs: null,\n },\n ],\n created: Date.now(),\n model: \"\",\n object: \"text_completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let text = \"\";\n for (const result of results) {\n if (result.choices.length > 0 && result.choices[0].text) {\n text = `${text}${result.choices[0].text}`;\n }\n }\n response.choices[0].text = text;\n response.id = lastResult.id;\n response.created = lastResult.created;\n response.model = lastResult.model;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamCompletion = (results: AnthropicCompletion[]) => {\n const response: AnthropicCompletion = {\n completion: \"\",\n id: \"\",\n model: \"\",\n stop_reason: \"\",\n type: \"completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let completion = \"\";\n for (const result of results) {\n completion = `${completion}${result.completion}`;\n }\n response.completion = completion;\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.stop_reason = lastResult.stop_reason;\n return response;\n};\n\nasync function* streamResponse<Item>(\n generator: AsyncIterable<Item>,\n afterStream: (body: object) => any,\n mapResults: any\n) {\n const data: {\n request_id: number | null;\n raw_response: any;\n prompt_blueprint: any;\n } = {\n request_id: null,\n raw_response: null,\n prompt_blueprint: null,\n };\n const results = [];\n for await (const result of generator) {\n results.push(result);\n data.raw_response = result;\n yield data;\n }\n const request_response = mapResults(results);\n const response = await afterStream({ request_response });\n data.request_id = response.request_id;\n data.prompt_blueprint = response.prompt_blueprint;\n yield data;\n}\n\nconst openaiChatRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.chat.completions.create(kwargs);\n};\n\nconst openaiCompletionsRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_OPENAI_FUNCTION = {\n chat: openaiChatRequest,\n completion: openaiCompletionsRequest,\n};\n\nconst openaiRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").default;\n const client = new OpenAI({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst azureOpenAIRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").AzureOpenAI;\n const client = new OpenAI({\n endpoint: kwargs.baseURL,\n });\n delete kwargs?.baseURL;\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst anthropicChatRequest = async (client: TypeAnthropic, kwargs: any) => {\n return client.messages.create(kwargs);\n};\n\nconst anthropicCompletionsRequest = async (\n client: TypeAnthropic,\n kwargs: any\n) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_ANTHROPIC_FUNCTION = {\n chat: anthropicChatRequest,\n completion: anthropicCompletionsRequest,\n};\n\nconst anthropicRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const Anthropic = require(\"@anthropic-ai/sdk\").default;\n const client = new Anthropic({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_ANTHROPIC_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst utilLogRequest = async (\n apiKey: string,\n body: LogRequest\n): Promise<RequestLog | null> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/log-request`, {\n method: \"POST\",\n headers: {\n \"X-API-KEY\": apiKey,\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 201) {\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request PromptLayer had the following error\"\n );\n return null;\n }\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While tracking your prompt PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n getAllPromptTemplates,\n getPromptTemplate,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n promptlayerApiHandler,\n promptLayerApiRequest,\n promptLayerCreateGroup,\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n publishPromptTemplate,\n streamResponse,\n trackRequest,\n utilLogRequest,\n};\n","import { promptLayerCreateGroup } from \"@/utils\";\n\nexport class GroupManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n create = () => promptLayerCreateGroup(this.apiKey);\n}\n","import * as opentelemetry from '@opentelemetry/api';\nimport {SimpleSpanProcessor} from '@opentelemetry/sdk-trace-base';\nimport {NodeTracerProvider} from '@opentelemetry/sdk-trace-node';\nimport PromptLayerSpanExporter from '@/span-exporter';\n\nexport const getTracer = (name: string = 'promptlayer-tracer') => {\n return opentelemetry.trace.getTracer(name);\n}\n\nexport const setupTracing = (enableTracing: boolean, apiKey?: string) => {\n const provider = new NodeTracerProvider();\n const exporter = new PromptLayerSpanExporter(enableTracing, apiKey);\n const processor = new SimpleSpanProcessor(exporter);\n provider.addSpanProcessor(processor);\n provider.register();\n}\n","import {Attributes, SpanKind, SpanStatusCode} from '@opentelemetry/api';\nimport {ReadableSpan, SpanExporter} from '@opentelemetry/sdk-trace-base';\nimport {ExportResultCode} from '@opentelemetry/core';\nimport {URL_API_PROMPTLAYER} from '@/utils';\n\nclass PromptLayerSpanExporter implements SpanExporter {\n private apiKey: string | undefined;\n private enableTracing: boolean;\n private url: string;\n\n constructor(enableTracing: boolean, apiKey?: string) {\n this.apiKey = apiKey || process.env.PROMPTLAYER_API_KEY;\n this.enableTracing = enableTracing;\n this.url = `${URL_API_PROMPTLAYER}/spans-bulk`;\n }\n\n private attributesToObject(attributes: Attributes | undefined): Record<string, any> {\n if (!attributes) return {};\n return Object.fromEntries(Object.entries(attributes));\n }\n\n private spanKindToString(kind: SpanKind): string {\n const kindMap: Record<SpanKind, string> = {\n [SpanKind.INTERNAL]: 'SpanKind.INTERNAL',\n [SpanKind.SERVER]: 'SpanKind.SERVER',\n [SpanKind.CLIENT]: 'SpanKind.CLIENT',\n [SpanKind.PRODUCER]: 'SpanKind.PRODUCER',\n [SpanKind.CONSUMER]: 'SpanKind.CONSUMER',\n };\n return kindMap[kind] || 'SpanKind.INTERNAL';\n }\n\n private statusCodeToString(code: SpanStatusCode): string {\n const statusMap: Record<SpanStatusCode, string> = {\n [SpanStatusCode.ERROR]: 'StatusCode.ERROR',\n [SpanStatusCode.OK]: 'StatusCode.OK',\n [SpanStatusCode.UNSET]: 'StatusCode.UNSET',\n };\n return statusMap[code] || 'StatusCode.UNSET';\n }\n\n private toNanoseconds(time: [number, number]): string {\n return (BigInt(time[0]) * BigInt(1e9) + BigInt(time[1])).toString();\n };\n\n export(spans: ReadableSpan[]): Promise<ExportResultCode> {\n if (!this.enableTracing) {\n return Promise.resolve(ExportResultCode.SUCCESS);\n }\n\n const requestData = spans.map(span => ({\n name: span.name,\n context: {\n trace_id: span.spanContext().traceId,\n span_id: span.spanContext().spanId,\n trace_state: span.spanContext().traceState?.serialize() || '',\n },\n kind: this.spanKindToString(span.kind),\n parent_id: span.parentSpanId || null,\n start_time: this.toNanoseconds(span.startTime),\n end_time: this.toNanoseconds(span.endTime),\n status: {\n status_code: this.statusCodeToString(span.status.code),\n description: span.status.message,\n },\n attributes: this.attributesToObject(span.attributes),\n events: span.events.map(event => ({\n name: event.name,\n timestamp: this.toNanoseconds(event.time),\n attributes: this.attributesToObject(event.attributes),\n })),\n links: span.links.map(link => ({\n context: link.context,\n attributes: this.attributesToObject(link.attributes),\n })),\n resource: {\n attributes: {\n ...span.resource.attributes,\n \"service.name\": \"prompt-layer-js\",\n },\n schema_url: '',\n },\n }));\n\n return fetch(this.url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-API-KEY': this.apiKey || '',\n },\n body: JSON.stringify({\n spans: requestData,\n }),\n })\n .then(response => {\n if (!response.ok) {\n console.error(`Error exporting spans\\nHTTP error! status: ${response.status}`);\n return ExportResultCode.FAILED;\n }\n return ExportResultCode.SUCCESS;\n })\n .catch((error) => {\n console.error('Error exporting spans:', error);\n return ExportResultCode.FAILED;\n });\n }\n\n shutdown(): Promise<void> {\n return Promise.resolve();\n }\n}\n\nexport default PromptLayerSpanExporter;\n","import {getTracer} from \"@/tracing\";\nimport {promptlayerApiHandler} from \"@/utils\";\n\nconst tracer = getTracer();\n\nexport const promptLayerBase = (\n apiKey: string,\n llm: object,\n function_name = \"\",\n provider = \"openai\"\n) => {\n const handler: ProxyHandler<any> = {\n construct: (target, args) => {\n const newTarget = Reflect.construct(target, args);\n Object.defineProperties(newTarget, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(newTarget, handler);\n },\n get: (target, prop, receiver) => {\n const value = target[prop];\n const function_name = `${Reflect.get(\n target,\n \"function_name\"\n )}.${prop.toString()}`;\n\n if (typeof value === \"object\") {\n Object.defineProperties(value, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(value, handler);\n }\n\n if (typeof value === \"function\") {\n return (...args: any[]) => {\n const request_start_time = new Date().toISOString();\n const provider_type = Reflect.get(target, \"provider\");\n const return_pl_id = args[0]?.return_pl_id;\n const pl_tags = args[0]?.pl_tags;\n delete args[0]?.return_pl_id;\n delete args[0]?.pl_tags;\n\n return tracer.startActiveSpan(`${provider_type}.${function_name}`, async (span: any) => {\n try {\n span.setAttribute('function_input', JSON.stringify(args));\n const response = Reflect.apply(value, target, args);\n const spanId = span.spanContext().spanId;\n\n if (response instanceof Promise) {\n return new Promise((resolve, reject) => {\n response\n .then(async (request_response) => {\n const response = await promptlayerApiHandler(apiKey, {\n api_key: apiKey,\n provider_type,\n function_name,\n request_start_time,\n request_end_time: new Date().toISOString(),\n request_response,\n kwargs: args[0],\n return_pl_id,\n tags: pl_tags,\n span_id: spanId,\n });\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n resolve(response);\n })\n .catch((error) => {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n reject(error);\n });\n });\n }\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n return response;\n } catch (error) {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n throw error;\n }\n });\n };\n }\n\n return Reflect.get(target, prop, receiver);\n },\n };\n\n return new Proxy(llm, handler);\n};\n","import * as opentelemetry from '@opentelemetry/api';\nimport { getTracer } from '@/tracing';\n\nexport const wrapWithSpan = (functionName: string, func: Function, attributes?: Record<string, any>) => {\n return function (...args: any[]) {\n const tracer = getTracer();\n\n const wrapperFunction = (span: opentelemetry.Span) => {\n try {\n if (attributes) {\n Object.entries(attributes).forEach(([key, value]) => {\n span.setAttribute(key, value);\n });\n }\n\n span.setAttribute('function_input', JSON.stringify(args));\n const result = func(...args);\n\n if (result instanceof Promise) {\n return result.then((resolvedResult) => {\n span.setAttribute('function_output', JSON.stringify(resolvedResult));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n return resolvedResult;\n }).catch((error) => {\n handleError(span, error, args);\n throw error;\n }).finally(() => span.end());\n } else {\n span.setAttribute('function_output', JSON.stringify(result));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n span.end();\n return result;\n }\n } catch (error) {\n handleError(span, error, args);\n throw error;\n }\n };\n\n return tracer.startActiveSpan(functionName, wrapperFunction);\n };\n};\n\nconst handleError = (span: opentelemetry.Span, error: any, args: any[]) => {\n span.setAttribute('function_input', JSON.stringify(args));\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : 'Unknown error',\n });\n span.end();\n}\n","import {\n GetPromptTemplateParams,\n Pagination,\n PublishPromptTemplate,\n} from \"@/types\";\nimport {\n getAllPromptTemplates,\n getPromptTemplate,\n publishPromptTemplate,\n} from \"@/utils\";\n\nexport class TemplateManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n get = (promptName: string, params?: Partial<GetPromptTemplateParams>) =>\n getPromptTemplate(this.apiKey, promptName, params);\n\n publish = (body: PublishPromptTemplate) =>\n publishPromptTemplate(this.apiKey, body);\n\n all = (params?: Pagination) => getAllPromptTemplates(this.apiKey, params);\n}\n","import { TrackGroup, TrackMetadata, TrackPrompt, TrackScore } from \"@/types\";\nimport {\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n} from \"@/utils\";\n\nconst metadata = (apiKey: string, body: TrackMetadata): Promise<boolean> => {\n if (!(body.metadata instanceof Object)) {\n throw new Error(\"Please provide a dictionary of metadata.\");\n }\n for (const [key, value] of Object.entries(body.metadata)) {\n if (typeof key !== \"string\" || typeof value !== \"string\") {\n throw new Error(\n \"Please provide a dictionary of metadata with key value pair of strings.\"\n );\n }\n }\n return promptLayerTrackMetadata(apiKey, body);\n};\n\nconst score = (apiKey: string, body: TrackScore): Promise<boolean> => {\n if (typeof body.score !== \"number\") {\n throw new Error(\"Score must be a number\");\n }\n if (body.score < 0 || body.score > 100) {\n throw new Error(\"Score must be a number between 0 and 100.\");\n }\n return promptLayerTrackScore(apiKey, body);\n};\n\nconst prompt = (apiKey: string, body: TrackPrompt): Promise<boolean> => {\n if (!(body.prompt_input_variables instanceof Object)) {\n throw new Error(\"Prompt template input variable dictionary not provided.\");\n }\n return promptLayerTrackPrompt(apiKey, body);\n};\n\nconst group = (apiKey: string, body: TrackGroup) =>\n promptLayerTrackGroup(apiKey, body);\n\nexport class TrackManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n group = (body: TrackGroup) => group(this.apiKey, body);\n\n metadata = (body: TrackMetadata) => metadata(this.apiKey, body);\n\n prompt = (body: TrackPrompt) => prompt(this.apiKey, body);\n\n score = (body: TrackScore) => score(this.apiKey, body);\n}\n"],"mappings":"+1DAAA,IAAAA,GAAA,GAAAC,GAAAD,GAAA,iBAAAE,IAAA,eAAAC,GAAAH,ICuBA,IAAAI,GAAiB,mBAQV,IAAMC,EACX,QAAQ,IAAI,qBAAuB,8BAE/BC,GAAwB,CAC5BC,EACAC,IAGGC,EAAA,wBAEH,OADoBD,EAAK,iBAAiB,OAAO,aAAa,IAAM,OAE3DE,GAAeH,EAAQC,EAAK,iBAAkBA,CAAI,EAEpD,MAAMG,GAAsBJ,EAAQC,CAAI,CACjD,GAEMG,GAAwB,CAAOJ,EAAgBC,IAAuBC,EAAA,wBAC1E,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACKK,EAAO,MAAMD,EAAS,KAAK,EAOjC,GANIA,EAAS,SAAW,KACtBE,EACED,EACA,mFACF,EAEEA,GAAQL,EAAK,aACf,MAAO,CAACA,EAAK,iBAAkBK,EAAK,UAAU,CAElD,OAASE,EAAG,CACV,QAAQ,KACN,4EAA4EA,CAAC,EAC/E,CACF,CACA,OAAOP,EAAK,gBACd,GAEMQ,GAA2B,CAC/BT,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,0BACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,8FACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,iGAAiGA,CAAC,EACpG,EACO,EACT,CACA,MAAO,EACT,GAEMI,GAAwB,CAC5BZ,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,uBAAwB,CACzE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,kFACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,EACT,CACA,MAAO,EACT,GAEMK,GAAyB,CAC7Bb,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,wBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,6GACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,gHAAgHA,CAAC,EACnH,EACO,EACT,CACA,MAAO,EACT,GAEMM,GAAwB,CAC5Bd,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,mGACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,sGAAsGA,CAAC,EACzG,EACO,EACT,CACA,MAAO,EACT,GAEMO,GACJf,GAC8BE,EAAA,wBAC9B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,gBAAiB,CAClE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAU,CACnB,QAASE,CACX,CAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,qEACF,EACO,IAEFA,EAAK,EACd,OAAS,EAAG,CACV,eAAQ,KACN,wEAAwE,CAAC,EAC3E,EACO,EACT,CACF,GAEMU,GAAoB,CACxBhB,EACAiB,EACAC,IACGhB,EAAA,wBACH,GAAI,CACF,IAAMiB,EAAM,IAAI,IACd,GAAGrB,CAAmB,qBAAqBmB,CAAU,EACvD,EACMZ,EAAW,MAAM,MAAMc,EAAK,CAChC,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,EACA,KAAM,KAAK,UAAUkB,CAAM,CAC7B,CAAC,EACKZ,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,+EACF,EACO,OAELA,EAAK,SACP,QAAQ,KACN,4EAA4EA,EAAK,OAAO,EAC1F,EAEKA,EACT,OAASE,EAAG,CACV,eAAQ,KACN,kFAAkFA,CAAC,EACrF,EACO,IACT,CACF,GAEMY,GAAwB,CAC5BpB,EACAC,IACGC,EAAA,wBACH,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,yBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAaE,CACf,EACA,KAAM,KAAK,UAAU,CACnB,gBAAiBW,EAAA,GAAKV,GACtB,eAAgBU,EAAA,GAAKV,GACrB,eAAgBA,EAAK,eAAiBA,EAAK,eAAiB,MAC9D,CAAC,CACH,CACF,EACMK,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,iFACF,EAEKA,CACT,OAASE,EAAG,CACV,QAAQ,KACN,oFAAoFA,CAAC,EACvF,CACF,CACF,GAEMa,GAAwB,CAC5BrB,EACAkB,IACGhB,EAAA,wBA3TL,IAAAoB,EA4TE,GAAI,CACF,IAAMH,EAAM,IAAI,IAAI,GAAGrB,CAAmB,mBAAmB,EAC7D,OAAO,QAAQoB,GAAU,CAAC,CAAC,EAAE,QAAQ,CAAC,CAACK,EAAKC,CAAK,IAC/CL,EAAI,aAAa,OAAOI,EAAKC,EAAM,SAAS,CAAC,CAC/C,EACA,IAAMnB,EAAW,MAAM,MAAMc,EAAK,CAChC,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,CACF,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,kFACF,EACO,OAEDgB,EAAAhB,EAAK,QAAL,KAAAgB,EAAc,CAAC,CACzB,OAASd,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,IACT,CACF,GAEaiB,GAA4BC,GASkBxB,EAAA,QATlBwB,GASkB,UATlB,CACvC,cAAAC,EACA,gBAAAC,EACA,SAAAC,EAAW,CAAC,EACZ,oBAAAC,EAAsB,KACtB,wBAAAC,EAA0B,KAC1B,mBAAAC,EAAqB,GACrB,QAAAC,EACA,QAAAC,EAAU,IACZ,EAA2D,CACzD,IAAMC,EAAU,CACd,gBAAAP,EACA,SAAAC,EACA,oBAAAC,EACA,wBAAAC,EACA,mBAAAC,CACF,EAEMI,EAAU,CACd,YAAaH,EACb,eAAgB,kBAClB,EAEA,GAAI,CAEF,IAAM5B,EAAW,MAAM,MACrB,GAAGP,CAAmB,cAAc,mBAClC6B,CACF,CAAC,OACD,CACE,OAAQ,OACR,QAASS,EACT,KAAM,KAAK,UAAUD,CAAO,CAC9B,CACF,EAEA,GAAI9B,EAAS,SAAW,IAEtB,MAAO,CACL,QAAS,GACT,QAAS,4BAHO,MAAMA,EAAS,KAAK,EAAE,MAAM,KAAO,CAAC,EAAE,GAI1C,OAASA,EAAS,UAC9B,EACF,EAGF,IAAMgC,EAAS,MAAMhC,EAAS,KAAK,EAC/BgC,EAAO,SACT,QAAQ,KAAK,YAAYA,EAAO,OAAO,EAAE,EAE3C,IAAMC,EAAeD,EAAO,8BAC5B,GAAI,CAACC,EACH,eAAQ,IAAI,4CAA4C,EACjD,CAAE,QAAS,GAAO,QAAS,wBAAyB,EAG7D,IAAMC,EAAe,oBAAoBD,CAAY,GAa/CE,GAFoB,MARN,MAAM,MACxB,GAAG1C,CAAmB,wCAAwCyC,CAAY,GAC1E,CACE,OAAQ,OACR,QAASH,CACX,CACF,GAE4C,KAAK,GAEZ,cAAc,MAG7CK,EAAO,IAAI,GAAAC,QAAK,SAAS,CAAE,MAAOF,CAAW,CAAC,EAEpD,GAAI,CAEF,IAAMG,EAAe,MAAMC,GACzBH,EACAF,EACAL,CACF,EACA,OAAAO,EAAK,MAAM,EACJE,CACT,QAAE,CAEAF,EAAK,MAAM,CACb,CACF,OAASI,EAAO,CACd,cAAQ,MACN,2BACEA,aAAiB,MAAQA,EAAM,QAAUA,CAC3C,EACF,EACMA,CACR,CACF,GAEA,SAAeD,GACbH,EACAF,EACAL,EACc,QAAAhC,EAAA,sBACd,IAAM4C,EAAUL,EAAK,SAAS,IAAIF,CAAY,EAE9C,OAAO,IAAI,QAAQ,CAAOQ,EAASC,IAAW9C,EAAA,sBAC5C,IAAI+C,EAAe,KAEbC,EAAmBC,GAA0B,CAC7CA,EAAQ,OAAS,0BAEnBF,EADqB,KAAK,MAAME,EAAQ,IAAc,EAC/B,aACvB,aAAaC,CAAK,EAClBN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DH,EAAQE,CAAO,EAEnB,EAGMG,EAAQ,WAAW,IAAM,CAC7BN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DF,EAAO,IAAI,MAAM,wDAAwD,CAAC,CAC5E,EAAGd,CAAO,EAEV,GAAI,CAEF,MAAMY,EAAQ,UAAU,wBAAyBI,CAAe,CAClE,OAASG,EAAK,CACZ,aAAaD,CAAK,EAClBJ,EAAOK,CAAG,CACZ,CACF,EAAC,CACH,GAEA,IAAMC,EAAoBL,GAAmD,CA7d7E,IAAA3B,EAAAiC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EA8dE,IAAIC,EAAuD,KACvDC,EAEE3D,EAA2B,CAC/B,GAAI,GACJ,QAAS,CAAC,EACV,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAI6D,EACJ,QAAW7B,KAAUY,EAAS,CAC5B,GAAIZ,EAAO,QAAQ,SAAW,EAAG,SACjC,IAAM8B,EAAQ9B,EAAO,QAAQ,CAAC,EAAE,MAE5B8B,EAAM,UACRJ,EAAU,GAAGA,GAAW,EAAE,GAAGI,EAAM,SAAW,EAAE,IAE9CA,EAAM,gBACRH,EAAe,CACb,KAAM,GAAGA,EAAeA,EAAa,KAAO,EAAE,GAC5CG,EAAM,cAAc,MAAQ,EAC9B,GACA,UAAW,GAAGH,EAAeA,EAAa,UAAY,EAAE,GACtDG,EAAM,cAAc,WAAa,EACnC,EACF,GAEF,IAAMC,GAAW9C,EAAA6C,EAAM,aAAN,YAAA7C,EAAmB,GACpC,GAAI8C,EAAU,CACZF,EAAYA,GAAa,CAAC,EAC1B,IAAMG,EAAeH,EAAU,GAAG,EAAE,EACpC,GAAI,CAACG,GAAgBD,EAAS,GAAI,CAChCF,EAAU,KAAK,CACb,GAAIE,EAAS,IAAM,GACnB,KAAMA,EAAS,MAAQ,WACvB,SAAU,CACR,OAAMb,EAAAa,EAAS,WAAT,YAAAb,EAAmB,OAAQ,GACjC,YAAWC,EAAAY,EAAS,WAAT,YAAAZ,EAAmB,YAAa,EAC7C,CACF,CAAC,EACD,QACF,CACAa,EAAa,SAAS,KAAO,GAAGA,EAAa,SAAS,IAAI,KACxDZ,EAAAW,EAAS,WAAT,YAAAX,EAAmB,OAAQ,EAC7B,GACAY,EAAa,SAAS,UAAY,GAAGA,EAAa,SAAS,SAAS,KAClEX,EAAAU,EAAS,WAAT,YAAAV,EAAmB,YAAa,EAClC,EACF,CACF,CACA,IAAMY,EAAcrB,EAAQ,CAAC,EAAE,QAAQ,GAAG,CAAC,EAC3C,OAAA5C,EAAS,QAAQ,KAAK,CACpB,eAAesD,EAAAW,GAAA,YAAAA,EAAa,gBAAb,KAAAX,EAA8B,OAC7C,OAAOC,EAAAU,GAAA,YAAAA,EAAa,QAAb,KAAAV,EAAsB,EAC7B,UAAUC,EAAAS,GAAA,YAAAA,EAAa,WAAb,KAAAT,EAAyB,KACnC,QAAS,CACP,KAAM,YACN,QAAAE,EACA,cAAeC,GAA8B,OAC7C,WAAYE,GAAwB,OACpC,SAASJ,EAAAQ,GAAA,YAAAA,EAAa,MAAM,UAAnB,KAAAR,EAA8B,IACzC,CACF,CAAC,EACDzD,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMkE,EAA0BtB,GAA2C,CACzE,IAAI5C,EAAoB,CACtB,GAAI,GACJ,MAAO,GACP,QAAS,CAAC,EACV,KAAM,YACN,KAAM,UACN,YAAa,gBACb,cAAe,KACf,MAAO,CACL,aAAc,EACd,cAAe,CACjB,CACF,EAEA,GAAI,CADe4C,EAAQ,GAAG,EAAE,EACf,OAAO5C,EACxB,IAAI0D,EAAU,GACd,QAAW1B,KAAUY,EACnB,OAAQZ,EAAO,KAAM,CACnB,IAAK,gBAAiB,CACpBhC,EAAWM,EAAA,GACN0B,EAAO,SAEZ,KACF,CACA,IAAK,sBACCA,EAAO,MAAM,OAAS,eACxB0B,EAAU,GAAGA,CAAO,GAAG1B,EAAO,MAAM,IAAI,IAE5C,IAAK,gBACC,UAAWA,IACbhC,EAAS,MAAM,cAAgBgC,EAAO,MAAM,eAC1C,gBAAiBA,EAAO,QAC1BhC,EAAS,YAAcgC,EAAO,MAAM,aAExC,QACE,KAEJ,CAEF,OAAAhC,EAAS,QAAQ,KAAK,CACpB,KAAM,OACN,KAAM0D,CACR,CAAC,EACM1D,CACT,EAEMmE,GAAiB,CACrBvB,EACAwB,EAAgB,mCACb,CACH,GAAI,eAAgBxB,EAAQ,CAAC,EAC3B,OAAOA,EAAQ,OACb,CAACyB,EAAMC,IAAajE,EAAAC,EAAA,GACfgE,GADe,CAElB,WAAY,GAAGD,EAAK,UAAU,GAAGC,EAAQ,UAAU,EACrD,GACA,CAAC,CACH,EAGF,GAAIF,IAAkB,4BACpB,OAAOF,EAAuBtB,CAAO,EAEvC,GAAI,SAAUA,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACnC,IAAI5C,EAAW,GACf,QAAWgC,KAAUY,EACnB5C,EAAW,GAAGA,CAAQ,GAAGgC,EAAO,QAAQ,CAAC,EAAE,IAAI,GAEjD,IAAMuC,EAAe,gBAAgB3B,EAAQ,GAAG,EAAE,CAAC,EACnD,OAAA2B,EAAa,QAAQ,CAAC,EAAE,KAAOvE,EACxBuE,CACT,CAEA,GAAI,UAAW3B,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACpC,IAAM5C,EAAWiD,EAAiBL,CAAO,EACzC,OAAA5C,EAAS,QAAQ,CAAC,EAAIM,IAAA,GACjBN,EAAS,QAAQ,CAAC,GAClBA,EAAS,QAAQ,CAAC,EAAE,SAElBA,CACT,CAEA,MAAO,EACT,EAEA,SAAgBF,GACdH,EACA6E,EACA5E,EACA,QAAA6E,EAAA,sBACA,IAAM7B,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA0BH,GAA1BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAMzD,EAAjB0D,EAAA,MACE,MAAMjF,EAAK,aAAe,CAACuB,EAAO,IAAI,EAAIA,EAC1CyB,EAAQ,KAAKzB,CAAK,SAFpB0D,EApoBF,CAooBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAIA,IAAMuC,EAAmBZ,GAAevB,EAAShD,EAAK,aAAa,EAC7DI,EAAW,UAAA8E,EAAM/E,GAAsBJ,EAAQU,EAAAC,EAAA,GAChDV,GADgD,CAEnD,iBAAAmF,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,CAC3C,EAAC,GACD,GAAI/E,GACEJ,EAAK,aAAc,CACrB,IAAMoF,EAAchF,EAAiB,CAAC,EAEtC,KAAM,CADa4C,EAAQ,GAAG,EAAE,EACboC,CAAU,CAC/B,CAEJ,GAEA,IAAM9E,EAAoB,CAAC6E,EAAuBE,IAAyB,CACzE,GAAI,CACF,QAAQ,KAAK,GAAGA,CAAY,KAAKF,EAAiB,OAAO,EAAE,CAC7D,OAAS5E,EAAG,CACV,QAAQ,KAAK,GAAG8E,CAAY,KAAKF,CAAgB,EAAE,CACrD,CACF,EAEMG,GAAsBtF,GAAuBC,EAAA,wBACjD,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,mFACF,EACKA,EAAS,KAAK,CACvB,OAAS,EAAG,CACV,QAAQ,KACN,4EAA4E,CAAC,EAC/E,CACF,CACA,MAAO,CAAC,CACV,GAEMmF,EAA0BvC,GAA0B,CACxD,IAAM5C,EAAuB,CAC3B,GAAI,GACJ,QAAS,CACP,CACE,cAAe,OACf,MAAO,EACP,KAAM,GACN,SAAU,IACZ,CACF,EACA,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIoF,EAAO,GACX,QAAWpD,KAAUY,EACfZ,EAAO,QAAQ,OAAS,GAAKA,EAAO,QAAQ,CAAC,EAAE,OACjDoD,EAAO,GAAGA,CAAI,GAAGpD,EAAO,QAAQ,CAAC,EAAE,IAAI,IAG3C,OAAAhC,EAAS,QAAQ,CAAC,EAAE,KAAOoF,EAC3BpF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMqF,GAA6BzC,GAAmC,CACpE,IAAM5C,EAAgC,CACpC,WAAY,GACZ,GAAI,GACJ,MAAO,GACP,YAAa,GACb,KAAM,YACR,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIsF,EAAa,GACjB,QAAWtD,KAAUY,EACnB0C,EAAa,GAAGA,CAAU,GAAGtD,EAAO,UAAU,GAEhD,OAAAhC,EAAS,WAAasF,EACtBtF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,YAAc4D,EAAW,YAC3B5D,CACT,EAEA,SAAgBuF,GACdf,EACAgB,EACAC,EACA,QAAAhB,EAAA,sBACA,IAAMxE,EAIF,CACF,WAAY,KACZ,aAAc,KACd,iBAAkB,IACpB,EACM2C,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA2BH,GAA3BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAM5C,EAAjB6C,EAAA,MACEjC,EAAQ,KAAKZ,CAAM,EACnB/B,EAAK,aAAe+B,EACpB,MAAM/B,SAHR4E,EA1vBF,CA0vBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAKA,IAAMuC,EAAmBU,EAAW7C,CAAO,EACrC5C,EAAW,UAAA8E,EAAMU,EAAY,CAAE,iBAAAT,CAAiB,CAAC,GACvD9E,EAAK,WAAaD,EAAS,WAC3BC,EAAK,iBAAmBD,EAAS,iBACjC,MAAMC,CACR,GAEA,IAAMyF,GAAoB,CAAOC,EAAoBC,IAAgB/F,EAAA,wBACnE,OAAO8F,EAAO,KAAK,YAAY,OAAOC,CAAM,CAC9C,GAEMC,GAA2B,CAAOF,EAAoBC,IAAgB/F,EAAA,wBAC1E,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEME,GAA8B,CAClC,KAAMJ,GACN,WAAYG,EACd,EAEME,GAAgB,CACpBC,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,QAAQ,QAAQ,EAAE,QAC3BN,EAAS,IAAIM,EAAO,CACxB,QAASL,EAAO,OAClB,CAAC,EACKM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMO,GAAqB,CACzBH,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,QAAQ,QAAQ,EAAE,YAC3BN,EAAS,IAAIM,EAAO,CACxB,SAAUL,EAAO,OACnB,CAAC,EACDA,GAAA,aAAAA,EAAe,QACf,IAAMM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMQ,GAAuB,CAAOT,EAAuBC,IAAgB/F,EAAA,wBACzE,OAAO8F,EAAO,SAAS,OAAOC,CAAM,CACtC,GAEMS,GAA8B,CAClCV,EACAC,IACG/F,EAAA,wBACH,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEMU,GAAiC,CACrC,KAAMF,GACN,WAAYC,EACd,EAEME,GAAmB,CACvBP,EACAJ,IACG/F,EAAA,wBACH,IAAM2G,EAAY,QAAQ,mBAAmB,EAAE,QACzCb,EAAS,IAAIa,EAAU,CAC3B,QAASZ,EAAO,OAClB,CAAC,EACKM,EACJI,GAA+BN,EAAgB,gBAAgB,IAAI,EACrE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMa,GAAiB,CACrB9G,EACAC,IAC+BC,EAAA,wBAC/B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,YAAaE,EACb,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUC,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,yEACF,EACO,MAEFA,EAAS,KAAK,CACvB,OAASG,EAAG,CACV,eAAQ,KACN,4EAA4EA,CAAC,EAC/E,EACO,IACT,CACF,GCp2BO,IAAMuG,EAAN,KAAmB,CAGxB,YAAYC,EAAgB,CAI5B,YAAS,IAAMC,GAAuB,KAAK,MAAM,EAH/C,KAAK,OAASD,CAChB,CAGF,ECVA,IAAAE,GAA+B,iCAC/BC,GAAkC,yCAClCC,GAAiC,yCCFjC,IAAAC,EAAmD,8BAEnDC,EAA+B,+BAG/B,IAAMC,EAAN,KAAsD,CAKpD,YAAYC,EAAwBC,EAAiB,CACnD,KAAK,OAASA,GAAU,QAAQ,IAAI,oBACpC,KAAK,cAAgBD,EACrB,KAAK,IAAM,GAAGE,CAAmB,aACnC,CAEQ,mBAAmBC,EAAyD,CAClF,OAAKA,EACE,OAAO,YAAY,OAAO,QAAQA,CAAU,CAAC,EAD5B,CAAC,CAE3B,CAEQ,iBAAiBC,EAAwB,CAQ/C,MAP0C,CACxC,CAAC,WAAS,QAAQ,EAAG,oBACrB,CAAC,WAAS,MAAM,EAAG,kBACnB,CAAC,WAAS,MAAM,EAAG,kBACnB,CAAC,WAAS,QAAQ,EAAG,oBACrB,CAAC,WAAS,QAAQ,EAAG,mBACvB,EACeA,CAAI,GAAK,mBAC1B,CAEQ,mBAAmBC,EAA8B,CAMvD,MALkD,CAChD,CAAC,iBAAe,KAAK,EAAG,mBACxB,CAAC,iBAAe,EAAE,EAAG,gBACrB,CAAC,iBAAe,KAAK,EAAG,kBAC1B,EACiBA,CAAI,GAAK,kBAC5B,CAEQ,cAAcC,EAAgC,CACpD,OAAQ,OAAOA,EAAK,CAAC,CAAC,EAAI,OAAO,GAAG,EAAI,OAAOA,EAAK,CAAC,CAAC,GAAG,SAAS,CACpE,CAEA,OAAOC,EAAkD,CACvD,GAAI,CAAC,KAAK,cACR,OAAO,QAAQ,QAAQ,mBAAiB,OAAO,EAGjD,IAAMC,EAAcD,EAAM,IAAIE,GAAK,CAlDvC,IAAAC,EAkD2C,OACrC,KAAMD,EAAK,KACX,QAAS,CACP,SAAUA,EAAK,YAAY,EAAE,QAC7B,QAASA,EAAK,YAAY,EAAE,OAC5B,cAAaC,EAAAD,EAAK,YAAY,EAAE,aAAnB,YAAAC,EAA+B,cAAe,EAC7D,EACA,KAAM,KAAK,iBAAiBD,EAAK,IAAI,EACrC,UAAWA,EAAK,cAAgB,KAChC,WAAY,KAAK,cAAcA,EAAK,SAAS,EAC7C,SAAU,KAAK,cAAcA,EAAK,OAAO,EACzC,OAAQ,CACN,YAAa,KAAK,mBAAmBA,EAAK,OAAO,IAAI,EACrD,YAAaA,EAAK,OAAO,OAC3B,EACA,WAAY,KAAK,mBAAmBA,EAAK,UAAU,EACnD,OAAQA,EAAK,OAAO,IAAIE,IAAU,CAChC,KAAMA,EAAM,KACZ,UAAW,KAAK,cAAcA,EAAM,IAAI,EACxC,WAAY,KAAK,mBAAmBA,EAAM,UAAU,CACtD,EAAE,EACF,MAAOF,EAAK,MAAM,IAAIG,IAAS,CAC7B,QAASA,EAAK,QACd,WAAY,KAAK,mBAAmBA,EAAK,UAAU,CACrD,EAAE,EACF,SAAU,CACR,WAAYC,EAAAC,EAAA,GACPL,EAAK,SAAS,YADP,CAEV,eAAgB,iBAClB,GACA,WAAY,EACd,CACF,EAAE,EAEF,OAAO,MAAM,KAAK,IAAK,CACrB,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAa,KAAK,QAAU,EAC9B,EACA,KAAM,KAAK,UAAU,CACnB,MAAOD,CACT,CAAC,CACH,CAAC,EACE,KAAKO,GACCA,EAAS,GAIP,mBAAiB,SAHtB,QAAQ,MAAM;AAAA,sBAA8CA,EAAS,MAAM,EAAE,EACtE,mBAAiB,OAG3B,EACA,MAAOC,IACN,QAAQ,MAAM,yBAA0BA,CAAK,EACtC,mBAAiB,OACzB,CACL,CAEA,UAA0B,CACxB,OAAO,QAAQ,QAAQ,CACzB,CACF,EAEOC,GAAQlB,ED3GR,IAAMmB,EAAY,CAACC,EAAe,uBAClB,SAAM,UAAUA,CAAI,EAG9BC,GAAe,CAACC,EAAwBC,IAAoB,CACvE,IAAMC,EAAW,IAAI,sBACfC,EAAW,IAAIC,GAAwBJ,EAAeC,CAAM,EAC5DI,EAAY,IAAI,uBAAoBF,CAAQ,EAClDD,EAAS,iBAAiBG,CAAS,EACnCH,EAAS,SAAS,CACpB,EEZA,IAAMI,GAASC,EAAU,EAEZC,EAAkB,CAC7BC,EACAC,EACAC,EAAgB,GAChBC,EAAW,WACR,CACH,IAAMC,EAA6B,CACjC,UAAW,CAACC,EAAQC,IAAS,CAC3B,IAAMC,EAAY,QAAQ,UAAUF,EAAQC,CAAI,EAChD,cAAO,iBAAiBC,EAAW,CACjC,cAAe,CACb,MAAOL,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMI,EAAWH,CAAO,CACrC,EACA,IAAK,CAACC,EAAQG,EAAMC,IAAa,CAC/B,IAAMC,EAAQL,EAAOG,CAAI,EACnBN,EAAgB,GAAG,QAAQ,IAC/BG,EACA,eACF,CAAC,IAAIG,EAAK,SAAS,CAAC,GAEpB,OAAI,OAAOE,GAAU,UACnB,OAAO,iBAAiBA,EAAO,CAC7B,cAAe,CACb,MAAOR,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMO,EAAON,CAAO,GAG7B,OAAOM,GAAU,WACZ,IAAIJ,IAAgB,CA9CnC,IAAAK,EAAAC,EAAAC,EAAAC,EA+CU,IAAMC,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAAgB,QAAQ,IAAIX,EAAQ,UAAU,EAC9CY,GAAeN,EAAAL,EAAK,CAAC,IAAN,YAAAK,EAAS,aACxBO,GAAUN,EAAAN,EAAK,CAAC,IAAN,YAAAM,EAAS,QACzB,OAAAC,EAAOP,EAAK,CAAC,IAAb,aAAAO,EAAgB,cAChBC,EAAOR,EAAK,CAAC,IAAb,aAAAQ,EAAgB,QAETjB,GAAO,gBAAgB,GAAGmB,CAAa,IAAId,CAAa,GAAWiB,GAAcC,EAAA,wBACtF,GAAI,CACFD,EAAK,aAAa,iBAAkB,KAAK,UAAUb,CAAI,CAAC,EACxD,IAAMe,EAAW,QAAQ,MAAMX,EAAOL,EAAQC,CAAI,EAC5CgB,EAASH,EAAK,YAAY,EAAE,OAElC,OAAIE,aAAoB,QACf,IAAI,QAAQ,CAACE,EAASC,IAAW,CACtCH,EACG,KAAYI,GAAqBL,EAAA,wBAChC,IAAMC,EAAW,MAAMK,GAAsB1B,EAAQ,CACnD,QAASA,EACT,cAAAgB,EACA,cAAAd,EACA,mBAAAa,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,EACzC,iBAAAU,EACA,OAAQnB,EAAK,CAAC,EACd,aAAAW,EACA,KAAMC,EACN,QAASI,CACX,CAAC,EAEDH,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACTI,EAAQF,CAAQ,CAClB,EAAC,EACA,MAAOM,GAAU,CAChBR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACTK,EAAOG,CAAK,CACd,CAAC,CACL,CAAC,GAGHR,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACFE,EACT,OAASM,EAAO,CACd,MAAAR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACHQ,CACR,CACF,EAAC,CACH,EAGK,QAAQ,IAAItB,EAAQG,EAAMC,CAAQ,CAC3C,CACF,EAEA,OAAO,IAAI,MAAMR,EAAKG,CAAO,CAC/B,EC9GA,IAAAwB,EAA+B,iCAGxB,IAAMC,GAAe,CAACC,EAAsBC,EAAgBC,IAC1D,YAAaC,EAAa,CAC/B,IAAMC,EAASC,EAAU,EAEnBC,EAAmBC,GAA6B,CACpD,GAAI,CACEL,GACF,OAAO,QAAQA,CAAU,EAAE,QAAQ,CAAC,CAACM,EAAKC,CAAK,IAAM,CACnDF,EAAK,aAAaC,EAAKC,CAAK,CAC9B,CAAC,EAGHF,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxD,IAAMO,EAAST,EAAK,GAAGE,CAAI,EAE3B,OAAIO,aAAkB,QACbA,EAAO,KAAMC,IAClBJ,EAAK,aAAa,kBAAmB,KAAK,UAAUI,CAAc,CAAC,EACnEJ,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACjDI,EACR,EAAE,MAAOC,GAAU,CAClB,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CAAC,EAAE,QAAQ,IAAML,EAAK,IAAI,CAAC,GAE3BA,EAAK,aAAa,kBAAmB,KAAK,UAAUG,CAAM,CAAC,EAC3DH,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACxDA,EAAK,IAAI,EACFG,EAEX,OAASE,EAAO,CACd,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CACF,EAEA,OAAOR,EAAO,gBAAgBJ,EAAcM,CAAe,CAC7D,EAGIO,GAAc,CAACN,EAA0BK,EAAYT,IAAgB,CACzEI,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxDI,EAAK,UAAU,CACb,KAAoB,iBAAe,MACnC,QAASK,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACDL,EAAK,IAAI,CACX,ECvCO,IAAMO,EAAN,KAAsB,CAG3B,YAAYC,EAAgB,CAI5B,SAAM,CAACC,EAAoBC,IACzBC,GAAkB,KAAK,OAAQF,EAAYC,CAAM,EAEnD,aAAWE,GACTC,GAAsB,KAAK,OAAQD,CAAI,EAEzC,SAAOF,GAAwBI,GAAsB,KAAK,OAAQJ,CAAM,EATtE,KAAK,OAASF,CAChB,CASF,ECjBA,IAAMO,GAAW,CAACC,EAAgBC,IAA0C,CAC1E,GAAI,EAAEA,EAAK,oBAAoB,QAC7B,MAAM,IAAI,MAAM,0CAA0C,EAE5D,OAAW,CAACC,EAAKC,CAAK,IAAK,OAAO,QAAQF,EAAK,QAAQ,EACrD,GAAI,OAAOC,GAAQ,UAAY,OAAOC,GAAU,SAC9C,MAAM,IAAI,MACR,yEACF,EAGJ,OAAOC,GAAyBJ,EAAQC,CAAI,CAC9C,EAEMI,GAAQ,CAACL,EAAgBC,IAAuC,CACpE,GAAI,OAAOA,EAAK,OAAU,SACxB,MAAM,IAAI,MAAM,wBAAwB,EAE1C,GAAIA,EAAK,MAAQ,GAAKA,EAAK,MAAQ,IACjC,MAAM,IAAI,MAAM,2CAA2C,EAE7D,OAAOK,GAAsBN,EAAQC,CAAI,CAC3C,EAEMM,GAAS,CAACP,EAAgBC,IAAwC,CACtE,GAAI,EAAEA,EAAK,kCAAkC,QAC3C,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAOO,GAAuBR,EAAQC,CAAI,CAC5C,EAEMQ,GAAQ,CAACT,EAAgBC,IAC7BS,GAAsBV,EAAQC,CAAI,EAEvBU,EAAN,KAAmB,CAGxB,YAAYX,EAAgB,CAI5B,WAASC,GAAqBQ,GAAM,KAAK,OAAQR,CAAI,EAErD,cAAYA,GAAwBF,GAAS,KAAK,OAAQE,CAAI,EAE9D,YAAUA,GAAsBM,GAAO,KAAK,OAAQN,CAAI,EAExD,WAASA,GAAqBI,GAAM,KAAK,OAAQJ,CAAI,EATnD,KAAK,OAASD,CAChB,CASF,ERpCA,IAAAY,GAA+B,iCAE/B,IAAMC,GAAgC,CACpC,OAAQ,CACN,KAAM,CACJ,cAAe,iCACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,4BACf,gBAAiBC,CACnB,CACF,EACA,UAAW,CACT,KAAM,CACJ,cAAe,4BACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,+BACf,gBAAiBC,EACnB,CACF,EACA,eAAgB,CACd,KAAM,CACJ,cAAe,6CACf,gBAAiBH,CACnB,EACA,WAAY,CACV,cAAe,wCACf,gBAAiBC,CACnB,CACF,CACF,EAEMG,GAAgD,CACpD,OAAQC,GACR,UAAWC,GACX,eAAgBC,EAClB,EAQMC,GAAyBC,GAAsB,CACnD,GAAI,CAACA,GAAO,OAAOA,GAAQ,UAAY,MAAM,QAAQA,CAAG,EACtD,MAAO,GAGT,IAAMC,EAAgB,CACpB,SACA,QACA,gBACA,oBACA,gBACF,EAGA,OAFe,OAAO,OAAOD,CAAG,EAElB,MAAOE,GACf,OAAOA,GAAQ,UAAYA,IAAQ,KAAa,GAC7CD,EAAc,MAAOE,GAAQA,KAAOD,CAAG,CAC/C,CACH,EAEaE,EAAN,KAAkB,CAQvB,YAAY,CACV,OAAAC,EAAS,QAAQ,IAAI,oBACrB,cAAAC,EAAgB,EAClB,EAAmB,CAAC,EAAG,CACrB,GAAID,IAAW,OACb,MAAM,IAAI,MACR,0HACF,EAGF,KAAK,OAASA,EACd,KAAK,cAAgBC,EACrB,KAAK,UAAY,IAAIC,EAAgBF,CAAM,EAC3C,KAAK,MAAQ,IAAIG,EAAaH,CAAM,EACpC,KAAK,MAAQ,IAAII,EAAaJ,CAAM,EACpC,KAAK,aAAeK,GAEhBJ,GACFK,GAAaL,EAAeD,CAAM,CAEtC,CAEA,IAAI,WAAY,CACd,GAAI,CACF,IAAMO,EAAS,QAAQ,mBAAmB,EAAE,QAC5C,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,YAAa,WAAW,CACtE,OAAS,EAAG,CACV,QAAQ,MACN,8EACF,CACF,CACF,CAEA,IAAI,QAAS,CACX,GAAI,CACF,IAAMA,EAAS,QAAQ,QAAQ,EAAE,QACjC,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,SAAU,QAAQ,CAChE,OAAS,EAAG,CACV,QAAQ,MACN,qEACF,CACF,CACF,CAEM,IAAIE,EAUK,QAAAC,EAAA,yBAVL,CACR,WAAAC,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,EAAS,EACX,EAAe,CAGb,OAFeC,EAAU,EAEX,gBAAgB,kBAA0BC,GAASX,EAAA,sBAC/D,GAAI,CACF,IAAMY,EAAgB,CACpB,WAAAX,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,CACF,EACAE,EAAK,aAAa,iBAAkB,KAAK,UAAUC,CAAa,CAAC,EAEjE,IAAMC,EAAyBT,EACzBU,EAA6C,CACjD,MAAOX,EACP,QAASD,EACT,iBAAkBI,CACpB,EACIF,IAAgBU,EAAkB,gBAAkBV,GAExD,IAAMW,EAAkB,MAAM,KAAK,UAAU,IAC3Cd,EACAa,CACF,EAEA,GAAI,CAACC,EAAiB,MAAM,IAAI,MAAM,kBAAkB,EAExD,IAAMC,EAAiBD,EAAgB,gBACvC,GAAI,CAACA,EAAgB,WACnB,MAAM,IAAI,MACR,WAAWd,CAAU,oDACvB,EAGF,IAAMgB,EAA0BF,EAAgB,SAChD,GAAI,CAACE,EACH,MAAM,IAAI,MACR,WAAWhB,CAAU,kDACvB,EAGF,IAAMiB,EAAuBD,EAAwB,MACrD,GAAI,CAACC,EACH,MAAM,IAAI,MACR,WAAWjB,CAAU,wDACvB,EAGF,IAAMkB,EAAgBD,EAAqB,SAErCE,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAASC,IAAA,GACVP,EAAgB,YACfP,GAA2B,CAAC,GAE5Be,EACJhD,GACE4C,CACF,EAAEH,EAAe,IAAI,EACjBQ,EAAgBD,EAAO,cAEvBE,EAAkBF,EAAO,gBACzBG,GAAmB9C,GAAyBuC,CAAa,EACzDQ,EAAoBZ,EAAgB,kBACtCY,IACFN,EAAO,QAAaM,EAAkB,KAExCN,EAAO,OAAYZ,EACfA,GAAU,CAAC,SAAU,cAAc,EAAE,SAASU,CAAa,IAC7DE,EAAO,eAAoB,CAAE,cAAe,EAAK,GAGnD,IAAMO,EAAW,MAAMF,GAAiBX,EAAiBM,CAAM,EAEzDQ,EAAiBC,IAAiB,CACtC,IAAMC,GAAmB,IAAI,KAAK,EAAE,YAAY,EAChD,OAAOC,GAAaV,EAAA,CAClB,cAAAE,EACA,cAAAL,EACA,KAAM,CAAC,EACP,OAAAE,EACA,KAAAhB,EACA,mBAAAe,EACA,iBAAAW,GACA,QAAS,KAAK,OACd,SAAAzB,EACA,UAAWS,EAAgB,GAC3B,eAAgBA,EAAgB,QAChC,uBAAAF,EACA,SAAUN,EACV,wBAAyB,GACzB,QAASI,EAAK,YAAY,EAAE,QACzBmB,GACJ,CACH,EAEA,GAAIrB,EACF,OAAOwB,GAAeL,EAAUC,EAAeJ,CAAe,EAChE,IAAMS,EAAa,MAAML,EAAc,CAAE,iBAAkBD,CAAS,CAAC,EAE/DO,EAAiB,CACrB,WAAYD,EAAW,WACvB,aAAcN,EACd,iBAAkBM,EAAW,gBAC/B,EACA,OAAAvB,EAAK,aAAa,kBAAmB,KAAK,UAAUwB,CAAc,CAAC,EAE5DA,CACT,OAASC,EAAO,CACd,MAAAzB,EAAK,UAAU,CACb,KAAoB,kBAAe,MACnC,QAASyB,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACKA,CACR,QAAE,CACAzB,EAAK,IAAI,CACX,CACF,EAAC,CACH,GAEM,YAAYZ,EAO6B,QAAAC,EAAA,yBAP7B,CAChB,aAAAqC,EACA,eAAAjC,EAAiB,CAAC,EAClB,SAAAE,EAAW,CAAC,EACZ,kBAAAgC,EAAoB,KACpB,gBAAAC,EAAkB,KAClB,iBAAAC,EAAmB,EACrB,EAA+C,CAC7C,GAAI,CACF,IAAMC,EAAS,MAAMC,GAAmB,CACtC,cAAeL,EACf,gBAAiBjC,EACjB,SAAAE,EACA,oBAAqBgC,EACrB,wBAAyBC,EACzB,mBAAoBC,EACpB,QAAS,KAAK,MAChB,CAAC,EAED,GAAI,CAACA,GACCxD,GAAsByD,CAAM,EAAG,CAGjC,IAAME,EAFa,OAAO,OAAOF,CAAM,EAER,OAC5BG,GAAcA,EAAK,iBAAmB,EACzC,EAEA,GAAID,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,KAAK,UAAUF,EAAQ,KAAM,CAAC,CAAC,EAMjD,GAAI,CAHqBE,EAAY,KAClCC,GAAcA,EAAK,SAAW,SACjC,EAEE,MAAM,IAAI,MAAM,KAAK,UAAUH,EAAQ,KAAM,CAAC,CAAC,CAEnD,CAGF,OAAOA,CACT,OAASL,EAAO,CACd,MAAIA,aAAiB,OACnB,QAAQ,MAAM,0BAA2BA,EAAM,OAAO,EAChD,IAAI,MAAM,2BAA2BA,EAAM,OAAO,EAAE,IAE1D,QAAQ,MAAM,kCAAmCA,CAAK,EAChD,IAAI,MAAM,gCAAgC,EAEpD,CACF,GAEM,WAAWN,EAAkB,QAAA9B,EAAA,sBACjC,OAAO6C,GAAe,KAAK,OAAQf,CAAI,CACzC,GACF","names":["src_exports","__export","PromptLayer","__toCommonJS","import_ably","URL_API_PROMPTLAYER","promptlayerApiHandler","apiKey","body","__async","proxyGenerator","promptLayerApiRequest","response","data","warnOnBadResponse","e","promptLayerTrackMetadata","__spreadProps","__spreadValues","promptLayerTrackScore","promptLayerTrackPrompt","promptLayerTrackGroup","promptLayerCreateGroup","getPromptTemplate","promptName","params","url","publishPromptTemplate","getAllPromptTemplates","_a","key","value","runWorkflowRequest","_0","workflow_name","input_variables","metadata","workflow_label_name","workflow_version_number","return_all_outputs","api_key","timeout","payload","headers","result","execution_id","channel_name","ably_token","ably","Ably","final_output","waitForWorkflowCompletion","error","channel","resolve","reject","results","messageListener","message","timer","err","openaiStreamChat","_b","_c","_d","_e","_f","_g","_h","_i","content","functionCall","lastResult","toolCalls","delta","toolCall","lastToolCall","firstChoice","anthropicStreamMessage","cleaned_result","function_name","prev","current","final_result","generator","__asyncGenerator","iter","__forAwait","more","temp","__await","request_response","request_id","main_message","trackRequest","openaiStreamCompletion","text","anthropicStreamCompletion","completion","streamResponse","afterStream","mapResults","openaiChatRequest","client","kwargs","openaiCompletionsRequest","MAP_TYPE_TO_OPENAI_FUNCTION","openaiRequest","promptBlueprint","OpenAI","requestToMake","azureOpenAIRequest","anthropicChatRequest","anthropicCompletionsRequest","MAP_TYPE_TO_ANTHROPIC_FUNCTION","anthropicRequest","Anthropic","utilLogRequest","GroupManager","apiKey","promptLayerCreateGroup","opentelemetry","import_sdk_trace_base","import_sdk_trace_node","import_api","import_core","PromptLayerSpanExporter","enableTracing","apiKey","URL_API_PROMPTLAYER","attributes","kind","code","time","spans","requestData","span","_a","event","link","__spreadProps","__spreadValues","response","error","span_exporter_default","getTracer","name","setupTracing","enableTracing","apiKey","provider","exporter","span_exporter_default","processor","tracer","getTracer","promptLayerBase","apiKey","llm","function_name","provider","handler","target","args","newTarget","prop","receiver","value","_a","_b","_c","_d","request_start_time","provider_type","return_pl_id","pl_tags","span","__async","response","spanId","resolve","reject","request_response","promptlayerApiHandler","error","opentelemetry","wrapWithSpan","functionName","func","attributes","args","tracer","getTracer","wrapperFunction","span","key","value","result","resolvedResult","error","handleError","TemplateManager","apiKey","promptName","params","getPromptTemplate","body","publishPromptTemplate","getAllPromptTemplates","metadata","apiKey","body","key","value","promptLayerTrackMetadata","score","promptLayerTrackScore","prompt","promptLayerTrackPrompt","group","promptLayerTrackGroup","TrackManager","opentelemetry","MAP_PROVIDER_TO_FUNCTION_NAME","openaiStreamChat","openaiStreamCompletion","anthropicStreamMessage","anthropicStreamCompletion","MAP_PROVIDER_TO_FUNCTION","openaiRequest","anthropicRequest","azureOpenAIRequest","isWorkflowResultsDict","obj","REQUIRED_KEYS","val","key","PromptLayer","apiKey","enableTracing","TemplateManager","GroupManager","TrackManager","wrapWithSpan","setupTracing","module","promptLayerBase","_0","__async","promptName","promptVersion","promptReleaseLabel","inputVariables","tags","metadata","groupId","modelParameterOverrides","stream","getTracer","span","functionInput","prompt_input_variables","templateGetParams","promptBlueprint","promptTemplate","promptBlueprintMetadata","promptBlueprintModel","provider_type","request_start_time","kwargs","__spreadValues","config","function_name","stream_function","request_function","provider_base_url","response","_trackRequest","body","request_end_time","trackRequest","streamResponse","requestLog","functionOutput","error","workflowName","workflowLabelName","workflowVersion","returnAllOutputs","result","runWorkflowRequest","outputNodes","node","utilLogRequest"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/utils.ts","../src/groups.ts","../src/tracing.ts","../src/span-exporter.ts","../src/promptlayer.ts","../src/span-wrapper.ts","../src/templates.ts","../src/track.ts"],"sourcesContent":["import { GroupManager } from \"@/groups\";\nimport { promptLayerBase } from \"@/promptlayer\";\nimport { wrapWithSpan } from \"@/span-wrapper\";\nimport { TemplateManager } from \"@/templates\";\nimport { getTracer, setupTracing } from \"@/tracing\";\nimport { TrackManager } from \"@/track\";\nimport { GetPromptTemplateParams, LogRequest, RunRequest, WorkflowRequest, WorkflowResponse } from \"@/types\";\nimport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n runWorkflowRequest,\n streamResponse,\n trackRequest,\n utilLogRequest,\n} from \"@/utils\";\nimport * as opentelemetry from \"@opentelemetry/api\";\n\nconst MAP_PROVIDER_TO_FUNCTION_NAME = {\n openai: {\n chat: {\n function_name: \"openai.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n anthropic: {\n chat: {\n function_name: \"anthropic.messages.create\",\n stream_function: anthropicStreamMessage,\n },\n completion: {\n function_name: \"anthropic.completions.create\",\n stream_function: anthropicStreamCompletion,\n },\n },\n \"openai.azure\": {\n chat: {\n function_name: \"openai.AzureOpenAI.chat.completions.create\",\n stream_function: openaiStreamChat,\n },\n completion: {\n function_name: \"openai.AzureOpenAI.completions.create\",\n stream_function: openaiStreamCompletion,\n },\n },\n};\n\nconst MAP_PROVIDER_TO_FUNCTION: Record<string, any> = {\n openai: openaiRequest,\n anthropic: anthropicRequest,\n \"openai.azure\": azureOpenAIRequest,\n};\n\nexport interface ClientOptions {\n apiKey?: string;\n enableTracing?: boolean;\n workspaceId?: number;\n}\n\nconst isWorkflowResultsDict = (obj: any): boolean => {\n if (!obj || typeof obj !== \"object\" || Array.isArray(obj)) {\n return false;\n }\n\n const REQUIRED_KEYS = [\n \"status\",\n \"value\",\n \"error_message\",\n \"raw_error_message\",\n \"is_output_node\",\n ];\n const values = Object.values(obj);\n\n return values.every((val) => {\n if (typeof val !== \"object\" || val === null) return false;\n return REQUIRED_KEYS.every((key) => key in val);\n });\n}\n\nexport class PromptLayer {\n apiKey: string;\n templates: TemplateManager;\n group: GroupManager;\n track: TrackManager;\n enableTracing: boolean;\n wrapWithSpan: typeof wrapWithSpan;\n\n constructor({\n apiKey = process.env.PROMPTLAYER_API_KEY,\n enableTracing = false,\n }: ClientOptions = {}) {\n if (apiKey === undefined) {\n throw new Error(\n \"PromptLayer API key not provided. Please set the PROMPTLAYER_API_KEY environment variable or pass the api_key parameter.\"\n );\n }\n\n this.apiKey = apiKey;\n this.enableTracing = enableTracing;\n this.templates = new TemplateManager(apiKey);\n this.group = new GroupManager(apiKey);\n this.track = new TrackManager(apiKey);\n this.wrapWithSpan = wrapWithSpan;\n\n if (enableTracing) {\n setupTracing(enableTracing, apiKey);\n }\n }\n\n get Anthropic() {\n try {\n const module = require(\"@anthropic-ai/sdk\").default;\n return promptLayerBase(this.apiKey, module, \"anthropic\", \"anthropic\");\n } catch (e) {\n console.error(\n \"To use the Anthropic module, you must install the @anthropic-ai/sdk package.\"\n );\n }\n }\n\n get OpenAI() {\n try {\n const module = require(\"openai\").default;\n return promptLayerBase(this.apiKey, module, \"openai\", \"openai\");\n } catch (e) {\n console.error(\n \"To use the OpenAI module, you must install the @openai/api package.\"\n );\n }\n }\n\n async run({\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream = false,\n }: RunRequest) {\n const tracer = getTracer();\n\n return tracer.startActiveSpan(\"PromptLayer Run\", async (span) => {\n try {\n const functionInput = {\n promptName,\n promptVersion,\n promptReleaseLabel,\n inputVariables,\n tags,\n metadata,\n groupId,\n modelParameterOverrides,\n stream,\n };\n span.setAttribute(\"function_input\", JSON.stringify(functionInput));\n\n const prompt_input_variables = inputVariables;\n const templateGetParams: GetPromptTemplateParams = {\n label: promptReleaseLabel,\n version: promptVersion,\n metadata_filters: metadata,\n };\n if (inputVariables) templateGetParams.input_variables = inputVariables;\n\n const promptBlueprint = await this.templates.get(\n promptName,\n templateGetParams\n );\n\n if (!promptBlueprint) throw new Error(\"Prompt not found\");\n\n const promptTemplate = promptBlueprint.prompt_template;\n if (!promptBlueprint.llm_kwargs) {\n throw new Error(\n `Prompt '${promptName}' does not have any LLM kwargs associated with it.`\n );\n }\n\n const promptBlueprintMetadata = promptBlueprint.metadata;\n if (!promptBlueprintMetadata) {\n throw new Error(\n `Prompt '${promptName}' does not have any metadata associated with it.`\n );\n }\n\n const promptBlueprintModel = promptBlueprintMetadata.model;\n if (!promptBlueprintModel) {\n throw new Error(\n `Prompt '${promptName}' does not have a model parameters associated with it.`\n );\n }\n\n const provider_type = promptBlueprintModel.provider;\n\n const request_start_time = new Date().toISOString();\n const kwargs = {\n ...promptBlueprint.llm_kwargs,\n ...(modelParameterOverrides || {}),\n };\n const config =\n MAP_PROVIDER_TO_FUNCTION_NAME[\n provider_type as keyof typeof MAP_PROVIDER_TO_FUNCTION_NAME\n ][promptTemplate.type];\n const function_name = config.function_name;\n\n const stream_function = config.stream_function;\n const request_function = MAP_PROVIDER_TO_FUNCTION[provider_type];\n const provider_base_url = promptBlueprint.provider_base_url;\n if (provider_base_url) {\n kwargs[\"baseURL\"] = provider_base_url.url;\n }\n kwargs[\"stream\"] = stream;\n if (stream && [\"openai\", \"openai.azure\"].includes(provider_type)) {\n kwargs[\"stream_options\"] = { include_usage: true };\n }\n\n const response = await request_function(promptBlueprint, kwargs);\n\n const _trackRequest = (body: object) => {\n const request_end_time = new Date().toISOString();\n return trackRequest({\n function_name,\n provider_type,\n args: [],\n kwargs,\n tags,\n request_start_time,\n request_end_time,\n api_key: this.apiKey,\n metadata,\n prompt_id: promptBlueprint.id,\n prompt_version: promptBlueprint.version,\n prompt_input_variables,\n group_id: groupId,\n return_prompt_blueprint: true,\n span_id: span.spanContext().spanId,\n ...body,\n });\n };\n\n if (stream)\n return streamResponse(response, _trackRequest, stream_function);\n const requestLog = await _trackRequest({ request_response: response });\n\n const functionOutput = {\n request_id: requestLog.request_id,\n raw_response: response,\n prompt_blueprint: requestLog.prompt_blueprint,\n };\n span.setAttribute(\"function_output\", JSON.stringify(functionOutput));\n\n return functionOutput;\n } catch (error) {\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : \"Unknown error\",\n });\n throw error;\n } finally {\n span.end();\n }\n });\n }\n\n async runWorkflow({\n workflowName,\n inputVariables = {},\n metadata = {},\n workflowLabelName = null,\n workflowVersion = null, // This is the version number, not the version ID\n returnAllOutputs = false,\n }: WorkflowRequest): Promise<WorkflowResponse> {\n try {\n const result = await runWorkflowRequest({\n workflow_name: workflowName,\n input_variables: inputVariables,\n metadata,\n workflow_label_name: workflowLabelName,\n workflow_version_number: workflowVersion,\n return_all_outputs: returnAllOutputs,\n api_key: this.apiKey,\n });\n\n if (!returnAllOutputs) {\n if (isWorkflowResultsDict(result)) {\n const nodeValues = Object.values(result);\n\n const outputNodes = nodeValues.filter(\n (node: any) => node.is_output_node === true\n );\n\n if (outputNodes.length === 0) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n\n const anyOutputSuccess = outputNodes.some(\n (node: any) => node.status === \"SUCCESS\"\n );\n if (!anyOutputSuccess) {\n throw new Error(JSON.stringify(result, null, 2));\n }\n }\n }\n\n return result;\n } catch (error) {\n if (error instanceof Error) {\n console.error(\"Error running workflow:\", error.message);\n throw new Error(`Error running workflow: ${error.message}`);\n } else {\n console.error(\"Unknown error running workflow:\", error);\n throw new Error(\"Unknown error running workflow\");\n }\n }\n }\n\n async logRequest(body: LogRequest) {\n return utilLogRequest(this.apiKey, body);\n }\n}\n","import {\n GetPromptTemplateParams,\n GetPromptTemplateResponse,\n ListPromptTemplatesResponse,\n LogRequest,\n Pagination,\n PublishPromptTemplate,\n PublishPromptTemplateResponse,\n RequestLog,\n RunWorkflowRequestParams,\n TrackGroup,\n TrackMetadata,\n TrackPrompt,\n TrackRequest,\n TrackScore,\n WorkflowResponse,\n} from \"@/types\";\nimport type TypeAnthropic from \"@anthropic-ai/sdk\";\nimport {\n Completion as AnthropicCompletion,\n Message,\n MessageStreamEvent,\n} from \"@anthropic-ai/sdk/resources\";\nimport Ably from \"ably\";\nimport type TypeOpenAI from \"openai\";\nimport {\n ChatCompletion,\n ChatCompletionChunk,\n Completion,\n} from \"openai/resources\";\n\nexport const URL_API_PROMPTLAYER =\n process.env.URL_API_PROMPTLAYER || \"https://api.promptlayer.com\";\n\nconst promptlayerApiHandler = async <Item>(\n apiKey: string,\n body: TrackRequest & {\n request_response: AsyncIterable<Item> | any;\n }\n) => {\n const isGenerator = body.request_response[Symbol.asyncIterator] !== undefined;\n if (isGenerator) {\n return proxyGenerator(apiKey, body.request_response, body);\n }\n return await promptLayerApiRequest(apiKey, body);\n};\n\nconst promptLayerApiRequest = async (apiKey: string, body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n }\n if (data && body.return_pl_id) {\n return [body.request_response, data.request_id];\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return body.request_response;\n};\n\nconst promptLayerTrackMetadata = async (\n apiKey: string,\n body: TrackMetadata\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-metadata`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While logging metadata to your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While logging metadata to your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackScore = async (\n apiKey: string,\n body: TrackScore\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/library-track-score`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While scoring your request, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While scoring your request, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackPrompt = async (\n apiKey: string,\n body: TrackPrompt\n): Promise<boolean> => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/library-track-prompt`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n }\n );\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a prompt template, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a prompt template, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerTrackGroup = async (\n apiKey: string,\n body: TrackGroup\n): Promise<boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n ...body,\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While associating your request with a group, PromptLayer experienced the following error\"\n );\n return false;\n }\n } catch (e) {\n console.warn(\n `WARNING: While associating your request with a group, PromptLayer experienced the following error: ${e}`\n );\n return false;\n }\n return true;\n};\n\nconst promptLayerCreateGroup = async (\n apiKey: string\n): Promise<number | boolean> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/create-group`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({\n api_key: apiKey,\n }),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While creating a group PromptLayer had the following error\"\n );\n return false;\n }\n return data.id;\n } catch (e) {\n console.warn(\n `WARNING: While creating a group PromptLayer had the following error: ${e}`\n );\n return false;\n }\n};\n\nconst getPromptTemplate = async (\n apiKey: string,\n promptName: string,\n params?: Partial<GetPromptTemplateParams>\n) => {\n try {\n const url = new URL(\n `${URL_API_PROMPTLAYER}/prompt-templates/${promptName}`\n );\n const response = await fetch(url, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify(params),\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching a prompt template PromptLayer had the following error\"\n );\n return null;\n }\n if (data.warning) {\n console.warn(\n `WARNING: While fetching your prompt PromptLayer had the following error: ${data.warning}`\n );\n }\n return data as Promise<GetPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching a prompt template PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nconst publishPromptTemplate = async (\n apiKey: string,\n body: PublishPromptTemplate\n) => {\n try {\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/rest/prompt-templates`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n body: JSON.stringify({\n prompt_template: { ...body },\n prompt_version: { ...body },\n release_labels: body.release_labels ? body.release_labels : undefined,\n }),\n }\n );\n const data = await response.json();\n if (response.status === 400) {\n warnOnBadResponse(\n data,\n \"WARNING: While publishing a prompt template PromptLayer had the following error\"\n );\n }\n return data as Promise<PublishPromptTemplateResponse>;\n } catch (e) {\n console.warn(\n `WARNING: While publishing a prompt template PromptLayer had the following error: ${e}`\n );\n }\n};\n\nconst getAllPromptTemplates = async (\n apiKey: string,\n params?: Partial<Pagination>\n) => {\n try {\n const url = new URL(`${URL_API_PROMPTLAYER}/prompt-templates`);\n Object.entries(params || {}).forEach(([key, value]) =>\n url.searchParams.append(key, value.toString())\n );\n const response = await fetch(url, {\n headers: {\n \"Content-Type\": \"application/json\",\n \"X-API-KEY\": apiKey,\n },\n });\n const data = await response.json();\n if (response.status !== 200) {\n warnOnBadResponse(\n data,\n \"WARNING: While fetching all prompt templates PromptLayer had the following error\"\n );\n return null;\n }\n return (data.items ?? []) as Promise<Array<ListPromptTemplatesResponse>>;\n } catch (e) {\n console.warn(\n `WARNING: While fetching all prompt templates PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport const runWorkflowRequest = async ({\n workflow_name,\n input_variables,\n metadata = {},\n workflow_label_name = null,\n workflow_version_number = null,\n return_all_outputs = false,\n api_key,\n timeout = 3600000, // Default timeout is 1 hour in milliseconds\n}: RunWorkflowRequestParams): Promise<WorkflowResponse> => {\n const payload = {\n input_variables,\n metadata,\n workflow_label_name,\n workflow_version_number,\n return_all_outputs,\n };\n\n const headers = {\n \"X-API-KEY\": api_key,\n \"Content-Type\": \"application/json\",\n };\n\n try {\n // Start the workflow by making a POST request\n const response = await fetch(\n `${URL_API_PROMPTLAYER}/workflows/${encodeURIComponent(\n workflow_name\n )}/run`,\n {\n method: \"POST\",\n headers: headers,\n body: JSON.stringify(payload),\n }\n );\n\n if (response.status !== 201) {\n const errorData = await response.json().catch(() => ({}));\n return {\n success: false,\n message: `Failed to run workflow: ${\n errorData.error || response.statusText\n }`,\n };\n }\n\n const result = await response.json();\n if (result.warning) {\n console.warn(`WARNING: ${result.warning}`);\n }\n const execution_id = result.workflow_version_execution_id;\n if (!execution_id) {\n console.log(\"No execution ID returned from workflow run\");\n return { success: false, message: \"Failed to run workflow\" };\n }\n\n const channel_name = `workflow_updates:${execution_id}`;\n\n // Request a token to subscribe to the channel\n const ws_response = await fetch(\n `${URL_API_PROMPTLAYER}/ws-token-request-library?capability=${channel_name}`,\n {\n method: \"POST\",\n headers: headers,\n }\n );\n\n const ws_token_response = await ws_response.json();\n\n const ably_token = ws_token_response.token_details.token;\n\n // Initialize Ably client using the Promise-based client\n const ably = new Ably.Realtime({ token: ably_token });\n\n try {\n // Wait for the workflow to complete and get the final output\n const final_output = await waitForWorkflowCompletion(\n ably,\n channel_name,\n timeout\n );\n ably.close();\n return final_output;\n } finally {\n // Ensure the Ably client is closed in all cases\n ably.close();\n }\n } catch (error) {\n console.error(\n `Failed to run workflow: ${\n error instanceof Error ? error.message : error\n }`\n );\n throw error;\n }\n};\n\nasync function waitForWorkflowCompletion(\n ably: Ably.Realtime,\n channel_name: string,\n timeout: number\n): Promise<any> {\n const channel = ably.channels.get(channel_name);\n\n return new Promise(async (resolve, reject) => {\n let results: any = null;\n\n const messageListener = (message: Ably.Message) => {\n if (message.name === \"SET_WORKFLOW_COMPLETE\") {\n const message_data = JSON.parse(message.data as string);\n results = message_data.final_output;\n clearTimeout(timer);\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n resolve(results);\n }\n };\n\n // Set up a timeout to reject the promise if no message is received in time\n const timer = setTimeout(() => {\n channel.unsubscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n reject(new Error(\"Workflow execution did not complete properly (timeout)\"));\n }, timeout);\n\n try {\n // Subscribe to the channel to receive updates\n await channel.subscribe(\"SET_WORKFLOW_COMPLETE\", messageListener);\n } catch (err) {\n clearTimeout(timer);\n reject(err);\n }\n });\n}\n\nconst openaiStreamChat = (results: ChatCompletionChunk[]): ChatCompletion => {\n let content: ChatCompletion.Choice[\"message\"][\"content\"] = null;\n let functionCall: ChatCompletion.Choice[\"message\"][\"function_call\"] =\n undefined;\n const response: ChatCompletion = {\n id: \"\",\n choices: [],\n created: Date.now(),\n model: \"\",\n object: \"chat.completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let toolCalls: ChatCompletion.Choice[\"message\"][\"tool_calls\"] = undefined;\n for (const result of results) {\n if (result.choices.length === 0) continue;\n const delta = result.choices[0].delta;\n\n if (delta.content) {\n content = `${content || \"\"}${delta.content || \"\"}`;\n }\n if (delta.function_call) {\n functionCall = {\n name: `${functionCall ? functionCall.name : \"\"}${\n delta.function_call.name || \"\"\n }`,\n arguments: `${functionCall ? functionCall.arguments : \"\"}${\n delta.function_call.arguments || \"\"\n }`,\n };\n }\n const toolCall = delta.tool_calls?.[0];\n if (toolCall) {\n toolCalls = toolCalls || [];\n const lastToolCall = toolCalls.at(-1);\n if (!lastToolCall || toolCall.id) {\n toolCalls.push({\n id: toolCall.id || \"\",\n type: toolCall.type || \"function\",\n function: {\n name: toolCall.function?.name || \"\",\n arguments: toolCall.function?.arguments || \"\",\n },\n });\n continue;\n }\n lastToolCall.function.name = `${lastToolCall.function.name}${\n toolCall.function?.name || \"\"\n }`;\n lastToolCall.function.arguments = `${lastToolCall.function.arguments}${\n toolCall.function?.arguments || \"\"\n }`;\n }\n }\n const firstChoice = results[0].choices.at(0);\n response.choices.push({\n finish_reason: firstChoice?.finish_reason ?? \"stop\",\n index: firstChoice?.index ?? 0,\n logprobs: firstChoice?.logprobs ?? null,\n message: {\n role: \"assistant\",\n content,\n function_call: functionCall ? functionCall : undefined,\n tool_calls: toolCalls ? toolCalls : undefined,\n refusal: firstChoice?.delta.refusal ?? null,\n },\n });\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.created = lastResult.created;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamMessage = (results: MessageStreamEvent[]): Message => {\n let response: Message = {\n id: \"\",\n model: \"\",\n content: [],\n role: \"assistant\",\n type: \"message\",\n stop_reason: \"stop_sequence\",\n stop_sequence: null,\n usage: {\n input_tokens: 0,\n output_tokens: 0,\n },\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let content = \"\";\n for (const result of results) {\n switch (result.type) {\n case \"message_start\": {\n response = {\n ...result.message,\n };\n break;\n }\n case \"content_block_delta\": {\n if (result.delta.type === \"text_delta\")\n content = `${content}${result.delta.text}`;\n }\n case \"message_delta\": {\n if (\"usage\" in result)\n response.usage.output_tokens = result.usage.output_tokens;\n if (\"stop_reason\" in result.delta)\n response.stop_reason = result.delta.stop_reason;\n }\n default: {\n break;\n }\n }\n }\n response.content.push({\n type: \"text\",\n text: content,\n });\n return response;\n};\n\nconst cleaned_result = (\n results: any[],\n function_name = \"openai.chat.completions.create\"\n) => {\n if (\"completion\" in results[0]) {\n return results.reduce(\n (prev, current) => ({\n ...current,\n completion: `${prev.completion}${current.completion}`,\n }),\n {}\n );\n }\n\n if (function_name === \"anthropic.messages.create\")\n return anthropicStreamMessage(results);\n\n if (\"text\" in results[0].choices[0]) {\n let response = \"\";\n for (const result of results) {\n response = `${response}${result.choices[0].text}`;\n }\n const final_result = structuredClone(results.at(-1));\n final_result.choices[0].text = response;\n return final_result;\n }\n\n if (\"delta\" in results[0].choices[0]) {\n const response = openaiStreamChat(results);\n response.choices[0] = {\n ...response.choices[0],\n ...response.choices[0].message,\n };\n return response;\n }\n\n return \"\";\n};\n\nasync function* proxyGenerator<Item>(\n apiKey: string,\n generator: AsyncIterable<Item>,\n body: TrackRequest\n) {\n const results = [];\n for await (const value of generator) {\n yield body.return_pl_id ? [value, null] : value;\n results.push(value);\n }\n const request_response = cleaned_result(results, body.function_name);\n const response = await promptLayerApiRequest(apiKey, {\n ...body,\n request_response,\n request_end_time: new Date().toISOString(),\n });\n if (response) {\n if (body.return_pl_id) {\n const request_id = (response as any)[1];\n const lastResult = results.at(-1);\n yield [lastResult, request_id];\n }\n }\n}\n\nconst warnOnBadResponse = (request_response: any, main_message: string) => {\n try {\n console.warn(`${main_message}: ${request_response.message}`);\n } catch (e) {\n console.warn(`${main_message}: ${request_response}`);\n }\n};\n\nconst trackRequest = async (body: TrackRequest) => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/track-request`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 200)\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request, PromptLayer experienced the following error:\"\n );\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While logging your request PromptLayer had the following error: ${e}`\n );\n }\n return {};\n};\n\nconst openaiStreamCompletion = (results: Completion[]) => {\n const response: Completion = {\n id: \"\",\n choices: [\n {\n finish_reason: \"stop\",\n index: 0,\n text: \"\",\n logprobs: null,\n },\n ],\n created: Date.now(),\n model: \"\",\n object: \"text_completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let text = \"\";\n for (const result of results) {\n if (result.choices.length > 0 && result.choices[0].text) {\n text = `${text}${result.choices[0].text}`;\n }\n }\n response.choices[0].text = text;\n response.id = lastResult.id;\n response.created = lastResult.created;\n response.model = lastResult.model;\n response.system_fingerprint = lastResult.system_fingerprint;\n response.usage = lastResult.usage;\n return response;\n};\n\nconst anthropicStreamCompletion = (results: AnthropicCompletion[]) => {\n const response: AnthropicCompletion = {\n completion: \"\",\n id: \"\",\n model: \"\",\n stop_reason: \"\",\n type: \"completion\",\n };\n const lastResult = results.at(-1);\n if (!lastResult) return response;\n let completion = \"\";\n for (const result of results) {\n completion = `${completion}${result.completion}`;\n }\n response.completion = completion;\n response.id = lastResult.id;\n response.model = lastResult.model;\n response.stop_reason = lastResult.stop_reason;\n return response;\n};\n\nasync function* streamResponse<Item>(\n generator: AsyncIterable<Item>,\n afterStream: (body: object) => any,\n mapResults: any\n) {\n const data: {\n request_id: number | null;\n raw_response: any;\n prompt_blueprint: any;\n } = {\n request_id: null,\n raw_response: null,\n prompt_blueprint: null,\n };\n const results = [];\n for await (const result of generator) {\n results.push(result);\n data.raw_response = result;\n yield data;\n }\n const request_response = mapResults(results);\n const response = await afterStream({ request_response });\n data.request_id = response.request_id;\n data.prompt_blueprint = response.prompt_blueprint;\n yield data;\n}\n\nconst openaiChatRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.chat.completions.create(kwargs);\n};\n\nconst openaiCompletionsRequest = async (client: TypeOpenAI, kwargs: any) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_OPENAI_FUNCTION = {\n chat: openaiChatRequest,\n completion: openaiCompletionsRequest,\n};\n\nconst openaiRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").default;\n const client = new OpenAI({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst azureOpenAIRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const OpenAI = require(\"openai\").AzureOpenAI;\n const client = new OpenAI({\n endpoint: kwargs.baseURL,\n });\n delete kwargs?.baseURL;\n const requestToMake =\n MAP_TYPE_TO_OPENAI_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst anthropicChatRequest = async (client: TypeAnthropic, kwargs: any) => {\n return client.messages.create(kwargs);\n};\n\nconst anthropicCompletionsRequest = async (\n client: TypeAnthropic,\n kwargs: any\n) => {\n return client.completions.create(kwargs);\n};\n\nconst MAP_TYPE_TO_ANTHROPIC_FUNCTION = {\n chat: anthropicChatRequest,\n completion: anthropicCompletionsRequest,\n};\n\nconst anthropicRequest = async (\n promptBlueprint: GetPromptTemplateResponse,\n kwargs: any\n) => {\n const Anthropic = require(\"@anthropic-ai/sdk\").default;\n const client = new Anthropic({\n baseURL: kwargs.baseURL,\n });\n const requestToMake =\n MAP_TYPE_TO_ANTHROPIC_FUNCTION[promptBlueprint.prompt_template.type];\n return requestToMake(client, kwargs);\n};\n\nconst utilLogRequest = async (\n apiKey: string,\n body: LogRequest\n): Promise<RequestLog | null> => {\n try {\n const response = await fetch(`${URL_API_PROMPTLAYER}/log-request`, {\n method: \"POST\",\n headers: {\n \"X-API-KEY\": apiKey,\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(body),\n });\n if (response.status !== 201) {\n warnOnBadResponse(\n response,\n \"WARNING: While logging your request PromptLayer had the following error\"\n );\n return null;\n }\n return response.json();\n } catch (e) {\n console.warn(\n `WARNING: While tracking your prompt PromptLayer had the following error: ${e}`\n );\n return null;\n }\n};\n\nexport {\n anthropicRequest,\n anthropicStreamCompletion,\n anthropicStreamMessage,\n azureOpenAIRequest,\n getAllPromptTemplates,\n getPromptTemplate,\n openaiRequest,\n openaiStreamChat,\n openaiStreamCompletion,\n promptlayerApiHandler,\n promptLayerApiRequest,\n promptLayerCreateGroup,\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n publishPromptTemplate,\n streamResponse,\n trackRequest,\n utilLogRequest,\n};\n","import { promptLayerCreateGroup } from \"@/utils\";\n\nexport class GroupManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n create = () => promptLayerCreateGroup(this.apiKey);\n}\n","import * as opentelemetry from '@opentelemetry/api';\nimport {SimpleSpanProcessor} from '@opentelemetry/sdk-trace-base';\nimport {NodeTracerProvider} from '@opentelemetry/sdk-trace-node';\nimport PromptLayerSpanExporter from '@/span-exporter';\n\nexport const getTracer = (name: string = 'promptlayer-tracer') => {\n return opentelemetry.trace.getTracer(name);\n}\n\nexport const setupTracing = (enableTracing: boolean, apiKey?: string) => {\n const provider = new NodeTracerProvider();\n const exporter = new PromptLayerSpanExporter(enableTracing, apiKey);\n const processor = new SimpleSpanProcessor(exporter);\n provider.addSpanProcessor(processor);\n provider.register();\n}\n","import {Attributes, SpanKind, SpanStatusCode} from '@opentelemetry/api';\nimport {ReadableSpan, SpanExporter} from '@opentelemetry/sdk-trace-base';\nimport {ExportResultCode} from '@opentelemetry/core';\nimport {URL_API_PROMPTLAYER} from '@/utils';\n\nclass PromptLayerSpanExporter implements SpanExporter {\n private apiKey: string | undefined;\n private enableTracing: boolean;\n private url: string;\n\n constructor(enableTracing: boolean, apiKey?: string) {\n this.apiKey = apiKey || process.env.PROMPTLAYER_API_KEY;\n this.enableTracing = enableTracing;\n this.url = `${URL_API_PROMPTLAYER}/spans-bulk`;\n }\n\n private attributesToObject(attributes: Attributes | undefined): Record<string, any> {\n if (!attributes) return {};\n return Object.fromEntries(Object.entries(attributes));\n }\n\n private spanKindToString(kind: SpanKind): string {\n const kindMap: Record<SpanKind, string> = {\n [SpanKind.INTERNAL]: 'SpanKind.INTERNAL',\n [SpanKind.SERVER]: 'SpanKind.SERVER',\n [SpanKind.CLIENT]: 'SpanKind.CLIENT',\n [SpanKind.PRODUCER]: 'SpanKind.PRODUCER',\n [SpanKind.CONSUMER]: 'SpanKind.CONSUMER',\n };\n return kindMap[kind] || 'SpanKind.INTERNAL';\n }\n\n private statusCodeToString(code: SpanStatusCode): string {\n const statusMap: Record<SpanStatusCode, string> = {\n [SpanStatusCode.ERROR]: 'StatusCode.ERROR',\n [SpanStatusCode.OK]: 'StatusCode.OK',\n [SpanStatusCode.UNSET]: 'StatusCode.UNSET',\n };\n return statusMap[code] || 'StatusCode.UNSET';\n }\n\n private toNanoseconds(time: [number, number]): string {\n return (BigInt(time[0]) * BigInt(1e9) + BigInt(time[1])).toString();\n };\n\n export(spans: ReadableSpan[]): Promise<ExportResultCode> {\n if (!this.enableTracing) {\n return Promise.resolve(ExportResultCode.SUCCESS);\n }\n\n const requestData = spans.map(span => ({\n name: span.name,\n context: {\n trace_id: span.spanContext().traceId,\n span_id: span.spanContext().spanId,\n trace_state: span.spanContext().traceState?.serialize() || '',\n },\n kind: this.spanKindToString(span.kind),\n parent_id: span.parentSpanId || null,\n start_time: this.toNanoseconds(span.startTime),\n end_time: this.toNanoseconds(span.endTime),\n status: {\n status_code: this.statusCodeToString(span.status.code),\n description: span.status.message,\n },\n attributes: this.attributesToObject(span.attributes),\n events: span.events.map(event => ({\n name: event.name,\n timestamp: this.toNanoseconds(event.time),\n attributes: this.attributesToObject(event.attributes),\n })),\n links: span.links.map(link => ({\n context: link.context,\n attributes: this.attributesToObject(link.attributes),\n })),\n resource: {\n attributes: {\n ...span.resource.attributes,\n \"service.name\": \"prompt-layer-js\",\n },\n schema_url: '',\n },\n }));\n\n return fetch(this.url, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-API-KEY': this.apiKey || '',\n },\n body: JSON.stringify({\n spans: requestData,\n }),\n })\n .then(response => {\n if (!response.ok) {\n console.error(`Error exporting spans\\nHTTP error! status: ${response.status}`);\n return ExportResultCode.FAILED;\n }\n return ExportResultCode.SUCCESS;\n })\n .catch((error) => {\n console.error('Error exporting spans:', error);\n return ExportResultCode.FAILED;\n });\n }\n\n shutdown(): Promise<void> {\n return Promise.resolve();\n }\n}\n\nexport default PromptLayerSpanExporter;\n","import {getTracer} from \"@/tracing\";\nimport {promptlayerApiHandler} from \"@/utils\";\n\nconst tracer = getTracer();\n\nexport const promptLayerBase = (\n apiKey: string,\n llm: object,\n function_name = \"\",\n provider = \"openai\"\n) => {\n const handler: ProxyHandler<any> = {\n construct: (target, args) => {\n const newTarget = Reflect.construct(target, args);\n Object.defineProperties(newTarget, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(newTarget, handler);\n },\n get: (target, prop, receiver) => {\n const value = target[prop];\n const function_name = `${Reflect.get(\n target,\n \"function_name\"\n )}.${prop.toString()}`;\n\n if (typeof value === \"object\") {\n Object.defineProperties(value, {\n function_name: {\n value: function_name,\n writable: true,\n },\n provider: {\n value: provider,\n },\n });\n return new Proxy(value, handler);\n }\n\n if (typeof value === \"function\") {\n return (...args: any[]) => {\n const request_start_time = new Date().toISOString();\n const provider_type = Reflect.get(target, \"provider\");\n const return_pl_id = args[0]?.return_pl_id;\n const pl_tags = args[0]?.pl_tags;\n delete args[0]?.return_pl_id;\n delete args[0]?.pl_tags;\n\n return tracer.startActiveSpan(`${provider_type}.${function_name}`, async (span: any) => {\n try {\n span.setAttribute('function_input', JSON.stringify(args));\n const response = Reflect.apply(value, target, args);\n const spanId = span.spanContext().spanId;\n\n if (response instanceof Promise) {\n return new Promise((resolve, reject) => {\n response\n .then(async (request_response) => {\n const response = await promptlayerApiHandler(apiKey, {\n api_key: apiKey,\n provider_type,\n function_name,\n request_start_time,\n request_end_time: new Date().toISOString(),\n request_response,\n kwargs: args[0],\n return_pl_id,\n tags: pl_tags,\n span_id: spanId,\n });\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n resolve(response);\n })\n .catch((error) => {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n reject(error);\n });\n });\n }\n\n span.setAttribute('function_output', JSON.stringify(response));\n span.setAttribute('response_status', 'success');\n span.end();\n return response;\n } catch (error) {\n span.recordException(error);\n span.setAttribute('response_status', 'error');\n span.end();\n throw error;\n }\n });\n };\n }\n\n return Reflect.get(target, prop, receiver);\n },\n };\n\n return new Proxy(llm, handler);\n};\n","import * as opentelemetry from '@opentelemetry/api';\nimport { getTracer } from '@/tracing';\n\nexport const wrapWithSpan = (functionName: string, func: Function, attributes?: Record<string, any>) => {\n return function (...args: any[]) {\n const tracer = getTracer();\n\n const wrapperFunction = (span: opentelemetry.Span) => {\n try {\n if (attributes) {\n Object.entries(attributes).forEach(([key, value]) => {\n span.setAttribute(key, value);\n });\n }\n\n span.setAttribute('function_input', JSON.stringify(args));\n const result = func(...args);\n\n if (result instanceof Promise) {\n return result.then((resolvedResult) => {\n span.setAttribute('function_output', JSON.stringify(resolvedResult));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n return resolvedResult;\n }).catch((error) => {\n handleError(span, error, args);\n throw error;\n }).finally(() => span.end());\n } else {\n span.setAttribute('function_output', JSON.stringify(result));\n span.setStatus({ code: opentelemetry.SpanStatusCode.OK });\n span.end();\n return result;\n }\n } catch (error) {\n handleError(span, error, args);\n throw error;\n }\n };\n\n return tracer.startActiveSpan(functionName, wrapperFunction);\n };\n};\n\nconst handleError = (span: opentelemetry.Span, error: any, args: any[]) => {\n span.setAttribute('function_input', JSON.stringify(args));\n span.setStatus({\n code: opentelemetry.SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : 'Unknown error',\n });\n span.end();\n}\n","import {\n GetPromptTemplateParams,\n Pagination,\n PublishPromptTemplate,\n} from \"@/types\";\nimport {\n getAllPromptTemplates,\n getPromptTemplate,\n publishPromptTemplate,\n} from \"@/utils\";\n\nexport class TemplateManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n get = (promptName: string, params?: Partial<GetPromptTemplateParams>) =>\n getPromptTemplate(this.apiKey, promptName, params);\n\n publish = (body: PublishPromptTemplate) =>\n publishPromptTemplate(this.apiKey, body);\n\n all = (params?: Pagination) => getAllPromptTemplates(this.apiKey, params);\n}\n","import { TrackGroup, TrackMetadata, TrackPrompt, TrackScore } from \"@/types\";\nimport {\n promptLayerTrackGroup,\n promptLayerTrackMetadata,\n promptLayerTrackPrompt,\n promptLayerTrackScore,\n} from \"@/utils\";\n\nconst metadata = (apiKey: string, body: TrackMetadata): Promise<boolean> => {\n if (!(body.metadata instanceof Object)) {\n throw new Error(\"Please provide a dictionary of metadata.\");\n }\n for (const [key, value] of Object.entries(body.metadata)) {\n if (typeof key !== \"string\" || typeof value !== \"string\") {\n throw new Error(\n \"Please provide a dictionary of metadata with key value pair of strings.\"\n );\n }\n }\n return promptLayerTrackMetadata(apiKey, body);\n};\n\nconst score = (apiKey: string, body: TrackScore): Promise<boolean> => {\n if (typeof body.score !== \"number\") {\n throw new Error(\"Score must be a number\");\n }\n if (body.score < 0 || body.score > 100) {\n throw new Error(\"Score must be a number between 0 and 100.\");\n }\n return promptLayerTrackScore(apiKey, body);\n};\n\nconst prompt = (apiKey: string, body: TrackPrompt): Promise<boolean> => {\n if (!(body.prompt_input_variables instanceof Object)) {\n throw new Error(\"Prompt template input variable dictionary not provided.\");\n }\n return promptLayerTrackPrompt(apiKey, body);\n};\n\nconst group = (apiKey: string, body: TrackGroup) =>\n promptLayerTrackGroup(apiKey, body);\n\nexport class TrackManager {\n apiKey: string;\n\n constructor(apiKey: string) {\n this.apiKey = apiKey;\n }\n\n group = (body: TrackGroup) => group(this.apiKey, body);\n\n metadata = (body: TrackMetadata) => metadata(this.apiKey, body);\n\n prompt = (body: TrackPrompt) => prompt(this.apiKey, body);\n\n score = (body: TrackScore) => score(this.apiKey, body);\n}\n"],"mappings":"+1DAAA,IAAAA,GAAA,GAAAC,GAAAD,GAAA,iBAAAE,IAAA,eAAAC,GAAAH,ICuBA,IAAAI,GAAiB,mBAQV,IAAMC,EACX,QAAQ,IAAI,qBAAuB,8BAE/BC,GAAwB,CAC5BC,EACAC,IAGGC,EAAA,wBAEH,OADoBD,EAAK,iBAAiB,OAAO,aAAa,IAAM,OAE3DE,GAAeH,EAAQC,EAAK,iBAAkBA,CAAI,EAEpD,MAAMG,GAAsBJ,EAAQC,CAAI,CACjD,GAEMG,GAAwB,CAAOJ,EAAgBC,IAAuBC,EAAA,wBAC1E,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACKK,EAAO,MAAMD,EAAS,KAAK,EAOjC,GANIA,EAAS,SAAW,KACtBE,EACED,EACA,mFACF,EAEEA,GAAQL,EAAK,aACf,MAAO,CAACA,EAAK,iBAAkBK,EAAK,UAAU,CAElD,OAASE,EAAG,CACV,QAAQ,KACN,4EAA4EA,CAAC,EAC/E,CACF,CACA,OAAOP,EAAK,gBACd,GAEMQ,GAA2B,CAC/BT,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,0BACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,8FACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,iGAAiGA,CAAC,EACpG,EACO,EACT,CACA,MAAO,EACT,GAEMI,GAAwB,CAC5BZ,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,uBAAwB,CACzE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,kFACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,EACT,CACA,MAAO,EACT,GAEMK,GAAyB,CAC7Bb,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,wBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CACF,EACMM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,6GACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,gHAAgHA,CAAC,EACnH,EACO,EACT,CACA,MAAO,EACT,GAEMM,GAAwB,CAC5Bd,EACAC,IACqBC,EAAA,wBACrB,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUY,EAAAC,EAAA,GAChBV,GADgB,CAEnB,QAASD,CACX,EAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,GAAIA,EAAS,SAAW,IACtB,OAAAE,EACED,EACA,mGACF,EACO,EAEX,OAASE,EAAG,CACV,eAAQ,KACN,sGAAsGA,CAAC,EACzG,EACO,EACT,CACA,MAAO,EACT,GAEMO,GACJf,GAC8BE,EAAA,wBAC9B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,gBAAiB,CAClE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAU,CACnB,QAASE,CACX,CAAC,CACH,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,qEACF,EACO,IAEFA,EAAK,EACd,OAAS,EAAG,CACV,eAAQ,KACN,wEAAwE,CAAC,EAC3E,EACO,EACT,CACF,GAEMU,GAAoB,CACxBhB,EACAiB,EACAC,IACGhB,EAAA,wBACH,GAAI,CACF,IAAMiB,EAAM,IAAI,IACd,GAAGrB,CAAmB,qBAAqBmB,CAAU,EACvD,EACMZ,EAAW,MAAM,MAAMc,EAAK,CAChC,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,EACA,KAAM,KAAK,UAAUkB,CAAM,CAC7B,CAAC,EACKZ,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,+EACF,EACO,OAELA,EAAK,SACP,QAAQ,KACN,4EAA4EA,EAAK,OAAO,EAC1F,EAEKA,EACT,OAASE,EAAG,CACV,eAAQ,KACN,kFAAkFA,CAAC,EACrF,EACO,IACT,CACF,GAEMY,GAAwB,CAC5BpB,EACAC,IACGC,EAAA,wBACH,GAAI,CACF,IAAMG,EAAW,MAAM,MACrB,GAAGP,CAAmB,yBACtB,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAaE,CACf,EACA,KAAM,KAAK,UAAU,CACnB,gBAAiBW,EAAA,GAAKV,GACtB,eAAgBU,EAAA,GAAKV,GACrB,eAAgBA,EAAK,eAAiBA,EAAK,eAAiB,MAC9D,CAAC,CACH,CACF,EACMK,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,iFACF,EAEKA,CACT,OAASE,EAAG,CACV,QAAQ,KACN,oFAAoFA,CAAC,EACvF,CACF,CACF,GAEMa,GAAwB,CAC5BrB,EACAkB,IACGhB,EAAA,wBA3TL,IAAAoB,EA4TE,GAAI,CACF,IAAMH,EAAM,IAAI,IAAI,GAAGrB,CAAmB,mBAAmB,EAC7D,OAAO,QAAQoB,GAAU,CAAC,CAAC,EAAE,QAAQ,CAAC,CAACK,EAAKC,CAAK,IAC/CL,EAAI,aAAa,OAAOI,EAAKC,EAAM,SAAS,CAAC,CAC/C,EACA,IAAMnB,EAAW,MAAM,MAAMc,EAAK,CAChC,QAAS,CACP,eAAgB,mBAChB,YAAanB,CACf,CACF,CAAC,EACKM,EAAO,MAAMD,EAAS,KAAK,EACjC,OAAIA,EAAS,SAAW,KACtBE,EACED,EACA,kFACF,EACO,OAEDgB,EAAAhB,EAAK,QAAL,KAAAgB,EAAc,CAAC,CACzB,OAASd,EAAG,CACV,eAAQ,KACN,qFAAqFA,CAAC,EACxF,EACO,IACT,CACF,GAEaiB,GAA4BC,GASkBxB,EAAA,QATlBwB,GASkB,UATlB,CACvC,cAAAC,EACA,gBAAAC,EACA,SAAAC,EAAW,CAAC,EACZ,oBAAAC,EAAsB,KACtB,wBAAAC,EAA0B,KAC1B,mBAAAC,EAAqB,GACrB,QAAAC,EACA,QAAAC,EAAU,IACZ,EAA2D,CACzD,IAAMC,EAAU,CACd,gBAAAP,EACA,SAAAC,EACA,oBAAAC,EACA,wBAAAC,EACA,mBAAAC,CACF,EAEMI,EAAU,CACd,YAAaH,EACb,eAAgB,kBAClB,EAEA,GAAI,CAEF,IAAM5B,EAAW,MAAM,MACrB,GAAGP,CAAmB,cAAc,mBAClC6B,CACF,CAAC,OACD,CACE,OAAQ,OACR,QAASS,EACT,KAAM,KAAK,UAAUD,CAAO,CAC9B,CACF,EAEA,GAAI9B,EAAS,SAAW,IAEtB,MAAO,CACL,QAAS,GACT,QAAS,4BAHO,MAAMA,EAAS,KAAK,EAAE,MAAM,KAAO,CAAC,EAAE,GAI1C,OAASA,EAAS,UAC9B,EACF,EAGF,IAAMgC,EAAS,MAAMhC,EAAS,KAAK,EAC/BgC,EAAO,SACT,QAAQ,KAAK,YAAYA,EAAO,OAAO,EAAE,EAE3C,IAAMC,EAAeD,EAAO,8BAC5B,GAAI,CAACC,EACH,eAAQ,IAAI,4CAA4C,EACjD,CAAE,QAAS,GAAO,QAAS,wBAAyB,EAG7D,IAAMC,EAAe,oBAAoBD,CAAY,GAa/CE,GAFoB,MARN,MAAM,MACxB,GAAG1C,CAAmB,wCAAwCyC,CAAY,GAC1E,CACE,OAAQ,OACR,QAASH,CACX,CACF,GAE4C,KAAK,GAEZ,cAAc,MAG7CK,EAAO,IAAI,GAAAC,QAAK,SAAS,CAAE,MAAOF,CAAW,CAAC,EAEpD,GAAI,CAEF,IAAMG,EAAe,MAAMC,GACzBH,EACAF,EACAL,CACF,EACA,OAAAO,EAAK,MAAM,EACJE,CACT,QAAE,CAEAF,EAAK,MAAM,CACb,CACF,OAASI,EAAO,CACd,cAAQ,MACN,2BACEA,aAAiB,MAAQA,EAAM,QAAUA,CAC3C,EACF,EACMA,CACR,CACF,GAEA,SAAeD,GACbH,EACAF,EACAL,EACc,QAAAhC,EAAA,sBACd,IAAM4C,EAAUL,EAAK,SAAS,IAAIF,CAAY,EAE9C,OAAO,IAAI,QAAQ,CAAOQ,EAASC,IAAW9C,EAAA,sBAC5C,IAAI+C,EAAe,KAEbC,EAAmBC,GAA0B,CAC7CA,EAAQ,OAAS,0BAEnBF,EADqB,KAAK,MAAME,EAAQ,IAAc,EAC/B,aACvB,aAAaC,CAAK,EAClBN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DH,EAAQE,CAAO,EAEnB,EAGMG,EAAQ,WAAW,IAAM,CAC7BN,EAAQ,YAAY,wBAAyBI,CAAe,EAC5DF,EAAO,IAAI,MAAM,wDAAwD,CAAC,CAC5E,EAAGd,CAAO,EAEV,GAAI,CAEF,MAAMY,EAAQ,UAAU,wBAAyBI,CAAe,CAClE,OAASG,EAAK,CACZ,aAAaD,CAAK,EAClBJ,EAAOK,CAAG,CACZ,CACF,EAAC,CACH,GAEA,IAAMC,EAAoBL,GAAmD,CA7d7E,IAAA3B,EAAAiC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EAAAC,EA8dE,IAAIC,EAAuD,KACvDC,EAEE3D,EAA2B,CAC/B,GAAI,GACJ,QAAS,CAAC,EACV,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAI6D,EACJ,QAAW7B,KAAUY,EAAS,CAC5B,GAAIZ,EAAO,QAAQ,SAAW,EAAG,SACjC,IAAM8B,EAAQ9B,EAAO,QAAQ,CAAC,EAAE,MAE5B8B,EAAM,UACRJ,EAAU,GAAGA,GAAW,EAAE,GAAGI,EAAM,SAAW,EAAE,IAE9CA,EAAM,gBACRH,EAAe,CACb,KAAM,GAAGA,EAAeA,EAAa,KAAO,EAAE,GAC5CG,EAAM,cAAc,MAAQ,EAC9B,GACA,UAAW,GAAGH,EAAeA,EAAa,UAAY,EAAE,GACtDG,EAAM,cAAc,WAAa,EACnC,EACF,GAEF,IAAMC,GAAW9C,EAAA6C,EAAM,aAAN,YAAA7C,EAAmB,GACpC,GAAI8C,EAAU,CACZF,EAAYA,GAAa,CAAC,EAC1B,IAAMG,EAAeH,EAAU,GAAG,EAAE,EACpC,GAAI,CAACG,GAAgBD,EAAS,GAAI,CAChCF,EAAU,KAAK,CACb,GAAIE,EAAS,IAAM,GACnB,KAAMA,EAAS,MAAQ,WACvB,SAAU,CACR,OAAMb,EAAAa,EAAS,WAAT,YAAAb,EAAmB,OAAQ,GACjC,YAAWC,EAAAY,EAAS,WAAT,YAAAZ,EAAmB,YAAa,EAC7C,CACF,CAAC,EACD,QACF,CACAa,EAAa,SAAS,KAAO,GAAGA,EAAa,SAAS,IAAI,KACxDZ,EAAAW,EAAS,WAAT,YAAAX,EAAmB,OAAQ,EAC7B,GACAY,EAAa,SAAS,UAAY,GAAGA,EAAa,SAAS,SAAS,KAClEX,EAAAU,EAAS,WAAT,YAAAV,EAAmB,YAAa,EAClC,EACF,CACF,CACA,IAAMY,EAAcrB,EAAQ,CAAC,EAAE,QAAQ,GAAG,CAAC,EAC3C,OAAA5C,EAAS,QAAQ,KAAK,CACpB,eAAesD,EAAAW,GAAA,YAAAA,EAAa,gBAAb,KAAAX,EAA8B,OAC7C,OAAOC,EAAAU,GAAA,YAAAA,EAAa,QAAb,KAAAV,EAAsB,EAC7B,UAAUC,EAAAS,GAAA,YAAAA,EAAa,WAAb,KAAAT,EAAyB,KACnC,QAAS,CACP,KAAM,YACN,QAAAE,EACA,cAAeC,GAA8B,OAC7C,WAAYE,GAAwB,OACpC,SAASJ,EAAAQ,GAAA,YAAAA,EAAa,MAAM,UAAnB,KAAAR,EAA8B,IACzC,CACF,CAAC,EACDzD,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMkE,EAA0BtB,GAA2C,CACzE,IAAI5C,EAAoB,CACtB,GAAI,GACJ,MAAO,GACP,QAAS,CAAC,EACV,KAAM,YACN,KAAM,UACN,YAAa,gBACb,cAAe,KACf,MAAO,CACL,aAAc,EACd,cAAe,CACjB,CACF,EAEA,GAAI,CADe4C,EAAQ,GAAG,EAAE,EACf,OAAO5C,EACxB,IAAI0D,EAAU,GACd,QAAW1B,KAAUY,EACnB,OAAQZ,EAAO,KAAM,CACnB,IAAK,gBAAiB,CACpBhC,EAAWM,EAAA,GACN0B,EAAO,SAEZ,KACF,CACA,IAAK,sBACCA,EAAO,MAAM,OAAS,eACxB0B,EAAU,GAAGA,CAAO,GAAG1B,EAAO,MAAM,IAAI,IAE5C,IAAK,gBACC,UAAWA,IACbhC,EAAS,MAAM,cAAgBgC,EAAO,MAAM,eAC1C,gBAAiBA,EAAO,QAC1BhC,EAAS,YAAcgC,EAAO,MAAM,aAExC,QACE,KAEJ,CAEF,OAAAhC,EAAS,QAAQ,KAAK,CACpB,KAAM,OACN,KAAM0D,CACR,CAAC,EACM1D,CACT,EAEMmE,GAAiB,CACrBvB,EACAwB,EAAgB,mCACb,CACH,GAAI,eAAgBxB,EAAQ,CAAC,EAC3B,OAAOA,EAAQ,OACb,CAACyB,EAAMC,IAAajE,EAAAC,EAAA,GACfgE,GADe,CAElB,WAAY,GAAGD,EAAK,UAAU,GAAGC,EAAQ,UAAU,EACrD,GACA,CAAC,CACH,EAGF,GAAIF,IAAkB,4BACpB,OAAOF,EAAuBtB,CAAO,EAEvC,GAAI,SAAUA,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACnC,IAAI5C,EAAW,GACf,QAAWgC,KAAUY,EACnB5C,EAAW,GAAGA,CAAQ,GAAGgC,EAAO,QAAQ,CAAC,EAAE,IAAI,GAEjD,IAAMuC,EAAe,gBAAgB3B,EAAQ,GAAG,EAAE,CAAC,EACnD,OAAA2B,EAAa,QAAQ,CAAC,EAAE,KAAOvE,EACxBuE,CACT,CAEA,GAAI,UAAW3B,EAAQ,CAAC,EAAE,QAAQ,CAAC,EAAG,CACpC,IAAM5C,EAAWiD,EAAiBL,CAAO,EACzC,OAAA5C,EAAS,QAAQ,CAAC,EAAIM,IAAA,GACjBN,EAAS,QAAQ,CAAC,GAClBA,EAAS,QAAQ,CAAC,EAAE,SAElBA,CACT,CAEA,MAAO,EACT,EAEA,SAAgBF,GACdH,EACA6E,EACA5E,EACA,QAAA6E,EAAA,sBACA,IAAM7B,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA0BH,GAA1BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAMzD,EAAjB0D,EAAA,MACE,MAAMjF,EAAK,aAAe,CAACuB,EAAO,IAAI,EAAIA,EAC1CyB,EAAQ,KAAKzB,CAAK,SAFpB0D,EApoBF,CAooBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAIA,IAAMuC,EAAmBZ,GAAevB,EAAShD,EAAK,aAAa,EAC7DI,EAAW,UAAA8E,EAAM/E,GAAsBJ,EAAQU,EAAAC,EAAA,GAChDV,GADgD,CAEnD,iBAAAmF,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,CAC3C,EAAC,GACD,GAAI/E,GACEJ,EAAK,aAAc,CACrB,IAAMoF,EAAchF,EAAiB,CAAC,EAEtC,KAAM,CADa4C,EAAQ,GAAG,EAAE,EACboC,CAAU,CAC/B,CAEJ,GAEA,IAAM9E,EAAoB,CAAC6E,EAAuBE,IAAyB,CACzE,GAAI,CACF,QAAQ,KAAK,GAAGA,CAAY,KAAKF,EAAiB,OAAO,EAAE,CAC7D,OAAS5E,EAAG,CACV,QAAQ,KAAK,GAAG8E,CAAY,KAAKF,CAAgB,EAAE,CACrD,CACF,EAEMG,GAAsBtF,GAAuBC,EAAA,wBACjD,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,iBAAkB,CACnE,OAAQ,OACR,QAAS,CACP,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUG,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,mFACF,EACKA,EAAS,KAAK,CACvB,OAAS,EAAG,CACV,QAAQ,KACN,4EAA4E,CAAC,EAC/E,CACF,CACA,MAAO,CAAC,CACV,GAEMmF,EAA0BvC,GAA0B,CACxD,IAAM5C,EAAuB,CAC3B,GAAI,GACJ,QAAS,CACP,CACE,cAAe,OACf,MAAO,EACP,KAAM,GACN,SAAU,IACZ,CACF,EACA,QAAS,KAAK,IAAI,EAClB,MAAO,GACP,OAAQ,iBACV,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIoF,EAAO,GACX,QAAWpD,KAAUY,EACfZ,EAAO,QAAQ,OAAS,GAAKA,EAAO,QAAQ,CAAC,EAAE,OACjDoD,EAAO,GAAGA,CAAI,GAAGpD,EAAO,QAAQ,CAAC,EAAE,IAAI,IAG3C,OAAAhC,EAAS,QAAQ,CAAC,EAAE,KAAOoF,EAC3BpF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,QAAU4D,EAAW,QAC9B5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,mBAAqB4D,EAAW,mBACzC5D,EAAS,MAAQ4D,EAAW,MACrB5D,CACT,EAEMqF,GAA6BzC,GAAmC,CACpE,IAAM5C,EAAgC,CACpC,WAAY,GACZ,GAAI,GACJ,MAAO,GACP,YAAa,GACb,KAAM,YACR,EACM4D,EAAahB,EAAQ,GAAG,EAAE,EAChC,GAAI,CAACgB,EAAY,OAAO5D,EACxB,IAAIsF,EAAa,GACjB,QAAWtD,KAAUY,EACnB0C,EAAa,GAAGA,CAAU,GAAGtD,EAAO,UAAU,GAEhD,OAAAhC,EAAS,WAAasF,EACtBtF,EAAS,GAAK4D,EAAW,GACzB5D,EAAS,MAAQ4D,EAAW,MAC5B5D,EAAS,YAAc4D,EAAW,YAC3B5D,CACT,EAEA,SAAgBuF,GACdf,EACAgB,EACAC,EACA,QAAAhB,EAAA,sBACA,IAAMxE,EAIF,CACF,WAAY,KACZ,aAAc,KACd,iBAAkB,IACpB,EACM2C,EAAU,CAAC,EACjB,YAAA8B,EAAAC,EAA2BH,GAA3BI,EAAAC,EAAArC,EAAAoC,EAAA,EAAAC,EAAA,UAAAC,EAAAJ,EAAA,cAAAE,EAAA,GACE,CADS,IAAM5C,EAAjB6C,EAAA,MACEjC,EAAQ,KAAKZ,CAAM,EACnB/B,EAAK,aAAe+B,EACpB,MAAM/B,SAHR4E,EA1vBF,CA0vBErC,EAAA,CAAAqC,UAAA,KAAAD,IAAAC,EAAAH,EAAA,oBAAAI,EAAAD,EAAA,KAAAH,YAAA,IAAAlC,EAAA,MAAAA,EAAA,IAKA,IAAMuC,EAAmBU,EAAW7C,CAAO,EACrC5C,EAAW,UAAA8E,EAAMU,EAAY,CAAE,iBAAAT,CAAiB,CAAC,GACvD9E,EAAK,WAAaD,EAAS,WAC3BC,EAAK,iBAAmBD,EAAS,iBACjC,MAAMC,CACR,GAEA,IAAMyF,GAAoB,CAAOC,EAAoBC,IAAgB/F,EAAA,wBACnE,OAAO8F,EAAO,KAAK,YAAY,OAAOC,CAAM,CAC9C,GAEMC,GAA2B,CAAOF,EAAoBC,IAAgB/F,EAAA,wBAC1E,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEME,GAA8B,CAClC,KAAMJ,GACN,WAAYG,EACd,EAEME,GAAgB,CACpBC,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,QAAQ,QAAQ,EAAE,QAC3BN,EAAS,IAAIM,EAAO,CACxB,QAASL,EAAO,OAClB,CAAC,EACKM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMO,GAAqB,CACzBH,EACAJ,IACG/F,EAAA,wBACH,IAAMoG,EAAS,QAAQ,QAAQ,EAAE,YAC3BN,EAAS,IAAIM,EAAO,CACxB,SAAUL,EAAO,OACnB,CAAC,EACDA,GAAA,aAAAA,EAAe,QACf,IAAMM,EACJJ,GAA4BE,EAAgB,gBAAgB,IAAI,EAClE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMQ,GAAuB,CAAOT,EAAuBC,IAAgB/F,EAAA,wBACzE,OAAO8F,EAAO,SAAS,OAAOC,CAAM,CACtC,GAEMS,GAA8B,CAClCV,EACAC,IACG/F,EAAA,wBACH,OAAO8F,EAAO,YAAY,OAAOC,CAAM,CACzC,GAEMU,GAAiC,CACrC,KAAMF,GACN,WAAYC,EACd,EAEME,GAAmB,CACvBP,EACAJ,IACG/F,EAAA,wBACH,IAAM2G,EAAY,QAAQ,mBAAmB,EAAE,QACzCb,EAAS,IAAIa,EAAU,CAC3B,QAASZ,EAAO,OAClB,CAAC,EACKM,EACJI,GAA+BN,EAAgB,gBAAgB,IAAI,EACrE,OAAOE,EAAcP,EAAQC,CAAM,CACrC,GAEMa,GAAiB,CACrB9G,EACAC,IAC+BC,EAAA,wBAC/B,GAAI,CACF,IAAMG,EAAW,MAAM,MAAM,GAAGP,CAAmB,eAAgB,CACjE,OAAQ,OACR,QAAS,CACP,YAAaE,EACb,eAAgB,kBAClB,EACA,KAAM,KAAK,UAAUC,CAAI,CAC3B,CAAC,EACD,OAAII,EAAS,SAAW,KACtBE,EACEF,EACA,yEACF,EACO,MAEFA,EAAS,KAAK,CACvB,OAASG,EAAG,CACV,eAAQ,KACN,4EAA4EA,CAAC,EAC/E,EACO,IACT,CACF,GCp2BO,IAAMuG,EAAN,KAAmB,CAGxB,YAAYC,EAAgB,CAI5B,YAAS,IAAMC,GAAuB,KAAK,MAAM,EAH/C,KAAK,OAASD,CAChB,CAGF,ECVA,IAAAE,GAA+B,iCAC/BC,GAAkC,yCAClCC,GAAiC,yCCFjC,IAAAC,EAAmD,8BAEnDC,EAA+B,+BAG/B,IAAMC,EAAN,KAAsD,CAKpD,YAAYC,EAAwBC,EAAiB,CACnD,KAAK,OAASA,GAAU,QAAQ,IAAI,oBACpC,KAAK,cAAgBD,EACrB,KAAK,IAAM,GAAGE,CAAmB,aACnC,CAEQ,mBAAmBC,EAAyD,CAClF,OAAKA,EACE,OAAO,YAAY,OAAO,QAAQA,CAAU,CAAC,EAD5B,CAAC,CAE3B,CAEQ,iBAAiBC,EAAwB,CAQ/C,MAP0C,CACxC,CAAC,WAAS,QAAQ,EAAG,oBACrB,CAAC,WAAS,MAAM,EAAG,kBACnB,CAAC,WAAS,MAAM,EAAG,kBACnB,CAAC,WAAS,QAAQ,EAAG,oBACrB,CAAC,WAAS,QAAQ,EAAG,mBACvB,EACeA,CAAI,GAAK,mBAC1B,CAEQ,mBAAmBC,EAA8B,CAMvD,MALkD,CAChD,CAAC,iBAAe,KAAK,EAAG,mBACxB,CAAC,iBAAe,EAAE,EAAG,gBACrB,CAAC,iBAAe,KAAK,EAAG,kBAC1B,EACiBA,CAAI,GAAK,kBAC5B,CAEQ,cAAcC,EAAgC,CACpD,OAAQ,OAAOA,EAAK,CAAC,CAAC,EAAI,OAAO,GAAG,EAAI,OAAOA,EAAK,CAAC,CAAC,GAAG,SAAS,CACpE,CAEA,OAAOC,EAAkD,CACvD,GAAI,CAAC,KAAK,cACR,OAAO,QAAQ,QAAQ,mBAAiB,OAAO,EAGjD,IAAMC,EAAcD,EAAM,IAAIE,GAAK,CAlDvC,IAAAC,EAkD2C,OACrC,KAAMD,EAAK,KACX,QAAS,CACP,SAAUA,EAAK,YAAY,EAAE,QAC7B,QAASA,EAAK,YAAY,EAAE,OAC5B,cAAaC,EAAAD,EAAK,YAAY,EAAE,aAAnB,YAAAC,EAA+B,cAAe,EAC7D,EACA,KAAM,KAAK,iBAAiBD,EAAK,IAAI,EACrC,UAAWA,EAAK,cAAgB,KAChC,WAAY,KAAK,cAAcA,EAAK,SAAS,EAC7C,SAAU,KAAK,cAAcA,EAAK,OAAO,EACzC,OAAQ,CACN,YAAa,KAAK,mBAAmBA,EAAK,OAAO,IAAI,EACrD,YAAaA,EAAK,OAAO,OAC3B,EACA,WAAY,KAAK,mBAAmBA,EAAK,UAAU,EACnD,OAAQA,EAAK,OAAO,IAAIE,IAAU,CAChC,KAAMA,EAAM,KACZ,UAAW,KAAK,cAAcA,EAAM,IAAI,EACxC,WAAY,KAAK,mBAAmBA,EAAM,UAAU,CACtD,EAAE,EACF,MAAOF,EAAK,MAAM,IAAIG,IAAS,CAC7B,QAASA,EAAK,QACd,WAAY,KAAK,mBAAmBA,EAAK,UAAU,CACrD,EAAE,EACF,SAAU,CACR,WAAYC,EAAAC,EAAA,GACPL,EAAK,SAAS,YADP,CAEV,eAAgB,iBAClB,GACA,WAAY,EACd,CACF,EAAE,EAEF,OAAO,MAAM,KAAK,IAAK,CACrB,OAAQ,OACR,QAAS,CACP,eAAgB,mBAChB,YAAa,KAAK,QAAU,EAC9B,EACA,KAAM,KAAK,UAAU,CACnB,MAAOD,CACT,CAAC,CACH,CAAC,EACE,KAAKO,GACCA,EAAS,GAIP,mBAAiB,SAHtB,QAAQ,MAAM;AAAA,sBAA8CA,EAAS,MAAM,EAAE,EACtE,mBAAiB,OAG3B,EACA,MAAOC,IACN,QAAQ,MAAM,yBAA0BA,CAAK,EACtC,mBAAiB,OACzB,CACL,CAEA,UAA0B,CACxB,OAAO,QAAQ,QAAQ,CACzB,CACF,EAEOC,GAAQlB,ED3GR,IAAMmB,EAAY,CAACC,EAAe,uBAClB,SAAM,UAAUA,CAAI,EAG9BC,GAAe,CAACC,EAAwBC,IAAoB,CACvE,IAAMC,EAAW,IAAI,sBACfC,EAAW,IAAIC,GAAwBJ,EAAeC,CAAM,EAC5DI,EAAY,IAAI,uBAAoBF,CAAQ,EAClDD,EAAS,iBAAiBG,CAAS,EACnCH,EAAS,SAAS,CACpB,EEZA,IAAMI,GAASC,EAAU,EAEZC,EAAkB,CAC7BC,EACAC,EACAC,EAAgB,GAChBC,EAAW,WACR,CACH,IAAMC,EAA6B,CACjC,UAAW,CAACC,EAAQC,IAAS,CAC3B,IAAMC,EAAY,QAAQ,UAAUF,EAAQC,CAAI,EAChD,cAAO,iBAAiBC,EAAW,CACjC,cAAe,CACb,MAAOL,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMI,EAAWH,CAAO,CACrC,EACA,IAAK,CAACC,EAAQG,EAAMC,IAAa,CAC/B,IAAMC,EAAQL,EAAOG,CAAI,EACnBN,EAAgB,GAAG,QAAQ,IAC/BG,EACA,eACF,CAAC,IAAIG,EAAK,SAAS,CAAC,GAEpB,OAAI,OAAOE,GAAU,UACnB,OAAO,iBAAiBA,EAAO,CAC7B,cAAe,CACb,MAAOR,EACP,SAAU,EACZ,EACA,SAAU,CACR,MAAOC,CACT,CACF,CAAC,EACM,IAAI,MAAMO,EAAON,CAAO,GAG7B,OAAOM,GAAU,WACZ,IAAIJ,IAAgB,CA9CnC,IAAAK,EAAAC,EAAAC,EAAAC,EA+CU,IAAMC,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAAgB,QAAQ,IAAIX,EAAQ,UAAU,EAC9CY,GAAeN,EAAAL,EAAK,CAAC,IAAN,YAAAK,EAAS,aACxBO,GAAUN,EAAAN,EAAK,CAAC,IAAN,YAAAM,EAAS,QACzB,OAAAC,EAAOP,EAAK,CAAC,IAAb,aAAAO,EAAgB,cAChBC,EAAOR,EAAK,CAAC,IAAb,aAAAQ,EAAgB,QAETjB,GAAO,gBAAgB,GAAGmB,CAAa,IAAId,CAAa,GAAWiB,GAAcC,EAAA,wBACtF,GAAI,CACFD,EAAK,aAAa,iBAAkB,KAAK,UAAUb,CAAI,CAAC,EACxD,IAAMe,EAAW,QAAQ,MAAMX,EAAOL,EAAQC,CAAI,EAC5CgB,EAASH,EAAK,YAAY,EAAE,OAElC,OAAIE,aAAoB,QACf,IAAI,QAAQ,CAACE,EAASC,IAAW,CACtCH,EACG,KAAYI,GAAqBL,EAAA,wBAChC,IAAMC,EAAW,MAAMK,GAAsB1B,EAAQ,CACnD,QAASA,EACT,cAAAgB,EACA,cAAAd,EACA,mBAAAa,EACA,iBAAkB,IAAI,KAAK,EAAE,YAAY,EACzC,iBAAAU,EACA,OAAQnB,EAAK,CAAC,EACd,aAAAW,EACA,KAAMC,EACN,QAASI,CACX,CAAC,EAEDH,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACTI,EAAQF,CAAQ,CAClB,EAAC,EACA,MAAOM,GAAU,CAChBR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACTK,EAAOG,CAAK,CACd,CAAC,CACL,CAAC,GAGHR,EAAK,aAAa,kBAAmB,KAAK,UAAUE,CAAQ,CAAC,EAC7DF,EAAK,aAAa,kBAAmB,SAAS,EAC9CA,EAAK,IAAI,EACFE,EACT,OAASM,EAAO,CACd,MAAAR,EAAK,gBAAgBQ,CAAK,EAC1BR,EAAK,aAAa,kBAAmB,OAAO,EAC5CA,EAAK,IAAI,EACHQ,CACR,CACF,EAAC,CACH,EAGK,QAAQ,IAAItB,EAAQG,EAAMC,CAAQ,CAC3C,CACF,EAEA,OAAO,IAAI,MAAMR,EAAKG,CAAO,CAC/B,EC9GA,IAAAwB,EAA+B,iCAGxB,IAAMC,GAAe,CAACC,EAAsBC,EAAgBC,IAC1D,YAAaC,EAAa,CAC/B,IAAMC,EAASC,EAAU,EAEnBC,EAAmBC,GAA6B,CACpD,GAAI,CACEL,GACF,OAAO,QAAQA,CAAU,EAAE,QAAQ,CAAC,CAACM,EAAKC,CAAK,IAAM,CACnDF,EAAK,aAAaC,EAAKC,CAAK,CAC9B,CAAC,EAGHF,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxD,IAAMO,EAAST,EAAK,GAAGE,CAAI,EAE3B,OAAIO,aAAkB,QACbA,EAAO,KAAMC,IAClBJ,EAAK,aAAa,kBAAmB,KAAK,UAAUI,CAAc,CAAC,EACnEJ,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACjDI,EACR,EAAE,MAAOC,GAAU,CAClB,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CAAC,EAAE,QAAQ,IAAML,EAAK,IAAI,CAAC,GAE3BA,EAAK,aAAa,kBAAmB,KAAK,UAAUG,CAAM,CAAC,EAC3DH,EAAK,UAAU,CAAE,KAAoB,iBAAe,EAAG,CAAC,EACxDA,EAAK,IAAI,EACFG,EAEX,OAASE,EAAO,CACd,MAAAC,GAAYN,EAAMK,EAAOT,CAAI,EACvBS,CACR,CACF,EAEA,OAAOR,EAAO,gBAAgBJ,EAAcM,CAAe,CAC7D,EAGIO,GAAc,CAACN,EAA0BK,EAAYT,IAAgB,CACzEI,EAAK,aAAa,iBAAkB,KAAK,UAAUJ,CAAI,CAAC,EACxDI,EAAK,UAAU,CACb,KAAoB,iBAAe,MACnC,QAASK,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACDL,EAAK,IAAI,CACX,ECvCO,IAAMO,EAAN,KAAsB,CAG3B,YAAYC,EAAgB,CAI5B,SAAM,CAACC,EAAoBC,IACzBC,GAAkB,KAAK,OAAQF,EAAYC,CAAM,EAEnD,aAAWE,GACTC,GAAsB,KAAK,OAAQD,CAAI,EAEzC,SAAOF,GAAwBI,GAAsB,KAAK,OAAQJ,CAAM,EATtE,KAAK,OAASF,CAChB,CASF,ECjBA,IAAMO,GAAW,CAACC,EAAgBC,IAA0C,CAC1E,GAAI,EAAEA,EAAK,oBAAoB,QAC7B,MAAM,IAAI,MAAM,0CAA0C,EAE5D,OAAW,CAACC,EAAKC,CAAK,IAAK,OAAO,QAAQF,EAAK,QAAQ,EACrD,GAAI,OAAOC,GAAQ,UAAY,OAAOC,GAAU,SAC9C,MAAM,IAAI,MACR,yEACF,EAGJ,OAAOC,GAAyBJ,EAAQC,CAAI,CAC9C,EAEMI,GAAQ,CAACL,EAAgBC,IAAuC,CACpE,GAAI,OAAOA,EAAK,OAAU,SACxB,MAAM,IAAI,MAAM,wBAAwB,EAE1C,GAAIA,EAAK,MAAQ,GAAKA,EAAK,MAAQ,IACjC,MAAM,IAAI,MAAM,2CAA2C,EAE7D,OAAOK,GAAsBN,EAAQC,CAAI,CAC3C,EAEMM,GAAS,CAACP,EAAgBC,IAAwC,CACtE,GAAI,EAAEA,EAAK,kCAAkC,QAC3C,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAOO,GAAuBR,EAAQC,CAAI,CAC5C,EAEMQ,GAAQ,CAACT,EAAgBC,IAC7BS,GAAsBV,EAAQC,CAAI,EAEvBU,EAAN,KAAmB,CAGxB,YAAYX,EAAgB,CAI5B,WAASC,GAAqBQ,GAAM,KAAK,OAAQR,CAAI,EAErD,cAAYA,GAAwBF,GAAS,KAAK,OAAQE,CAAI,EAE9D,YAAUA,GAAsBM,GAAO,KAAK,OAAQN,CAAI,EAExD,WAASA,GAAqBI,GAAM,KAAK,OAAQJ,CAAI,EATnD,KAAK,OAASD,CAChB,CASF,ERpCA,IAAAY,GAA+B,iCAE/B,IAAMC,GAAgC,CACpC,OAAQ,CACN,KAAM,CACJ,cAAe,iCACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,4BACf,gBAAiBC,CACnB,CACF,EACA,UAAW,CACT,KAAM,CACJ,cAAe,4BACf,gBAAiBC,CACnB,EACA,WAAY,CACV,cAAe,+BACf,gBAAiBC,EACnB,CACF,EACA,eAAgB,CACd,KAAM,CACJ,cAAe,6CACf,gBAAiBH,CACnB,EACA,WAAY,CACV,cAAe,wCACf,gBAAiBC,CACnB,CACF,CACF,EAEMG,GAAgD,CACpD,OAAQC,GACR,UAAWC,GACX,eAAgBC,EAClB,EAQMC,GAAyBC,GAAsB,CACnD,GAAI,CAACA,GAAO,OAAOA,GAAQ,UAAY,MAAM,QAAQA,CAAG,EACtD,MAAO,GAGT,IAAMC,EAAgB,CACpB,SACA,QACA,gBACA,oBACA,gBACF,EAGA,OAFe,OAAO,OAAOD,CAAG,EAElB,MAAOE,GACf,OAAOA,GAAQ,UAAYA,IAAQ,KAAa,GAC7CD,EAAc,MAAOE,GAAQA,KAAOD,CAAG,CAC/C,CACH,EAEaE,EAAN,KAAkB,CAQvB,YAAY,CACV,OAAAC,EAAS,QAAQ,IAAI,oBACrB,cAAAC,EAAgB,EAClB,EAAmB,CAAC,EAAG,CACrB,GAAID,IAAW,OACb,MAAM,IAAI,MACR,0HACF,EAGF,KAAK,OAASA,EACd,KAAK,cAAgBC,EACrB,KAAK,UAAY,IAAIC,EAAgBF,CAAM,EAC3C,KAAK,MAAQ,IAAIG,EAAaH,CAAM,EACpC,KAAK,MAAQ,IAAII,EAAaJ,CAAM,EACpC,KAAK,aAAeK,GAEhBJ,GACFK,GAAaL,EAAeD,CAAM,CAEtC,CAEA,IAAI,WAAY,CACd,GAAI,CACF,IAAMO,EAAS,QAAQ,mBAAmB,EAAE,QAC5C,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,YAAa,WAAW,CACtE,OAAS,EAAG,CACV,QAAQ,MACN,8EACF,CACF,CACF,CAEA,IAAI,QAAS,CACX,GAAI,CACF,IAAMA,EAAS,QAAQ,QAAQ,EAAE,QACjC,OAAOC,EAAgB,KAAK,OAAQD,EAAQ,SAAU,QAAQ,CAChE,OAAS,EAAG,CACV,QAAQ,MACN,qEACF,CACF,CACF,CAEM,IAAIE,EAUK,QAAAC,EAAA,yBAVL,CACR,WAAAC,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,EAAS,EACX,EAAe,CAGb,OAFeC,EAAU,EAEX,gBAAgB,kBAA0BC,GAASX,EAAA,sBAC/D,GAAI,CACF,IAAMY,EAAgB,CACpB,WAAAX,EACA,cAAAC,EACA,mBAAAC,EACA,eAAAC,EACA,KAAAC,EACA,SAAAC,EACA,QAAAC,EACA,wBAAAC,EACA,OAAAC,CACF,EACAE,EAAK,aAAa,iBAAkB,KAAK,UAAUC,CAAa,CAAC,EAEjE,IAAMC,EAAyBT,EACzBU,EAA6C,CACjD,MAAOX,EACP,QAASD,EACT,iBAAkBI,CACpB,EACIF,IAAgBU,EAAkB,gBAAkBV,GAExD,IAAMW,EAAkB,MAAM,KAAK,UAAU,IAC3Cd,EACAa,CACF,EAEA,GAAI,CAACC,EAAiB,MAAM,IAAI,MAAM,kBAAkB,EAExD,IAAMC,EAAiBD,EAAgB,gBACvC,GAAI,CAACA,EAAgB,WACnB,MAAM,IAAI,MACR,WAAWd,CAAU,oDACvB,EAGF,IAAMgB,EAA0BF,EAAgB,SAChD,GAAI,CAACE,EACH,MAAM,IAAI,MACR,WAAWhB,CAAU,kDACvB,EAGF,IAAMiB,EAAuBD,EAAwB,MACrD,GAAI,CAACC,EACH,MAAM,IAAI,MACR,WAAWjB,CAAU,wDACvB,EAGF,IAAMkB,EAAgBD,EAAqB,SAErCE,EAAqB,IAAI,KAAK,EAAE,YAAY,EAC5CC,EAASC,IAAA,GACVP,EAAgB,YACfP,GAA2B,CAAC,GAE5Be,EACJhD,GACE4C,CACF,EAAEH,EAAe,IAAI,EACjBQ,EAAgBD,EAAO,cAEvBE,EAAkBF,EAAO,gBACzBG,GAAmB9C,GAAyBuC,CAAa,EACzDQ,EAAoBZ,EAAgB,kBACtCY,IACFN,EAAO,QAAaM,EAAkB,KAExCN,EAAO,OAAYZ,EACfA,GAAU,CAAC,SAAU,cAAc,EAAE,SAASU,CAAa,IAC7DE,EAAO,eAAoB,CAAE,cAAe,EAAK,GAGnD,IAAMO,EAAW,MAAMF,GAAiBX,EAAiBM,CAAM,EAEzDQ,EAAiBC,IAAiB,CACtC,IAAMC,GAAmB,IAAI,KAAK,EAAE,YAAY,EAChD,OAAOC,GAAaV,EAAA,CAClB,cAAAE,EACA,cAAAL,EACA,KAAM,CAAC,EACP,OAAAE,EACA,KAAAhB,EACA,mBAAAe,EACA,iBAAAW,GACA,QAAS,KAAK,OACd,SAAAzB,EACA,UAAWS,EAAgB,GAC3B,eAAgBA,EAAgB,QAChC,uBAAAF,EACA,SAAUN,EACV,wBAAyB,GACzB,QAASI,EAAK,YAAY,EAAE,QACzBmB,GACJ,CACH,EAEA,GAAIrB,EACF,OAAOwB,GAAeL,EAAUC,EAAeJ,CAAe,EAChE,IAAMS,EAAa,MAAML,EAAc,CAAE,iBAAkBD,CAAS,CAAC,EAE/DO,EAAiB,CACrB,WAAYD,EAAW,WACvB,aAAcN,EACd,iBAAkBM,EAAW,gBAC/B,EACA,OAAAvB,EAAK,aAAa,kBAAmB,KAAK,UAAUwB,CAAc,CAAC,EAE5DA,CACT,OAASC,EAAO,CACd,MAAAzB,EAAK,UAAU,CACb,KAAoB,kBAAe,MACnC,QAASyB,aAAiB,MAAQA,EAAM,QAAU,eACpD,CAAC,EACKA,CACR,QAAE,CACAzB,EAAK,IAAI,CACX,CACF,EAAC,CACH,GAEM,YAAYZ,EAO6B,QAAAC,EAAA,yBAP7B,CAChB,aAAAqC,EACA,eAAAjC,EAAiB,CAAC,EAClB,SAAAE,EAAW,CAAC,EACZ,kBAAAgC,EAAoB,KACpB,gBAAAC,EAAkB,KAClB,iBAAAC,EAAmB,EACrB,EAA+C,CAC7C,GAAI,CACF,IAAMC,EAAS,MAAMC,GAAmB,CACtC,cAAeL,EACf,gBAAiBjC,EACjB,SAAAE,EACA,oBAAqBgC,EACrB,wBAAyBC,EACzB,mBAAoBC,EACpB,QAAS,KAAK,MAChB,CAAC,EAED,GAAI,CAACA,GACCxD,GAAsByD,CAAM,EAAG,CAGjC,IAAME,EAFa,OAAO,OAAOF,CAAM,EAER,OAC5BG,GAAcA,EAAK,iBAAmB,EACzC,EAEA,GAAID,EAAY,SAAW,EACzB,MAAM,IAAI,MAAM,KAAK,UAAUF,EAAQ,KAAM,CAAC,CAAC,EAMjD,GAAI,CAHqBE,EAAY,KAClCC,GAAcA,EAAK,SAAW,SACjC,EAEE,MAAM,IAAI,MAAM,KAAK,UAAUH,EAAQ,KAAM,CAAC,CAAC,CAEnD,CAGF,OAAOA,CACT,OAASL,EAAO,CACd,MAAIA,aAAiB,OACnB,QAAQ,MAAM,0BAA2BA,EAAM,OAAO,EAChD,IAAI,MAAM,2BAA2BA,EAAM,OAAO,EAAE,IAE1D,QAAQ,MAAM,kCAAmCA,CAAK,EAChD,IAAI,MAAM,gCAAgC,EAEpD,CACF,GAEM,WAAWN,EAAkB,QAAA9B,EAAA,sBACjC,OAAO6C,GAAe,KAAK,OAAQf,CAAI,CACzC,GACF","names":["src_exports","__export","PromptLayer","__toCommonJS","import_ably","URL_API_PROMPTLAYER","promptlayerApiHandler","apiKey","body","__async","proxyGenerator","promptLayerApiRequest","response","data","warnOnBadResponse","e","promptLayerTrackMetadata","__spreadProps","__spreadValues","promptLayerTrackScore","promptLayerTrackPrompt","promptLayerTrackGroup","promptLayerCreateGroup","getPromptTemplate","promptName","params","url","publishPromptTemplate","getAllPromptTemplates","_a","key","value","runWorkflowRequest","_0","workflow_name","input_variables","metadata","workflow_label_name","workflow_version_number","return_all_outputs","api_key","timeout","payload","headers","result","execution_id","channel_name","ably_token","ably","Ably","final_output","waitForWorkflowCompletion","error","channel","resolve","reject","results","messageListener","message","timer","err","openaiStreamChat","_b","_c","_d","_e","_f","_g","_h","_i","content","functionCall","lastResult","toolCalls","delta","toolCall","lastToolCall","firstChoice","anthropicStreamMessage","cleaned_result","function_name","prev","current","final_result","generator","__asyncGenerator","iter","__forAwait","more","temp","__await","request_response","request_id","main_message","trackRequest","openaiStreamCompletion","text","anthropicStreamCompletion","completion","streamResponse","afterStream","mapResults","openaiChatRequest","client","kwargs","openaiCompletionsRequest","MAP_TYPE_TO_OPENAI_FUNCTION","openaiRequest","promptBlueprint","OpenAI","requestToMake","azureOpenAIRequest","anthropicChatRequest","anthropicCompletionsRequest","MAP_TYPE_TO_ANTHROPIC_FUNCTION","anthropicRequest","Anthropic","utilLogRequest","GroupManager","apiKey","promptLayerCreateGroup","opentelemetry","import_sdk_trace_base","import_sdk_trace_node","import_api","import_core","PromptLayerSpanExporter","enableTracing","apiKey","URL_API_PROMPTLAYER","attributes","kind","code","time","spans","requestData","span","_a","event","link","__spreadProps","__spreadValues","response","error","span_exporter_default","getTracer","name","setupTracing","enableTracing","apiKey","provider","exporter","span_exporter_default","processor","tracer","getTracer","promptLayerBase","apiKey","llm","function_name","provider","handler","target","args","newTarget","prop","receiver","value","_a","_b","_c","_d","request_start_time","provider_type","return_pl_id","pl_tags","span","__async","response","spanId","resolve","reject","request_response","promptlayerApiHandler","error","opentelemetry","wrapWithSpan","functionName","func","attributes","args","tracer","getTracer","wrapperFunction","span","key","value","result","resolvedResult","error","handleError","TemplateManager","apiKey","promptName","params","getPromptTemplate","body","publishPromptTemplate","getAllPromptTemplates","metadata","apiKey","body","key","value","promptLayerTrackMetadata","score","promptLayerTrackScore","prompt","promptLayerTrackPrompt","group","promptLayerTrackGroup","TrackManager","opentelemetry","MAP_PROVIDER_TO_FUNCTION_NAME","openaiStreamChat","openaiStreamCompletion","anthropicStreamMessage","anthropicStreamCompletion","MAP_PROVIDER_TO_FUNCTION","openaiRequest","anthropicRequest","azureOpenAIRequest","isWorkflowResultsDict","obj","REQUIRED_KEYS","val","key","PromptLayer","apiKey","enableTracing","TemplateManager","GroupManager","TrackManager","wrapWithSpan","setupTracing","module","promptLayerBase","_0","__async","promptName","promptVersion","promptReleaseLabel","inputVariables","tags","metadata","groupId","modelParameterOverrides","stream","getTracer","span","functionInput","prompt_input_variables","templateGetParams","promptBlueprint","promptTemplate","promptBlueprintMetadata","promptBlueprintModel","provider_type","request_start_time","kwargs","__spreadValues","config","function_name","stream_function","request_function","provider_base_url","response","_trackRequest","body","request_end_time","trackRequest","streamResponse","requestLog","functionOutput","error","workflowName","workflowLabelName","workflowVersion","returnAllOutputs","result","runWorkflowRequest","outputNodes","node","utilLogRequest"]}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "promptlayer",
3
3
  "license": "MIT",
4
- "version": "1.0.21",
4
+ "version": "1.0.23",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.esm.js",
7
7
  "types": "dist/index.d.ts",
package/src/utils.ts CHANGED
@@ -263,7 +263,7 @@ const getPromptTemplate = async (
263
263
  }
264
264
  if (data.warning) {
265
265
  console.warn(
266
- `WARNING: While tracking your prompt PromptLayer had the following error: ${data.warning}`
266
+ `WARNING: While fetching your prompt PromptLayer had the following error: ${data.warning}`
267
267
  );
268
268
  }
269
269
  return data as Promise<GetPromptTemplateResponse>;
@@ -350,7 +350,7 @@ export const runWorkflowRequest = async ({
350
350
  workflow_version_number = null,
351
351
  return_all_outputs = false,
352
352
  api_key,
353
- timeout = 120000, // Default timeout is 2 minutes in milliseconds
353
+ timeout = 3600000, // Default timeout is 1 hour in milliseconds
354
354
  }: RunWorkflowRequestParams): Promise<WorkflowResponse> => {
355
355
  const payload = {
356
356
  input_variables,