@transcend-io/cli 4.114.0 → 4.114.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2390,7 +2390,7 @@ export async function main(): Promise<void> {
2390
2390
  ];
2391
2391
  const largeLanguageModel = {
2392
2392
  name: 'gpt-4',
2393
- client: 'openai',
2393
+ client: 'openai' as const,
2394
2394
  };
2395
2395
  const temperature = 1;
2396
2396
  const topP = 1;
@@ -2409,7 +2409,7 @@ export async function main(): Promise<void> {
2409
2409
  // report error upon failure
2410
2410
  await promptManager.reportPromptRunError('predictProductLine', {
2411
2411
  promptRunMessages: input,
2412
- duration: t1 - new Date().getTime(),
2412
+ duration: new Date().getTime() - t0,
2413
2413
  temperature,
2414
2414
  topP,
2415
2415
  error: err.message,
@@ -241,7 +241,7 @@ class TranscendPromptManager {
241
241
  privacy_types_1.ChatCompletionRole.Assistant) {
242
242
  throw new Error(`promptRunMessages[${options.promptRunMessages.length - 1}].role is expected to be = ${privacy_types_1.ChatCompletionRole.Assistant}`);
243
243
  }
244
- const response = options.promptRunMessages[options.promptRunMessages.length - 1].message;
244
+ const response = options.promptRunMessages[options.promptRunMessages.length - 1].content;
245
245
  // Look up the large language model being report on
246
246
  const largeLanguageModel = this.getLargeLanguageModel(options.largeLanguageModel);
247
247
  let parsed;
@@ -8,7 +8,7 @@ export interface ReportPromptRunInput {
8
8
  /** Messages reported on */
9
9
  promptRunMessages: {
10
10
  /** Message reported */
11
- message: string;
11
+ content: string;
12
12
  /** Role of message */
13
13
  role: ChatCompletionRole;
14
14
  /** Template used if created from prompt */
@@ -29,7 +29,7 @@ export interface ReportPromptRunInput {
29
29
  /** TopP parameter used when running prompt */
30
30
  topP?: number;
31
31
  /** Max tokens ot sample parameter used when running prompt */
32
- maxTokensToSample?: string;
32
+ maxTokensToSample?: number;
33
33
  /** The prompt group being reported */
34
34
  promptGroupId?: string;
35
35
  /** The LLM Id being reported on */
@@ -1 +1 @@
1
- {"version":3,"file":"reportPromptRun.d.ts","sourceRoot":"","sources":["../../src/graphql/reportPromptRun.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD,OAAO,EACL,WAAW,EACX,kBAAkB,EAClB,oBAAoB,EACrB,MAAM,6BAA6B,CAAC;AAErC,MAAM,WAAW,oBAAoB;IACnC,kBAAkB;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,iDAAiD;IACjD,WAAW,EAAE,oBAAoB,CAAC;IAClC,2BAA2B;IAC3B,iBAAiB,EAAE;QACjB,uBAAuB;QACvB,OAAO,EAAE,MAAM,CAAC;QAChB,sBAAsB;QACtB,IAAI,EAAE,kBAAkB,CAAC;QACzB,2CAA2C;QAC3C,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,EAAE,CAAC;IACJ,gDAAgD;IAChD,QAAQ,EAAE,MAAM,CAAC;IACjB,oCAAoC;IACpC,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,4BAA4B;IAC5B,MAAM,CAAC,EAAE,WAAW,CAAC;IACrB,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,0DAA0D;IAC1D,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,2CAA2C;IAC3C,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,8CAA8C;IAC9C,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,8DAA8D;IAC9D,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,sCAAsC;IACtC,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,mCAAmC;IACnC,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B;AAED;;;;;;GAMG;AACH,wBAAsB,eAAe,CACnC,MAAM,EAAE,aAAa,EACrB,KAAK,EAAE,oBAAoB,GAC1B,OAAO,CAAC,MAAM,CAAC,CAgBjB"}
1
+ {"version":3,"file":"reportPromptRun.d.ts","sourceRoot":"","sources":["../../src/graphql/reportPromptRun.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD,OAAO,EACL,WAAW,EACX,kBAAkB,EAClB,oBAAoB,EACrB,MAAM,6BAA6B,CAAC;AAErC,MAAM,WAAW,oBAAoB;IACnC,kBAAkB;IAClB,IAAI,EAAE,MAAM,CAAC;IACb,iDAAiD;IACjD,WAAW,EAAE,oBAAoB,CAAC;IAClC,2BAA2B;IAC3B,iBAAiB,EAAE;QACjB,uBAAuB;QACvB,OAAO,EAAE,MAAM,CAAC;QAChB,sBAAsB;QACtB,IAAI,EAAE,kBAAkB,CAAC;QACzB,2CAA2C;QAC3C,QAAQ,CAAC,EAAE,MAAM,CAAC;KACnB,EAAE,CAAC;IACJ,gDAAgD;IAChD,QAAQ,EAAE,MAAM,CAAC;IACjB,oCAAoC;IACpC,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,4BAA4B;IAC5B,MAAM,CAAC,EAAE,WAAW,CAAC;IACrB,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,0DAA0D;IAC1D,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,2CAA2C;IAC3C,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,8CAA8C;IAC9C,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,8DAA8D;IAC9D,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B,sCAAsC;IACtC,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,mCAAmC;IACnC,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B;AAED;;;;;;GAMG;AACH,wBAAsB,eAAe,CACnC,MAAM,EAAE,aAAa,EACrB,KAAK,EAAE,oBAAoB,GAC1B,OAAO,CAAC,MAAM,CAAC,CAwBjB"}
@@ -12,7 +12,13 @@ const makeGraphQLRequest_1 = require("./makeGraphQLRequest");
12
12
  */
13
13
  async function reportPromptRun(client, input) {
14
14
  const { reportPromptRun: { promptRun }, } = await (0, makeGraphQLRequest_1.makeGraphQLRequest)(client, gqls_1.CREATE_PROMPT, {
15
- input,
15
+ input: {
16
+ ...input,
17
+ promptRunMessages: input.promptRunMessages.map(({ content, ...rest }) => ({
18
+ ...rest,
19
+ message: content,
20
+ })),
21
+ },
16
22
  });
17
23
  return promptRun.id;
18
24
  }
@@ -1 +1 @@
1
- {"version":3,"file":"reportPromptRun.js","sourceRoot":"","sources":["../../src/graphql/reportPromptRun.ts"],"names":[],"mappings":";;;AACA,iCAAuC;AACvC,6DAA0D;AA2C1D;;;;;;GAMG;AACI,KAAK,UAAU,eAAe,CACnC,MAAqB,EACrB,KAA2B;IAE3B,MAAM,EACJ,eAAe,EAAE,EAAE,SAAS,EAAE,GAC/B,GAAG,MAAM,IAAA,uCAAkB,EASzB,MAAM,EAAE,oBAAa,EAAE;QACxB,KAAK;KACN,CAAC,CAAC;IACH,OAAO,SAAS,CAAC,EAAE,CAAC;AACtB,CAAC;AAnBD,0CAmBC"}
1
+ {"version":3,"file":"reportPromptRun.js","sourceRoot":"","sources":["../../src/graphql/reportPromptRun.ts"],"names":[],"mappings":";;;AACA,iCAAuC;AACvC,6DAA0D;AA2C1D;;;;;;GAMG;AACI,KAAK,UAAU,eAAe,CACnC,MAAqB,EACrB,KAA2B;IAE3B,MAAM,EACJ,eAAe,EAAE,EAAE,SAAS,EAAE,GAC/B,GAAG,MAAM,IAAA,uCAAkB,EASzB,MAAM,EAAE,oBAAa,EAAE;QACxB,KAAK,EAAE;YACL,GAAG,KAAK;YACR,iBAAiB,EAAE,KAAK,CAAC,iBAAiB,CAAC,GAAG,CAC5C,CAAC,EAAE,OAAO,EAAE,GAAG,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;gBACzB,GAAG,IAAI;gBACP,OAAO,EAAE,OAAO;aACjB,CAAC,CACH;SACF;KACF,CAAC,CAAC;IACH,OAAO,SAAS,CAAC,EAAE,CAAC;AACtB,CAAC;AA3BD,0CA2BC"}