@promptbook/remote-client 0.100.0-26 → 0.100.0-28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,5 @@
1
1
  import { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION } from '../version';
2
2
  import { VALUE_STRINGS } from '../config';
3
- import { MAX_TOKENS } from '../config';
4
3
  import { SMALL_NUMBER } from '../config';
5
4
  import { renderPromptbookMermaid } from '../conversion/prettify/renderPipelineMermaidOptions';
6
5
  import { deserializeError } from '../errors/utils/deserializeError';
@@ -86,7 +85,6 @@ import { isValidUrl } from '../utils/validators/url/isValidUrl';
86
85
  import { isValidUuid } from '../utils/validators/uuid/isValidUuid';
87
86
  export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION };
88
87
  export { VALUE_STRINGS };
89
- export { MAX_TOKENS };
90
88
  export { SMALL_NUMBER };
91
89
  export { renderPromptbookMermaid };
92
90
  export { deserializeError };
@@ -129,12 +129,6 @@ export declare const VALUE_STRINGS: {
129
129
  readonly unserializable: "(unserializable value)";
130
130
  readonly circular: "(circular JSON)";
131
131
  };
132
- /**
133
- * Default cap for the number of tokens in a single request to the LLM
134
- *
135
- * @public exported from `@promptbook/utils`
136
- */
137
- export declare const MAX_TOKENS = 1048576;
138
132
  /**
139
133
  * Small number limit
140
134
  *
@@ -2,7 +2,6 @@ import Anthropic from '@anthropic-ai/sdk';
2
2
  import type { AvailableModel } from '../../execution/AvailableModel';
3
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
4
4
  import type { ChatPromptResult } from '../../execution/PromptResult';
5
- import type { CompletionPromptResult } from '../../execution/PromptResult';
6
5
  import type { Prompt } from '../../types/Prompt';
7
6
  import type { string_markdown } from '../../types/typeAliases';
8
7
  import type { string_markdown_text } from '../../types/typeAliases';
@@ -42,10 +41,6 @@ export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools
42
41
  * Calls Anthropic Claude API to use a chat model.
43
42
  */
44
43
  callChatModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements'>): Promise<ChatPromptResult>;
45
- /**
46
- * Calls Anthropic Claude API to use a completion model.
47
- */
48
- callCompletionModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements'>): Promise<CompletionPromptResult>;
49
44
  /**
50
45
  * Get the model that should be used as default
51
46
  */
@@ -87,8 +87,6 @@ export type CommonModelRequirements = {
87
87
  readonly seed?: number_seed;
88
88
  /**
89
89
  * Maximum number of tokens that can be generated by the model
90
- *
91
- * Note: [🌾]
92
90
  */
93
91
  readonly maxTokens?: number;
94
92
  };
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.100.0-25`).
18
+ * It follows semantic versioning (e.g., `0.100.0-27`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-client",
3
- "version": "0.100.0-26",
3
+ "version": "0.100.0-28",
4
4
  "description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -100,7 +100,7 @@
100
100
  "module": "./esm/index.es.js",
101
101
  "typings": "./esm/typings/src/_packages/remote-client.index.d.ts",
102
102
  "peerDependencies": {
103
- "@promptbook/core": "0.100.0-26"
103
+ "@promptbook/core": "0.100.0-28"
104
104
  },
105
105
  "dependencies": {
106
106
  "crypto": "1.0.1",
package/umd/index.umd.js CHANGED
@@ -23,7 +23,7 @@
23
23
  * @generated
24
24
  * @see https://github.com/webgptorg/promptbook
25
25
  */
26
- const PROMPTBOOK_ENGINE_VERSION = '0.100.0-26';
26
+ const PROMPTBOOK_ENGINE_VERSION = '0.100.0-28';
27
27
  /**
28
28
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
29
29
  * Note: [💞] Ignore a discrepancy between file name and entity name