@promptbook/pdf 0.88.0 → 0.89.0-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/esm/index.es.js +11 -4
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/core.index.d.ts +2 -2
- package/esm/typings/src/_packages/types.index.d.ts +10 -0
- package/esm/typings/src/llm-providers/_common/utils/count-total-usage/LlmExecutionToolsWithTotalUsage.d.ts +7 -0
- package/esm/typings/src/llm-providers/_common/utils/count-total-usage/{countTotalUsage.d.ts → countUsage.d.ts} +1 -1
- package/esm/typings/src/playground/BrjappConnector.d.ts +64 -0
- package/esm/typings/src/playground/brjapp-api-schema.d.ts +12879 -0
- package/esm/typings/src/playground/playground.d.ts +5 -0
- package/esm/typings/src/remote-server/socket-types/_subtypes/PromptbookServer_Identification.d.ts +2 -1
- package/esm/typings/src/remote-server/types/RemoteServerOptions.d.ts +15 -3
- package/esm/typings/src/types/typeAliases.d.ts +2 -2
- package/package.json +2 -2
- package/umd/index.umd.js +11 -4
- package/umd/index.umd.js.map +1 -1
package/esm/typings/src/remote-server/socket-types/_subtypes/PromptbookServer_Identification.d.ts
CHANGED
|
@@ -37,7 +37,8 @@ export type PromptbookServer_AnonymousIdentification = {
|
|
|
37
37
|
/**
|
|
38
38
|
* Identifier of the end user
|
|
39
39
|
*
|
|
40
|
-
* Note:
|
|
40
|
+
* Note: This can be either some id or email or any other identifier
|
|
41
|
+
* Note: In anonymous mode, this is passed to the certain model providers to identify misuse
|
|
41
42
|
* Note: In anonymous mode, there is no need to identify yourself, nor does it change the actual configuration of LLM Tools (unlike in application mode)
|
|
42
43
|
*/
|
|
43
44
|
readonly userId?: string_user_id;
|
|
@@ -58,15 +58,27 @@ export type ApplicationRemoteServerOptions<TCustomOptions> = {
|
|
|
58
58
|
};
|
|
59
59
|
export type ApplicationRemoteServerClientOptions<TCustomOptions> = {
|
|
60
60
|
/**
|
|
61
|
-
*
|
|
61
|
+
* Identifier of the application
|
|
62
|
+
*
|
|
63
|
+
* Note: This is usefull when you use Promptbook remote server for multiple apps/frontends, if its used just for single app, use here just "app" or "your-app-name"
|
|
64
|
+
* Note: This can be some id or some semantic name like "email-agent"
|
|
62
65
|
*/
|
|
63
66
|
readonly appId: string_app_id | null;
|
|
64
67
|
/**
|
|
65
|
-
*
|
|
68
|
+
* Identifier of the end user
|
|
69
|
+
*
|
|
70
|
+
* Note: This can be either some id or email or any other identifier
|
|
71
|
+
* Note: This is also passed to the certain model providers to identify misuse
|
|
66
72
|
*/
|
|
67
73
|
readonly userId?: string_user_id;
|
|
68
74
|
/**
|
|
69
|
-
*
|
|
75
|
+
* Token of the user to verify its identity
|
|
76
|
+
*
|
|
77
|
+
* Note: This is passed for example to `createLlmExecutionTools`
|
|
78
|
+
*/
|
|
79
|
+
readonly userToken?: string_user_id;
|
|
80
|
+
/**
|
|
81
|
+
* Additional arbitrary options to identify the client or to pass custom metadata
|
|
70
82
|
*/
|
|
71
83
|
readonly customOptions?: TCustomOptions;
|
|
72
84
|
};
|
|
@@ -433,13 +433,13 @@ export type string_uuid = string & {
|
|
|
433
433
|
*
|
|
434
434
|
* @@@
|
|
435
435
|
*/
|
|
436
|
-
export type string_app_id = id;
|
|
436
|
+
export type string_app_id = id | 'app';
|
|
437
437
|
/**
|
|
438
438
|
* End user identifier
|
|
439
439
|
*
|
|
440
440
|
* @@@
|
|
441
441
|
*/
|
|
442
|
-
export type string_user_id = id;
|
|
442
|
+
export type string_user_id = id | string_email;
|
|
443
443
|
/**
|
|
444
444
|
* Semantic helper
|
|
445
445
|
*
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@promptbook/pdf",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.89.0-1",
|
|
4
4
|
"description": "It's time for a paradigm shift. The future of software in plain English, French or Latin",
|
|
5
5
|
"private": false,
|
|
6
6
|
"sideEffects": false,
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"module": "./esm/index.es.js",
|
|
48
48
|
"typings": "./esm/typings/src/_packages/pdf.index.d.ts",
|
|
49
49
|
"peerDependencies": {
|
|
50
|
-
"@promptbook/core": "0.
|
|
50
|
+
"@promptbook/core": "0.89.0-1"
|
|
51
51
|
},
|
|
52
52
|
"dependencies": {
|
|
53
53
|
"crypto": "^1.0.1",
|
package/umd/index.umd.js
CHANGED
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
* @generated
|
|
26
26
|
* @see https://github.com/webgptorg/promptbook
|
|
27
27
|
*/
|
|
28
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.
|
|
28
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.89.0-1';
|
|
29
29
|
/**
|
|
30
30
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
31
31
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -2597,8 +2597,9 @@
|
|
|
2597
2597
|
* @returns LLM tools with same functionality with added total cost counting
|
|
2598
2598
|
* @public exported from `@promptbook/core`
|
|
2599
2599
|
*/
|
|
2600
|
-
function
|
|
2600
|
+
function countUsage(llmTools) {
|
|
2601
2601
|
let totalUsage = ZERO_USAGE;
|
|
2602
|
+
const spending = new rxjs.Subject();
|
|
2602
2603
|
const proxyTools = {
|
|
2603
2604
|
get title() {
|
|
2604
2605
|
// TODO: [🧠] Maybe put here some suffix
|
|
@@ -2608,12 +2609,15 @@
|
|
|
2608
2609
|
// TODO: [🧠] Maybe put here some suffix
|
|
2609
2610
|
return llmTools.description;
|
|
2610
2611
|
},
|
|
2611
|
-
|
|
2612
|
+
checkConfiguration() {
|
|
2612
2613
|
return /* not await */ llmTools.checkConfiguration();
|
|
2613
2614
|
},
|
|
2614
2615
|
listModels() {
|
|
2615
2616
|
return /* not await */ llmTools.listModels();
|
|
2616
2617
|
},
|
|
2618
|
+
spending() {
|
|
2619
|
+
return spending.asObservable();
|
|
2620
|
+
},
|
|
2617
2621
|
getTotalUsage() {
|
|
2618
2622
|
// <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
|
|
2619
2623
|
return totalUsage;
|
|
@@ -2624,6 +2628,7 @@
|
|
|
2624
2628
|
// console.info('[🚕] callChatModel through countTotalUsage');
|
|
2625
2629
|
const promptResult = await llmTools.callChatModel(prompt);
|
|
2626
2630
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
2631
|
+
spending.next(promptResult.usage);
|
|
2627
2632
|
return promptResult;
|
|
2628
2633
|
};
|
|
2629
2634
|
}
|
|
@@ -2632,6 +2637,7 @@
|
|
|
2632
2637
|
// console.info('[🚕] callCompletionModel through countTotalUsage');
|
|
2633
2638
|
const promptResult = await llmTools.callCompletionModel(prompt);
|
|
2634
2639
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
2640
|
+
spending.next(promptResult.usage);
|
|
2635
2641
|
return promptResult;
|
|
2636
2642
|
};
|
|
2637
2643
|
}
|
|
@@ -2640,6 +2646,7 @@
|
|
|
2640
2646
|
// console.info('[🚕] callEmbeddingModel through countTotalUsage');
|
|
2641
2647
|
const promptResult = await llmTools.callEmbeddingModel(prompt);
|
|
2642
2648
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
2649
|
+
spending.next(promptResult.usage);
|
|
2643
2650
|
return promptResult;
|
|
2644
2651
|
};
|
|
2645
2652
|
}
|
|
@@ -3536,7 +3543,7 @@
|
|
|
3536
3543
|
// TODO: [🚐] Make arrayable LLMs -> single LLM DRY
|
|
3537
3544
|
const _llms = arrayableToArray(tools.llm);
|
|
3538
3545
|
const llmTools = _llms.length === 1 ? _llms[0] : joinLlmExecutionTools(..._llms);
|
|
3539
|
-
const llmToolsWithUsage =
|
|
3546
|
+
const llmToolsWithUsage = countUsage(llmTools);
|
|
3540
3547
|
// <- TODO: [🌯]
|
|
3541
3548
|
/*
|
|
3542
3549
|
TODO: [🧠][🪑][🔃] Should this be done or not
|