@adobe/spacecat-shared-gpt-client 1.4.2 → 1.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/package.json +4 -4
- package/src/clients/index.d.ts +93 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,17 @@
|
|
|
1
|
+
# [@adobe/spacecat-shared-gpt-client-v1.4.4](https://github.com/adobe/spacecat-shared/compare/@adobe/spacecat-shared-gpt-client-v1.4.3...@adobe/spacecat-shared-gpt-client-v1.4.4) (2025-01-14)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Bug Fixes
|
|
5
|
+
|
|
6
|
+
* firefall interface missing functions ([#533](https://github.com/adobe/spacecat-shared/issues/533)) ([d62db11](https://github.com/adobe/spacecat-shared/commit/d62db11aaab8ad04c561d03e79b2eb25a7d0c34b))
|
|
7
|
+
|
|
8
|
+
# [@adobe/spacecat-shared-gpt-client-v1.4.3](https://github.com/adobe/spacecat-shared/compare/@adobe/spacecat-shared-gpt-client-v1.4.2...@adobe/spacecat-shared-gpt-client-v1.4.3) (2025-01-12)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
### Bug Fixes
|
|
12
|
+
|
|
13
|
+
* **deps:** update external fixes ([#538](https://github.com/adobe/spacecat-shared/issues/538)) ([a3bddf6](https://github.com/adobe/spacecat-shared/commit/a3bddf6cb2a9b60db8f8c3450e81205cd10c0b23))
|
|
14
|
+
|
|
1
15
|
# [@adobe/spacecat-shared-gpt-client-v1.4.2](https://github.com/adobe/spacecat-shared/compare/@adobe/spacecat-shared-gpt-client-v1.4.1...@adobe/spacecat-shared-gpt-client-v1.4.2) (2024-12-31)
|
|
2
16
|
|
|
3
17
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@adobe/spacecat-shared-gpt-client",
|
|
3
|
-
"version": "1.4.
|
|
3
|
+
"version": "1.4.4",
|
|
4
4
|
"description": "Shared modules of the Spacecat Services - GPT Client",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -36,8 +36,8 @@
|
|
|
36
36
|
"dependencies": {
|
|
37
37
|
"@adobe/fetch": "4.1.11",
|
|
38
38
|
"@adobe/helix-universal": "5.0.8",
|
|
39
|
-
"@adobe/spacecat-shared-ims-client": "1.5.
|
|
40
|
-
"@adobe/spacecat-shared-utils": "1.
|
|
39
|
+
"@adobe/spacecat-shared-ims-client": "1.5.2",
|
|
40
|
+
"@adobe/spacecat-shared-utils": "1.26.1"
|
|
41
41
|
},
|
|
42
42
|
"devDependencies": {
|
|
43
43
|
"chai": "5.1.2",
|
|
@@ -45,6 +45,6 @@
|
|
|
45
45
|
"nock": "13.5.6",
|
|
46
46
|
"sinon": "19.0.2",
|
|
47
47
|
"sinon-chai": "4.0.0",
|
|
48
|
-
"typescript": "5.7.
|
|
48
|
+
"typescript": "5.7.3"
|
|
49
49
|
}
|
|
50
50
|
}
|
package/src/clients/index.d.ts
CHANGED
|
@@ -24,6 +24,99 @@ export class FirefallClient {
|
|
|
24
24
|
* Sends the given prompt to the Firefall GPT API and returns the response.
|
|
25
25
|
* @param {string} prompt The prompt to send to the Firefall GPT API.
|
|
26
26
|
* @returns {Promise<string>} The response from the Firefall GPT API.
|
|
27
|
+
* @deprecated since version 1.2.19. Use fetchCapabilityExecution instead.
|
|
27
28
|
*/
|
|
28
29
|
fetch(prompt: string): Promise<string>;
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Fetches data from Firefall Chat Completion API.
|
|
33
|
+
*
|
|
34
|
+
* @param {string} prompt - The text prompt to provide to Firefall
|
|
35
|
+
* @param {object} [options] - The options for the call, with optional properties:
|
|
36
|
+
* - imageUrls: An array of URLs of the images to provide to Firefall
|
|
37
|
+
* - model: LLM Model to use (default: gpt-4-turbo).
|
|
38
|
+
* Use 'gpt-4-vision' with images.
|
|
39
|
+
* JSON mode is only currently supported with the following models: gpt-35-turbo-1106, gpt-4-turbo
|
|
40
|
+
* @returns {Promise<object>} A promise that resolves to an object containing the chat completion.
|
|
41
|
+
*
|
|
42
|
+
* The returned object has the following structure:
|
|
43
|
+
*
|
|
44
|
+
* @example
|
|
45
|
+
* {
|
|
46
|
+
* "conversation_identifier": string | null,
|
|
47
|
+
* "query_id": string | null,
|
|
48
|
+
* "model": string,
|
|
49
|
+
* "choices": [
|
|
50
|
+
* {
|
|
51
|
+
* "finish_reason": string,
|
|
52
|
+
* "index": number,
|
|
53
|
+
* "message": {
|
|
54
|
+
* "role": string,
|
|
55
|
+
* "content": string,
|
|
56
|
+
* "function_call": object | null
|
|
57
|
+
* },
|
|
58
|
+
* "content_filter_results": {
|
|
59
|
+
* "hate": {
|
|
60
|
+
* "filtered": boolean,
|
|
61
|
+
* "severity": string
|
|
62
|
+
* },
|
|
63
|
+
* "self_harm": {
|
|
64
|
+
* "filtered": boolean,
|
|
65
|
+
* "severity": string
|
|
66
|
+
* },
|
|
67
|
+
* "sexual": {
|
|
68
|
+
* "filtered": boolean,
|
|
69
|
+
* "severity": string
|
|
70
|
+
* },
|
|
71
|
+
* "violence": {
|
|
72
|
+
* "filtered": boolean,
|
|
73
|
+
* "severity": string
|
|
74
|
+
* }
|
|
75
|
+
* },
|
|
76
|
+
* "logprobs": object | null
|
|
77
|
+
* }
|
|
78
|
+
* ],
|
|
79
|
+
* "created_at": string,
|
|
80
|
+
* "usage": {
|
|
81
|
+
* "completion_tokens": number,
|
|
82
|
+
* "prompt_tokens": number,
|
|
83
|
+
* "total_tokens": number
|
|
84
|
+
* },
|
|
85
|
+
* "prompt_filter_results": [
|
|
86
|
+
* {
|
|
87
|
+
* "prompt_index": number,
|
|
88
|
+
* "content_filter_results": {
|
|
89
|
+
* "hate": {
|
|
90
|
+
* "filtered": boolean,
|
|
91
|
+
* "severity": string
|
|
92
|
+
* },
|
|
93
|
+
* "jailbreak": {
|
|
94
|
+
* "filtered": boolean,
|
|
95
|
+
* "detected": boolean
|
|
96
|
+
* },
|
|
97
|
+
* "self_harm": {
|
|
98
|
+
* "filtered": boolean,
|
|
99
|
+
* "severity": string
|
|
100
|
+
* },
|
|
101
|
+
* "sexual": {
|
|
102
|
+
* "filtered": boolean,
|
|
103
|
+
* "severity": string
|
|
104
|
+
* },
|
|
105
|
+
* "violence": {
|
|
106
|
+
* "filtered": boolean,
|
|
107
|
+
* "severity": string
|
|
108
|
+
* }
|
|
109
|
+
* }
|
|
110
|
+
* }
|
|
111
|
+
* ]
|
|
112
|
+
* }
|
|
113
|
+
*/
|
|
114
|
+
fetchChatCompletion(prompt: string, options?: object): Promise<object>;
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Fetches data from Firefall API.
|
|
118
|
+
* @param prompt The text prompt to provide to Firefall
|
|
119
|
+
* @returns {Promise<string>} - AI response
|
|
120
|
+
*/
|
|
121
|
+
fetchCapabilityExecution(prompt: string): Promise<string>;
|
|
29
122
|
}
|