@automattic/jetpack-ai-client 0.1.16 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +5 -0
- package/build/ask-question/index.d.ts +32 -0
- package/build/ask-question/index.js +37 -0
- package/build/components/ai-control/index.d.ts +49 -0
- package/build/components/ai-control/index.js +53 -0
- package/build/components/ai-control/message.d.ts +30 -0
- package/build/components/ai-control/message.js +44 -0
- package/build/components/ai-status-indicator/index.d.ts +15 -0
- package/build/components/ai-status-indicator/index.js +22 -0
- package/build/components/audio-duration-display/index.d.ts +12 -0
- package/build/components/audio-duration-display/index.js +25 -0
- package/build/components/audio-duration-display/lib/media.d.ts +29 -0
- package/build/components/audio-duration-display/lib/media.js +52 -0
- package/build/components/index.d.ts +4 -0
- package/build/components/index.js +4 -0
- package/build/data-flow/context.d.ts +39 -0
- package/build/data-flow/context.js +22 -0
- package/build/data-flow/index.d.ts +3 -0
- package/build/data-flow/index.js +3 -0
- package/build/data-flow/use-ai-context.d.ts +24 -0
- package/build/data-flow/use-ai-context.js +46 -0
- package/build/data-flow/with-ai-assistant-data.d.ts +10 -0
- package/build/data-flow/with-ai-assistant-data.js +42 -0
- package/build/hooks/use-ai-suggestions/index.d.ts +48 -0
- package/build/hooks/use-ai-suggestions/index.js +214 -0
- package/build/hooks/use-media-recording/index.d.ts +42 -0
- package/build/hooks/use-media-recording/index.js +143 -0
- package/build/icons/ai-assistant.d.ts +2 -0
- package/build/icons/ai-assistant.js +7 -0
- package/build/icons/index.d.ts +7 -0
- package/build/icons/index.js +7 -0
- package/build/icons/mic.d.ts +2 -0
- package/build/icons/mic.js +7 -0
- package/build/icons/origami-plane.d.ts +2 -0
- package/build/icons/origami-plane.js +7 -0
- package/build/icons/player-pause.d.ts +2 -0
- package/build/icons/player-pause.js +7 -0
- package/build/icons/player-play.d.ts +2 -0
- package/build/icons/player-play.js +7 -0
- package/build/icons/player-stop.d.ts +2 -0
- package/build/icons/player-stop.js +7 -0
- package/build/icons/speak-tone.d.ts +2 -0
- package/build/icons/speak-tone.js +7 -0
- package/build/index.d.ts +9 -0
- package/build/index.js +27 -0
- package/build/jwt/index.d.ts +19 -0
- package/build/jwt/index.js +74 -0
- package/build/suggestions-event-source/index.d.ts +55 -0
- package/build/suggestions-event-source/index.js +282 -0
- package/build/types.d.ts +34 -0
- package/build/types.js +27 -0
- package/package.json +13 -6
- package/src/components/ai-control/index.tsx +21 -19
- package/src/data-flow/context.tsx +3 -3
- package/src/data-flow/with-ai-assistant-data.tsx +42 -39
- package/src/index.ts +32 -0
- package/src/types.ts +17 -0
- package/index.ts +0 -32
- package/src/global.d.ts +0 -9
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* External dependencies
|
|
3
|
+
*/
|
|
4
|
+
import { isSimpleSite } from '@automattic/jetpack-shared-extension-utils';
|
|
5
|
+
import apiFetch from '@wordpress/api-fetch';
|
|
6
|
+
import debugFactory from 'debug';
|
|
7
|
+
const debug = debugFactory('jetpack-ai-client:jwt');
|
|
8
|
+
const JWT_TOKEN_ID = 'jetpack-ai-jwt';
|
|
9
|
+
const JWT_TOKEN_EXPIRATION_TIME = 2 * 60 * 1000; // 2 minutes
|
|
10
|
+
/**
|
|
11
|
+
* Request a token from the Jetpack site.
|
|
12
|
+
*
|
|
13
|
+
* @param {RequestTokenOptions} options - Options
|
|
14
|
+
* @returns {Promise<TokenDataProps>} The token and the blogId
|
|
15
|
+
*/
|
|
16
|
+
export default async function requestJwt({ apiNonce, siteId, expirationTime, } = {}) {
|
|
17
|
+
// Default values
|
|
18
|
+
apiNonce = apiNonce || window.JP_CONNECTION_INITIAL_STATE.apiNonce;
|
|
19
|
+
siteId = siteId || window.JP_CONNECTION_INITIAL_STATE.siteSuffix;
|
|
20
|
+
expirationTime = expirationTime || JWT_TOKEN_EXPIRATION_TIME;
|
|
21
|
+
const isSimple = isSimpleSite();
|
|
22
|
+
// Trying to pick the token from localStorage
|
|
23
|
+
const token = localStorage.getItem(JWT_TOKEN_ID);
|
|
24
|
+
let tokenData = null;
|
|
25
|
+
if (token) {
|
|
26
|
+
try {
|
|
27
|
+
tokenData = JSON.parse(token);
|
|
28
|
+
}
|
|
29
|
+
catch (err) {
|
|
30
|
+
debug('Error parsing token', err);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
if (tokenData && tokenData?.expire > Date.now()) {
|
|
34
|
+
debug('Using cached token');
|
|
35
|
+
return tokenData;
|
|
36
|
+
}
|
|
37
|
+
let data;
|
|
38
|
+
if (!isSimple) {
|
|
39
|
+
data = await apiFetch({
|
|
40
|
+
/*
|
|
41
|
+
* This endpoint is registered in the Jetpack plugin.
|
|
42
|
+
* Provably we should move it to another package, but for now it's here.
|
|
43
|
+
* issue: https://github.com/Automattic/jetpack/issues/31938
|
|
44
|
+
*/
|
|
45
|
+
path: '/jetpack/v4/jetpack-ai-jwt?_cacheBuster=' + Date.now(),
|
|
46
|
+
credentials: 'same-origin',
|
|
47
|
+
headers: {
|
|
48
|
+
'X-WP-Nonce': apiNonce,
|
|
49
|
+
},
|
|
50
|
+
method: 'POST',
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
data = await apiFetch({
|
|
55
|
+
path: '/wpcom/v2/sites/' + siteId + '/jetpack-openai-query/jwt',
|
|
56
|
+
method: 'POST',
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
const newTokenData = {
|
|
60
|
+
token: data.token,
|
|
61
|
+
/**
|
|
62
|
+
* TODO: make sure we return id from the .com token acquisition endpoint too
|
|
63
|
+
*/
|
|
64
|
+
blogId: !isSimple ? data.blog_id : siteId,
|
|
65
|
+
/**
|
|
66
|
+
* Let's expire the token in 2 minutes
|
|
67
|
+
*/
|
|
68
|
+
expire: Date.now() + expirationTime,
|
|
69
|
+
};
|
|
70
|
+
// Store the token in localStorage
|
|
71
|
+
debug('Storing new token');
|
|
72
|
+
localStorage.setItem(JWT_TOKEN_ID, JSON.stringify(newTokenData));
|
|
73
|
+
return newTokenData;
|
|
74
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* External dependencies
|
|
3
|
+
*/
|
|
4
|
+
import { EventSourceMessage } from '@microsoft/fetch-event-source';
|
|
5
|
+
import type { AiModelTypeProp, PromptProp } from '../types';
|
|
6
|
+
type SuggestionsEventSourceConstructorArgs = {
|
|
7
|
+
url?: string;
|
|
8
|
+
question: PromptProp;
|
|
9
|
+
token?: string;
|
|
10
|
+
options?: {
|
|
11
|
+
postId?: number;
|
|
12
|
+
feature?: 'ai-assistant-experimental' | string | undefined;
|
|
13
|
+
fromCache?: boolean;
|
|
14
|
+
functions?: Array<object>;
|
|
15
|
+
model?: AiModelTypeProp;
|
|
16
|
+
};
|
|
17
|
+
};
|
|
18
|
+
type FunctionCallProps = {
|
|
19
|
+
name?: string;
|
|
20
|
+
arguments?: string;
|
|
21
|
+
};
|
|
22
|
+
/**
|
|
23
|
+
* SuggestionsEventSource is a wrapper around EvenTarget that emits
|
|
24
|
+
* a 'chunk' event for each chunk of data received, and a 'done' event
|
|
25
|
+
* when the stream is closed.
|
|
26
|
+
* It also emits a 'suggestion' event with the full suggestion received so far
|
|
27
|
+
*
|
|
28
|
+
* @returns {EventSource} The event source
|
|
29
|
+
* @fires SuggestionsEventSource#suggestion - The full suggestion has been received so far
|
|
30
|
+
* @fires SuggestionsEventSource#message - A message has been received
|
|
31
|
+
* @fires SuggestionsEventSource#chunk - A chunk of data has been received
|
|
32
|
+
* @fires SuggestionsEventSource#done - The stream has been closed. No more data will be received
|
|
33
|
+
* @fires SuggestionsEventSource#error - An error has occurred
|
|
34
|
+
* @fires SuggestionsEventSource#error_network - The EventSource connection to the server returned some error
|
|
35
|
+
* @fires SuggestionsEventSource#error_context_too_large - The server returned a 413 error
|
|
36
|
+
* @fires SuggestionsEventSource#error_moderation - The server returned a 422 error
|
|
37
|
+
* @fires SuggestionsEventSource#error_quota_exceeded - The server returned a 429 error
|
|
38
|
+
* @fires SuggestionsEventSource#error_service_unavailable - The server returned a 503 error
|
|
39
|
+
* @fires SuggestionsEventSource#error_unclear_prompt - The server returned a message starting with JETPACK_AI_ERROR
|
|
40
|
+
*/
|
|
41
|
+
export default class SuggestionsEventSource extends EventTarget {
|
|
42
|
+
fullMessage: string;
|
|
43
|
+
fullFunctionCall: FunctionCallProps;
|
|
44
|
+
isPromptClear: boolean;
|
|
45
|
+
controller: AbortController;
|
|
46
|
+
errorUnclearPromptTriggered: boolean;
|
|
47
|
+
constructor(data: SuggestionsEventSourceConstructorArgs);
|
|
48
|
+
initEventSource({ url, question, token, options, }: SuggestionsEventSourceConstructorArgs): Promise<void>;
|
|
49
|
+
checkForUnclearPrompt(): void;
|
|
50
|
+
close(): void;
|
|
51
|
+
processEvent(e: EventSourceMessage): void;
|
|
52
|
+
processConnectionError(response: any): void;
|
|
53
|
+
processErrorEvent(e: any): void;
|
|
54
|
+
}
|
|
55
|
+
export {};
|
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* External dependencies
|
|
3
|
+
*/
|
|
4
|
+
import { fetchEventSource } from '@microsoft/fetch-event-source';
|
|
5
|
+
import debugFactory from 'debug';
|
|
6
|
+
/**
|
|
7
|
+
* Internal dependencies
|
|
8
|
+
*/
|
|
9
|
+
import { getErrorData } from '../hooks/use-ai-suggestions';
|
|
10
|
+
import requestJwt from '../jwt';
|
|
11
|
+
/*
|
|
12
|
+
* Types & constants
|
|
13
|
+
*/
|
|
14
|
+
import { ERROR_CONTEXT_TOO_LARGE, ERROR_MODERATION, ERROR_NETWORK, ERROR_QUOTA_EXCEEDED, ERROR_RESPONSE, ERROR_SERVICE_UNAVAILABLE, ERROR_UNCLEAR_PROMPT, } from '../types';
|
|
15
|
+
const debug = debugFactory('jetpack-ai-client:suggestions-event-source');
|
|
16
|
+
/**
|
|
17
|
+
* SuggestionsEventSource is a wrapper around EvenTarget that emits
|
|
18
|
+
* a 'chunk' event for each chunk of data received, and a 'done' event
|
|
19
|
+
* when the stream is closed.
|
|
20
|
+
* It also emits a 'suggestion' event with the full suggestion received so far
|
|
21
|
+
*
|
|
22
|
+
* @returns {EventSource} The event source
|
|
23
|
+
* @fires SuggestionsEventSource#suggestion - The full suggestion has been received so far
|
|
24
|
+
* @fires SuggestionsEventSource#message - A message has been received
|
|
25
|
+
* @fires SuggestionsEventSource#chunk - A chunk of data has been received
|
|
26
|
+
* @fires SuggestionsEventSource#done - The stream has been closed. No more data will be received
|
|
27
|
+
* @fires SuggestionsEventSource#error - An error has occurred
|
|
28
|
+
* @fires SuggestionsEventSource#error_network - The EventSource connection to the server returned some error
|
|
29
|
+
* @fires SuggestionsEventSource#error_context_too_large - The server returned a 413 error
|
|
30
|
+
* @fires SuggestionsEventSource#error_moderation - The server returned a 422 error
|
|
31
|
+
* @fires SuggestionsEventSource#error_quota_exceeded - The server returned a 429 error
|
|
32
|
+
* @fires SuggestionsEventSource#error_service_unavailable - The server returned a 503 error
|
|
33
|
+
* @fires SuggestionsEventSource#error_unclear_prompt - The server returned a message starting with JETPACK_AI_ERROR
|
|
34
|
+
*/
|
|
35
|
+
export default class SuggestionsEventSource extends EventTarget {
|
|
36
|
+
fullMessage;
|
|
37
|
+
fullFunctionCall;
|
|
38
|
+
isPromptClear;
|
|
39
|
+
controller;
|
|
40
|
+
// Flag to detect if the unclear prompt event was already dispatched
|
|
41
|
+
errorUnclearPromptTriggered;
|
|
42
|
+
constructor(data) {
|
|
43
|
+
super();
|
|
44
|
+
this.fullMessage = '';
|
|
45
|
+
this.fullFunctionCall = {
|
|
46
|
+
name: '',
|
|
47
|
+
arguments: '',
|
|
48
|
+
};
|
|
49
|
+
this.isPromptClear = false;
|
|
50
|
+
// The AbortController is used to close the fetchEventSource connection
|
|
51
|
+
this.controller = new AbortController();
|
|
52
|
+
this.initEventSource(data);
|
|
53
|
+
}
|
|
54
|
+
async initEventSource({ url, question, token, options = {}, }) {
|
|
55
|
+
// If the token is not provided, try to get one
|
|
56
|
+
if (!token) {
|
|
57
|
+
try {
|
|
58
|
+
debug('Token was not provided, requesting one...');
|
|
59
|
+
token = (await requestJwt()).token;
|
|
60
|
+
}
|
|
61
|
+
catch (err) {
|
|
62
|
+
this.processErrorEvent(err);
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
const bodyData = {};
|
|
67
|
+
// Populate body data with post id
|
|
68
|
+
if (options?.postId) {
|
|
69
|
+
bodyData.post_id = options.postId;
|
|
70
|
+
}
|
|
71
|
+
// If the url is not provided, we use the default one
|
|
72
|
+
if (!url) {
|
|
73
|
+
const urlHandler = new URL('https://public-api.wordpress.com/wpcom/v2/jetpack-ai-query');
|
|
74
|
+
// Support response from cache option
|
|
75
|
+
if (options?.fromCache) {
|
|
76
|
+
urlHandler.searchParams.append('stream_cache', 'true');
|
|
77
|
+
}
|
|
78
|
+
url = urlHandler.toString();
|
|
79
|
+
debug('URL not provided, using default: %o', url);
|
|
80
|
+
}
|
|
81
|
+
// question can be a string or an array of PromptMessagesProp
|
|
82
|
+
if (Array.isArray(question)) {
|
|
83
|
+
bodyData.messages = question;
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
bodyData.question = question;
|
|
87
|
+
}
|
|
88
|
+
// Propagate the feature option
|
|
89
|
+
if (options?.feature?.length) {
|
|
90
|
+
debug('Feature: %o', options.feature);
|
|
91
|
+
bodyData.feature = options.feature;
|
|
92
|
+
}
|
|
93
|
+
// Propagate the functions option
|
|
94
|
+
if (options?.functions?.length) {
|
|
95
|
+
debug('Functions: %o', options.functions);
|
|
96
|
+
bodyData.functions = options.functions;
|
|
97
|
+
}
|
|
98
|
+
// Model
|
|
99
|
+
if (options?.model?.length) {
|
|
100
|
+
debug('Model: %o', options.model);
|
|
101
|
+
bodyData.model = options.model;
|
|
102
|
+
}
|
|
103
|
+
// Clean the unclear prompt trigger flag
|
|
104
|
+
this.errorUnclearPromptTriggered = false;
|
|
105
|
+
await fetchEventSource(url, {
|
|
106
|
+
openWhenHidden: true,
|
|
107
|
+
method: 'POST',
|
|
108
|
+
headers: {
|
|
109
|
+
'Content-type': 'application/json',
|
|
110
|
+
Authorization: 'Bearer ' + token,
|
|
111
|
+
},
|
|
112
|
+
body: JSON.stringify(bodyData),
|
|
113
|
+
onclose: () => {
|
|
114
|
+
debug('Stream closed');
|
|
115
|
+
},
|
|
116
|
+
onerror: err => {
|
|
117
|
+
this.processErrorEvent(err);
|
|
118
|
+
throw err; // rethrow to stop the operation otherwise it will retry forever
|
|
119
|
+
},
|
|
120
|
+
onmessage: ev => {
|
|
121
|
+
this.processEvent(ev);
|
|
122
|
+
},
|
|
123
|
+
onopen: async (response) => {
|
|
124
|
+
if (response.ok) {
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
let errorCode;
|
|
128
|
+
if (response.status >= 400 &&
|
|
129
|
+
response.status <= 500 &&
|
|
130
|
+
![413, 422, 429].includes(response.status)) {
|
|
131
|
+
this.processConnectionError(response);
|
|
132
|
+
}
|
|
133
|
+
/*
|
|
134
|
+
* error code 503
|
|
135
|
+
* service unavailable
|
|
136
|
+
*/
|
|
137
|
+
if (response.status === 503) {
|
|
138
|
+
errorCode = ERROR_SERVICE_UNAVAILABLE;
|
|
139
|
+
this.dispatchEvent(new CustomEvent(ERROR_SERVICE_UNAVAILABLE));
|
|
140
|
+
}
|
|
141
|
+
/*
|
|
142
|
+
* error code 413
|
|
143
|
+
* request context too large
|
|
144
|
+
*/
|
|
145
|
+
if (response.status === 413) {
|
|
146
|
+
errorCode = ERROR_CONTEXT_TOO_LARGE;
|
|
147
|
+
this.dispatchEvent(new CustomEvent(ERROR_CONTEXT_TOO_LARGE));
|
|
148
|
+
}
|
|
149
|
+
/*
|
|
150
|
+
* error code 422
|
|
151
|
+
* request flagged by moderation system
|
|
152
|
+
*/
|
|
153
|
+
if (response.status === 422) {
|
|
154
|
+
errorCode = ERROR_MODERATION;
|
|
155
|
+
this.dispatchEvent(new CustomEvent(ERROR_MODERATION));
|
|
156
|
+
}
|
|
157
|
+
/*
|
|
158
|
+
* error code 429
|
|
159
|
+
* you exceeded your current quota please check your plan and billing details
|
|
160
|
+
*/
|
|
161
|
+
if (response.status === 429) {
|
|
162
|
+
errorCode = ERROR_QUOTA_EXCEEDED;
|
|
163
|
+
this.dispatchEvent(new CustomEvent(ERROR_QUOTA_EXCEEDED));
|
|
164
|
+
}
|
|
165
|
+
// Always dispatch a global ERROR_RESPONSE event
|
|
166
|
+
this.dispatchEvent(new CustomEvent(ERROR_RESPONSE, {
|
|
167
|
+
detail: getErrorData(errorCode),
|
|
168
|
+
}));
|
|
169
|
+
throw new Error();
|
|
170
|
+
},
|
|
171
|
+
signal: this.controller.signal,
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
checkForUnclearPrompt() {
|
|
175
|
+
if (this.isPromptClear) {
|
|
176
|
+
return;
|
|
177
|
+
}
|
|
178
|
+
/*
|
|
179
|
+
* Sometimes the first token of the message is not received,
|
|
180
|
+
* so we check only for JETPACK_AI_ERROR, ignoring:
|
|
181
|
+
* - the double underscores (italic markdown)
|
|
182
|
+
* - the double asterisks (bold markdown)
|
|
183
|
+
*/
|
|
184
|
+
const replacedMessage = this.fullMessage.replace(/__|(\*\*)/g, '');
|
|
185
|
+
if (replacedMessage.startsWith('JETPACK_AI_ERROR')) {
|
|
186
|
+
/*
|
|
187
|
+
* Check if the unclear prompt event was already dispatched,
|
|
188
|
+
* to ensure that it is dispatched only once per request.
|
|
189
|
+
*/
|
|
190
|
+
if (this.errorUnclearPromptTriggered) {
|
|
191
|
+
return;
|
|
192
|
+
}
|
|
193
|
+
this.errorUnclearPromptTriggered = true;
|
|
194
|
+
// The unclear prompt marker was found, so we dispatch an error event
|
|
195
|
+
this.dispatchEvent(new CustomEvent(ERROR_UNCLEAR_PROMPT));
|
|
196
|
+
debug('Unclear error prompt dispatched');
|
|
197
|
+
this.dispatchEvent(new CustomEvent(ERROR_RESPONSE, {
|
|
198
|
+
detail: getErrorData(ERROR_UNCLEAR_PROMPT),
|
|
199
|
+
}));
|
|
200
|
+
}
|
|
201
|
+
else if ('JETPACK_AI_ERROR'.startsWith(replacedMessage)) {
|
|
202
|
+
// Partial unclear prompt marker was found, so we wait for more data and print a debug message without dispatching an event
|
|
203
|
+
debug(this.fullMessage);
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
// Mark the prompt as clear
|
|
207
|
+
this.isPromptClear = true;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
close() {
|
|
211
|
+
this.controller.abort();
|
|
212
|
+
}
|
|
213
|
+
processEvent(e) {
|
|
214
|
+
if (e.data === '[DONE]') {
|
|
215
|
+
/*
|
|
216
|
+
* Check if the unclear prompt event was already dispatched,
|
|
217
|
+
* to ensure that it is dispatched only once per request.
|
|
218
|
+
*/
|
|
219
|
+
if (this.errorUnclearPromptTriggered) {
|
|
220
|
+
return;
|
|
221
|
+
}
|
|
222
|
+
if (this.fullMessage.length) {
|
|
223
|
+
// Dispatch an event with the full content
|
|
224
|
+
this.dispatchEvent(new CustomEvent('done', { detail: this.fullMessage }));
|
|
225
|
+
debug('Done: %o', this.fullMessage);
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
if (this.fullFunctionCall.name.length) {
|
|
229
|
+
this.dispatchEvent(new CustomEvent('function_done', { detail: this.fullFunctionCall }));
|
|
230
|
+
debug('Done: %o', this.fullFunctionCall);
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
let data;
|
|
235
|
+
try {
|
|
236
|
+
data = JSON.parse(e.data);
|
|
237
|
+
}
|
|
238
|
+
catch (err) {
|
|
239
|
+
debug('Error parsing JSON', e, err);
|
|
240
|
+
return;
|
|
241
|
+
}
|
|
242
|
+
const { delta } = data?.choices?.[0] ?? { delta: { content: null, function_call: null } };
|
|
243
|
+
const chunk = delta.content;
|
|
244
|
+
const functionCallChunk = delta.function_call;
|
|
245
|
+
if (chunk) {
|
|
246
|
+
this.fullMessage += chunk;
|
|
247
|
+
this.checkForUnclearPrompt();
|
|
248
|
+
if (this.isPromptClear) {
|
|
249
|
+
// Dispatch an event with the chunk
|
|
250
|
+
this.dispatchEvent(new CustomEvent('chunk', { detail: chunk }));
|
|
251
|
+
// Dispatch an event with the full message
|
|
252
|
+
debug('suggestion: %o', this.fullMessage);
|
|
253
|
+
this.dispatchEvent(new CustomEvent('suggestion', { detail: this.fullMessage }));
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
if (functionCallChunk) {
|
|
257
|
+
if (functionCallChunk.name != null) {
|
|
258
|
+
this.fullFunctionCall.name += functionCallChunk.name;
|
|
259
|
+
}
|
|
260
|
+
if (functionCallChunk.arguments != null) {
|
|
261
|
+
this.fullFunctionCall.arguments += functionCallChunk.arguments;
|
|
262
|
+
}
|
|
263
|
+
// Dispatch an event with the function call
|
|
264
|
+
this.dispatchEvent(new CustomEvent('functionCallChunk', { detail: this.fullFunctionCall }));
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
processConnectionError(response) {
|
|
268
|
+
debug('Connection error: %o', response);
|
|
269
|
+
this.dispatchEvent(new CustomEvent(ERROR_NETWORK, { detail: response }));
|
|
270
|
+
this.dispatchEvent(new CustomEvent(ERROR_RESPONSE, {
|
|
271
|
+
detail: getErrorData(ERROR_NETWORK),
|
|
272
|
+
}));
|
|
273
|
+
}
|
|
274
|
+
processErrorEvent(e) {
|
|
275
|
+
debug('onerror: %o', e);
|
|
276
|
+
// Dispatch a generic network error event
|
|
277
|
+
this.dispatchEvent(new CustomEvent(ERROR_NETWORK, { detail: e }));
|
|
278
|
+
this.dispatchEvent(new CustomEvent(ERROR_RESPONSE, {
|
|
279
|
+
detail: getErrorData(ERROR_NETWORK),
|
|
280
|
+
}));
|
|
281
|
+
}
|
|
282
|
+
}
|
package/build/types.d.ts
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
export declare const ERROR_SERVICE_UNAVAILABLE: "error_service_unavailable";
|
|
2
|
+
export declare const ERROR_QUOTA_EXCEEDED: "error_quota_exceeded";
|
|
3
|
+
export declare const ERROR_MODERATION: "error_moderation";
|
|
4
|
+
export declare const ERROR_CONTEXT_TOO_LARGE: "error_context_too_large";
|
|
5
|
+
export declare const ERROR_NETWORK: "error_network";
|
|
6
|
+
export declare const ERROR_UNCLEAR_PROMPT: "error_unclear_prompt";
|
|
7
|
+
export declare const ERROR_RESPONSE: "error_response";
|
|
8
|
+
export type SuggestionErrorCode = typeof ERROR_SERVICE_UNAVAILABLE | typeof ERROR_QUOTA_EXCEEDED | typeof ERROR_MODERATION | typeof ERROR_CONTEXT_TOO_LARGE | typeof ERROR_NETWORK | typeof ERROR_UNCLEAR_PROMPT | typeof ERROR_RESPONSE;
|
|
9
|
+
export type PromptItemProps = {
|
|
10
|
+
role: 'system' | 'user' | 'assistant' | 'jetpack-ai';
|
|
11
|
+
content?: string;
|
|
12
|
+
context?: object;
|
|
13
|
+
};
|
|
14
|
+
export type PromptMessagesProp = Array<PromptItemProps>;
|
|
15
|
+
export type PromptProp = PromptMessagesProp | string;
|
|
16
|
+
export type { UseAiContextOptions } from './data-flow/use-ai-context';
|
|
17
|
+
export type { RequestingErrorProps } from './hooks/use-ai-suggestions';
|
|
18
|
+
export declare const REQUESTING_STATES: readonly ["init", "requesting", "suggesting", "done", "error"];
|
|
19
|
+
export type RequestingStateProp = (typeof REQUESTING_STATES)[number];
|
|
20
|
+
export declare const AI_MODEL_GPT_3_5_Turbo_16K: "gpt-3.5-turbo-16k";
|
|
21
|
+
export declare const AI_MODEL_GPT_4: "gpt-4";
|
|
22
|
+
export type AiModelTypeProp = typeof AI_MODEL_GPT_3_5_Turbo_16K | typeof AI_MODEL_GPT_4;
|
|
23
|
+
interface JPConnectionInitialState {
|
|
24
|
+
apiNonce: string;
|
|
25
|
+
siteSuffix: string;
|
|
26
|
+
connectionStatus: {
|
|
27
|
+
isActive: boolean;
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
declare global {
|
|
31
|
+
interface Window {
|
|
32
|
+
JP_CONNECTION_INITIAL_STATE: JPConnectionInitialState;
|
|
33
|
+
}
|
|
34
|
+
}
|
package/build/types.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export const ERROR_SERVICE_UNAVAILABLE = 'error_service_unavailable';
|
|
2
|
+
export const ERROR_QUOTA_EXCEEDED = 'error_quota_exceeded';
|
|
3
|
+
export const ERROR_MODERATION = 'error_moderation';
|
|
4
|
+
export const ERROR_CONTEXT_TOO_LARGE = 'error_context_too_large';
|
|
5
|
+
export const ERROR_NETWORK = 'error_network';
|
|
6
|
+
export const ERROR_UNCLEAR_PROMPT = 'error_unclear_prompt';
|
|
7
|
+
export const ERROR_RESPONSE = 'error_response';
|
|
8
|
+
/*
|
|
9
|
+
* Requests types
|
|
10
|
+
*/
|
|
11
|
+
const REQUESTING_STATE_INIT = 'init';
|
|
12
|
+
const REQUESTING_STATE_REQUESTING = 'requesting';
|
|
13
|
+
const REQUESTING_STATE_SUGGESTING = 'suggesting';
|
|
14
|
+
const REQUESTING_STATE_DONE = 'done';
|
|
15
|
+
const REQUESTING_STATE_ERROR = 'error';
|
|
16
|
+
export const REQUESTING_STATES = [
|
|
17
|
+
REQUESTING_STATE_INIT,
|
|
18
|
+
REQUESTING_STATE_REQUESTING,
|
|
19
|
+
REQUESTING_STATE_SUGGESTING,
|
|
20
|
+
REQUESTING_STATE_DONE,
|
|
21
|
+
REQUESTING_STATE_ERROR,
|
|
22
|
+
];
|
|
23
|
+
/*
|
|
24
|
+
* Model types and constants
|
|
25
|
+
*/
|
|
26
|
+
export const AI_MODEL_GPT_3_5_Turbo_16K = 'gpt-3.5-turbo-16k';
|
|
27
|
+
export const AI_MODEL_GPT_4 = 'gpt-4';
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"private": false,
|
|
3
3
|
"name": "@automattic/jetpack-ai-client",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.2.0",
|
|
5
5
|
"description": "A JS client for consuming Jetpack AI services",
|
|
6
6
|
"homepage": "https://github.com/Automattic/jetpack/tree/HEAD/projects/js-packages/ai-client/#readme",
|
|
7
7
|
"bugs": {
|
|
@@ -15,7 +15,8 @@
|
|
|
15
15
|
"license": "GPL-2.0-or-later",
|
|
16
16
|
"author": "Automattic",
|
|
17
17
|
"scripts": {
|
|
18
|
-
"build": "pnpm run compile-ts",
|
|
18
|
+
"build": "pnpm run clean && pnpm run compile-ts",
|
|
19
|
+
"clean": "rm -rf build/",
|
|
19
20
|
"compile-ts": "tsc --pretty",
|
|
20
21
|
"test": "NODE_OPTIONS=--experimental-vm-modules jest"
|
|
21
22
|
},
|
|
@@ -29,13 +30,19 @@
|
|
|
29
30
|
"typescript": "5.0.4"
|
|
30
31
|
},
|
|
31
32
|
"exports": {
|
|
32
|
-
".":
|
|
33
|
+
".": {
|
|
34
|
+
"types": "./build/index.d.ts",
|
|
35
|
+
"default": "./build/index.js"
|
|
36
|
+
}
|
|
33
37
|
},
|
|
38
|
+
"main": "./build/index.js",
|
|
39
|
+
"types": "./build/index.d.ts",
|
|
34
40
|
"dependencies": {
|
|
35
|
-
"@automattic/jetpack-base-styles": "^0.6.
|
|
36
|
-
"@automattic/jetpack-connection": "^0.30.
|
|
37
|
-
"@automattic/jetpack-shared-extension-utils": "^0.13.
|
|
41
|
+
"@automattic/jetpack-base-styles": "^0.6.13",
|
|
42
|
+
"@automattic/jetpack-connection": "^0.30.8",
|
|
43
|
+
"@automattic/jetpack-shared-extension-utils": "^0.13.2",
|
|
38
44
|
"@microsoft/fetch-event-source": "2.0.1",
|
|
45
|
+
"@types/react": "18.2.33",
|
|
39
46
|
"@wordpress/api-fetch": "6.42.0",
|
|
40
47
|
"@wordpress/block-editor": "12.13.0",
|
|
41
48
|
"@wordpress/components": "25.11.0",
|
|
@@ -19,6 +19,22 @@ import { GuidelineMessage } from './message';
|
|
|
19
19
|
* Types
|
|
20
20
|
*/
|
|
21
21
|
import type { RequestingStateProp } from '../../types';
|
|
22
|
+
type AIControlProps = {
|
|
23
|
+
disabled?: boolean;
|
|
24
|
+
value: string;
|
|
25
|
+
placeholder?: string;
|
|
26
|
+
showAccept?: boolean;
|
|
27
|
+
acceptLabel?: string;
|
|
28
|
+
showButtonLabels?: boolean;
|
|
29
|
+
isTransparent?: boolean;
|
|
30
|
+
state?: RequestingStateProp;
|
|
31
|
+
showClearButton?: boolean;
|
|
32
|
+
showGuideLine?: boolean;
|
|
33
|
+
onChange?: ( newValue: string ) => void;
|
|
34
|
+
onSend?: ( currentValue: string ) => void;
|
|
35
|
+
onStop?: () => void;
|
|
36
|
+
onAccept?: () => void;
|
|
37
|
+
};
|
|
22
38
|
|
|
23
39
|
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
|
24
40
|
const noop = () => {};
|
|
@@ -26,23 +42,9 @@ const noop = () => {};
|
|
|
26
42
|
/**
|
|
27
43
|
* AI Control component.
|
|
28
44
|
*
|
|
29
|
-
* @param {
|
|
30
|
-
* @param {
|
|
31
|
-
* @
|
|
32
|
-
* @param {string} props.placeholder - The input placeholder
|
|
33
|
-
* @param {boolean} props.showAccept - Whether to show the accept button
|
|
34
|
-
* @param {string} props.acceptLabel - The accept button label
|
|
35
|
-
* @param {boolean} props.showButtonLabels - Whether to show the button labels
|
|
36
|
-
* @param {boolean} props.isTransparent - Whether the component has low opacity
|
|
37
|
-
* @param {string} props.state - The request state
|
|
38
|
-
* @param {boolean} props.showClearButton - Whether to show the clear button when the input has a value
|
|
39
|
-
* @param {boolean} props.showGuideLine - WHether to show the guideline message
|
|
40
|
-
* @param {Function} props.onChange - Input change handler
|
|
41
|
-
* @param {Function} props.onSend - Request send handler
|
|
42
|
-
* @param {Function} props.onStop - Request stop handler
|
|
43
|
-
* @param {Function} props.onAccept - Response accept handler
|
|
44
|
-
* @param {object} ref - Auto injected ref from react
|
|
45
|
-
* @returns {object} - AI Control component
|
|
45
|
+
* @param {AIControlProps} props - Component props.
|
|
46
|
+
* @param {React.MutableRefObject} ref - Ref to the component.
|
|
47
|
+
* @returns {React.ReactElement} Rendered component.
|
|
46
48
|
*/
|
|
47
49
|
export function AIControl(
|
|
48
50
|
{
|
|
@@ -76,8 +78,8 @@ export function AIControl(
|
|
|
76
78
|
onStop?: () => void;
|
|
77
79
|
onAccept?: () => void;
|
|
78
80
|
},
|
|
79
|
-
ref
|
|
80
|
-
) {
|
|
81
|
+
ref: React.MutableRefObject< null > // eslint-disable-line @typescript-eslint/ban-types
|
|
82
|
+
): React.ReactElement {
|
|
81
83
|
const promptUserInputRef = useRef( null );
|
|
82
84
|
const loading = state === 'requesting' || state === 'suggesting';
|
|
83
85
|
|
|
@@ -53,7 +53,7 @@ type AiDataContextProviderProps = {
|
|
|
53
53
|
/*
|
|
54
54
|
* Children
|
|
55
55
|
*/
|
|
56
|
-
children: React.
|
|
56
|
+
children: React.ReactElement;
|
|
57
57
|
};
|
|
58
58
|
|
|
59
59
|
/**
|
|
@@ -67,7 +67,7 @@ export const AiDataContext = createContext( {} as AiDataContextProps );
|
|
|
67
67
|
* AI Data Context Provider
|
|
68
68
|
*
|
|
69
69
|
* @param {AiDataContextProviderProps} props - Component props.
|
|
70
|
-
* @returns {React.
|
|
70
|
+
* @returns {React.ReactElement} Context provider.
|
|
71
71
|
* @example
|
|
72
72
|
* <AiDataContextProvider value={ value }>
|
|
73
73
|
* { children }
|
|
@@ -76,6 +76,6 @@ export const AiDataContext = createContext( {} as AiDataContextProps );
|
|
|
76
76
|
export const AiDataContextProvider = ( {
|
|
77
77
|
value,
|
|
78
78
|
children,
|
|
79
|
-
}: AiDataContextProviderProps ): React.
|
|
79
|
+
}: AiDataContextProviderProps ): React.ReactElement => (
|
|
80
80
|
<AiDataContext.Provider value={ value } children={ children } />
|
|
81
81
|
);
|