@botpress/cognitive 0.1.50 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/dist/index.cjs +139 -67
- package/dist/index.cjs.map +2 -2
- package/dist/index.d.ts +41 -5
- package/dist/index.mjs +139 -67
- package/dist/index.mjs.map +2 -2
- package/package.json +1 -1
- package/refresh-models.ts +0 -6
package/dist/index.d.ts
CHANGED
|
@@ -1896,7 +1896,7 @@ interface CreateEventResponse {
|
|
|
1896
1896
|
* ID of the [Message](#schema_message) to link the event to.
|
|
1897
1897
|
*/
|
|
1898
1898
|
messageId?: string;
|
|
1899
|
-
status: "pending" | "processed" | "ignored" | "failed" | "scheduled";
|
|
1899
|
+
status: "pending" | "processed" | "ignored" | "failed" | "scheduled" | "canceled";
|
|
1900
1900
|
/**
|
|
1901
1901
|
* Reason why the event failed to be processed
|
|
1902
1902
|
*/
|
|
@@ -1949,7 +1949,7 @@ interface GetEventResponse {
|
|
|
1949
1949
|
* ID of the [Message](#schema_message) to link the event to.
|
|
1950
1950
|
*/
|
|
1951
1951
|
messageId?: string;
|
|
1952
|
-
status: "pending" | "processed" | "ignored" | "failed" | "scheduled";
|
|
1952
|
+
status: "pending" | "processed" | "ignored" | "failed" | "scheduled" | "canceled";
|
|
1953
1953
|
/**
|
|
1954
1954
|
* Reason why the event failed to be processed
|
|
1955
1955
|
*/
|
|
@@ -2005,7 +2005,7 @@ interface ListEventsResponse {
|
|
|
2005
2005
|
* ID of the [Message](#schema_message) to link the event to.
|
|
2006
2006
|
*/
|
|
2007
2007
|
messageId?: string;
|
|
2008
|
-
status: "pending" | "processed" | "ignored" | "failed" | "scheduled";
|
|
2008
|
+
status: "pending" | "processed" | "ignored" | "failed" | "scheduled" | "canceled";
|
|
2009
2009
|
/**
|
|
2010
2010
|
* Reason why the event failed to be processed
|
|
2011
2011
|
*/
|
|
@@ -2019,6 +2019,19 @@ interface ListEventsResponse {
|
|
|
2019
2019
|
};
|
|
2020
2020
|
}
|
|
2021
2021
|
|
|
2022
|
+
interface CancelScheduledEventRequestHeaders {
|
|
2023
|
+
}
|
|
2024
|
+
interface CancelScheduledEventRequestQuery {
|
|
2025
|
+
}
|
|
2026
|
+
interface CancelScheduledEventRequestParams {
|
|
2027
|
+
id: string;
|
|
2028
|
+
}
|
|
2029
|
+
interface CancelScheduledEventRequestBody {
|
|
2030
|
+
}
|
|
2031
|
+
type CancelScheduledEventInput = CancelScheduledEventRequestBody & CancelScheduledEventRequestHeaders & CancelScheduledEventRequestQuery & CancelScheduledEventRequestParams;
|
|
2032
|
+
interface CancelScheduledEventResponse {
|
|
2033
|
+
}
|
|
2034
|
+
|
|
2022
2035
|
interface CreateMessageRequestHeaders {
|
|
2023
2036
|
}
|
|
2024
2037
|
interface CreateMessageRequestQuery {
|
|
@@ -15987,6 +16000,12 @@ interface SearchFilesResponse {
|
|
|
15987
16000
|
* MIME type of the file's content
|
|
15988
16001
|
*/
|
|
15989
16002
|
contentType: string;
|
|
16003
|
+
/**
|
|
16004
|
+
* Metadata of the file as an object of key-value pairs.
|
|
16005
|
+
*/
|
|
16006
|
+
metadata: {
|
|
16007
|
+
[k: string]: any | null;
|
|
16008
|
+
};
|
|
15990
16009
|
/**
|
|
15991
16010
|
* The tags of the file as an object of key-value pairs.
|
|
15992
16011
|
*/
|
|
@@ -17916,6 +17935,7 @@ declare class Client$1 {
|
|
|
17916
17935
|
readonly createEvent: (input: CreateEventInput) => Promise<CreateEventResponse>;
|
|
17917
17936
|
readonly getEvent: (input: GetEventInput) => Promise<GetEventResponse>;
|
|
17918
17937
|
readonly listEvents: (input: ListEventsInput) => Promise<ListEventsResponse>;
|
|
17938
|
+
readonly cancelScheduledEvent: (input: CancelScheduledEventInput) => Promise<CancelScheduledEventResponse>;
|
|
17919
17939
|
readonly createMessage: (input: CreateMessageInput) => Promise<CreateMessageResponse>;
|
|
17920
17940
|
readonly getOrCreateMessage: (input: GetOrCreateMessageInput) => Promise<GetOrCreateMessageResponse>;
|
|
17921
17941
|
readonly getMessage: (input: GetMessageInput) => Promise<GetMessageResponse>;
|
|
@@ -18155,7 +18175,7 @@ declare class Client extends Client$1 implements IClient {
|
|
|
18155
18175
|
conversationId?: string;
|
|
18156
18176
|
userId?: string;
|
|
18157
18177
|
messageId?: string;
|
|
18158
|
-
status: "pending" | "processed" | "ignored" | "failed" | "scheduled";
|
|
18178
|
+
status: "pending" | "processed" | "ignored" | "failed" | "scheduled" | "canceled";
|
|
18159
18179
|
failureReason: string | null;
|
|
18160
18180
|
}>;
|
|
18161
18181
|
messages: (props: {
|
|
@@ -19215,6 +19235,20 @@ type Model = {
|
|
|
19215
19235
|
lifecycle: 'production' | 'preview' | 'deprecated' | 'discontinued';
|
|
19216
19236
|
};
|
|
19217
19237
|
|
|
19238
|
+
type BetaEvents = {
|
|
19239
|
+
request: (req: {
|
|
19240
|
+
input: CognitiveRequest;
|
|
19241
|
+
}) => void;
|
|
19242
|
+
response: (req: {
|
|
19243
|
+
input: CognitiveRequest;
|
|
19244
|
+
}, res: CognitiveResponse) => void;
|
|
19245
|
+
error: (req: {
|
|
19246
|
+
input: CognitiveRequest;
|
|
19247
|
+
}, error: any) => void;
|
|
19248
|
+
retry: (req: {
|
|
19249
|
+
input: CognitiveRequest;
|
|
19250
|
+
}, error: any) => void;
|
|
19251
|
+
};
|
|
19218
19252
|
type ClientProps = {
|
|
19219
19253
|
apiUrl?: string;
|
|
19220
19254
|
timeout?: number;
|
|
@@ -19236,8 +19270,10 @@ declare class CognitiveBeta {
|
|
|
19236
19270
|
private readonly _withCredentials;
|
|
19237
19271
|
private readonly _headers;
|
|
19238
19272
|
private readonly _debug;
|
|
19273
|
+
private _events;
|
|
19239
19274
|
constructor(props: ClientProps);
|
|
19240
19275
|
clone(): CognitiveBeta;
|
|
19276
|
+
on<K extends keyof BetaEvents>(event: K, cb: BetaEvents[K]): Unsubscribe;
|
|
19241
19277
|
generateText(input: CognitiveRequest, options?: RequestOptions): Promise<CognitiveResponse>;
|
|
19242
19278
|
listModels(): Promise<Model[]>;
|
|
19243
19279
|
generateTextStream(request: CognitiveRequest, options?: RequestOptions): AsyncGenerator<CognitiveStreamChunk, void, unknown>;
|
|
@@ -19247,4 +19283,4 @@ declare class CognitiveBeta {
|
|
|
19247
19283
|
}
|
|
19248
19284
|
declare const getCognitiveV2Model: (model: string) => Model | undefined;
|
|
19249
19285
|
|
|
19250
|
-
export { type BotpressClientLike, Cognitive, CognitiveBeta, type CognitiveRequest, type CognitiveResponse, type CognitiveStreamChunk, type Events, type GenerateContentInput, type GenerateContentOutput, type Model$1 as Model, type ModelPreferences, ModelProvider, type Models, RemoteModelProvider, getCognitiveV2Model };
|
|
19286
|
+
export { type BetaEvents, type BotpressClientLike, Cognitive, CognitiveBeta, type CognitiveRequest, type CognitiveResponse, type CognitiveStreamChunk, type Events, type GenerateContentInput, type GenerateContentOutput, type Model$1 as Model, type ModelPreferences, ModelProvider, type Models, RemoteModelProvider, getCognitiveV2Model };
|
package/dist/index.mjs
CHANGED
|
@@ -1747,6 +1747,7 @@ var CognitiveBeta = class _CognitiveBeta {
|
|
|
1747
1747
|
_withCredentials;
|
|
1748
1748
|
_headers;
|
|
1749
1749
|
_debug = false;
|
|
1750
|
+
_events = createNanoEvents();
|
|
1750
1751
|
constructor(props) {
|
|
1751
1752
|
this._apiUrl = props.apiUrl || "https://api.botpress.cloud";
|
|
1752
1753
|
this._timeout = props.timeout || 60001;
|
|
@@ -1777,15 +1778,28 @@ var CognitiveBeta = class _CognitiveBeta {
|
|
|
1777
1778
|
debug: this._debug
|
|
1778
1779
|
});
|
|
1779
1780
|
}
|
|
1781
|
+
on(event, cb) {
|
|
1782
|
+
return this._events.on(event, cb);
|
|
1783
|
+
}
|
|
1780
1784
|
async generateText(input, options = {}) {
|
|
1781
1785
|
const signal = options.signal ?? AbortSignal.timeout(this._timeout);
|
|
1782
|
-
const {
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
|
|
1786
|
+
const req = { input };
|
|
1787
|
+
this._events.emit("request", req);
|
|
1788
|
+
try {
|
|
1789
|
+
const { data } = await this._withServerRetry(
|
|
1790
|
+
() => this._axiosClient.post("/v2/cognitive/generate-text", input, {
|
|
1791
|
+
signal,
|
|
1792
|
+
timeout: options.timeout ?? this._timeout
|
|
1793
|
+
}),
|
|
1794
|
+
options,
|
|
1795
|
+
req
|
|
1796
|
+
);
|
|
1797
|
+
this._events.emit("response", req, data);
|
|
1798
|
+
return data;
|
|
1799
|
+
} catch (error) {
|
|
1800
|
+
this._events.emit("error", req, error);
|
|
1801
|
+
throw error;
|
|
1802
|
+
}
|
|
1789
1803
|
}
|
|
1790
1804
|
async listModels() {
|
|
1791
1805
|
const { data } = await this._withServerRetry(
|
|
@@ -1795,61 +1809,88 @@ var CognitiveBeta = class _CognitiveBeta {
|
|
|
1795
1809
|
}
|
|
1796
1810
|
async *generateTextStream(request, options = {}) {
|
|
1797
1811
|
const signal = options.signal ?? AbortSignal.timeout(this._timeout);
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
}
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1812
|
+
const req = { input: request };
|
|
1813
|
+
const chunks = [];
|
|
1814
|
+
let lastChunk;
|
|
1815
|
+
this._events.emit("request", req);
|
|
1816
|
+
try {
|
|
1817
|
+
if (isBrowser()) {
|
|
1818
|
+
const res2 = await fetch(`${this._apiUrl}/v2/cognitive/generate-text-stream`, {
|
|
1819
|
+
method: "POST",
|
|
1820
|
+
headers: {
|
|
1821
|
+
...this._headers,
|
|
1822
|
+
"Content-Type": "application/json"
|
|
1823
|
+
},
|
|
1824
|
+
credentials: this._withCredentials ? "include" : "omit",
|
|
1825
|
+
body: JSON.stringify({ ...request, stream: true }),
|
|
1826
|
+
signal
|
|
1827
|
+
});
|
|
1828
|
+
if (!res2.ok) {
|
|
1829
|
+
const text = await res2.text().catch(() => "");
|
|
1830
|
+
const err = new Error(`HTTP ${res2.status}: ${text || res2.statusText}`);
|
|
1831
|
+
err.response = { status: res2.status, data: text };
|
|
1832
|
+
throw err;
|
|
1833
|
+
}
|
|
1834
|
+
const body = res2.body;
|
|
1835
|
+
if (!body) {
|
|
1836
|
+
throw new Error("No response body received for streaming request");
|
|
1837
|
+
}
|
|
1838
|
+
const reader = body.getReader();
|
|
1839
|
+
const iterable = (async function* () {
|
|
1840
|
+
for (; ; ) {
|
|
1841
|
+
const { value, done } = await reader.read();
|
|
1842
|
+
if (done) {
|
|
1843
|
+
break;
|
|
1844
|
+
}
|
|
1845
|
+
if (value) {
|
|
1846
|
+
yield value;
|
|
1847
|
+
}
|
|
1848
|
+
}
|
|
1849
|
+
})();
|
|
1850
|
+
for await (const obj of this._ndjson(iterable)) {
|
|
1851
|
+
chunks.push(obj);
|
|
1852
|
+
lastChunk = obj;
|
|
1853
|
+
yield obj;
|
|
1854
|
+
}
|
|
1855
|
+
if (lastChunk?.metadata) {
|
|
1856
|
+
this._events.emit("response", req, {
|
|
1857
|
+
output: chunks.map((c) => c.output || "").join(""),
|
|
1858
|
+
metadata: lastChunk.metadata
|
|
1859
|
+
});
|
|
1860
|
+
}
|
|
1861
|
+
return;
|
|
1814
1862
|
}
|
|
1815
|
-
const
|
|
1816
|
-
|
|
1863
|
+
const res = await this._withServerRetry(
|
|
1864
|
+
() => this._axiosClient.post(
|
|
1865
|
+
"/v2/cognitive/generate-text-stream",
|
|
1866
|
+
{ ...request, stream: true },
|
|
1867
|
+
{
|
|
1868
|
+
responseType: "stream",
|
|
1869
|
+
signal,
|
|
1870
|
+
timeout: options.timeout ?? this._timeout
|
|
1871
|
+
}
|
|
1872
|
+
),
|
|
1873
|
+
options,
|
|
1874
|
+
req
|
|
1875
|
+
);
|
|
1876
|
+
const nodeStream = res.data;
|
|
1877
|
+
if (!nodeStream) {
|
|
1817
1878
|
throw new Error("No response body received for streaming request");
|
|
1818
1879
|
}
|
|
1819
|
-
const
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
const { value, done } = await reader.read();
|
|
1823
|
-
if (done) {
|
|
1824
|
-
break;
|
|
1825
|
-
}
|
|
1826
|
-
if (value) {
|
|
1827
|
-
yield value;
|
|
1828
|
-
}
|
|
1829
|
-
}
|
|
1830
|
-
})();
|
|
1831
|
-
for await (const obj of this._ndjson(iterable)) {
|
|
1880
|
+
for await (const obj of this._ndjson(nodeStream)) {
|
|
1881
|
+
chunks.push(obj);
|
|
1882
|
+
lastChunk = obj;
|
|
1832
1883
|
yield obj;
|
|
1833
1884
|
}
|
|
1834
|
-
|
|
1835
|
-
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1843
|
-
timeout: options.timeout ?? this._timeout
|
|
1844
|
-
}
|
|
1845
|
-
)
|
|
1846
|
-
);
|
|
1847
|
-
const nodeStream = res.data;
|
|
1848
|
-
if (!nodeStream) {
|
|
1849
|
-
throw new Error("No response body received for streaming request");
|
|
1850
|
-
}
|
|
1851
|
-
for await (const obj of this._ndjson(nodeStream)) {
|
|
1852
|
-
yield obj;
|
|
1885
|
+
if (lastChunk?.metadata) {
|
|
1886
|
+
this._events.emit("response", req, {
|
|
1887
|
+
output: chunks.map((c) => c.output || "").join(""),
|
|
1888
|
+
metadata: lastChunk.metadata
|
|
1889
|
+
});
|
|
1890
|
+
}
|
|
1891
|
+
} catch (error) {
|
|
1892
|
+
this._events.emit("error", req, error);
|
|
1893
|
+
throw error;
|
|
1853
1894
|
}
|
|
1854
1895
|
}
|
|
1855
1896
|
async *_ndjson(stream) {
|
|
@@ -1891,14 +1932,30 @@ var CognitiveBeta = class _CognitiveBeta {
|
|
|
1891
1932
|
}
|
|
1892
1933
|
return false;
|
|
1893
1934
|
}
|
|
1894
|
-
async _withServerRetry(fn) {
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1935
|
+
async _withServerRetry(fn, options = {}, req) {
|
|
1936
|
+
let attemptCount = 0;
|
|
1937
|
+
return (0, import_exponential_backoff.backOff)(
|
|
1938
|
+
async () => {
|
|
1939
|
+
try {
|
|
1940
|
+
const result = await fn();
|
|
1941
|
+
attemptCount = 0;
|
|
1942
|
+
return result;
|
|
1943
|
+
} catch (error) {
|
|
1944
|
+
if (attemptCount > 0 && req) {
|
|
1945
|
+
this._events.emit("retry", req, error);
|
|
1946
|
+
}
|
|
1947
|
+
attemptCount++;
|
|
1948
|
+
throw error;
|
|
1949
|
+
}
|
|
1950
|
+
},
|
|
1951
|
+
{
|
|
1952
|
+
numOfAttempts: 3,
|
|
1953
|
+
startingDelay: 300,
|
|
1954
|
+
timeMultiple: 2,
|
|
1955
|
+
jitter: "full",
|
|
1956
|
+
retry: (e) => !options.signal?.aborted && this._isRetryableServerError(e)
|
|
1957
|
+
}
|
|
1958
|
+
);
|
|
1902
1959
|
}
|
|
1903
1960
|
};
|
|
1904
1961
|
var getCognitiveV2Model = (model) => {
|
|
@@ -2281,8 +2338,21 @@ var Cognitive = class _Cognitive {
|
|
|
2281
2338
|
delete input.systemPrompt;
|
|
2282
2339
|
}
|
|
2283
2340
|
const betaClient = new CognitiveBeta(this._client.config);
|
|
2284
|
-
const
|
|
2285
|
-
|
|
2341
|
+
const props = { input };
|
|
2342
|
+
betaClient.on("request", () => {
|
|
2343
|
+
this._events.emit("request", props);
|
|
2344
|
+
});
|
|
2345
|
+
betaClient.on("error", (_req, error) => {
|
|
2346
|
+
this._events.emit("error", props, error);
|
|
2347
|
+
});
|
|
2348
|
+
betaClient.on("retry", (_req, error) => {
|
|
2349
|
+
this._events.emit("retry", props, error);
|
|
2350
|
+
});
|
|
2351
|
+
const response = await betaClient.generateText(input, {
|
|
2352
|
+
signal: input.signal,
|
|
2353
|
+
timeout: this._timeoutMs
|
|
2354
|
+
});
|
|
2355
|
+
const result = {
|
|
2286
2356
|
output: {
|
|
2287
2357
|
id: "beta-output",
|
|
2288
2358
|
provider: response.metadata.provider,
|
|
@@ -2320,6 +2390,8 @@ var Cognitive = class _Cognitive {
|
|
|
2320
2390
|
}
|
|
2321
2391
|
}
|
|
2322
2392
|
};
|
|
2393
|
+
this._events.emit("response", props, result);
|
|
2394
|
+
return result;
|
|
2323
2395
|
}
|
|
2324
2396
|
async _generateContent(input) {
|
|
2325
2397
|
const start = Date.now();
|