@workers-community/workers-types 4.20250402.0 → 4.20250404.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.d.ts +170 -38
  2. package/index.ts +172 -38
  3. package/package.json +1 -1
package/index.d.ts CHANGED
@@ -1282,7 +1282,7 @@ declare class DigestStream extends WritableStream<
1282
1282
  ArrayBuffer | ArrayBufferView
1283
1283
  > {
1284
1284
  constructor(algorithm: string | SubtleCryptoHashAlgorithm);
1285
- get digest(): Promise<ArrayBuffer | ArrayBufferView>;
1285
+ readonly digest: Promise<ArrayBuffer>;
1286
1286
  get bytesWritten(): number | bigint;
1287
1287
  }
1288
1288
  /**
@@ -1291,7 +1291,7 @@ declare class DigestStream extends WritableStream<
1291
1291
  * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder)
1292
1292
  */
1293
1293
  declare class TextDecoder {
1294
- constructor(decoder?: string, options?: TextDecoderConstructorOptions);
1294
+ constructor(label?: string, options?: TextDecoderConstructorOptions);
1295
1295
  /**
1296
1296
  * Returns the result of running encoding's decoder. The method can be invoked zero or more times with options's stream set to true, and then once without options's stream (or set to false), to process a fragmented input. If the invocation without options's stream (or set to false) has no input, it's clearest to omit both arguments.
1297
1297
  *
@@ -3131,7 +3131,7 @@ interface GPUCommandEncoder {
3131
3131
  destinationOffset: number | bigint,
3132
3132
  size: number | bigint,
3133
3133
  ): void;
3134
- finish(param0?: GPUCommandBufferDescriptor): GPUCommandBuffer;
3134
+ finish(param1?: GPUCommandBufferDescriptor): GPUCommandBuffer;
3135
3135
  copyTextureToBuffer(
3136
3136
  source: GPUImageCopyTexture,
3137
3137
  destination: GPUImageCopyBuffer,
@@ -3554,6 +3554,28 @@ declare abstract class BaseAiImageToText {
3554
3554
  inputs: AiImageToTextInput;
3555
3555
  postProcessedOutputs: AiImageToTextOutput;
3556
3556
  }
3557
+ type AiImageTextToTextInput = {
3558
+ image: string;
3559
+ prompt?: string;
3560
+ max_tokens?: number;
3561
+ temperature?: number;
3562
+ ignore_eos?: boolean;
3563
+ top_p?: number;
3564
+ top_k?: number;
3565
+ seed?: number;
3566
+ repetition_penalty?: number;
3567
+ frequency_penalty?: number;
3568
+ presence_penalty?: number;
3569
+ raw?: boolean;
3570
+ messages?: RoleScopedChatInput[];
3571
+ };
3572
+ type AiImageTextToTextOutput = {
3573
+ description: string;
3574
+ };
3575
+ declare abstract class BaseAiImageTextToText {
3576
+ inputs: AiImageTextToTextInput;
3577
+ postProcessedOutputs: AiImageTextToTextOutput;
3578
+ }
3557
3579
  type AiObjectDetectionInput = {
3558
3580
  image: number[];
3559
3581
  };
@@ -3964,6 +3986,72 @@ declare abstract class Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo {
3964
3986
  inputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input;
3965
3987
  postProcessedOutputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output;
3966
3988
  }
3989
+ type Ai_Cf_Baai_Bge_M3_Input = BGEM3InputQueryAndContexts | BGEM3InputEmbedding;
3990
+ interface BGEM3InputQueryAndContexts {
3991
+ /**
3992
+ * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts
3993
+ */
3994
+ query?: string;
3995
+ /**
3996
+ * List of provided contexts. Note that the index in this array is important, as the response will refer to it.
3997
+ */
3998
+ contexts: {
3999
+ /**
4000
+ * One of the provided context content
4001
+ */
4002
+ text?: string;
4003
+ }[];
4004
+ /**
4005
+ * When provided with too long context should the model error out or truncate the context to fit?
4006
+ */
4007
+ truncate_inputs?: boolean;
4008
+ }
4009
+ interface BGEM3InputEmbedding {
4010
+ text: string | string[];
4011
+ /**
4012
+ * When provided with too long context should the model error out or truncate the context to fit?
4013
+ */
4014
+ truncate_inputs?: boolean;
4015
+ }
4016
+ type Ai_Cf_Baai_Bge_M3_Output =
4017
+ | BGEM3OuputQuery
4018
+ | BGEM3OutputEmbeddingForContexts
4019
+ | BGEM3OuputEmbedding;
4020
+ interface BGEM3OuputQuery {
4021
+ response?: {
4022
+ /**
4023
+ * Index of the context in the request
4024
+ */
4025
+ id?: number;
4026
+ /**
4027
+ * Score of the context under the index.
4028
+ */
4029
+ score?: number;
4030
+ }[];
4031
+ }
4032
+ interface BGEM3OutputEmbeddingForContexts {
4033
+ response?: number[][];
4034
+ shape?: number[];
4035
+ /**
4036
+ * The pooling method used in the embedding process.
4037
+ */
4038
+ pooling?: "mean" | "cls";
4039
+ }
4040
+ interface BGEM3OuputEmbedding {
4041
+ shape?: number[];
4042
+ /**
4043
+ * Embeddings of the requested text values
4044
+ */
4045
+ data?: number[][];
4046
+ /**
4047
+ * The pooling method used in the embedding process.
4048
+ */
4049
+ pooling?: "mean" | "cls";
4050
+ }
4051
+ declare abstract class Base_Ai_Cf_Baai_Bge_M3 {
4052
+ inputs: Ai_Cf_Baai_Bge_M3_Input;
4053
+ postProcessedOutputs: Ai_Cf_Baai_Bge_M3_Output;
4054
+ }
3967
4055
  interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input {
3968
4056
  /**
3969
4057
  * A text description of the image you want to generate.
@@ -4274,6 +4362,40 @@ declare abstract class Base_Ai_Cf_Meta_Llama_Guard_3_8B {
4274
4362
  inputs: Ai_Cf_Meta_Llama_Guard_3_8B_Input;
4275
4363
  postProcessedOutputs: Ai_Cf_Meta_Llama_Guard_3_8B_Output;
4276
4364
  }
4365
+ interface Ai_Cf_Baai_Bge_Reranker_Base_Input {
4366
+ /**
4367
+ * A query you wish to perform against the provided contexts.
4368
+ */
4369
+ /**
4370
+ * Number of returned results starting with the best score.
4371
+ */
4372
+ top_k?: number;
4373
+ /**
4374
+ * List of provided contexts. Note that the index in this array is important, as the response will refer to it.
4375
+ */
4376
+ contexts: {
4377
+ /**
4378
+ * One of the provided context content
4379
+ */
4380
+ text?: string;
4381
+ }[];
4382
+ }
4383
+ interface Ai_Cf_Baai_Bge_Reranker_Base_Output {
4384
+ response?: {
4385
+ /**
4386
+ * Index of the context in the request
4387
+ */
4388
+ id?: number;
4389
+ /**
4390
+ * Score of the context under the index.
4391
+ */
4392
+ score?: number;
4393
+ }[];
4394
+ }
4395
+ declare abstract class Base_Ai_Cf_Baai_Bge_Reranker_Base {
4396
+ inputs: Ai_Cf_Baai_Bge_Reranker_Base_Input;
4397
+ postProcessedOutputs: Ai_Cf_Baai_Bge_Reranker_Base_Output;
4398
+ }
4277
4399
  interface AiModels {
4278
4400
  "@cf/huggingface/distilbert-sst-2-int8": BaseAiTextClassification;
4279
4401
  "@cf/stabilityai/stable-diffusion-xl-base-1.0": BaseAiTextToImage;
@@ -4281,6 +4403,7 @@ interface AiModels {
4281
4403
  "@cf/runwayml/stable-diffusion-v1-5-img2img": BaseAiTextToImage;
4282
4404
  "@cf/lykon/dreamshaper-8-lcm": BaseAiTextToImage;
4283
4405
  "@cf/bytedance/stable-diffusion-xl-lightning": BaseAiTextToImage;
4406
+ "@cf/myshell-ai/melotts": BaseAiTextToSpeech;
4284
4407
  "@cf/baai/bge-base-en-v1.5": BaseAiTextEmbeddings;
4285
4408
  "@cf/baai/bge-small-en-v1.5": BaseAiTextEmbeddings;
4286
4409
  "@cf/baai/bge-large-en-v1.5": BaseAiTextEmbeddings;
@@ -4334,9 +4457,11 @@ interface AiModels {
4334
4457
  "@cf/unum/uform-gen2-qwen-500m": Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M;
4335
4458
  "@cf/openai/whisper-tiny-en": Base_Ai_Cf_Openai_Whisper_Tiny_En;
4336
4459
  "@cf/openai/whisper-large-v3-turbo": Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo;
4460
+ "@cf/baai/bge-m3": Base_Ai_Cf_Baai_Bge_M3;
4337
4461
  "@cf/black-forest-labs/flux-1-schnell": Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell;
4338
4462
  "@cf/meta/llama-3.2-11b-vision-instruct": Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct;
4339
4463
  "@cf/meta/llama-guard-3-8b": Base_Ai_Cf_Meta_Llama_Guard_3_8B;
4464
+ "@cf/baai/bge-reranker-base": Base_Ai_Cf_Baai_Bge_Reranker_Base;
4340
4465
  }
4341
4466
  type AiOptions = {
4342
4467
  gateway?: GatewayOptions;
@@ -4394,8 +4519,8 @@ declare abstract class Ai<AiModelList extends AiModelListType = AiModels> {
4394
4519
  ? Response
4395
4520
  : AiModelList[Name]["postProcessedOutputs"]
4396
4521
  >;
4397
- public models(params?: AiModelsSearchParams): Promise<AiModelsSearchObject[]>;
4398
- public toMarkdown(
4522
+ models(params?: AiModelsSearchParams): Promise<AiModelsSearchObject[]>;
4523
+ toMarkdown(
4399
4524
  files: {
4400
4525
  name: string;
4401
4526
  blob: Blob;
@@ -4405,7 +4530,7 @@ declare abstract class Ai<AiModelList extends AiModelListType = AiModels> {
4405
4530
  extraHeaders?: object;
4406
4531
  },
4407
4532
  ): Promise<ConversionResponse[]>;
4408
- public toMarkdown(
4533
+ toMarkdown(
4409
4534
  files: {
4410
4535
  name: string;
4411
4536
  blob: Blob;
@@ -6085,19 +6210,7 @@ declare namespace Rpc {
6085
6210
  // serializable check as well. Otherwise, only types defined with the "type" keyword would pass.
6086
6211
  type Serializable<T> =
6087
6212
  // Structured cloneables
6088
- | void
6089
- | undefined
6090
- | null
6091
- | boolean
6092
- | number
6093
- | bigint
6094
- | string
6095
- | TypedArray
6096
- | ArrayBuffer
6097
- | DataView
6098
- | Date
6099
- | Error
6100
- | RegExp
6213
+ | BaseType
6101
6214
  // Structured cloneable composites
6102
6215
  | Map<
6103
6216
  T extends Map<infer U, unknown> ? Serializable<U> : never,
@@ -6109,11 +6222,6 @@ declare namespace Rpc {
6109
6222
  [K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
6110
6223
  }
6111
6224
  // Special types
6112
- | ReadableStream<Uint8Array>
6113
- | WritableStream<Uint8Array>
6114
- | Request
6115
- | Response
6116
- | Headers
6117
6225
  | Stub<Stubable>
6118
6226
  // Serialized as stubs, see `Stubify`
6119
6227
  | Stubable;
@@ -6124,6 +6232,26 @@ declare namespace Rpc {
6124
6232
  dup(): this;
6125
6233
  }
6126
6234
  export type Stub<T extends Stubable> = Provider<T> & StubBase<T>;
6235
+ // This represents all the types that can be sent as-is over an RPC boundary
6236
+ type BaseType =
6237
+ | void
6238
+ | undefined
6239
+ | null
6240
+ | boolean
6241
+ | number
6242
+ | bigint
6243
+ | string
6244
+ | TypedArray
6245
+ | ArrayBuffer
6246
+ | DataView
6247
+ | Date
6248
+ | Error
6249
+ | RegExp
6250
+ | ReadableStream<Uint8Array>
6251
+ | WritableStream<Uint8Array>
6252
+ | Request
6253
+ | Response
6254
+ | Headers;
6127
6255
  // Recursively rewrite all `Stubable` types with `Stub`s
6128
6256
  type Stubify<T> = T extends Stubable
6129
6257
  ? Stub<T>
@@ -6135,13 +6263,15 @@ declare namespace Rpc {
6135
6263
  ? Array<Stubify<V>>
6136
6264
  : T extends ReadonlyArray<infer V>
6137
6265
  ? ReadonlyArray<Stubify<V>>
6138
- : T extends {
6139
- [key: string | number]: any;
6140
- }
6141
- ? {
6142
- [K in keyof T]: Stubify<T[K]>;
6143
- }
6144
- : T;
6266
+ : T extends BaseType
6267
+ ? T
6268
+ : T extends {
6269
+ [key: string | number]: any;
6270
+ }
6271
+ ? {
6272
+ [K in keyof T]: Stubify<T[K]>;
6273
+ }
6274
+ : T;
6145
6275
  // Recursively rewrite all `Stub<T>`s with the corresponding `T`s.
6146
6276
  // Note we use `StubBase` instead of `Stub` here to avoid circular dependencies:
6147
6277
  // `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`.
@@ -6156,13 +6286,15 @@ declare namespace Rpc {
6156
6286
  ? Array<Unstubify<V>>
6157
6287
  : T extends ReadonlyArray<infer V>
6158
6288
  ? ReadonlyArray<Unstubify<V>>
6159
- : T extends {
6160
- [key: string | number]: unknown;
6161
- }
6162
- ? {
6163
- [K in keyof T]: Unstubify<T[K]>;
6164
- }
6165
- : T;
6289
+ : T extends BaseType
6290
+ ? T
6291
+ : T extends {
6292
+ [key: string | number]: unknown;
6293
+ }
6294
+ ? {
6295
+ [K in keyof T]: Unstubify<T[K]>;
6296
+ }
6297
+ : T;
6166
6298
  type UnstubifyAll<A extends any[]> = {
6167
6299
  [I in keyof A]: Unstubify<A[I]>;
6168
6300
  };
package/index.ts CHANGED
@@ -1287,7 +1287,7 @@ export declare class DigestStream extends WritableStream<
1287
1287
  ArrayBuffer | ArrayBufferView
1288
1288
  > {
1289
1289
  constructor(algorithm: string | SubtleCryptoHashAlgorithm);
1290
- get digest(): Promise<ArrayBuffer | ArrayBufferView>;
1290
+ readonly digest: Promise<ArrayBuffer>;
1291
1291
  get bytesWritten(): number | bigint;
1292
1292
  }
1293
1293
  /**
@@ -1296,7 +1296,7 @@ export declare class DigestStream extends WritableStream<
1296
1296
  * [MDN Reference](https://developer.mozilla.org/docs/Web/API/TextDecoder)
1297
1297
  */
1298
1298
  export declare class TextDecoder {
1299
- constructor(decoder?: string, options?: TextDecoderConstructorOptions);
1299
+ constructor(label?: string, options?: TextDecoderConstructorOptions);
1300
1300
  /**
1301
1301
  * Returns the result of running encoding's decoder. The method can be invoked zero or more times with options's stream set to true, and then once without options's stream (or set to false), to process a fragmented input. If the invocation without options's stream (or set to false) has no input, it's clearest to omit both arguments.
1302
1302
  *
@@ -3143,7 +3143,7 @@ export interface GPUCommandEncoder {
3143
3143
  destinationOffset: number | bigint,
3144
3144
  size: number | bigint,
3145
3145
  ): void;
3146
- finish(param0?: GPUCommandBufferDescriptor): GPUCommandBuffer;
3146
+ finish(param1?: GPUCommandBufferDescriptor): GPUCommandBuffer;
3147
3147
  copyTextureToBuffer(
3148
3148
  source: GPUImageCopyTexture,
3149
3149
  destination: GPUImageCopyBuffer,
@@ -3566,6 +3566,28 @@ export declare abstract class BaseAiImageToText {
3566
3566
  inputs: AiImageToTextInput;
3567
3567
  postProcessedOutputs: AiImageToTextOutput;
3568
3568
  }
3569
+ export type AiImageTextToTextInput = {
3570
+ image: string;
3571
+ prompt?: string;
3572
+ max_tokens?: number;
3573
+ temperature?: number;
3574
+ ignore_eos?: boolean;
3575
+ top_p?: number;
3576
+ top_k?: number;
3577
+ seed?: number;
3578
+ repetition_penalty?: number;
3579
+ frequency_penalty?: number;
3580
+ presence_penalty?: number;
3581
+ raw?: boolean;
3582
+ messages?: RoleScopedChatInput[];
3583
+ };
3584
+ export type AiImageTextToTextOutput = {
3585
+ description: string;
3586
+ };
3587
+ export declare abstract class BaseAiImageTextToText {
3588
+ inputs: AiImageTextToTextInput;
3589
+ postProcessedOutputs: AiImageTextToTextOutput;
3590
+ }
3569
3591
  export type AiObjectDetectionInput = {
3570
3592
  image: number[];
3571
3593
  };
@@ -3976,6 +3998,74 @@ export declare abstract class Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo {
3976
3998
  inputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Input;
3977
3999
  postProcessedOutputs: Ai_Cf_Openai_Whisper_Large_V3_Turbo_Output;
3978
4000
  }
4001
+ export type Ai_Cf_Baai_Bge_M3_Input =
4002
+ | BGEM3InputQueryAndContexts
4003
+ | BGEM3InputEmbedding;
4004
+ export interface BGEM3InputQueryAndContexts {
4005
+ /**
4006
+ * A query you wish to perform against the provided contexts. If no query is provided the model with respond with embeddings for contexts
4007
+ */
4008
+ query?: string;
4009
+ /**
4010
+ * List of provided contexts. Note that the index in this array is important, as the response will refer to it.
4011
+ */
4012
+ contexts: {
4013
+ /**
4014
+ * One of the provided context content
4015
+ */
4016
+ text?: string;
4017
+ }[];
4018
+ /**
4019
+ * When provided with too long context should the model error out or truncate the context to fit?
4020
+ */
4021
+ truncate_inputs?: boolean;
4022
+ }
4023
+ export interface BGEM3InputEmbedding {
4024
+ text: string | string[];
4025
+ /**
4026
+ * When provided with too long context should the model error out or truncate the context to fit?
4027
+ */
4028
+ truncate_inputs?: boolean;
4029
+ }
4030
+ export type Ai_Cf_Baai_Bge_M3_Output =
4031
+ | BGEM3OuputQuery
4032
+ | BGEM3OutputEmbeddingForContexts
4033
+ | BGEM3OuputEmbedding;
4034
+ export interface BGEM3OuputQuery {
4035
+ response?: {
4036
+ /**
4037
+ * Index of the context in the request
4038
+ */
4039
+ id?: number;
4040
+ /**
4041
+ * Score of the context under the index.
4042
+ */
4043
+ score?: number;
4044
+ }[];
4045
+ }
4046
+ export interface BGEM3OutputEmbeddingForContexts {
4047
+ response?: number[][];
4048
+ shape?: number[];
4049
+ /**
4050
+ * The pooling method used in the embedding process.
4051
+ */
4052
+ pooling?: "mean" | "cls";
4053
+ }
4054
+ export interface BGEM3OuputEmbedding {
4055
+ shape?: number[];
4056
+ /**
4057
+ * Embeddings of the requested text values
4058
+ */
4059
+ data?: number[][];
4060
+ /**
4061
+ * The pooling method used in the embedding process.
4062
+ */
4063
+ pooling?: "mean" | "cls";
4064
+ }
4065
+ export declare abstract class Base_Ai_Cf_Baai_Bge_M3 {
4066
+ inputs: Ai_Cf_Baai_Bge_M3_Input;
4067
+ postProcessedOutputs: Ai_Cf_Baai_Bge_M3_Output;
4068
+ }
3979
4069
  export interface Ai_Cf_Black_Forest_Labs_Flux_1_Schnell_Input {
3980
4070
  /**
3981
4071
  * A text description of the image you want to generate.
@@ -4286,6 +4376,40 @@ export declare abstract class Base_Ai_Cf_Meta_Llama_Guard_3_8B {
4286
4376
  inputs: Ai_Cf_Meta_Llama_Guard_3_8B_Input;
4287
4377
  postProcessedOutputs: Ai_Cf_Meta_Llama_Guard_3_8B_Output;
4288
4378
  }
4379
+ export interface Ai_Cf_Baai_Bge_Reranker_Base_Input {
4380
+ /**
4381
+ * A query you wish to perform against the provided contexts.
4382
+ */
4383
+ /**
4384
+ * Number of returned results starting with the best score.
4385
+ */
4386
+ top_k?: number;
4387
+ /**
4388
+ * List of provided contexts. Note that the index in this array is important, as the response will refer to it.
4389
+ */
4390
+ contexts: {
4391
+ /**
4392
+ * One of the provided context content
4393
+ */
4394
+ text?: string;
4395
+ }[];
4396
+ }
4397
+ export interface Ai_Cf_Baai_Bge_Reranker_Base_Output {
4398
+ response?: {
4399
+ /**
4400
+ * Index of the context in the request
4401
+ */
4402
+ id?: number;
4403
+ /**
4404
+ * Score of the context under the index.
4405
+ */
4406
+ score?: number;
4407
+ }[];
4408
+ }
4409
+ export declare abstract class Base_Ai_Cf_Baai_Bge_Reranker_Base {
4410
+ inputs: Ai_Cf_Baai_Bge_Reranker_Base_Input;
4411
+ postProcessedOutputs: Ai_Cf_Baai_Bge_Reranker_Base_Output;
4412
+ }
4289
4413
  export interface AiModels {
4290
4414
  "@cf/huggingface/distilbert-sst-2-int8": BaseAiTextClassification;
4291
4415
  "@cf/stabilityai/stable-diffusion-xl-base-1.0": BaseAiTextToImage;
@@ -4293,6 +4417,7 @@ export interface AiModels {
4293
4417
  "@cf/runwayml/stable-diffusion-v1-5-img2img": BaseAiTextToImage;
4294
4418
  "@cf/lykon/dreamshaper-8-lcm": BaseAiTextToImage;
4295
4419
  "@cf/bytedance/stable-diffusion-xl-lightning": BaseAiTextToImage;
4420
+ "@cf/myshell-ai/melotts": BaseAiTextToSpeech;
4296
4421
  "@cf/baai/bge-base-en-v1.5": BaseAiTextEmbeddings;
4297
4422
  "@cf/baai/bge-small-en-v1.5": BaseAiTextEmbeddings;
4298
4423
  "@cf/baai/bge-large-en-v1.5": BaseAiTextEmbeddings;
@@ -4346,9 +4471,11 @@ export interface AiModels {
4346
4471
  "@cf/unum/uform-gen2-qwen-500m": Base_Ai_Cf_Unum_Uform_Gen2_Qwen_500M;
4347
4472
  "@cf/openai/whisper-tiny-en": Base_Ai_Cf_Openai_Whisper_Tiny_En;
4348
4473
  "@cf/openai/whisper-large-v3-turbo": Base_Ai_Cf_Openai_Whisper_Large_V3_Turbo;
4474
+ "@cf/baai/bge-m3": Base_Ai_Cf_Baai_Bge_M3;
4349
4475
  "@cf/black-forest-labs/flux-1-schnell": Base_Ai_Cf_Black_Forest_Labs_Flux_1_Schnell;
4350
4476
  "@cf/meta/llama-3.2-11b-vision-instruct": Base_Ai_Cf_Meta_Llama_3_2_11B_Vision_Instruct;
4351
4477
  "@cf/meta/llama-guard-3-8b": Base_Ai_Cf_Meta_Llama_Guard_3_8B;
4478
+ "@cf/baai/bge-reranker-base": Base_Ai_Cf_Baai_Bge_Reranker_Base;
4352
4479
  }
4353
4480
  export type AiOptions = {
4354
4481
  gateway?: GatewayOptions;
@@ -4408,8 +4535,8 @@ export declare abstract class Ai<
4408
4535
  ? Response
4409
4536
  : AiModelList[Name]["postProcessedOutputs"]
4410
4537
  >;
4411
- public models(params?: AiModelsSearchParams): Promise<AiModelsSearchObject[]>;
4412
- public toMarkdown(
4538
+ models(params?: AiModelsSearchParams): Promise<AiModelsSearchObject[]>;
4539
+ toMarkdown(
4413
4540
  files: {
4414
4541
  name: string;
4415
4542
  blob: Blob;
@@ -4419,7 +4546,7 @@ export declare abstract class Ai<
4419
4546
  extraHeaders?: object;
4420
4547
  },
4421
4548
  ): Promise<ConversionResponse[]>;
4422
- public toMarkdown(
4549
+ toMarkdown(
4423
4550
  files: {
4424
4551
  name: string;
4425
4552
  blob: Blob;
@@ -6066,19 +6193,7 @@ export declare namespace Rpc {
6066
6193
  // serializable check as well. Otherwise, only types defined with the "type" keyword would pass.
6067
6194
  type Serializable<T> =
6068
6195
  // Structured cloneables
6069
- | void
6070
- | undefined
6071
- | null
6072
- | boolean
6073
- | number
6074
- | bigint
6075
- | string
6076
- | TypedArray
6077
- | ArrayBuffer
6078
- | DataView
6079
- | Date
6080
- | Error
6081
- | RegExp
6196
+ | BaseType
6082
6197
  // Structured cloneable composites
6083
6198
  | Map<
6084
6199
  T extends Map<infer U, unknown> ? Serializable<U> : never,
@@ -6090,11 +6205,6 @@ export declare namespace Rpc {
6090
6205
  [K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
6091
6206
  }
6092
6207
  // Special types
6093
- | ReadableStream<Uint8Array>
6094
- | WritableStream<Uint8Array>
6095
- | Request
6096
- | Response
6097
- | Headers
6098
6208
  | Stub<Stubable>
6099
6209
  // Serialized as stubs, see `Stubify`
6100
6210
  | Stubable;
@@ -6105,6 +6215,26 @@ export declare namespace Rpc {
6105
6215
  dup(): this;
6106
6216
  }
6107
6217
  export type Stub<T extends Stubable> = Provider<T> & StubBase<T>;
6218
+ // This represents all the types that can be sent as-is over an RPC boundary
6219
+ type BaseType =
6220
+ | void
6221
+ | undefined
6222
+ | null
6223
+ | boolean
6224
+ | number
6225
+ | bigint
6226
+ | string
6227
+ | TypedArray
6228
+ | ArrayBuffer
6229
+ | DataView
6230
+ | Date
6231
+ | Error
6232
+ | RegExp
6233
+ | ReadableStream<Uint8Array>
6234
+ | WritableStream<Uint8Array>
6235
+ | Request
6236
+ | Response
6237
+ | Headers;
6108
6238
  // Recursively rewrite all `Stubable` types with `Stub`s
6109
6239
  type Stubify<T> = T extends Stubable
6110
6240
  ? Stub<T>
@@ -6116,13 +6246,15 @@ export declare namespace Rpc {
6116
6246
  ? Array<Stubify<V>>
6117
6247
  : T extends ReadonlyArray<infer V>
6118
6248
  ? ReadonlyArray<Stubify<V>>
6119
- : T extends {
6120
- [key: string | number]: any;
6121
- }
6122
- ? {
6123
- [K in keyof T]: Stubify<T[K]>;
6124
- }
6125
- : T;
6249
+ : T extends BaseType
6250
+ ? T
6251
+ : T extends {
6252
+ [key: string | number]: any;
6253
+ }
6254
+ ? {
6255
+ [K in keyof T]: Stubify<T[K]>;
6256
+ }
6257
+ : T;
6126
6258
  // Recursively rewrite all `Stub<T>`s with the corresponding `T`s.
6127
6259
  // Note we use `StubBase` instead of `Stub` here to avoid circular dependencies:
6128
6260
  // `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`.
@@ -6137,13 +6269,15 @@ export declare namespace Rpc {
6137
6269
  ? Array<Unstubify<V>>
6138
6270
  : T extends ReadonlyArray<infer V>
6139
6271
  ? ReadonlyArray<Unstubify<V>>
6140
- : T extends {
6141
- [key: string | number]: unknown;
6142
- }
6143
- ? {
6144
- [K in keyof T]: Unstubify<T[K]>;
6145
- }
6146
- : T;
6272
+ : T extends BaseType
6273
+ ? T
6274
+ : T extends {
6275
+ [key: string | number]: unknown;
6276
+ }
6277
+ ? {
6278
+ [K in keyof T]: Unstubify<T[K]>;
6279
+ }
6280
+ : T;
6147
6281
  type UnstubifyAll<A extends any[]> = {
6148
6282
  [I in keyof A]: Unstubify<A[I]>;
6149
6283
  };
package/package.json CHANGED
@@ -7,7 +7,7 @@
7
7
  },
8
8
  "author": "Workers Community",
9
9
  "license": "MIT OR Apache-2.0",
10
- "version": "4.20250402.0",
10
+ "version": "4.20250404.0",
11
11
  "exports": {
12
12
  ".": {
13
13
  "types": "./index.d.ts",