@huggingface/tasks 0.12.9 → 0.12.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +151 -8
- package/dist/index.js +151 -8
- package/dist/src/model-libraries-snippets.d.ts +1 -0
- package/dist/src/model-libraries-snippets.d.ts.map +1 -1
- package/dist/src/model-libraries.d.ts +9 -2
- package/dist/src/model-libraries.d.ts.map +1 -1
- package/dist/src/snippets/curl.d.ts +1 -0
- package/dist/src/snippets/curl.d.ts.map +1 -1
- package/dist/src/snippets/js.d.ts +1 -0
- package/dist/src/snippets/js.d.ts.map +1 -1
- package/dist/src/snippets/python.d.ts +1 -0
- package/dist/src/snippets/python.d.ts.map +1 -1
- package/dist/src/snippets/types.d.ts +1 -1
- package/dist/src/snippets/types.d.ts.map +1 -1
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts +1 -1
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-to-text/inference.d.ts +1 -1
- package/dist/src/tasks/image-to-text/inference.d.ts.map +1 -1
- package/dist/src/tasks/index.d.ts.map +1 -1
- package/dist/src/tasks/text-to-audio/inference.d.ts +1 -1
- package/dist/src/tasks/text-to-audio/inference.d.ts.map +1 -1
- package/dist/src/tasks/text-to-speech/inference.d.ts +1 -1
- package/dist/src/tasks/text-to-speech/inference.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/model-libraries-snippets.ts +6 -0
- package/src/model-libraries.ts +7 -0
- package/src/snippets/curl.ts +28 -1
- package/src/snippets/js.ts +31 -1
- package/src/snippets/python.ts +29 -5
- package/src/snippets/types.ts +4 -1
- package/src/tasks/automatic-speech-recognition/inference.ts +1 -1
- package/src/tasks/automatic-speech-recognition/spec/input.json +1 -1
- package/src/tasks/image-to-text/inference.ts +1 -1
- package/src/tasks/image-to-text/spec/input.json +1 -1
- package/src/tasks/index.ts +2 -1
- package/src/tasks/text-to-audio/inference.ts +1 -1
- package/src/tasks/text-to-audio/spec/input.json +1 -1
- package/src/tasks/text-to-speech/inference.ts +1 -1
- package/src/tasks/text-to-speech/spec/input.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -4307,6 +4307,61 @@ var taskData39 = {
|
|
|
4307
4307
|
};
|
|
4308
4308
|
var data_default39 = taskData39;
|
|
4309
4309
|
|
|
4310
|
+
// src/tasks/video-text-to-text/data.ts
|
|
4311
|
+
var taskData40 = {
|
|
4312
|
+
datasets: [
|
|
4313
|
+
{
|
|
4314
|
+
description: "Multiple-choice questions and answers about videos.",
|
|
4315
|
+
id: "lmms-lab/Video-MME"
|
|
4316
|
+
},
|
|
4317
|
+
{
|
|
4318
|
+
description: "A dataset of instructions and question-answer pairs about videos.",
|
|
4319
|
+
id: "lmms-lab/VideoChatGPT"
|
|
4320
|
+
}
|
|
4321
|
+
],
|
|
4322
|
+
demo: {
|
|
4323
|
+
inputs: [
|
|
4324
|
+
{
|
|
4325
|
+
filename: "video-text-to-text-input.gif",
|
|
4326
|
+
type: "img"
|
|
4327
|
+
},
|
|
4328
|
+
{
|
|
4329
|
+
label: "Text Prompt",
|
|
4330
|
+
content: "What is happening in this video?",
|
|
4331
|
+
type: "text"
|
|
4332
|
+
}
|
|
4333
|
+
],
|
|
4334
|
+
outputs: [
|
|
4335
|
+
{
|
|
4336
|
+
label: "Answer",
|
|
4337
|
+
content: "The video shows a series of images showing a fountain with water jets and a variety of colorful flowers and butterflies in the background.",
|
|
4338
|
+
type: "text"
|
|
4339
|
+
}
|
|
4340
|
+
]
|
|
4341
|
+
},
|
|
4342
|
+
metrics: [],
|
|
4343
|
+
models: [
|
|
4344
|
+
{
|
|
4345
|
+
description: "A robust video-text-to-text model that can take in image and video inputs.",
|
|
4346
|
+
id: "llava-hf/llava-onevision-qwen2-72b-ov-hf"
|
|
4347
|
+
},
|
|
4348
|
+
{
|
|
4349
|
+
description: "Large and powerful video-text-to-text model that can take in image and video inputs.",
|
|
4350
|
+
id: "llava-hf/LLaVA-NeXT-Video-34B-hf"
|
|
4351
|
+
}
|
|
4352
|
+
],
|
|
4353
|
+
spaces: [
|
|
4354
|
+
{
|
|
4355
|
+
description: "An application to chat with a video-text-to-text model.",
|
|
4356
|
+
id: "llava-hf/video-llava"
|
|
4357
|
+
}
|
|
4358
|
+
],
|
|
4359
|
+
summary: "Video-text-to-text models take in a video and a text prompt and output text. These models are also called video-language models.",
|
|
4360
|
+
widgetModels: [""],
|
|
4361
|
+
youtubeId: ""
|
|
4362
|
+
};
|
|
4363
|
+
var data_default40 = taskData40;
|
|
4364
|
+
|
|
4310
4365
|
// src/tasks/index.ts
|
|
4311
4366
|
var TASKS_MODEL_LIBRARIES = {
|
|
4312
4367
|
"audio-classification": ["speechbrain", "transformers", "transformers.js"],
|
|
@@ -4423,7 +4478,7 @@ var TASKS_DATA = {
|
|
|
4423
4478
|
"token-classification": getData("token-classification", data_default26),
|
|
4424
4479
|
translation: getData("translation", data_default27),
|
|
4425
4480
|
"unconditional-image-generation": getData("unconditional-image-generation", data_default31),
|
|
4426
|
-
"video-text-to-text": getData("video-text-to-text",
|
|
4481
|
+
"video-text-to-text": getData("video-text-to-text", data_default40),
|
|
4427
4482
|
"visual-question-answering": getData("visual-question-answering", data_default33),
|
|
4428
4483
|
"voice-activity-detection": void 0,
|
|
4429
4484
|
"zero-shot-classification": getData("zero-shot-classification", data_default34),
|
|
@@ -5276,6 +5331,11 @@ var mlxim = (model) => [
|
|
|
5276
5331
|
|
|
5277
5332
|
model = create_model(${model.id})`
|
|
5278
5333
|
];
|
|
5334
|
+
var model2vec = (model) => [
|
|
5335
|
+
`from model2vec import StaticModel
|
|
5336
|
+
|
|
5337
|
+
model = StaticModel.from_pretrained("${model.id}")`
|
|
5338
|
+
];
|
|
5279
5339
|
var nemo = (model) => {
|
|
5280
5340
|
let command = void 0;
|
|
5281
5341
|
if (model.tags.includes("automatic-speech-recognition")) {
|
|
@@ -5693,6 +5753,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5693
5753
|
filter: false,
|
|
5694
5754
|
countDownloads: `path:"mlc-chat-config.json"`
|
|
5695
5755
|
},
|
|
5756
|
+
model2vec: {
|
|
5757
|
+
prettyLabel: "Model2Vec",
|
|
5758
|
+
repoName: "model2vec",
|
|
5759
|
+
repoUrl: "https://github.com/MinishLab/model2vec",
|
|
5760
|
+
snippets: model2vec,
|
|
5761
|
+
filter: false
|
|
5762
|
+
},
|
|
5696
5763
|
moshi: {
|
|
5697
5764
|
prettyLabel: "Moshi",
|
|
5698
5765
|
repoName: "Moshi",
|
|
@@ -6137,6 +6204,7 @@ __export(curl_exports, {
|
|
|
6137
6204
|
hasCurlInferenceSnippet: () => hasCurlInferenceSnippet,
|
|
6138
6205
|
snippetBasic: () => snippetBasic,
|
|
6139
6206
|
snippetFile: () => snippetFile,
|
|
6207
|
+
snippetImageTextToTextGeneration: () => snippetImageTextToTextGeneration,
|
|
6140
6208
|
snippetTextGeneration: () => snippetTextGeneration,
|
|
6141
6209
|
snippetZeroShotClassification: () => snippetZeroShotClassification
|
|
6142
6210
|
});
|
|
@@ -6146,7 +6214,7 @@ var snippetBasic = (model, accessToken) => `curl https://api-inference.huggingfa
|
|
|
6146
6214
|
-H 'Content-Type: application/json' \\
|
|
6147
6215
|
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
|
|
6148
6216
|
var snippetTextGeneration = (model, accessToken) => {
|
|
6149
|
-
if (model.
|
|
6217
|
+
if (model.tags.includes("conversational")) {
|
|
6150
6218
|
return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
|
|
6151
6219
|
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\
|
|
6152
6220
|
-H 'Content-Type: application/json' \\
|
|
@@ -6161,6 +6229,30 @@ var snippetTextGeneration = (model, accessToken) => {
|
|
|
6161
6229
|
return snippetBasic(model, accessToken);
|
|
6162
6230
|
}
|
|
6163
6231
|
};
|
|
6232
|
+
var snippetImageTextToTextGeneration = (model, accessToken) => {
|
|
6233
|
+
if (model.tags.includes("conversational")) {
|
|
6234
|
+
return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
|
|
6235
|
+
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\
|
|
6236
|
+
-H 'Content-Type: application/json' \\
|
|
6237
|
+
-d '{
|
|
6238
|
+
"model": "${model.id}",
|
|
6239
|
+
"messages": [
|
|
6240
|
+
{
|
|
6241
|
+
"role": "user",
|
|
6242
|
+
"content": [
|
|
6243
|
+
{"type": "image_url", "image_url": {"url": "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"}},
|
|
6244
|
+
{"type": "text", "text": "Describe this image in one sentence."}
|
|
6245
|
+
]
|
|
6246
|
+
}
|
|
6247
|
+
],
|
|
6248
|
+
"max_tokens": 500,
|
|
6249
|
+
"stream": false
|
|
6250
|
+
}'
|
|
6251
|
+
`;
|
|
6252
|
+
} else {
|
|
6253
|
+
return snippetBasic(model, accessToken);
|
|
6254
|
+
}
|
|
6255
|
+
};
|
|
6164
6256
|
var snippetZeroShotClassification = (model, accessToken) => `curl https://api-inference.huggingface.co/models/${model.id} \\
|
|
6165
6257
|
-X POST \\
|
|
6166
6258
|
-d '{"inputs": ${getModelInputSnippet(model, true)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
|
|
@@ -6181,6 +6273,7 @@ var curlSnippets = {
|
|
|
6181
6273
|
summarization: snippetBasic,
|
|
6182
6274
|
"feature-extraction": snippetBasic,
|
|
6183
6275
|
"text-generation": snippetTextGeneration,
|
|
6276
|
+
"image-text-to-text": snippetImageTextToTextGeneration,
|
|
6184
6277
|
"text2text-generation": snippetBasic,
|
|
6185
6278
|
"fill-mask": snippetBasic,
|
|
6186
6279
|
"sentence-similarity": snippetBasic,
|
|
@@ -6210,6 +6303,7 @@ __export(python_exports, {
|
|
|
6210
6303
|
pythonSnippets: () => pythonSnippets,
|
|
6211
6304
|
snippetBasic: () => snippetBasic2,
|
|
6212
6305
|
snippetConversational: () => snippetConversational,
|
|
6306
|
+
snippetConversationalWithImage: () => snippetConversationalWithImage,
|
|
6213
6307
|
snippetDocumentQuestionAnswering: () => snippetDocumentQuestionAnswering,
|
|
6214
6308
|
snippetFile: () => snippetFile2,
|
|
6215
6309
|
snippetTabular: () => snippetTabular,
|
|
@@ -6220,17 +6314,36 @@ __export(python_exports, {
|
|
|
6220
6314
|
});
|
|
6221
6315
|
var snippetConversational = (model, accessToken) => `from huggingface_hub import InferenceClient
|
|
6222
6316
|
|
|
6223
|
-
client = InferenceClient(
|
|
6224
|
-
"${model.id}",
|
|
6225
|
-
token="${accessToken || "{API_TOKEN}"}",
|
|
6226
|
-
)
|
|
6317
|
+
client = InferenceClient(api_key="${accessToken || "{API_TOKEN}"}")
|
|
6227
6318
|
|
|
6228
6319
|
for message in client.chat_completion(
|
|
6320
|
+
model="${model.id}",
|
|
6229
6321
|
messages=[{"role": "user", "content": "What is the capital of France?"}],
|
|
6230
6322
|
max_tokens=500,
|
|
6231
6323
|
stream=True,
|
|
6232
6324
|
):
|
|
6233
6325
|
print(message.choices[0].delta.content, end="")`;
|
|
6326
|
+
var snippetConversationalWithImage = (model, accessToken) => `from huggingface_hub import InferenceClient
|
|
6327
|
+
|
|
6328
|
+
client = InferenceClient(api_key="${accessToken || "{API_TOKEN}"}")
|
|
6329
|
+
|
|
6330
|
+
image_url = "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
|
|
6331
|
+
|
|
6332
|
+
for message in client.chat_completion(
|
|
6333
|
+
model="${model.id}",
|
|
6334
|
+
messages=[
|
|
6335
|
+
{
|
|
6336
|
+
"role": "user",
|
|
6337
|
+
"content": [
|
|
6338
|
+
{"type": "image_url", "image_url": {"url": image_url}},
|
|
6339
|
+
{"type": "text", "text": "Describe this image in one sentence."},
|
|
6340
|
+
],
|
|
6341
|
+
}
|
|
6342
|
+
],
|
|
6343
|
+
max_tokens=500,
|
|
6344
|
+
stream=True,
|
|
6345
|
+
):
|
|
6346
|
+
print(message.choices[0].delta.content, end="")`;
|
|
6234
6347
|
var snippetZeroShotClassification2 = (model) => `def query(payload):
|
|
6235
6348
|
response = requests.post(API_URL, headers=headers, json=payload)
|
|
6236
6349
|
return response.json()
|
|
@@ -6348,8 +6461,10 @@ var pythonSnippets = {
|
|
|
6348
6461
|
"zero-shot-image-classification": snippetZeroShotImageClassification
|
|
6349
6462
|
};
|
|
6350
6463
|
function getPythonInferenceSnippet(model, accessToken) {
|
|
6351
|
-
if (model.pipeline_tag === "text-generation" && model.
|
|
6464
|
+
if (model.pipeline_tag === "text-generation" && model.tags.includes("conversational")) {
|
|
6352
6465
|
return snippetConversational(model, accessToken);
|
|
6466
|
+
} else if (model.pipeline_tag === "image-text-to-text" && model.tags.includes("conversational")) {
|
|
6467
|
+
return snippetConversationalWithImage(model, accessToken);
|
|
6353
6468
|
} else {
|
|
6354
6469
|
const body = model.pipeline_tag && model.pipeline_tag in pythonSnippets ? pythonSnippets[model.pipeline_tag]?.(model, accessToken) ?? "" : "";
|
|
6355
6470
|
return `import requests
|
|
@@ -6372,6 +6487,7 @@ __export(js_exports, {
|
|
|
6372
6487
|
jsSnippets: () => jsSnippets,
|
|
6373
6488
|
snippetBasic: () => snippetBasic3,
|
|
6374
6489
|
snippetFile: () => snippetFile3,
|
|
6490
|
+
snippetImageTextToTextGeneration: () => snippetImageTextToTextGeneration2,
|
|
6375
6491
|
snippetTextGeneration: () => snippetTextGeneration2,
|
|
6376
6492
|
snippetTextToAudio: () => snippetTextToAudio2,
|
|
6377
6493
|
snippetTextToImage: () => snippetTextToImage2,
|
|
@@ -6397,7 +6513,7 @@ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
|
|
|
6397
6513
|
console.log(JSON.stringify(response));
|
|
6398
6514
|
});`;
|
|
6399
6515
|
var snippetTextGeneration2 = (model, accessToken) => {
|
|
6400
|
-
if (model.
|
|
6516
|
+
if (model.tags.includes("conversational")) {
|
|
6401
6517
|
return `import { HfInference } from "@huggingface/inference";
|
|
6402
6518
|
|
|
6403
6519
|
const inference = new HfInference("${accessToken || `{API_TOKEN}`}");
|
|
@@ -6413,6 +6529,32 @@ for await (const chunk of inference.chatCompletionStream({
|
|
|
6413
6529
|
return snippetBasic3(model, accessToken);
|
|
6414
6530
|
}
|
|
6415
6531
|
};
|
|
6532
|
+
var snippetImageTextToTextGeneration2 = (model, accessToken) => {
|
|
6533
|
+
if (model.tags.includes("conversational")) {
|
|
6534
|
+
return `import { HfInference } from "@huggingface/inference";
|
|
6535
|
+
|
|
6536
|
+
const inference = new HfInference("${accessToken || `{API_TOKEN}`}");
|
|
6537
|
+
const imageUrl = "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg";
|
|
6538
|
+
|
|
6539
|
+
for await (const chunk of inference.chatCompletionStream({
|
|
6540
|
+
model: "${model.id}",
|
|
6541
|
+
messages: [
|
|
6542
|
+
{
|
|
6543
|
+
"role": "user",
|
|
6544
|
+
"content": [
|
|
6545
|
+
{"type": "image_url", "image_url": {"url": imageUrl}},
|
|
6546
|
+
{"type": "text", "text": "Describe this image in one sentence."},
|
|
6547
|
+
],
|
|
6548
|
+
}
|
|
6549
|
+
],
|
|
6550
|
+
max_tokens: 500,
|
|
6551
|
+
})) {
|
|
6552
|
+
process.stdout.write(chunk.choices[0]?.delta?.content || "");
|
|
6553
|
+
}`;
|
|
6554
|
+
} else {
|
|
6555
|
+
return snippetBasic3(model, accessToken);
|
|
6556
|
+
}
|
|
6557
|
+
};
|
|
6416
6558
|
var snippetZeroShotClassification3 = (model, accessToken) => `async function query(data) {
|
|
6417
6559
|
const response = await fetch(
|
|
6418
6560
|
"https://api-inference.huggingface.co/models/${model.id}",
|
|
@@ -6515,6 +6657,7 @@ var jsSnippets = {
|
|
|
6515
6657
|
summarization: snippetBasic3,
|
|
6516
6658
|
"feature-extraction": snippetBasic3,
|
|
6517
6659
|
"text-generation": snippetTextGeneration2,
|
|
6660
|
+
"image-text-to-text": snippetImageTextToTextGeneration2,
|
|
6518
6661
|
"text2text-generation": snippetBasic3,
|
|
6519
6662
|
"fill-mask": snippetBasic3,
|
|
6520
6663
|
"sentence-similarity": snippetBasic3,
|
package/dist/index.js
CHANGED
|
@@ -4269,6 +4269,61 @@ var taskData39 = {
|
|
|
4269
4269
|
};
|
|
4270
4270
|
var data_default39 = taskData39;
|
|
4271
4271
|
|
|
4272
|
+
// src/tasks/video-text-to-text/data.ts
|
|
4273
|
+
var taskData40 = {
|
|
4274
|
+
datasets: [
|
|
4275
|
+
{
|
|
4276
|
+
description: "Multiple-choice questions and answers about videos.",
|
|
4277
|
+
id: "lmms-lab/Video-MME"
|
|
4278
|
+
},
|
|
4279
|
+
{
|
|
4280
|
+
description: "A dataset of instructions and question-answer pairs about videos.",
|
|
4281
|
+
id: "lmms-lab/VideoChatGPT"
|
|
4282
|
+
}
|
|
4283
|
+
],
|
|
4284
|
+
demo: {
|
|
4285
|
+
inputs: [
|
|
4286
|
+
{
|
|
4287
|
+
filename: "video-text-to-text-input.gif",
|
|
4288
|
+
type: "img"
|
|
4289
|
+
},
|
|
4290
|
+
{
|
|
4291
|
+
label: "Text Prompt",
|
|
4292
|
+
content: "What is happening in this video?",
|
|
4293
|
+
type: "text"
|
|
4294
|
+
}
|
|
4295
|
+
],
|
|
4296
|
+
outputs: [
|
|
4297
|
+
{
|
|
4298
|
+
label: "Answer",
|
|
4299
|
+
content: "The video shows a series of images showing a fountain with water jets and a variety of colorful flowers and butterflies in the background.",
|
|
4300
|
+
type: "text"
|
|
4301
|
+
}
|
|
4302
|
+
]
|
|
4303
|
+
},
|
|
4304
|
+
metrics: [],
|
|
4305
|
+
models: [
|
|
4306
|
+
{
|
|
4307
|
+
description: "A robust video-text-to-text model that can take in image and video inputs.",
|
|
4308
|
+
id: "llava-hf/llava-onevision-qwen2-72b-ov-hf"
|
|
4309
|
+
},
|
|
4310
|
+
{
|
|
4311
|
+
description: "Large and powerful video-text-to-text model that can take in image and video inputs.",
|
|
4312
|
+
id: "llava-hf/LLaVA-NeXT-Video-34B-hf"
|
|
4313
|
+
}
|
|
4314
|
+
],
|
|
4315
|
+
spaces: [
|
|
4316
|
+
{
|
|
4317
|
+
description: "An application to chat with a video-text-to-text model.",
|
|
4318
|
+
id: "llava-hf/video-llava"
|
|
4319
|
+
}
|
|
4320
|
+
],
|
|
4321
|
+
summary: "Video-text-to-text models take in a video and a text prompt and output text. These models are also called video-language models.",
|
|
4322
|
+
widgetModels: [""],
|
|
4323
|
+
youtubeId: ""
|
|
4324
|
+
};
|
|
4325
|
+
var data_default40 = taskData40;
|
|
4326
|
+
|
|
4272
4327
|
// src/tasks/index.ts
|
|
4273
4328
|
var TASKS_MODEL_LIBRARIES = {
|
|
4274
4329
|
"audio-classification": ["speechbrain", "transformers", "transformers.js"],
|
|
@@ -4385,7 +4440,7 @@ var TASKS_DATA = {
|
|
|
4385
4440
|
"token-classification": getData("token-classification", data_default26),
|
|
4386
4441
|
translation: getData("translation", data_default27),
|
|
4387
4442
|
"unconditional-image-generation": getData("unconditional-image-generation", data_default31),
|
|
4388
|
-
"video-text-to-text": getData("video-text-to-text",
|
|
4443
|
+
"video-text-to-text": getData("video-text-to-text", data_default40),
|
|
4389
4444
|
"visual-question-answering": getData("visual-question-answering", data_default33),
|
|
4390
4445
|
"voice-activity-detection": void 0,
|
|
4391
4446
|
"zero-shot-classification": getData("zero-shot-classification", data_default34),
|
|
@@ -5238,6 +5293,11 @@ var mlxim = (model) => [
|
|
|
5238
5293
|
|
|
5239
5294
|
model = create_model(${model.id})`
|
|
5240
5295
|
];
|
|
5296
|
+
var model2vec = (model) => [
|
|
5297
|
+
`from model2vec import StaticModel
|
|
5298
|
+
|
|
5299
|
+
model = StaticModel.from_pretrained("${model.id}")`
|
|
5300
|
+
];
|
|
5241
5301
|
var nemo = (model) => {
|
|
5242
5302
|
let command = void 0;
|
|
5243
5303
|
if (model.tags.includes("automatic-speech-recognition")) {
|
|
@@ -5655,6 +5715,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5655
5715
|
filter: false,
|
|
5656
5716
|
countDownloads: `path:"mlc-chat-config.json"`
|
|
5657
5717
|
},
|
|
5718
|
+
model2vec: {
|
|
5719
|
+
prettyLabel: "Model2Vec",
|
|
5720
|
+
repoName: "model2vec",
|
|
5721
|
+
repoUrl: "https://github.com/MinishLab/model2vec",
|
|
5722
|
+
snippets: model2vec,
|
|
5723
|
+
filter: false
|
|
5724
|
+
},
|
|
5658
5725
|
moshi: {
|
|
5659
5726
|
prettyLabel: "Moshi",
|
|
5660
5727
|
repoName: "Moshi",
|
|
@@ -6099,6 +6166,7 @@ __export(curl_exports, {
|
|
|
6099
6166
|
hasCurlInferenceSnippet: () => hasCurlInferenceSnippet,
|
|
6100
6167
|
snippetBasic: () => snippetBasic,
|
|
6101
6168
|
snippetFile: () => snippetFile,
|
|
6169
|
+
snippetImageTextToTextGeneration: () => snippetImageTextToTextGeneration,
|
|
6102
6170
|
snippetTextGeneration: () => snippetTextGeneration,
|
|
6103
6171
|
snippetZeroShotClassification: () => snippetZeroShotClassification
|
|
6104
6172
|
});
|
|
@@ -6108,7 +6176,7 @@ var snippetBasic = (model, accessToken) => `curl https://api-inference.huggingfa
|
|
|
6108
6176
|
-H 'Content-Type: application/json' \\
|
|
6109
6177
|
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
|
|
6110
6178
|
var snippetTextGeneration = (model, accessToken) => {
|
|
6111
|
-
if (model.
|
|
6179
|
+
if (model.tags.includes("conversational")) {
|
|
6112
6180
|
return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
|
|
6113
6181
|
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\
|
|
6114
6182
|
-H 'Content-Type: application/json' \\
|
|
@@ -6123,6 +6191,30 @@ var snippetTextGeneration = (model, accessToken) => {
|
|
|
6123
6191
|
return snippetBasic(model, accessToken);
|
|
6124
6192
|
}
|
|
6125
6193
|
};
|
|
6194
|
+
var snippetImageTextToTextGeneration = (model, accessToken) => {
|
|
6195
|
+
if (model.tags.includes("conversational")) {
|
|
6196
|
+
return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
|
|
6197
|
+
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\
|
|
6198
|
+
-H 'Content-Type: application/json' \\
|
|
6199
|
+
-d '{
|
|
6200
|
+
"model": "${model.id}",
|
|
6201
|
+
"messages": [
|
|
6202
|
+
{
|
|
6203
|
+
"role": "user",
|
|
6204
|
+
"content": [
|
|
6205
|
+
{"type": "image_url", "image_url": {"url": "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"}},
|
|
6206
|
+
{"type": "text", "text": "Describe this image in one sentence."}
|
|
6207
|
+
]
|
|
6208
|
+
}
|
|
6209
|
+
],
|
|
6210
|
+
"max_tokens": 500,
|
|
6211
|
+
"stream": false
|
|
6212
|
+
}'
|
|
6213
|
+
`;
|
|
6214
|
+
} else {
|
|
6215
|
+
return snippetBasic(model, accessToken);
|
|
6216
|
+
}
|
|
6217
|
+
};
|
|
6126
6218
|
var snippetZeroShotClassification = (model, accessToken) => `curl https://api-inference.huggingface.co/models/${model.id} \\
|
|
6127
6219
|
-X POST \\
|
|
6128
6220
|
-d '{"inputs": ${getModelInputSnippet(model, true)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
|
|
@@ -6143,6 +6235,7 @@ var curlSnippets = {
|
|
|
6143
6235
|
summarization: snippetBasic,
|
|
6144
6236
|
"feature-extraction": snippetBasic,
|
|
6145
6237
|
"text-generation": snippetTextGeneration,
|
|
6238
|
+
"image-text-to-text": snippetImageTextToTextGeneration,
|
|
6146
6239
|
"text2text-generation": snippetBasic,
|
|
6147
6240
|
"fill-mask": snippetBasic,
|
|
6148
6241
|
"sentence-similarity": snippetBasic,
|
|
@@ -6172,6 +6265,7 @@ __export(python_exports, {
|
|
|
6172
6265
|
pythonSnippets: () => pythonSnippets,
|
|
6173
6266
|
snippetBasic: () => snippetBasic2,
|
|
6174
6267
|
snippetConversational: () => snippetConversational,
|
|
6268
|
+
snippetConversationalWithImage: () => snippetConversationalWithImage,
|
|
6175
6269
|
snippetDocumentQuestionAnswering: () => snippetDocumentQuestionAnswering,
|
|
6176
6270
|
snippetFile: () => snippetFile2,
|
|
6177
6271
|
snippetTabular: () => snippetTabular,
|
|
@@ -6182,17 +6276,36 @@ __export(python_exports, {
|
|
|
6182
6276
|
});
|
|
6183
6277
|
var snippetConversational = (model, accessToken) => `from huggingface_hub import InferenceClient
|
|
6184
6278
|
|
|
6185
|
-
client = InferenceClient(
|
|
6186
|
-
"${model.id}",
|
|
6187
|
-
token="${accessToken || "{API_TOKEN}"}",
|
|
6188
|
-
)
|
|
6279
|
+
client = InferenceClient(api_key="${accessToken || "{API_TOKEN}"}")
|
|
6189
6280
|
|
|
6190
6281
|
for message in client.chat_completion(
|
|
6282
|
+
model="${model.id}",
|
|
6191
6283
|
messages=[{"role": "user", "content": "What is the capital of France?"}],
|
|
6192
6284
|
max_tokens=500,
|
|
6193
6285
|
stream=True,
|
|
6194
6286
|
):
|
|
6195
6287
|
print(message.choices[0].delta.content, end="")`;
|
|
6288
|
+
var snippetConversationalWithImage = (model, accessToken) => `from huggingface_hub import InferenceClient
|
|
6289
|
+
|
|
6290
|
+
client = InferenceClient(api_key="${accessToken || "{API_TOKEN}"}")
|
|
6291
|
+
|
|
6292
|
+
image_url = "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
|
|
6293
|
+
|
|
6294
|
+
for message in client.chat_completion(
|
|
6295
|
+
model="${model.id}",
|
|
6296
|
+
messages=[
|
|
6297
|
+
{
|
|
6298
|
+
"role": "user",
|
|
6299
|
+
"content": [
|
|
6300
|
+
{"type": "image_url", "image_url": {"url": image_url}},
|
|
6301
|
+
{"type": "text", "text": "Describe this image in one sentence."},
|
|
6302
|
+
],
|
|
6303
|
+
}
|
|
6304
|
+
],
|
|
6305
|
+
max_tokens=500,
|
|
6306
|
+
stream=True,
|
|
6307
|
+
):
|
|
6308
|
+
print(message.choices[0].delta.content, end="")`;
|
|
6196
6309
|
var snippetZeroShotClassification2 = (model) => `def query(payload):
|
|
6197
6310
|
response = requests.post(API_URL, headers=headers, json=payload)
|
|
6198
6311
|
return response.json()
|
|
@@ -6310,8 +6423,10 @@ var pythonSnippets = {
|
|
|
6310
6423
|
"zero-shot-image-classification": snippetZeroShotImageClassification
|
|
6311
6424
|
};
|
|
6312
6425
|
function getPythonInferenceSnippet(model, accessToken) {
|
|
6313
|
-
if (model.pipeline_tag === "text-generation" && model.
|
|
6426
|
+
if (model.pipeline_tag === "text-generation" && model.tags.includes("conversational")) {
|
|
6314
6427
|
return snippetConversational(model, accessToken);
|
|
6428
|
+
} else if (model.pipeline_tag === "image-text-to-text" && model.tags.includes("conversational")) {
|
|
6429
|
+
return snippetConversationalWithImage(model, accessToken);
|
|
6315
6430
|
} else {
|
|
6316
6431
|
const body = model.pipeline_tag && model.pipeline_tag in pythonSnippets ? pythonSnippets[model.pipeline_tag]?.(model, accessToken) ?? "" : "";
|
|
6317
6432
|
return `import requests
|
|
@@ -6334,6 +6449,7 @@ __export(js_exports, {
|
|
|
6334
6449
|
jsSnippets: () => jsSnippets,
|
|
6335
6450
|
snippetBasic: () => snippetBasic3,
|
|
6336
6451
|
snippetFile: () => snippetFile3,
|
|
6452
|
+
snippetImageTextToTextGeneration: () => snippetImageTextToTextGeneration2,
|
|
6337
6453
|
snippetTextGeneration: () => snippetTextGeneration2,
|
|
6338
6454
|
snippetTextToAudio: () => snippetTextToAudio2,
|
|
6339
6455
|
snippetTextToImage: () => snippetTextToImage2,
|
|
@@ -6359,7 +6475,7 @@ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
|
|
|
6359
6475
|
console.log(JSON.stringify(response));
|
|
6360
6476
|
});`;
|
|
6361
6477
|
var snippetTextGeneration2 = (model, accessToken) => {
|
|
6362
|
-
if (model.
|
|
6478
|
+
if (model.tags.includes("conversational")) {
|
|
6363
6479
|
return `import { HfInference } from "@huggingface/inference";
|
|
6364
6480
|
|
|
6365
6481
|
const inference = new HfInference("${accessToken || `{API_TOKEN}`}");
|
|
@@ -6375,6 +6491,32 @@ for await (const chunk of inference.chatCompletionStream({
|
|
|
6375
6491
|
return snippetBasic3(model, accessToken);
|
|
6376
6492
|
}
|
|
6377
6493
|
};
|
|
6494
|
+
var snippetImageTextToTextGeneration2 = (model, accessToken) => {
|
|
6495
|
+
if (model.tags.includes("conversational")) {
|
|
6496
|
+
return `import { HfInference } from "@huggingface/inference";
|
|
6497
|
+
|
|
6498
|
+
const inference = new HfInference("${accessToken || `{API_TOKEN}`}");
|
|
6499
|
+
const imageUrl = "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg";
|
|
6500
|
+
|
|
6501
|
+
for await (const chunk of inference.chatCompletionStream({
|
|
6502
|
+
model: "${model.id}",
|
|
6503
|
+
messages: [
|
|
6504
|
+
{
|
|
6505
|
+
"role": "user",
|
|
6506
|
+
"content": [
|
|
6507
|
+
{"type": "image_url", "image_url": {"url": imageUrl}},
|
|
6508
|
+
{"type": "text", "text": "Describe this image in one sentence."},
|
|
6509
|
+
],
|
|
6510
|
+
}
|
|
6511
|
+
],
|
|
6512
|
+
max_tokens: 500,
|
|
6513
|
+
})) {
|
|
6514
|
+
process.stdout.write(chunk.choices[0]?.delta?.content || "");
|
|
6515
|
+
}`;
|
|
6516
|
+
} else {
|
|
6517
|
+
return snippetBasic3(model, accessToken);
|
|
6518
|
+
}
|
|
6519
|
+
};
|
|
6378
6520
|
var snippetZeroShotClassification3 = (model, accessToken) => `async function query(data) {
|
|
6379
6521
|
const response = await fetch(
|
|
6380
6522
|
"https://api-inference.huggingface.co/models/${model.id}",
|
|
@@ -6477,6 +6619,7 @@ var jsSnippets = {
|
|
|
6477
6619
|
summarization: snippetBasic3,
|
|
6478
6620
|
"feature-extraction": snippetBasic3,
|
|
6479
6621
|
"text-generation": snippetTextGeneration2,
|
|
6622
|
+
"image-text-to-text": snippetImageTextToTextGeneration2,
|
|
6480
6623
|
"text2text-generation": snippetBasic3,
|
|
6481
6624
|
"fill-mask": snippetBasic3,
|
|
6482
6625
|
"sentence-similarity": snippetBasic3,
|
|
@@ -58,6 +58,7 @@ export declare const yolov10: (model: ModelData) => string[];
|
|
|
58
58
|
export declare const birefnet: (model: ModelData) => string[];
|
|
59
59
|
export declare const mlx: (model: ModelData) => string[];
|
|
60
60
|
export declare const mlxim: (model: ModelData) => string[];
|
|
61
|
+
export declare const model2vec: (model: ModelData) => string[];
|
|
61
62
|
export declare const nemo: (model: ModelData) => string[];
|
|
62
63
|
export declare const pythae: (model: ModelData) => string[];
|
|
63
64
|
export declare const audiocraft: (model: ModelData) => string[];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAe9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAuCF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAOhD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC"}
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAe9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAuCF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAOhD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC"}
|
|
@@ -400,6 +400,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
400
400
|
filter: false;
|
|
401
401
|
countDownloads: string;
|
|
402
402
|
};
|
|
403
|
+
model2vec: {
|
|
404
|
+
prettyLabel: string;
|
|
405
|
+
repoName: string;
|
|
406
|
+
repoUrl: string;
|
|
407
|
+
snippets: (model: ModelData) => string[];
|
|
408
|
+
filter: false;
|
|
409
|
+
};
|
|
403
410
|
moshi: {
|
|
404
411
|
prettyLabel: string;
|
|
405
412
|
repoName: string;
|
|
@@ -711,6 +718,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
711
718
|
};
|
|
712
719
|
};
|
|
713
720
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
714
|
-
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
|
|
715
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
|
|
721
|
+
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
|
|
722
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
|
|
716
723
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAypBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,stCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,stCAQ1B,CAAC"}
|
|
@@ -2,6 +2,7 @@ import type { PipelineType } from "../pipelines.js";
|
|
|
2
2
|
import type { ModelDataMinimal } from "./types.js";
|
|
3
3
|
export declare const snippetBasic: (model: ModelDataMinimal, accessToken: string) => string;
|
|
4
4
|
export declare const snippetTextGeneration: (model: ModelDataMinimal, accessToken: string) => string;
|
|
5
|
+
export declare const snippetImageTextToTextGeneration: (model: ModelDataMinimal, accessToken: string) => string;
|
|
5
6
|
export declare const snippetZeroShotClassification: (model: ModelDataMinimal, accessToken: string) => string;
|
|
6
7
|
export declare const snippetFile: (model: ModelDataMinimal, accessToken: string) => string;
|
|
7
8
|
export declare const curlSnippets: Partial<Record<PipelineType, (model: ModelDataMinimal, accessToken: string) => string>>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKhB,CAAC;AAE7D,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAgBpF,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKjC,CAAC;AAE7D,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAIf,CAAC;AAE7D,eAAO,MAAM,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,
|
|
1
|
+
{"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKhB,CAAC;AAE7D,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAgBpF,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAwB/F,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKjC,CAAC;AAE7D,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAIf,CAAC;AAE7D,eAAO,MAAM,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAyBhH,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI5F;AAED,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAAG,OAAO,CAE9F"}
|
|
@@ -2,6 +2,7 @@ import type { PipelineType } from "../pipelines.js";
|
|
|
2
2
|
import type { ModelDataMinimal } from "./types.js";
|
|
3
3
|
export declare const snippetBasic: (model: ModelDataMinimal, accessToken: string) => string;
|
|
4
4
|
export declare const snippetTextGeneration: (model: ModelDataMinimal, accessToken: string) => string;
|
|
5
|
+
export declare const snippetImageTextToTextGeneration: (model: ModelDataMinimal, accessToken: string) => string;
|
|
5
6
|
export declare const snippetZeroShotClassification: (model: ModelDataMinimal, accessToken: string) => string;
|
|
6
7
|
export declare const snippetTextToImage: (model: ModelDataMinimal, accessToken: string) => string;
|
|
7
8
|
export declare const snippetTextToAudio: (model: ModelDataMinimal, accessToken: string) => string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAmBxE,CAAC;AAEL,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAiBpF,CAAC;
|
|
1
|
+
{"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAmBxE,CAAC;AAEL,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAiBpF,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,eAAe,MAAM,KAAG,MA0B/F,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqBzF,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAkB9E,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqCjF,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAoBvE,CAAC;AAEL,eAAO,MAAM,UAAU,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAyB9G,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI1F;AAED,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAEtE"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import type { PipelineType } from "../pipelines.js";
|
|
2
2
|
import type { ModelDataMinimal } from "./types.js";
|
|
3
3
|
export declare const snippetConversational: (model: ModelDataMinimal, accessToken: string) => string;
|
|
4
|
+
export declare const snippetConversationalWithImage: (model: ModelDataMinimal, accessToken: string) => string;
|
|
4
5
|
export declare const snippetZeroShotClassification: (model: ModelDataMinimal) => string;
|
|
5
6
|
export declare const snippetZeroShotImageClassification: (model: ModelDataMinimal) => string;
|
|
6
7
|
export declare const snippetBasic: (model: ModelDataMinimal) => string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,
|
|
1
|
+
{"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAWjC,CAAC;AAErD,eAAO,MAAM,8BAA8B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqB7C,CAAC;AAElD,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,MAQrE,CAAC;AAEJ,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,MAc1E,CAAC;AAEJ,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,MAOpD,CAAC;AAEJ,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,MAOP,CAAC;AAEhD,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MAUjB,CAAC;AAE7C,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,MAMtD,CAAC;AAEJ,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MA2B5D,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,MAUxE,CAAC;AAEJ,eAAO,MAAM,cAAc,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CA4BlH,CAAC;AAEF,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAoB9F;AAED,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAE1E"}
|
|
@@ -4,5 +4,5 @@ import type { ModelData } from "../model-data";
|
|
|
4
4
|
*
|
|
5
5
|
* Add more fields as needed.
|
|
6
6
|
*/
|
|
7
|
-
export type ModelDataMinimal = Pick<ModelData, "id" | "pipeline_tag" | "mask_token" | "library_name" | "config">;
|
|
7
|
+
export type ModelDataMinimal = Pick<ModelData, "id" | "pipeline_tag" | "mask_token" | "library_name" | "config" | "tags">;
|
|
8
8
|
//# sourceMappingURL=types.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/snippets/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAE/C;;;;GAIG;AACH,MAAM,MAAM,gBAAgB,GAAG,IAAI,
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/snippets/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,eAAe,CAAC;AAE/C;;;;GAIG;AACH,MAAM,MAAM,gBAAgB,GAAG,IAAI,CAClC,SAAS,EACT,IAAI,GAAG,cAAc,GAAG,YAAY,GAAG,cAAc,GAAG,QAAQ,GAAG,MAAM,CACzE,CAAC"}
|
|
@@ -27,7 +27,7 @@ export interface AutomaticSpeechRecognitionParameters {
|
|
|
27
27
|
/**
|
|
28
28
|
* Parametrization of the text generation process
|
|
29
29
|
*/
|
|
30
|
-
|
|
30
|
+
generation_parameters?: GenerationParameters;
|
|
31
31
|
/**
|
|
32
32
|
* Whether to output corresponding timestamps with the generated text
|
|
33
33
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,oCAAoC,CAAC;IAClD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oCAAoC;IACpD;;OAEG;IACH,
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,oCAAoC,CAAC;IAClD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oCAAoC;IACpD;;OAEG;IACH,qBAAqB,CAAC,EAAE,oBAAoB,CAAC;IAC7C;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;;OAGG;IACH,MAAM,CAAC,EAAE,qCAAqC,EAAE,CAAC;IACjD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED,MAAM,WAAW,qCAAqC;IACrD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-text/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,MAAM,EAAE,OAAO,CAAC;IAChB;;OAEG;IACH,UAAU,CAAC,EAAE,qBAAqB,CAAC;IACnC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,qBAAqB;IACrC;;OAEG;IACH,
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-text/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,MAAM,EAAE,OAAO,CAAC;IAChB;;OAEG;IACH,UAAU,CAAC,EAAE,qBAAqB,CAAC;IACnC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,qBAAqB;IACrC;;OAEG;IACH,qBAAqB,CAAC,EAAE,oBAAoB,CAAC;IAC7C;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC,aAAa,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA4CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAChH,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AACnF,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,iCAAiC,CAAC;AACzC,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,uCAAuC,CAAC;AAC3D,mBAAmB,sCAAsC,CAAC;AAC1D,mBAAmB,4CAA4C,CAAC;AAChE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,CA6DzE,CAAC;AAoBF,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,YAAY,EAAE,QAAQ,GAAG,SAAS,CAqDxD,CAAC;AAEX,MAAM,WAAW,WAAW;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;CACX;AAED,MAAM,MAAM,aAAa,GACtB;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,IAAI,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;IACH,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,KAAK,CAAC;CACX,GACD;IACA,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC;IAClB,IAAI,EAAE,SAAS,CAAC;CACf,GACD;IACA,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACZ,GACD;IACA,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACb,CAAC,CAAC;IACH,IAAI,EAAE,kBAAkB,CAAC;CACxB,CAAC;AAEL,MAAM,WAAW,QAAQ;IACxB,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,OAAO,EAAE,aAAa,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,IAAI,EAAE,QAAQ,CAAC;IACf,EAAE,EAAE,YAAY,CAAC;IACjB,WAAW,CAAC,EAAE,YAAY,CAAC;IAC3B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,eAAe,EAAE,CAAC;IAC7B,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,GAAG,OAAO,GAAG,WAAW,CAAC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text-to-audio/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,qBAAqB,CAAC;IACnC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,qBAAqB;IACrC;;OAEG;IACH,
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text-to-audio/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,qBAAqB,CAAC;IACnC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,qBAAqB;IACrC;;OAEG;IACH,qBAAqB,CAAC,EAAE,oBAAoB,CAAC;IAC7C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC;;OAEG;IACH,KAAK,EAAE,OAAO,CAAC;IACf,YAAY,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text-to-speech/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,sBAAsB,CAAC;IACpC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,sBAAsB;IACtC;;OAEG;IACH,
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text-to-speech/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,sBAAsB,CAAC;IACpC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,sBAAsB;IACtC;;OAEG;IACH,qBAAqB,CAAC,EAAE,oBAAoB,CAAC;IAC7C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;;;GAIG;AACH,MAAM,WAAW,kBAAkB;IAClC;;OAEG;IACH,KAAK,EAAE,OAAO,CAAC;IACf,YAAY,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/tasks",
|
|
3
3
|
"packageManager": "pnpm@8.10.5",
|
|
4
|
-
"version": "0.12.
|
|
4
|
+
"version": "0.12.11",
|
|
5
5
|
"description": "List of ML tasks for huggingface.co/tasks",
|
|
6
6
|
"repository": "https://github.com/huggingface/huggingface.js.git",
|
|
7
7
|
"publishConfig": {
|
|
@@ -951,6 +951,12 @@ export const mlxim = (model: ModelData): string[] => [
|
|
|
951
951
|
model = create_model(${model.id})`,
|
|
952
952
|
];
|
|
953
953
|
|
|
954
|
+
export const model2vec = (model: ModelData): string[] => [
|
|
955
|
+
`from model2vec import StaticModel
|
|
956
|
+
|
|
957
|
+
model = StaticModel.from_pretrained("${model.id}")`,
|
|
958
|
+
];
|
|
959
|
+
|
|
954
960
|
export const nemo = (model: ModelData): string[] => {
|
|
955
961
|
let command: string[] | undefined = undefined;
|
|
956
962
|
// Resolve the tag to a nemo domain/sub-domain
|
package/src/model-libraries.ts
CHANGED
|
@@ -406,6 +406,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
406
406
|
filter: false,
|
|
407
407
|
countDownloads: `path:"mlc-chat-config.json"`,
|
|
408
408
|
},
|
|
409
|
+
model2vec: {
|
|
410
|
+
prettyLabel: "Model2Vec",
|
|
411
|
+
repoName: "model2vec",
|
|
412
|
+
repoUrl: "https://github.com/MinishLab/model2vec",
|
|
413
|
+
snippets: snippets.model2vec,
|
|
414
|
+
filter: false,
|
|
415
|
+
},
|
|
409
416
|
moshi: {
|
|
410
417
|
prettyLabel: "Moshi",
|
|
411
418
|
repoName: "Moshi",
|
package/src/snippets/curl.ts
CHANGED
|
@@ -10,7 +10,7 @@ export const snippetBasic = (model: ModelDataMinimal, accessToken: string): stri
|
|
|
10
10
|
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
|
|
11
11
|
|
|
12
12
|
export const snippetTextGeneration = (model: ModelDataMinimal, accessToken: string): string => {
|
|
13
|
-
if (model.
|
|
13
|
+
if (model.tags.includes("conversational")) {
|
|
14
14
|
// Conversational model detected, so we display a code snippet that features the Messages API
|
|
15
15
|
return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
|
|
16
16
|
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\
|
|
@@ -27,6 +27,32 @@ export const snippetTextGeneration = (model: ModelDataMinimal, accessToken: stri
|
|
|
27
27
|
}
|
|
28
28
|
};
|
|
29
29
|
|
|
30
|
+
export const snippetImageTextToTextGeneration = (model: ModelDataMinimal, accessToken: string): string => {
|
|
31
|
+
if (model.tags.includes("conversational")) {
|
|
32
|
+
// Conversational model detected, so we display a code snippet that features the Messages API
|
|
33
|
+
return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
|
|
34
|
+
-H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\
|
|
35
|
+
-H 'Content-Type: application/json' \\
|
|
36
|
+
-d '{
|
|
37
|
+
"model": "${model.id}",
|
|
38
|
+
"messages": [
|
|
39
|
+
{
|
|
40
|
+
"role": "user",
|
|
41
|
+
"content": [
|
|
42
|
+
{"type": "image_url", "image_url": {"url": "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"}},
|
|
43
|
+
{"type": "text", "text": "Describe this image in one sentence."}
|
|
44
|
+
]
|
|
45
|
+
}
|
|
46
|
+
],
|
|
47
|
+
"max_tokens": 500,
|
|
48
|
+
"stream": false
|
|
49
|
+
}'
|
|
50
|
+
`;
|
|
51
|
+
} else {
|
|
52
|
+
return snippetBasic(model, accessToken);
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
|
|
30
56
|
export const snippetZeroShotClassification = (model: ModelDataMinimal, accessToken: string): string =>
|
|
31
57
|
`curl https://api-inference.huggingface.co/models/${model.id} \\
|
|
32
58
|
-X POST \\
|
|
@@ -51,6 +77,7 @@ export const curlSnippets: Partial<Record<PipelineType, (model: ModelDataMinimal
|
|
|
51
77
|
summarization: snippetBasic,
|
|
52
78
|
"feature-extraction": snippetBasic,
|
|
53
79
|
"text-generation": snippetTextGeneration,
|
|
80
|
+
"image-text-to-text": snippetImageTextToTextGeneration,
|
|
54
81
|
"text2text-generation": snippetBasic,
|
|
55
82
|
"fill-mask": snippetBasic,
|
|
56
83
|
"sentence-similarity": snippetBasic,
|
package/src/snippets/js.ts
CHANGED
|
@@ -24,7 +24,7 @@ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
|
|
|
24
24
|
});`;
|
|
25
25
|
|
|
26
26
|
export const snippetTextGeneration = (model: ModelDataMinimal, accessToken: string): string => {
|
|
27
|
-
if (model.
|
|
27
|
+
if (model.tags.includes("conversational")) {
|
|
28
28
|
// Conversational model detected, so we display a code snippet that features the Messages API
|
|
29
29
|
return `import { HfInference } from "@huggingface/inference";
|
|
30
30
|
|
|
@@ -41,6 +41,35 @@ for await (const chunk of inference.chatCompletionStream({
|
|
|
41
41
|
return snippetBasic(model, accessToken);
|
|
42
42
|
}
|
|
43
43
|
};
|
|
44
|
+
|
|
45
|
+
export const snippetImageTextToTextGeneration = (model: ModelDataMinimal, accessToken: string): string => {
|
|
46
|
+
if (model.tags.includes("conversational")) {
|
|
47
|
+
// Conversational model detected, so we display a code snippet that features the Messages API
|
|
48
|
+
return `import { HfInference } from "@huggingface/inference";
|
|
49
|
+
|
|
50
|
+
const inference = new HfInference("${accessToken || `{API_TOKEN}`}");
|
|
51
|
+
const imageUrl = "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg";
|
|
52
|
+
|
|
53
|
+
for await (const chunk of inference.chatCompletionStream({
|
|
54
|
+
model: "${model.id}",
|
|
55
|
+
messages: [
|
|
56
|
+
{
|
|
57
|
+
"role": "user",
|
|
58
|
+
"content": [
|
|
59
|
+
{"type": "image_url", "image_url": {"url": imageUrl}},
|
|
60
|
+
{"type": "text", "text": "Describe this image in one sentence."},
|
|
61
|
+
],
|
|
62
|
+
}
|
|
63
|
+
],
|
|
64
|
+
max_tokens: 500,
|
|
65
|
+
})) {
|
|
66
|
+
process.stdout.write(chunk.choices[0]?.delta?.content || "");
|
|
67
|
+
}`;
|
|
68
|
+
} else {
|
|
69
|
+
return snippetBasic(model, accessToken);
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
|
|
44
73
|
export const snippetZeroShotClassification = (model: ModelDataMinimal, accessToken: string): string =>
|
|
45
74
|
`async function query(data) {
|
|
46
75
|
const response = await fetch(
|
|
@@ -156,6 +185,7 @@ export const jsSnippets: Partial<Record<PipelineType, (model: ModelDataMinimal,
|
|
|
156
185
|
summarization: snippetBasic,
|
|
157
186
|
"feature-extraction": snippetBasic,
|
|
158
187
|
"text-generation": snippetTextGeneration,
|
|
188
|
+
"image-text-to-text": snippetImageTextToTextGeneration,
|
|
159
189
|
"text2text-generation": snippetBasic,
|
|
160
190
|
"fill-mask": snippetBasic,
|
|
161
191
|
"sentence-similarity": snippetBasic,
|
package/src/snippets/python.ts
CHANGED
|
@@ -5,18 +5,39 @@ import type { ModelDataMinimal } from "./types.js";
|
|
|
5
5
|
export const snippetConversational = (model: ModelDataMinimal, accessToken: string): string =>
|
|
6
6
|
`from huggingface_hub import InferenceClient
|
|
7
7
|
|
|
8
|
-
client = InferenceClient(
|
|
9
|
-
"${model.id}",
|
|
10
|
-
token="${accessToken || "{API_TOKEN}"}",
|
|
11
|
-
)
|
|
8
|
+
client = InferenceClient(api_key="${accessToken || "{API_TOKEN}"}")
|
|
12
9
|
|
|
13
10
|
for message in client.chat_completion(
|
|
11
|
+
model="${model.id}",
|
|
14
12
|
messages=[{"role": "user", "content": "What is the capital of France?"}],
|
|
15
13
|
max_tokens=500,
|
|
16
14
|
stream=True,
|
|
17
15
|
):
|
|
18
16
|
print(message.choices[0].delta.content, end="")`;
|
|
19
17
|
|
|
18
|
+
export const snippetConversationalWithImage = (model: ModelDataMinimal, accessToken: string): string =>
|
|
19
|
+
`from huggingface_hub import InferenceClient
|
|
20
|
+
|
|
21
|
+
client = InferenceClient(api_key="${accessToken || "{API_TOKEN}"}")
|
|
22
|
+
|
|
23
|
+
image_url = "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
|
|
24
|
+
|
|
25
|
+
for message in client.chat_completion(
|
|
26
|
+
model="${model.id}",
|
|
27
|
+
messages=[
|
|
28
|
+
{
|
|
29
|
+
"role": "user",
|
|
30
|
+
"content": [
|
|
31
|
+
{"type": "image_url", "image_url": {"url": image_url}},
|
|
32
|
+
{"type": "text", "text": "Describe this image in one sentence."},
|
|
33
|
+
],
|
|
34
|
+
}
|
|
35
|
+
],
|
|
36
|
+
max_tokens=500,
|
|
37
|
+
stream=True,
|
|
38
|
+
):
|
|
39
|
+
print(message.choices[0].delta.content, end="")`;
|
|
40
|
+
|
|
20
41
|
export const snippetZeroShotClassification = (model: ModelDataMinimal): string =>
|
|
21
42
|
`def query(payload):
|
|
22
43
|
response = requests.post(API_URL, headers=headers, json=payload)
|
|
@@ -153,9 +174,12 @@ export const pythonSnippets: Partial<Record<PipelineType, (model: ModelDataMinim
|
|
|
153
174
|
};
|
|
154
175
|
|
|
155
176
|
export function getPythonInferenceSnippet(model: ModelDataMinimal, accessToken: string): string {
|
|
156
|
-
if (model.pipeline_tag === "text-generation" && model.
|
|
177
|
+
if (model.pipeline_tag === "text-generation" && model.tags.includes("conversational")) {
|
|
157
178
|
// Conversational model detected, so we display a code snippet that features the Messages API
|
|
158
179
|
return snippetConversational(model, accessToken);
|
|
180
|
+
} else if (model.pipeline_tag === "image-text-to-text" && model.tags.includes("conversational")) {
|
|
181
|
+
// Example sending an image to the Message API
|
|
182
|
+
return snippetConversationalWithImage(model, accessToken);
|
|
159
183
|
} else {
|
|
160
184
|
const body =
|
|
161
185
|
model.pipeline_tag && model.pipeline_tag in pythonSnippets
|
package/src/snippets/types.ts
CHANGED
|
@@ -5,4 +5,7 @@ import type { ModelData } from "../model-data";
|
|
|
5
5
|
*
|
|
6
6
|
* Add more fields as needed.
|
|
7
7
|
*/
|
|
8
|
-
export type ModelDataMinimal = Pick<
|
|
8
|
+
export type ModelDataMinimal = Pick<
|
|
9
|
+
ModelData,
|
|
10
|
+
"id" | "pipeline_tag" | "mask_token" | "library_name" | "config" | "tags"
|
|
11
|
+
>;
|
|
@@ -29,7 +29,7 @@ export interface AutomaticSpeechRecognitionParameters {
|
|
|
29
29
|
/**
|
|
30
30
|
* Parametrization of the text generation process
|
|
31
31
|
*/
|
|
32
|
-
|
|
32
|
+
generation_parameters?: GenerationParameters;
|
|
33
33
|
/**
|
|
34
34
|
* Whether to output corresponding timestamps with the generated text
|
|
35
35
|
*/
|
|
@@ -24,7 +24,7 @@
|
|
|
24
24
|
"type": "boolean",
|
|
25
25
|
"description": "Whether to output corresponding timestamps with the generated text"
|
|
26
26
|
},
|
|
27
|
-
"
|
|
27
|
+
"generation_parameters": {
|
|
28
28
|
"description": "Parametrization of the text generation process",
|
|
29
29
|
"$ref": "/inference/schemas/common-definitions.json#/definitions/GenerationParameters"
|
|
30
30
|
}
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
"type": "integer",
|
|
24
24
|
"description": "The amount of maximum tokens to generate."
|
|
25
25
|
},
|
|
26
|
-
"
|
|
26
|
+
"generation_parameters": {
|
|
27
27
|
"description": "Parametrization of the text generation process",
|
|
28
28
|
"$ref": "/inference/schemas/common-definitions.json#/definitions/GenerationParameters"
|
|
29
29
|
}
|
package/src/tasks/index.ts
CHANGED
|
@@ -40,6 +40,7 @@ import zeroShotObjectDetection from "./zero-shot-object-detection/data";
|
|
|
40
40
|
import imageTo3D from "./image-to-3d/data";
|
|
41
41
|
import textTo3D from "./text-to-3d/data";
|
|
42
42
|
import keypointDetection from "./keypoint-detection/data";
|
|
43
|
+
import videoTextToText from "./video-text-to-text/data";
|
|
43
44
|
|
|
44
45
|
export type * from "./audio-classification/inference";
|
|
45
46
|
export type * from "./automatic-speech-recognition/inference";
|
|
@@ -237,7 +238,7 @@ export const TASKS_DATA: Record<PipelineType, TaskData | undefined> = {
|
|
|
237
238
|
"token-classification": getData("token-classification", tokenClassification),
|
|
238
239
|
translation: getData("translation", translation),
|
|
239
240
|
"unconditional-image-generation": getData("unconditional-image-generation", unconditionalImageGeneration),
|
|
240
|
-
"video-text-to-text": getData("video-text-to-text",
|
|
241
|
+
"video-text-to-text": getData("video-text-to-text", videoTextToText),
|
|
241
242
|
"visual-question-answering": getData("visual-question-answering", visualQuestionAnswering),
|
|
242
243
|
"voice-activity-detection": undefined,
|
|
243
244
|
"zero-shot-classification": getData("zero-shot-classification", zeroShotClassification),
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
"description": "Additional inference parameters for Text To Audio",
|
|
21
21
|
"type": "object",
|
|
22
22
|
"properties": {
|
|
23
|
-
"
|
|
23
|
+
"generation_parameters": {
|
|
24
24
|
"description": "Parametrization of the text generation process",
|
|
25
25
|
"$ref": "/inference/schemas/common-definitions.json#/definitions/GenerationParameters"
|
|
26
26
|
}
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
"description": "Additional inference parameters for Text To Speech",
|
|
21
21
|
"type": "object",
|
|
22
22
|
"properties": {
|
|
23
|
-
"
|
|
23
|
+
"generation_parameters": {
|
|
24
24
|
"description": "Parametrization of the text generation process",
|
|
25
25
|
"$ref": "/inference/schemas/common-definitions.json#/definitions/GenerationParameters"
|
|
26
26
|
}
|