@huggingface/tasks 0.19.26 → 0.19.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/model-libraries-snippets.d.ts.map +1 -1
- package/dist/commonjs/model-libraries-snippets.js +80 -19
- package/dist/commonjs/model-libraries.d.ts +8 -7
- package/dist/commonjs/model-libraries.d.ts.map +1 -1
- package/dist/commonjs/model-libraries.js +7 -6
- package/dist/esm/model-libraries-snippets.d.ts.map +1 -1
- package/dist/esm/model-libraries-snippets.js +80 -19
- package/dist/esm/model-libraries.d.ts +8 -7
- package/dist/esm/model-libraries.d.ts.map +1 -1
- package/dist/esm/model-libraries.js +7 -6
- package/package.json +1 -1
- package/src/model-libraries-snippets.ts +101 -14
- package/src/model-libraries.ts +7 -6
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;AAkKF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EA6BlD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAS9C,CAAC;AA4EF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BnD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EA4BzD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuChD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAQlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EASlD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB3D,CAAC;AACF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAUpD,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAAI,OAAO,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAKrD,CAAC;AAyBF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,GAAI,OAAO,SAAS,KAAG,MAAM,EAuC7D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAyGrD,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAWnD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,GAAG,QAAO,MAAM,EAgB5B,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAiBpD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAKpD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAW9C,CAAC;AAkEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAY5C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuBhD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AA4BF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB9C,CAAC"}
|
|
@@ -408,8 +408,60 @@ const diffusers_textual_inversion = (model) => [
|
|
|
408
408
|
pipe = DiffusionPipeline.from_pretrained("${get_base_diffusers_model(model)}")
|
|
409
409
|
pipe.load_textual_inversion("${model.id}")`,
|
|
410
410
|
];
|
|
411
|
+
const diffusers_flux_fill = (model) => [
|
|
412
|
+
`import torch
|
|
413
|
+
from diffusers import FluxFillPipeline
|
|
414
|
+
from diffusers.utils import load_image
|
|
415
|
+
|
|
416
|
+
image = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup.png")
|
|
417
|
+
mask = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup_mask.png")
|
|
418
|
+
|
|
419
|
+
pipe = FluxFillPipeline.from_pretrained("${model.id}", torch_dtype=torch.bfloat16).to("cuda")
|
|
420
|
+
image = pipe(
|
|
421
|
+
prompt="a white paper cup",
|
|
422
|
+
image=image,
|
|
423
|
+
mask_image=mask,
|
|
424
|
+
height=1632,
|
|
425
|
+
width=1232,
|
|
426
|
+
guidance_scale=30,
|
|
427
|
+
num_inference_steps=50,
|
|
428
|
+
max_sequence_length=512,
|
|
429
|
+
generator=torch.Generator("cpu").manual_seed(0)
|
|
430
|
+
).images[0]
|
|
431
|
+
image.save(f"flux-fill-dev.png")`,
|
|
432
|
+
];
|
|
433
|
+
const diffusers_inpainting = (model) => [
|
|
434
|
+
`import torch
|
|
435
|
+
from diffusers import AutoPipelineForInpainting
|
|
436
|
+
from diffusers.utils import load_image
|
|
437
|
+
|
|
438
|
+
pipe = AutoPipelineForInpainting.from_pretrained("${model.id}", torch_dtype=torch.float16, variant="fp16").to("cuda")
|
|
439
|
+
|
|
440
|
+
img_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png"
|
|
441
|
+
mask_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png"
|
|
442
|
+
|
|
443
|
+
image = load_image(img_url).resize((1024, 1024))
|
|
444
|
+
mask_image = load_image(mask_url).resize((1024, 1024))
|
|
445
|
+
|
|
446
|
+
prompt = "a tiger sitting on a park bench"
|
|
447
|
+
generator = torch.Generator(device="cuda").manual_seed(0)
|
|
448
|
+
|
|
449
|
+
image = pipe(
|
|
450
|
+
prompt=prompt,
|
|
451
|
+
image=image,
|
|
452
|
+
mask_image=mask_image,
|
|
453
|
+
guidance_scale=8.0,
|
|
454
|
+
num_inference_steps=20, # steps between 15 and 30 work well for us
|
|
455
|
+
strength=0.99, # make sure to use \`strength\` below 1.0
|
|
456
|
+
generator=generator,
|
|
457
|
+
).images[0]`,
|
|
458
|
+
];
|
|
411
459
|
const diffusers = (model) => {
|
|
412
|
-
if (model.tags.includes("
|
|
460
|
+
if (model.tags.includes("StableDiffusionInpaintPipeline") ||
|
|
461
|
+
model.tags.includes("StableDiffusionXLInpaintPipeline")) {
|
|
462
|
+
return diffusers_inpainting(model);
|
|
463
|
+
}
|
|
464
|
+
else if (model.tags.includes("controlnet")) {
|
|
413
465
|
return diffusers_controlnet(model);
|
|
414
466
|
}
|
|
415
467
|
else if (model.tags.includes("lora")) {
|
|
@@ -429,6 +481,9 @@ const diffusers = (model) => {
|
|
|
429
481
|
else if (model.tags.includes("textual_inversion")) {
|
|
430
482
|
return diffusers_textual_inversion(model);
|
|
431
483
|
}
|
|
484
|
+
else if (model.tags.includes("FluxFillPipeline")) {
|
|
485
|
+
return diffusers_flux_fill(model);
|
|
486
|
+
}
|
|
432
487
|
else if (model.pipeline_tag === "image-to-video") {
|
|
433
488
|
return diffusers_image_to_video(model);
|
|
434
489
|
}
|
|
@@ -1328,27 +1383,34 @@ const transformers = (model) => {
|
|
|
1328
1383
|
return [`# ⚠️ Type of model unknown`];
|
|
1329
1384
|
}
|
|
1330
1385
|
const remote_code_snippet = model.tags.includes(TAG_CUSTOM_CODE) ? ", trust_remote_code=True" : "";
|
|
1331
|
-
|
|
1386
|
+
const autoSnippet = [];
|
|
1332
1387
|
if (info.processor) {
|
|
1333
|
-
const
|
|
1388
|
+
const processorVarName = info.processor === "AutoTokenizer"
|
|
1334
1389
|
? "tokenizer"
|
|
1335
1390
|
: info.processor === "AutoFeatureExtractor"
|
|
1336
1391
|
? "extractor"
|
|
1337
1392
|
: "processor";
|
|
1338
|
-
autoSnippet =
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1393
|
+
autoSnippet.push("# Load model directly", `from transformers import ${info.processor}, ${info.auto_model}`, "", `${processorVarName} = ${info.processor}.from_pretrained("${model.id}"` + remote_code_snippet + ")", `model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ")");
|
|
1394
|
+
if (model.tags.includes("conversational")) {
|
|
1395
|
+
if (model.tags.includes("image-text-to-text")) {
|
|
1396
|
+
autoSnippet.push("messages = [", [
|
|
1397
|
+
" {",
|
|
1398
|
+
' "role": "user",',
|
|
1399
|
+
' "content": [',
|
|
1400
|
+
' {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},',
|
|
1401
|
+
' {"type": "text", "text": "What animal is on the candy?"}',
|
|
1402
|
+
" ]",
|
|
1403
|
+
" },",
|
|
1404
|
+
].join("\n"), "]");
|
|
1405
|
+
}
|
|
1406
|
+
else {
|
|
1407
|
+
autoSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
|
|
1408
|
+
}
|
|
1409
|
+
autoSnippet.push("inputs = ${processorVarName}.apply_chat_template(", " messages,", " add_generation_prompt=True,", " tokenize=True,", " return_dict=True,", ' return_tensors="pt",', ").to(model.device)", "", "outputs = model.generate(**inputs, max_new_tokens=40)", 'print(${processorVarName}.decode(outputs[0][inputs["input_ids"].shape[-1]:]))');
|
|
1410
|
+
}
|
|
1345
1411
|
}
|
|
1346
1412
|
else {
|
|
1347
|
-
autoSnippet =
|
|
1348
|
-
"# Load model directly",
|
|
1349
|
-
`from transformers import ${info.auto_model}`,
|
|
1350
|
-
`model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ")",
|
|
1351
|
-
].join("\n");
|
|
1413
|
+
autoSnippet.push("# Load model directly", `from transformers import ${info.auto_model}`, `model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ', torch_dtype="auto"),');
|
|
1352
1414
|
}
|
|
1353
1415
|
if (model.pipeline_tag && library_to_tasks_js_1.LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
|
|
1354
1416
|
const pipelineSnippet = [
|
|
@@ -1381,9 +1443,9 @@ const transformers = (model) => {
|
|
|
1381
1443
|
else if (model.pipeline_tag === "image-classification") {
|
|
1382
1444
|
pipelineSnippet.push('pipe("https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/hub/parrots.png")');
|
|
1383
1445
|
}
|
|
1384
|
-
return [pipelineSnippet.join("\n"), autoSnippet];
|
|
1446
|
+
return [pipelineSnippet.join("\n"), autoSnippet.join("\n")];
|
|
1385
1447
|
}
|
|
1386
|
-
return [autoSnippet];
|
|
1448
|
+
return [autoSnippet.join("\n")];
|
|
1387
1449
|
};
|
|
1388
1450
|
exports.transformers = transformers;
|
|
1389
1451
|
const transformersJS = (model) => {
|
|
@@ -1495,10 +1557,9 @@ exports.sana = sana;
|
|
|
1495
1557
|
const videoprism = (model) => [
|
|
1496
1558
|
`# Install from https://github.com/google-deepmind/videoprism
|
|
1497
1559
|
import jax
|
|
1498
|
-
import jax.numpy as jnp
|
|
1499
1560
|
from videoprism import models as vp
|
|
1500
1561
|
|
|
1501
|
-
flax_model = vp.
|
|
1562
|
+
flax_model = vp.get_model("${model.id}")
|
|
1502
1563
|
loaded_state = vp.load_pretrained_weights("${model.id}")
|
|
1503
1564
|
|
|
1504
1565
|
@jax.jit
|
|
@@ -460,12 +460,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
460
460
|
repoUrl: string;
|
|
461
461
|
countDownloads: string;
|
|
462
462
|
};
|
|
463
|
-
"hunyuan3d-2": {
|
|
464
|
-
prettyLabel: string;
|
|
465
|
-
repoName: string;
|
|
466
|
-
repoUrl: string;
|
|
467
|
-
countDownloads: string;
|
|
468
|
-
};
|
|
469
463
|
imstoucan: {
|
|
470
464
|
prettyLabel: string;
|
|
471
465
|
repoName: string;
|
|
@@ -518,6 +512,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
518
512
|
snippets: (model: ModelData) => string[];
|
|
519
513
|
filter: false;
|
|
520
514
|
};
|
|
515
|
+
kronos: {
|
|
516
|
+
prettyLabel: string;
|
|
517
|
+
repoName: string;
|
|
518
|
+
repoUrl: string;
|
|
519
|
+
filter: false;
|
|
520
|
+
countDownloads: string;
|
|
521
|
+
};
|
|
521
522
|
k2: {
|
|
522
523
|
prettyLabel: string;
|
|
523
524
|
repoName: string;
|
|
@@ -1189,5 +1190,5 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
1189
1190
|
};
|
|
1190
1191
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
1191
1192
|
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[];
|
|
1192
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "
|
|
1193
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "kimi-audio" | "kronos" | "k2" | "lightning-ir" | "litert-lm" | "lerobot" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "magi-1" | "magenta-realtime" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "mtvcraft" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "PaddleOCR" | "peft" | "perception-encoder" | "phantom-wan" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "renderformer" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "seedvr" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "monkeyocr" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tencent-song-generation" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "tirex" | "torchgeo" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "univa" | "uni-3dar" | "unity-sentis" | "sana" | "videoprism" | "vfi-mamba" | "voicecraft" | "vui" | "wham" | "whisperkit" | "yolov10" | "zonos" | "3dtopia-xl")[];
|
|
1193
1194
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAonCI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,6lEAQ1B,CAAC"}
|
|
@@ -457,12 +457,6 @@ exports.MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
457
457
|
repoUrl: "https://github.com/Tencent/HunyuanDiT",
|
|
458
458
|
countDownloads: `path:"pytorch_model_ema.pt" OR path:"pytorch_model_distill.pt"`,
|
|
459
459
|
},
|
|
460
|
-
"hunyuan3d-2": {
|
|
461
|
-
prettyLabel: "Hunyuan3D-2",
|
|
462
|
-
repoName: "Hunyuan3D-2",
|
|
463
|
-
repoUrl: "https://github.com/Tencent/Hunyuan3D-2",
|
|
464
|
-
countDownloads: `path_filename:"model_index" OR path_filename:"config"`,
|
|
465
|
-
},
|
|
466
460
|
imstoucan: {
|
|
467
461
|
prettyLabel: "IMS Toucan",
|
|
468
462
|
repoName: "IMS-Toucan",
|
|
@@ -516,6 +510,13 @@ exports.MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
516
510
|
snippets: snippets.kimi_audio,
|
|
517
511
|
filter: false,
|
|
518
512
|
},
|
|
513
|
+
kronos: {
|
|
514
|
+
prettyLabel: "KRONOS",
|
|
515
|
+
repoName: "KRONOS",
|
|
516
|
+
repoUrl: "https://github.com/mahmoodlab/KRONOS",
|
|
517
|
+
filter: false,
|
|
518
|
+
countDownloads: `path_extension:"pt"`,
|
|
519
|
+
},
|
|
519
520
|
k2: {
|
|
520
521
|
prettyLabel: "K2",
|
|
521
522
|
repoName: "k2",
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;AAkKF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EA6BlD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAS9C,CAAC;AA4EF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BnD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EA4BzD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuChD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAQlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EASlD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB3D,CAAC;AACF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAUpD,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAAI,OAAO,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAKrD,CAAC;AAyBF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,GAAI,OAAO,SAAS,KAAG,MAAM,EAuC7D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAyGrD,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAWnD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,GAAG,QAAO,MAAM,EAgB5B,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAiBpD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAKpD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAW9C,CAAC;AAkEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAY5C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuBhD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AA4BF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB9C,CAAC"}
|
|
@@ -388,8 +388,60 @@ const diffusers_textual_inversion = (model) => [
|
|
|
388
388
|
pipe = DiffusionPipeline.from_pretrained("${get_base_diffusers_model(model)}")
|
|
389
389
|
pipe.load_textual_inversion("${model.id}")`,
|
|
390
390
|
];
|
|
391
|
+
const diffusers_flux_fill = (model) => [
|
|
392
|
+
`import torch
|
|
393
|
+
from diffusers import FluxFillPipeline
|
|
394
|
+
from diffusers.utils import load_image
|
|
395
|
+
|
|
396
|
+
image = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup.png")
|
|
397
|
+
mask = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup_mask.png")
|
|
398
|
+
|
|
399
|
+
pipe = FluxFillPipeline.from_pretrained("${model.id}", torch_dtype=torch.bfloat16).to("cuda")
|
|
400
|
+
image = pipe(
|
|
401
|
+
prompt="a white paper cup",
|
|
402
|
+
image=image,
|
|
403
|
+
mask_image=mask,
|
|
404
|
+
height=1632,
|
|
405
|
+
width=1232,
|
|
406
|
+
guidance_scale=30,
|
|
407
|
+
num_inference_steps=50,
|
|
408
|
+
max_sequence_length=512,
|
|
409
|
+
generator=torch.Generator("cpu").manual_seed(0)
|
|
410
|
+
).images[0]
|
|
411
|
+
image.save(f"flux-fill-dev.png")`,
|
|
412
|
+
];
|
|
413
|
+
const diffusers_inpainting = (model) => [
|
|
414
|
+
`import torch
|
|
415
|
+
from diffusers import AutoPipelineForInpainting
|
|
416
|
+
from diffusers.utils import load_image
|
|
417
|
+
|
|
418
|
+
pipe = AutoPipelineForInpainting.from_pretrained("${model.id}", torch_dtype=torch.float16, variant="fp16").to("cuda")
|
|
419
|
+
|
|
420
|
+
img_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png"
|
|
421
|
+
mask_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png"
|
|
422
|
+
|
|
423
|
+
image = load_image(img_url).resize((1024, 1024))
|
|
424
|
+
mask_image = load_image(mask_url).resize((1024, 1024))
|
|
425
|
+
|
|
426
|
+
prompt = "a tiger sitting on a park bench"
|
|
427
|
+
generator = torch.Generator(device="cuda").manual_seed(0)
|
|
428
|
+
|
|
429
|
+
image = pipe(
|
|
430
|
+
prompt=prompt,
|
|
431
|
+
image=image,
|
|
432
|
+
mask_image=mask_image,
|
|
433
|
+
guidance_scale=8.0,
|
|
434
|
+
num_inference_steps=20, # steps between 15 and 30 work well for us
|
|
435
|
+
strength=0.99, # make sure to use \`strength\` below 1.0
|
|
436
|
+
generator=generator,
|
|
437
|
+
).images[0]`,
|
|
438
|
+
];
|
|
391
439
|
export const diffusers = (model) => {
|
|
392
|
-
if (model.tags.includes("
|
|
440
|
+
if (model.tags.includes("StableDiffusionInpaintPipeline") ||
|
|
441
|
+
model.tags.includes("StableDiffusionXLInpaintPipeline")) {
|
|
442
|
+
return diffusers_inpainting(model);
|
|
443
|
+
}
|
|
444
|
+
else if (model.tags.includes("controlnet")) {
|
|
393
445
|
return diffusers_controlnet(model);
|
|
394
446
|
}
|
|
395
447
|
else if (model.tags.includes("lora")) {
|
|
@@ -409,6 +461,9 @@ export const diffusers = (model) => {
|
|
|
409
461
|
else if (model.tags.includes("textual_inversion")) {
|
|
410
462
|
return diffusers_textual_inversion(model);
|
|
411
463
|
}
|
|
464
|
+
else if (model.tags.includes("FluxFillPipeline")) {
|
|
465
|
+
return diffusers_flux_fill(model);
|
|
466
|
+
}
|
|
412
467
|
else if (model.pipeline_tag === "image-to-video") {
|
|
413
468
|
return diffusers_image_to_video(model);
|
|
414
469
|
}
|
|
@@ -1259,27 +1314,34 @@ export const transformers = (model) => {
|
|
|
1259
1314
|
return [`# ⚠️ Type of model unknown`];
|
|
1260
1315
|
}
|
|
1261
1316
|
const remote_code_snippet = model.tags.includes(TAG_CUSTOM_CODE) ? ", trust_remote_code=True" : "";
|
|
1262
|
-
|
|
1317
|
+
const autoSnippet = [];
|
|
1263
1318
|
if (info.processor) {
|
|
1264
|
-
const
|
|
1319
|
+
const processorVarName = info.processor === "AutoTokenizer"
|
|
1265
1320
|
? "tokenizer"
|
|
1266
1321
|
: info.processor === "AutoFeatureExtractor"
|
|
1267
1322
|
? "extractor"
|
|
1268
1323
|
: "processor";
|
|
1269
|
-
autoSnippet =
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1324
|
+
autoSnippet.push("# Load model directly", `from transformers import ${info.processor}, ${info.auto_model}`, "", `${processorVarName} = ${info.processor}.from_pretrained("${model.id}"` + remote_code_snippet + ")", `model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ")");
|
|
1325
|
+
if (model.tags.includes("conversational")) {
|
|
1326
|
+
if (model.tags.includes("image-text-to-text")) {
|
|
1327
|
+
autoSnippet.push("messages = [", [
|
|
1328
|
+
" {",
|
|
1329
|
+
' "role": "user",',
|
|
1330
|
+
' "content": [',
|
|
1331
|
+
' {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},',
|
|
1332
|
+
' {"type": "text", "text": "What animal is on the candy?"}',
|
|
1333
|
+
" ]",
|
|
1334
|
+
" },",
|
|
1335
|
+
].join("\n"), "]");
|
|
1336
|
+
}
|
|
1337
|
+
else {
|
|
1338
|
+
autoSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
|
|
1339
|
+
}
|
|
1340
|
+
autoSnippet.push("inputs = ${processorVarName}.apply_chat_template(", " messages,", " add_generation_prompt=True,", " tokenize=True,", " return_dict=True,", ' return_tensors="pt",', ").to(model.device)", "", "outputs = model.generate(**inputs, max_new_tokens=40)", 'print(${processorVarName}.decode(outputs[0][inputs["input_ids"].shape[-1]:]))');
|
|
1341
|
+
}
|
|
1276
1342
|
}
|
|
1277
1343
|
else {
|
|
1278
|
-
autoSnippet =
|
|
1279
|
-
"# Load model directly",
|
|
1280
|
-
`from transformers import ${info.auto_model}`,
|
|
1281
|
-
`model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ")",
|
|
1282
|
-
].join("\n");
|
|
1344
|
+
autoSnippet.push("# Load model directly", `from transformers import ${info.auto_model}`, `model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ', torch_dtype="auto"),');
|
|
1283
1345
|
}
|
|
1284
1346
|
if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
|
|
1285
1347
|
const pipelineSnippet = [
|
|
@@ -1312,9 +1374,9 @@ export const transformers = (model) => {
|
|
|
1312
1374
|
else if (model.pipeline_tag === "image-classification") {
|
|
1313
1375
|
pipelineSnippet.push('pipe("https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/hub/parrots.png")');
|
|
1314
1376
|
}
|
|
1315
|
-
return [pipelineSnippet.join("\n"), autoSnippet];
|
|
1377
|
+
return [pipelineSnippet.join("\n"), autoSnippet.join("\n")];
|
|
1316
1378
|
}
|
|
1317
|
-
return [autoSnippet];
|
|
1379
|
+
return [autoSnippet.join("\n")];
|
|
1318
1380
|
};
|
|
1319
1381
|
export const transformersJS = (model) => {
|
|
1320
1382
|
if (!model.pipeline_tag) {
|
|
@@ -1418,10 +1480,9 @@ image = sana(
|
|
|
1418
1480
|
export const videoprism = (model) => [
|
|
1419
1481
|
`# Install from https://github.com/google-deepmind/videoprism
|
|
1420
1482
|
import jax
|
|
1421
|
-
import jax.numpy as jnp
|
|
1422
1483
|
from videoprism import models as vp
|
|
1423
1484
|
|
|
1424
|
-
flax_model = vp.
|
|
1485
|
+
flax_model = vp.get_model("${model.id}")
|
|
1425
1486
|
loaded_state = vp.load_pretrained_weights("${model.id}")
|
|
1426
1487
|
|
|
1427
1488
|
@jax.jit
|
|
@@ -460,12 +460,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
460
460
|
repoUrl: string;
|
|
461
461
|
countDownloads: string;
|
|
462
462
|
};
|
|
463
|
-
"hunyuan3d-2": {
|
|
464
|
-
prettyLabel: string;
|
|
465
|
-
repoName: string;
|
|
466
|
-
repoUrl: string;
|
|
467
|
-
countDownloads: string;
|
|
468
|
-
};
|
|
469
463
|
imstoucan: {
|
|
470
464
|
prettyLabel: string;
|
|
471
465
|
repoName: string;
|
|
@@ -518,6 +512,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
518
512
|
snippets: (model: ModelData) => string[];
|
|
519
513
|
filter: false;
|
|
520
514
|
};
|
|
515
|
+
kronos: {
|
|
516
|
+
prettyLabel: string;
|
|
517
|
+
repoName: string;
|
|
518
|
+
repoUrl: string;
|
|
519
|
+
filter: false;
|
|
520
|
+
countDownloads: string;
|
|
521
|
+
};
|
|
521
522
|
k2: {
|
|
522
523
|
prettyLabel: string;
|
|
523
524
|
repoName: string;
|
|
@@ -1189,5 +1190,5 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
1189
1190
|
};
|
|
1190
1191
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
1191
1192
|
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[];
|
|
1192
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "
|
|
1193
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "kimi-audio" | "kronos" | "k2" | "lightning-ir" | "litert-lm" | "lerobot" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "magi-1" | "magenta-realtime" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "mtvcraft" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "PaddleOCR" | "peft" | "perception-encoder" | "phantom-wan" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "renderformer" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "seedvr" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "monkeyocr" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tencent-song-generation" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "tirex" | "torchgeo" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "univa" | "uni-3dar" | "unity-sentis" | "sana" | "videoprism" | "vfi-mamba" | "voicecraft" | "vui" | "wham" | "whisperkit" | "yolov10" | "zonos" | "3dtopia-xl")[];
|
|
1193
1194
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAonCI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,6lEAQ1B,CAAC"}
|
|
@@ -421,12 +421,6 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
421
421
|
repoUrl: "https://github.com/Tencent/HunyuanDiT",
|
|
422
422
|
countDownloads: `path:"pytorch_model_ema.pt" OR path:"pytorch_model_distill.pt"`,
|
|
423
423
|
},
|
|
424
|
-
"hunyuan3d-2": {
|
|
425
|
-
prettyLabel: "Hunyuan3D-2",
|
|
426
|
-
repoName: "Hunyuan3D-2",
|
|
427
|
-
repoUrl: "https://github.com/Tencent/Hunyuan3D-2",
|
|
428
|
-
countDownloads: `path_filename:"model_index" OR path_filename:"config"`,
|
|
429
|
-
},
|
|
430
424
|
imstoucan: {
|
|
431
425
|
prettyLabel: "IMS Toucan",
|
|
432
426
|
repoName: "IMS-Toucan",
|
|
@@ -480,6 +474,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
480
474
|
snippets: snippets.kimi_audio,
|
|
481
475
|
filter: false,
|
|
482
476
|
},
|
|
477
|
+
kronos: {
|
|
478
|
+
prettyLabel: "KRONOS",
|
|
479
|
+
repoName: "KRONOS",
|
|
480
|
+
repoUrl: "https://github.com/mahmoodlab/KRONOS",
|
|
481
|
+
filter: false,
|
|
482
|
+
countDownloads: `path_extension:"pt"`,
|
|
483
|
+
},
|
|
483
484
|
k2: {
|
|
484
485
|
prettyLabel: "K2",
|
|
485
486
|
repoName: "k2",
|
package/package.json
CHANGED
|
@@ -434,8 +434,63 @@ pipe = DiffusionPipeline.from_pretrained("${get_base_diffusers_model(model)}")
|
|
|
434
434
|
pipe.load_textual_inversion("${model.id}")`,
|
|
435
435
|
];
|
|
436
436
|
|
|
437
|
+
const diffusers_flux_fill = (model: ModelData) => [
|
|
438
|
+
`import torch
|
|
439
|
+
from diffusers import FluxFillPipeline
|
|
440
|
+
from diffusers.utils import load_image
|
|
441
|
+
|
|
442
|
+
image = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup.png")
|
|
443
|
+
mask = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup_mask.png")
|
|
444
|
+
|
|
445
|
+
pipe = FluxFillPipeline.from_pretrained("${model.id}", torch_dtype=torch.bfloat16).to("cuda")
|
|
446
|
+
image = pipe(
|
|
447
|
+
prompt="a white paper cup",
|
|
448
|
+
image=image,
|
|
449
|
+
mask_image=mask,
|
|
450
|
+
height=1632,
|
|
451
|
+
width=1232,
|
|
452
|
+
guidance_scale=30,
|
|
453
|
+
num_inference_steps=50,
|
|
454
|
+
max_sequence_length=512,
|
|
455
|
+
generator=torch.Generator("cpu").manual_seed(0)
|
|
456
|
+
).images[0]
|
|
457
|
+
image.save(f"flux-fill-dev.png")`,
|
|
458
|
+
];
|
|
459
|
+
|
|
460
|
+
const diffusers_inpainting = (model: ModelData) => [
|
|
461
|
+
`import torch
|
|
462
|
+
from diffusers import AutoPipelineForInpainting
|
|
463
|
+
from diffusers.utils import load_image
|
|
464
|
+
|
|
465
|
+
pipe = AutoPipelineForInpainting.from_pretrained("${model.id}", torch_dtype=torch.float16, variant="fp16").to("cuda")
|
|
466
|
+
|
|
467
|
+
img_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png"
|
|
468
|
+
mask_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png"
|
|
469
|
+
|
|
470
|
+
image = load_image(img_url).resize((1024, 1024))
|
|
471
|
+
mask_image = load_image(mask_url).resize((1024, 1024))
|
|
472
|
+
|
|
473
|
+
prompt = "a tiger sitting on a park bench"
|
|
474
|
+
generator = torch.Generator(device="cuda").manual_seed(0)
|
|
475
|
+
|
|
476
|
+
image = pipe(
|
|
477
|
+
prompt=prompt,
|
|
478
|
+
image=image,
|
|
479
|
+
mask_image=mask_image,
|
|
480
|
+
guidance_scale=8.0,
|
|
481
|
+
num_inference_steps=20, # steps between 15 and 30 work well for us
|
|
482
|
+
strength=0.99, # make sure to use \`strength\` below 1.0
|
|
483
|
+
generator=generator,
|
|
484
|
+
).images[0]`,
|
|
485
|
+
];
|
|
486
|
+
|
|
437
487
|
export const diffusers = (model: ModelData): string[] => {
|
|
438
|
-
if (
|
|
488
|
+
if (
|
|
489
|
+
model.tags.includes("StableDiffusionInpaintPipeline") ||
|
|
490
|
+
model.tags.includes("StableDiffusionXLInpaintPipeline")
|
|
491
|
+
) {
|
|
492
|
+
return diffusers_inpainting(model);
|
|
493
|
+
} else if (model.tags.includes("controlnet")) {
|
|
439
494
|
return diffusers_controlnet(model);
|
|
440
495
|
} else if (model.tags.includes("lora")) {
|
|
441
496
|
if (model.pipeline_tag === "image-to-image") {
|
|
@@ -449,6 +504,8 @@ export const diffusers = (model: ModelData): string[] => {
|
|
|
449
504
|
}
|
|
450
505
|
} else if (model.tags.includes("textual_inversion")) {
|
|
451
506
|
return diffusers_textual_inversion(model);
|
|
507
|
+
} else if (model.tags.includes("FluxFillPipeline")) {
|
|
508
|
+
return diffusers_flux_fill(model);
|
|
452
509
|
} else if (model.pipeline_tag === "image-to-video") {
|
|
453
510
|
return diffusers_image_to_video(model);
|
|
454
511
|
} else if (model.pipeline_tag === "image-to-image") {
|
|
@@ -1373,27 +1430,58 @@ export const transformers = (model: ModelData): string[] => {
|
|
|
1373
1430
|
}
|
|
1374
1431
|
const remote_code_snippet = model.tags.includes(TAG_CUSTOM_CODE) ? ", trust_remote_code=True" : "";
|
|
1375
1432
|
|
|
1376
|
-
|
|
1433
|
+
const autoSnippet = [];
|
|
1377
1434
|
if (info.processor) {
|
|
1378
|
-
const
|
|
1435
|
+
const processorVarName =
|
|
1379
1436
|
info.processor === "AutoTokenizer"
|
|
1380
1437
|
? "tokenizer"
|
|
1381
1438
|
: info.processor === "AutoFeatureExtractor"
|
|
1382
1439
|
? "extractor"
|
|
1383
1440
|
: "processor";
|
|
1384
|
-
autoSnippet
|
|
1441
|
+
autoSnippet.push(
|
|
1385
1442
|
"# Load model directly",
|
|
1386
1443
|
`from transformers import ${info.processor}, ${info.auto_model}`,
|
|
1387
1444
|
"",
|
|
1388
|
-
`${
|
|
1389
|
-
`model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ")"
|
|
1390
|
-
|
|
1445
|
+
`${processorVarName} = ${info.processor}.from_pretrained("${model.id}"` + remote_code_snippet + ")",
|
|
1446
|
+
`model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ")"
|
|
1447
|
+
);
|
|
1448
|
+
if (model.tags.includes("conversational")) {
|
|
1449
|
+
if (model.tags.includes("image-text-to-text")) {
|
|
1450
|
+
autoSnippet.push(
|
|
1451
|
+
"messages = [",
|
|
1452
|
+
[
|
|
1453
|
+
" {",
|
|
1454
|
+
' "role": "user",',
|
|
1455
|
+
' "content": [',
|
|
1456
|
+
' {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},',
|
|
1457
|
+
' {"type": "text", "text": "What animal is on the candy?"}',
|
|
1458
|
+
" ]",
|
|
1459
|
+
" },",
|
|
1460
|
+
].join("\n"),
|
|
1461
|
+
"]"
|
|
1462
|
+
);
|
|
1463
|
+
} else {
|
|
1464
|
+
autoSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
|
|
1465
|
+
}
|
|
1466
|
+
autoSnippet.push(
|
|
1467
|
+
"inputs = ${processorVarName}.apply_chat_template(",
|
|
1468
|
+
" messages,",
|
|
1469
|
+
" add_generation_prompt=True,",
|
|
1470
|
+
" tokenize=True,",
|
|
1471
|
+
" return_dict=True,",
|
|
1472
|
+
' return_tensors="pt",',
|
|
1473
|
+
").to(model.device)",
|
|
1474
|
+
"",
|
|
1475
|
+
"outputs = model.generate(**inputs, max_new_tokens=40)",
|
|
1476
|
+
'print(${processorVarName}.decode(outputs[0][inputs["input_ids"].shape[-1]:]))'
|
|
1477
|
+
);
|
|
1478
|
+
}
|
|
1391
1479
|
} else {
|
|
1392
|
-
autoSnippet
|
|
1480
|
+
autoSnippet.push(
|
|
1393
1481
|
"# Load model directly",
|
|
1394
1482
|
`from transformers import ${info.auto_model}`,
|
|
1395
|
-
`model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ")
|
|
1396
|
-
|
|
1483
|
+
`model = ${info.auto_model}.from_pretrained("${model.id}"` + remote_code_snippet + ', torch_dtype="auto"),'
|
|
1484
|
+
);
|
|
1397
1485
|
}
|
|
1398
1486
|
|
|
1399
1487
|
if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
|
|
@@ -1437,9 +1525,9 @@ export const transformers = (model: ModelData): string[] => {
|
|
|
1437
1525
|
);
|
|
1438
1526
|
}
|
|
1439
1527
|
|
|
1440
|
-
return [pipelineSnippet.join("\n"), autoSnippet];
|
|
1528
|
+
return [pipelineSnippet.join("\n"), autoSnippet.join("\n")];
|
|
1441
1529
|
}
|
|
1442
|
-
return [autoSnippet];
|
|
1530
|
+
return [autoSnippet.join("\n")];
|
|
1443
1531
|
};
|
|
1444
1532
|
|
|
1445
1533
|
export const transformersJS = (model: ModelData): string[] => {
|
|
@@ -1556,10 +1644,9 @@ image = sana(
|
|
|
1556
1644
|
export const videoprism = (model: ModelData): string[] => [
|
|
1557
1645
|
`# Install from https://github.com/google-deepmind/videoprism
|
|
1558
1646
|
import jax
|
|
1559
|
-
import jax.numpy as jnp
|
|
1560
1647
|
from videoprism import models as vp
|
|
1561
1648
|
|
|
1562
|
-
flax_model = vp.
|
|
1649
|
+
flax_model = vp.get_model("${model.id}")
|
|
1563
1650
|
loaded_state = vp.load_pretrained_weights("${model.id}")
|
|
1564
1651
|
|
|
1565
1652
|
@jax.jit
|
package/src/model-libraries.ts
CHANGED
|
@@ -465,12 +465,6 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
465
465
|
repoUrl: "https://github.com/Tencent/HunyuanDiT",
|
|
466
466
|
countDownloads: `path:"pytorch_model_ema.pt" OR path:"pytorch_model_distill.pt"`,
|
|
467
467
|
},
|
|
468
|
-
"hunyuan3d-2": {
|
|
469
|
-
prettyLabel: "Hunyuan3D-2",
|
|
470
|
-
repoName: "Hunyuan3D-2",
|
|
471
|
-
repoUrl: "https://github.com/Tencent/Hunyuan3D-2",
|
|
472
|
-
countDownloads: `path_filename:"model_index" OR path_filename:"config"`,
|
|
473
|
-
},
|
|
474
468
|
imstoucan: {
|
|
475
469
|
prettyLabel: "IMS Toucan",
|
|
476
470
|
repoName: "IMS-Toucan",
|
|
@@ -524,6 +518,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
524
518
|
snippets: snippets.kimi_audio,
|
|
525
519
|
filter: false,
|
|
526
520
|
},
|
|
521
|
+
kronos: {
|
|
522
|
+
prettyLabel: "KRONOS",
|
|
523
|
+
repoName: "KRONOS",
|
|
524
|
+
repoUrl: "https://github.com/mahmoodlab/KRONOS",
|
|
525
|
+
filter: false,
|
|
526
|
+
countDownloads: `path_extension:"pt"`,
|
|
527
|
+
},
|
|
527
528
|
k2: {
|
|
528
529
|
prettyLabel: "K2",
|
|
529
530
|
repoName: "k2",
|