@huggingface/tasks 0.19.27 → 0.19.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/model-libraries-snippets.d.ts.map +1 -1
- package/dist/commonjs/model-libraries-snippets.js +102 -8
- package/dist/commonjs/model-libraries.d.ts +1 -7
- package/dist/commonjs/model-libraries.d.ts.map +1 -1
- package/dist/commonjs/model-libraries.js +0 -6
- package/dist/esm/model-libraries-snippets.d.ts.map +1 -1
- package/dist/esm/model-libraries-snippets.js +102 -8
- package/dist/esm/model-libraries.d.ts +1 -7
- package/dist/esm/model-libraries.d.ts.map +1 -1
- package/dist/esm/model-libraries.js +0 -6
- package/package.json +1 -1
- package/src/model-libraries-snippets.ts +107 -8
- package/src/model-libraries.ts +0 -6
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;AAkKF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EA6BlD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAS9C,CAAC;AA4EF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BnD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EA4BzD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuChD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAQlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAoDlD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB3D,CAAC;AACF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAUpD,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAAI,OAAO,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAKrD,CAAC;AAyBF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,GAAI,OAAO,SAAS,KAAG,MAAM,EAuC7D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAyGrD,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAWnD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,GAAG,QAAO,MAAM,EAgB5B,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAiBpD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAKpD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAW9C,CAAC;AAkEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAY5C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuBhD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AA4BF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB9C,CAAC"}
|
|
@@ -408,8 +408,60 @@ const diffusers_textual_inversion = (model) => [
|
|
|
408
408
|
pipe = DiffusionPipeline.from_pretrained("${get_base_diffusers_model(model)}")
|
|
409
409
|
pipe.load_textual_inversion("${model.id}")`,
|
|
410
410
|
];
|
|
411
|
+
const diffusers_flux_fill = (model) => [
|
|
412
|
+
`import torch
|
|
413
|
+
from diffusers import FluxFillPipeline
|
|
414
|
+
from diffusers.utils import load_image
|
|
415
|
+
|
|
416
|
+
image = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup.png")
|
|
417
|
+
mask = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup_mask.png")
|
|
418
|
+
|
|
419
|
+
pipe = FluxFillPipeline.from_pretrained("${model.id}", torch_dtype=torch.bfloat16).to("cuda")
|
|
420
|
+
image = pipe(
|
|
421
|
+
prompt="a white paper cup",
|
|
422
|
+
image=image,
|
|
423
|
+
mask_image=mask,
|
|
424
|
+
height=1632,
|
|
425
|
+
width=1232,
|
|
426
|
+
guidance_scale=30,
|
|
427
|
+
num_inference_steps=50,
|
|
428
|
+
max_sequence_length=512,
|
|
429
|
+
generator=torch.Generator("cpu").manual_seed(0)
|
|
430
|
+
).images[0]
|
|
431
|
+
image.save(f"flux-fill-dev.png")`,
|
|
432
|
+
];
|
|
433
|
+
const diffusers_inpainting = (model) => [
|
|
434
|
+
`import torch
|
|
435
|
+
from diffusers import AutoPipelineForInpainting
|
|
436
|
+
from diffusers.utils import load_image
|
|
437
|
+
|
|
438
|
+
pipe = AutoPipelineForInpainting.from_pretrained("${model.id}", torch_dtype=torch.float16, variant="fp16").to("cuda")
|
|
439
|
+
|
|
440
|
+
img_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png"
|
|
441
|
+
mask_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png"
|
|
442
|
+
|
|
443
|
+
image = load_image(img_url).resize((1024, 1024))
|
|
444
|
+
mask_image = load_image(mask_url).resize((1024, 1024))
|
|
445
|
+
|
|
446
|
+
prompt = "a tiger sitting on a park bench"
|
|
447
|
+
generator = torch.Generator(device="cuda").manual_seed(0)
|
|
448
|
+
|
|
449
|
+
image = pipe(
|
|
450
|
+
prompt=prompt,
|
|
451
|
+
image=image,
|
|
452
|
+
mask_image=mask_image,
|
|
453
|
+
guidance_scale=8.0,
|
|
454
|
+
num_inference_steps=20, # steps between 15 and 30 work well for us
|
|
455
|
+
strength=0.99, # make sure to use \`strength\` below 1.0
|
|
456
|
+
generator=generator,
|
|
457
|
+
).images[0]`,
|
|
458
|
+
];
|
|
411
459
|
const diffusers = (model) => {
|
|
412
|
-
if (model.tags.includes("
|
|
460
|
+
if (model.tags.includes("StableDiffusionInpaintPipeline") ||
|
|
461
|
+
model.tags.includes("StableDiffusionXLInpaintPipeline")) {
|
|
462
|
+
return diffusers_inpainting(model);
|
|
463
|
+
}
|
|
464
|
+
else if (model.tags.includes("controlnet")) {
|
|
413
465
|
return diffusers_controlnet(model);
|
|
414
466
|
}
|
|
415
467
|
else if (model.tags.includes("lora")) {
|
|
@@ -429,6 +481,9 @@ const diffusers = (model) => {
|
|
|
429
481
|
else if (model.tags.includes("textual_inversion")) {
|
|
430
482
|
return diffusers_textual_inversion(model);
|
|
431
483
|
}
|
|
484
|
+
else if (model.tags.includes("FluxFillPipeline")) {
|
|
485
|
+
return diffusers_flux_fill(model);
|
|
486
|
+
}
|
|
432
487
|
else if (model.pipeline_tag === "image-to-video") {
|
|
433
488
|
return diffusers_image_to_video(model);
|
|
434
489
|
}
|
|
@@ -928,16 +983,56 @@ const paddlenlp = (model) => {
|
|
|
928
983
|
}
|
|
929
984
|
};
|
|
930
985
|
exports.paddlenlp = paddlenlp;
|
|
931
|
-
const paddleocr = (model) =>
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
986
|
+
const paddleocr = (model) => {
|
|
987
|
+
const mapping = {
|
|
988
|
+
textline_detection: { className: "TextDetection" },
|
|
989
|
+
textline_recognition: { className: "TextRecognition" },
|
|
990
|
+
seal_text_detection: { className: "SealTextDetection" },
|
|
991
|
+
doc_img_unwarping: { className: "TextImageUnwarping" },
|
|
992
|
+
doc_img_orientation_classification: { className: "DocImgOrientationClassification" },
|
|
993
|
+
textline_orientation_classification: { className: "TextLineOrientationClassification" },
|
|
994
|
+
chart_parsing: { className: "ChartParsing" },
|
|
995
|
+
formula_recognition: { className: "FormulaRecognition" },
|
|
996
|
+
layout_detection: { className: "LayoutDetection" },
|
|
997
|
+
table_cells_detection: { className: "TableCellsDetection" },
|
|
998
|
+
wired_table_classification: { className: "TableClassification" },
|
|
999
|
+
table_structure_recognition: { className: "TableStructureRecognition" },
|
|
1000
|
+
};
|
|
1001
|
+
if (model.tags.includes("doc_vlm")) {
|
|
1002
|
+
return [
|
|
1003
|
+
`# pip install paddleocr
|
|
1004
|
+
from paddleocr import DocVLM
|
|
1005
|
+
model = DocVLM(model_name="${model.id}")
|
|
1006
|
+
output = model.predict(
|
|
1007
|
+
input={"image": "path/to/image.png", "query": "Parsing this image and output the content in Markdown format."},
|
|
1008
|
+
batch_size=1
|
|
1009
|
+
)
|
|
1010
|
+
for res in output:
|
|
1011
|
+
res.print()
|
|
1012
|
+
res.save_to_img(save_path="./output/")
|
|
1013
|
+
res.save_to_json(save_path="./output/res.json")`,
|
|
1014
|
+
];
|
|
1015
|
+
}
|
|
1016
|
+
for (const tag of model.tags) {
|
|
1017
|
+
if (tag in mapping) {
|
|
1018
|
+
const { className } = mapping[tag];
|
|
1019
|
+
return [
|
|
1020
|
+
`# pip install paddleocr
|
|
1021
|
+
from paddleocr import ${className}
|
|
1022
|
+
model = ${className}(model_name="${model.id}")
|
|
935
1023
|
output = model.predict(input="path/to/image.png", batch_size=1)
|
|
936
1024
|
for res in output:
|
|
937
1025
|
res.print()
|
|
938
1026
|
res.save_to_img(save_path="./output/")
|
|
939
1027
|
res.save_to_json(save_path="./output/res.json")`,
|
|
940
|
-
];
|
|
1028
|
+
];
|
|
1029
|
+
}
|
|
1030
|
+
}
|
|
1031
|
+
return [
|
|
1032
|
+
`# Please refer to the document for information on how to use the model.
|
|
1033
|
+
# https://paddlepaddle.github.io/PaddleOCR/latest/en/version3.x/module_usage/module_overview.html`,
|
|
1034
|
+
];
|
|
1035
|
+
};
|
|
941
1036
|
exports.paddleocr = paddleocr;
|
|
942
1037
|
const perception_encoder = (model) => {
|
|
943
1038
|
const clip_model = `# Use PE-Core models as CLIP models
|
|
@@ -1502,10 +1597,9 @@ exports.sana = sana;
|
|
|
1502
1597
|
const videoprism = (model) => [
|
|
1503
1598
|
`# Install from https://github.com/google-deepmind/videoprism
|
|
1504
1599
|
import jax
|
|
1505
|
-
import jax.numpy as jnp
|
|
1506
1600
|
from videoprism import models as vp
|
|
1507
1601
|
|
|
1508
|
-
flax_model = vp.
|
|
1602
|
+
flax_model = vp.get_model("${model.id}")
|
|
1509
1603
|
loaded_state = vp.load_pretrained_weights("${model.id}")
|
|
1510
1604
|
|
|
1511
1605
|
@jax.jit
|
|
@@ -460,12 +460,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
460
460
|
repoUrl: string;
|
|
461
461
|
countDownloads: string;
|
|
462
462
|
};
|
|
463
|
-
"hunyuan3d-2": {
|
|
464
|
-
prettyLabel: string;
|
|
465
|
-
repoName: string;
|
|
466
|
-
repoUrl: string;
|
|
467
|
-
countDownloads: string;
|
|
468
|
-
};
|
|
469
463
|
imstoucan: {
|
|
470
464
|
prettyLabel: string;
|
|
471
465
|
repoName: string;
|
|
@@ -1196,5 +1190,5 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
1196
1190
|
};
|
|
1197
1191
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
1198
1192
|
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[];
|
|
1199
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "
|
|
1193
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "kimi-audio" | "kronos" | "k2" | "lightning-ir" | "litert-lm" | "lerobot" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "magi-1" | "magenta-realtime" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "mtvcraft" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "PaddleOCR" | "peft" | "perception-encoder" | "phantom-wan" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "renderformer" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "seedvr" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "monkeyocr" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tencent-song-generation" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "tirex" | "torchgeo" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "univa" | "uni-3dar" | "unity-sentis" | "sana" | "videoprism" | "vfi-mamba" | "voicecraft" | "vui" | "wham" | "whisperkit" | "yolov10" | "zonos" | "3dtopia-xl")[];
|
|
1200
1194
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAonCI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,6lEAQ1B,CAAC"}
|
|
@@ -457,12 +457,6 @@ exports.MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
457
457
|
repoUrl: "https://github.com/Tencent/HunyuanDiT",
|
|
458
458
|
countDownloads: `path:"pytorch_model_ema.pt" OR path:"pytorch_model_distill.pt"`,
|
|
459
459
|
},
|
|
460
|
-
"hunyuan3d-2": {
|
|
461
|
-
prettyLabel: "Hunyuan3D-2",
|
|
462
|
-
repoName: "Hunyuan3D-2",
|
|
463
|
-
repoUrl: "https://github.com/Tencent/Hunyuan3D-2",
|
|
464
|
-
countDownloads: `path_filename:"model_index" OR path_filename:"config"`,
|
|
465
|
-
},
|
|
466
460
|
imstoucan: {
|
|
467
461
|
prettyLabel: "IMS Toucan",
|
|
468
462
|
repoName: "IMS-Toucan",
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAenC,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAsDnC,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,SAAS,KAAG,MAAM,EAY1D,CAAC;AAkKF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EA6BlD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAgBjD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAS9C,CAAC;AA4EF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BnD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EA+BrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EA4BzD,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuChD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAQlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAoDlD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAgB3D,CAAC;AACF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAUpD,CAAC;AAEF,eAAO,MAAM,uBAAuB,GAAI,OAAO,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAKrD,CAAC;AAyBF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,GAAI,OAAO,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,GAAI,OAAO,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,GAAI,OAAO,SAAS,KAAG,MAAM,EAuC7D,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,YAAY,GAAI,OAAO,SAAS,KAAG,MAAM,EAyGrD,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,GAAI,OAAO,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAWnD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,GAAG,QAAO,MAAM,EAgB5B,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAiBpD,CAAC;AAEF,eAAO,MAAM,QAAQ,GAAI,OAAO,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,WAAW,GAAI,OAAO,SAAS,KAAG,MAAM,EAKpD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAW9C,CAAC;AAkEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,GAAG,GAAI,OAAO,SAAS,KAAG,MAAM,EAY5C,CAAC;AAEF,eAAO,MAAM,SAAS,GAAI,OAAO,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,OAAO,GAAI,OAAO,SAAS,KAAG,MAAM,EAuBhD,CAAC;AAEF,eAAO,MAAM,IAAI,GAAI,OAAO,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAI/C,CAAC;AA4BF,eAAO,MAAM,MAAM,GAAI,OAAO,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,UAAU,GAAI,OAAO,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,GAAI,OAAO,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,KAAK,GAAI,OAAO,SAAS,KAAG,MAAM,EAiB9C,CAAC"}
|
|
@@ -388,8 +388,60 @@ const diffusers_textual_inversion = (model) => [
|
|
|
388
388
|
pipe = DiffusionPipeline.from_pretrained("${get_base_diffusers_model(model)}")
|
|
389
389
|
pipe.load_textual_inversion("${model.id}")`,
|
|
390
390
|
];
|
|
391
|
+
const diffusers_flux_fill = (model) => [
|
|
392
|
+
`import torch
|
|
393
|
+
from diffusers import FluxFillPipeline
|
|
394
|
+
from diffusers.utils import load_image
|
|
395
|
+
|
|
396
|
+
image = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup.png")
|
|
397
|
+
mask = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup_mask.png")
|
|
398
|
+
|
|
399
|
+
pipe = FluxFillPipeline.from_pretrained("${model.id}", torch_dtype=torch.bfloat16).to("cuda")
|
|
400
|
+
image = pipe(
|
|
401
|
+
prompt="a white paper cup",
|
|
402
|
+
image=image,
|
|
403
|
+
mask_image=mask,
|
|
404
|
+
height=1632,
|
|
405
|
+
width=1232,
|
|
406
|
+
guidance_scale=30,
|
|
407
|
+
num_inference_steps=50,
|
|
408
|
+
max_sequence_length=512,
|
|
409
|
+
generator=torch.Generator("cpu").manual_seed(0)
|
|
410
|
+
).images[0]
|
|
411
|
+
image.save(f"flux-fill-dev.png")`,
|
|
412
|
+
];
|
|
413
|
+
const diffusers_inpainting = (model) => [
|
|
414
|
+
`import torch
|
|
415
|
+
from diffusers import AutoPipelineForInpainting
|
|
416
|
+
from diffusers.utils import load_image
|
|
417
|
+
|
|
418
|
+
pipe = AutoPipelineForInpainting.from_pretrained("${model.id}", torch_dtype=torch.float16, variant="fp16").to("cuda")
|
|
419
|
+
|
|
420
|
+
img_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png"
|
|
421
|
+
mask_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png"
|
|
422
|
+
|
|
423
|
+
image = load_image(img_url).resize((1024, 1024))
|
|
424
|
+
mask_image = load_image(mask_url).resize((1024, 1024))
|
|
425
|
+
|
|
426
|
+
prompt = "a tiger sitting on a park bench"
|
|
427
|
+
generator = torch.Generator(device="cuda").manual_seed(0)
|
|
428
|
+
|
|
429
|
+
image = pipe(
|
|
430
|
+
prompt=prompt,
|
|
431
|
+
image=image,
|
|
432
|
+
mask_image=mask_image,
|
|
433
|
+
guidance_scale=8.0,
|
|
434
|
+
num_inference_steps=20, # steps between 15 and 30 work well for us
|
|
435
|
+
strength=0.99, # make sure to use \`strength\` below 1.0
|
|
436
|
+
generator=generator,
|
|
437
|
+
).images[0]`,
|
|
438
|
+
];
|
|
391
439
|
export const diffusers = (model) => {
|
|
392
|
-
if (model.tags.includes("
|
|
440
|
+
if (model.tags.includes("StableDiffusionInpaintPipeline") ||
|
|
441
|
+
model.tags.includes("StableDiffusionXLInpaintPipeline")) {
|
|
442
|
+
return diffusers_inpainting(model);
|
|
443
|
+
}
|
|
444
|
+
else if (model.tags.includes("controlnet")) {
|
|
393
445
|
return diffusers_controlnet(model);
|
|
394
446
|
}
|
|
395
447
|
else if (model.tags.includes("lora")) {
|
|
@@ -409,6 +461,9 @@ export const diffusers = (model) => {
|
|
|
409
461
|
else if (model.tags.includes("textual_inversion")) {
|
|
410
462
|
return diffusers_textual_inversion(model);
|
|
411
463
|
}
|
|
464
|
+
else if (model.tags.includes("FluxFillPipeline")) {
|
|
465
|
+
return diffusers_flux_fill(model);
|
|
466
|
+
}
|
|
412
467
|
else if (model.pipeline_tag === "image-to-video") {
|
|
413
468
|
return diffusers_image_to_video(model);
|
|
414
469
|
}
|
|
@@ -882,16 +937,56 @@ export const paddlenlp = (model) => {
|
|
|
882
937
|
];
|
|
883
938
|
}
|
|
884
939
|
};
|
|
885
|
-
export const paddleocr = (model) =>
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
940
|
+
export const paddleocr = (model) => {
|
|
941
|
+
const mapping = {
|
|
942
|
+
textline_detection: { className: "TextDetection" },
|
|
943
|
+
textline_recognition: { className: "TextRecognition" },
|
|
944
|
+
seal_text_detection: { className: "SealTextDetection" },
|
|
945
|
+
doc_img_unwarping: { className: "TextImageUnwarping" },
|
|
946
|
+
doc_img_orientation_classification: { className: "DocImgOrientationClassification" },
|
|
947
|
+
textline_orientation_classification: { className: "TextLineOrientationClassification" },
|
|
948
|
+
chart_parsing: { className: "ChartParsing" },
|
|
949
|
+
formula_recognition: { className: "FormulaRecognition" },
|
|
950
|
+
layout_detection: { className: "LayoutDetection" },
|
|
951
|
+
table_cells_detection: { className: "TableCellsDetection" },
|
|
952
|
+
wired_table_classification: { className: "TableClassification" },
|
|
953
|
+
table_structure_recognition: { className: "TableStructureRecognition" },
|
|
954
|
+
};
|
|
955
|
+
if (model.tags.includes("doc_vlm")) {
|
|
956
|
+
return [
|
|
957
|
+
`# pip install paddleocr
|
|
958
|
+
from paddleocr import DocVLM
|
|
959
|
+
model = DocVLM(model_name="${model.id}")
|
|
960
|
+
output = model.predict(
|
|
961
|
+
input={"image": "path/to/image.png", "query": "Parsing this image and output the content in Markdown format."},
|
|
962
|
+
batch_size=1
|
|
963
|
+
)
|
|
964
|
+
for res in output:
|
|
965
|
+
res.print()
|
|
966
|
+
res.save_to_img(save_path="./output/")
|
|
967
|
+
res.save_to_json(save_path="./output/res.json")`,
|
|
968
|
+
];
|
|
969
|
+
}
|
|
970
|
+
for (const tag of model.tags) {
|
|
971
|
+
if (tag in mapping) {
|
|
972
|
+
const { className } = mapping[tag];
|
|
973
|
+
return [
|
|
974
|
+
`# pip install paddleocr
|
|
975
|
+
from paddleocr import ${className}
|
|
976
|
+
model = ${className}(model_name="${model.id}")
|
|
889
977
|
output = model.predict(input="path/to/image.png", batch_size=1)
|
|
890
978
|
for res in output:
|
|
891
979
|
res.print()
|
|
892
980
|
res.save_to_img(save_path="./output/")
|
|
893
981
|
res.save_to_json(save_path="./output/res.json")`,
|
|
894
|
-
];
|
|
982
|
+
];
|
|
983
|
+
}
|
|
984
|
+
}
|
|
985
|
+
return [
|
|
986
|
+
`# Please refer to the document for information on how to use the model.
|
|
987
|
+
# https://paddlepaddle.github.io/PaddleOCR/latest/en/version3.x/module_usage/module_overview.html`,
|
|
988
|
+
];
|
|
989
|
+
};
|
|
895
990
|
export const perception_encoder = (model) => {
|
|
896
991
|
const clip_model = `# Use PE-Core models as CLIP models
|
|
897
992
|
import core.vision_encoder.pe as pe
|
|
@@ -1425,10 +1520,9 @@ image = sana(
|
|
|
1425
1520
|
export const videoprism = (model) => [
|
|
1426
1521
|
`# Install from https://github.com/google-deepmind/videoprism
|
|
1427
1522
|
import jax
|
|
1428
|
-
import jax.numpy as jnp
|
|
1429
1523
|
from videoprism import models as vp
|
|
1430
1524
|
|
|
1431
|
-
flax_model = vp.
|
|
1525
|
+
flax_model = vp.get_model("${model.id}")
|
|
1432
1526
|
loaded_state = vp.load_pretrained_weights("${model.id}")
|
|
1433
1527
|
|
|
1434
1528
|
@jax.jit
|
|
@@ -460,12 +460,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
460
460
|
repoUrl: string;
|
|
461
461
|
countDownloads: string;
|
|
462
462
|
};
|
|
463
|
-
"hunyuan3d-2": {
|
|
464
|
-
prettyLabel: string;
|
|
465
|
-
repoName: string;
|
|
466
|
-
repoUrl: string;
|
|
467
|
-
countDownloads: string;
|
|
468
|
-
};
|
|
469
463
|
imstoucan: {
|
|
470
464
|
prettyLabel: string;
|
|
471
465
|
repoName: string;
|
|
@@ -1196,5 +1190,5 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
1196
1190
|
};
|
|
1197
1191
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
1198
1192
|
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[];
|
|
1199
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "
|
|
1193
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "fme" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hermes" | "hezar" | "htrflow" | "hunyuan-dit" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "kimi-audio" | "kronos" | "k2" | "lightning-ir" | "litert-lm" | "lerobot" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "magi-1" | "magenta-realtime" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "mtvcraft" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "PaddleOCR" | "peft" | "perception-encoder" | "phantom-wan" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "renderformer" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "seedvr" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "monkeyocr" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tencent-song-generation" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "tirex" | "torchgeo" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "univa" | "uni-3dar" | "unity-sentis" | "sana" | "videoprism" | "vfi-mamba" | "voicecraft" | "vui" | "wham" | "whisperkit" | "yolov10" | "zonos" | "3dtopia-xl")[];
|
|
1200
1194
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAonCI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,6lEAQ1B,CAAC"}
|
|
@@ -421,12 +421,6 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
421
421
|
repoUrl: "https://github.com/Tencent/HunyuanDiT",
|
|
422
422
|
countDownloads: `path:"pytorch_model_ema.pt" OR path:"pytorch_model_distill.pt"`,
|
|
423
423
|
},
|
|
424
|
-
"hunyuan3d-2": {
|
|
425
|
-
prettyLabel: "Hunyuan3D-2",
|
|
426
|
-
repoName: "Hunyuan3D-2",
|
|
427
|
-
repoUrl: "https://github.com/Tencent/Hunyuan3D-2",
|
|
428
|
-
countDownloads: `path_filename:"model_index" OR path_filename:"config"`,
|
|
429
|
-
},
|
|
430
424
|
imstoucan: {
|
|
431
425
|
prettyLabel: "IMS Toucan",
|
|
432
426
|
repoName: "IMS-Toucan",
|
package/package.json
CHANGED
|
@@ -434,8 +434,63 @@ pipe = DiffusionPipeline.from_pretrained("${get_base_diffusers_model(model)}")
|
|
|
434
434
|
pipe.load_textual_inversion("${model.id}")`,
|
|
435
435
|
];
|
|
436
436
|
|
|
437
|
+
const diffusers_flux_fill = (model: ModelData) => [
|
|
438
|
+
`import torch
|
|
439
|
+
from diffusers import FluxFillPipeline
|
|
440
|
+
from diffusers.utils import load_image
|
|
441
|
+
|
|
442
|
+
image = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup.png")
|
|
443
|
+
mask = load_image("https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/cup_mask.png")
|
|
444
|
+
|
|
445
|
+
pipe = FluxFillPipeline.from_pretrained("${model.id}", torch_dtype=torch.bfloat16).to("cuda")
|
|
446
|
+
image = pipe(
|
|
447
|
+
prompt="a white paper cup",
|
|
448
|
+
image=image,
|
|
449
|
+
mask_image=mask,
|
|
450
|
+
height=1632,
|
|
451
|
+
width=1232,
|
|
452
|
+
guidance_scale=30,
|
|
453
|
+
num_inference_steps=50,
|
|
454
|
+
max_sequence_length=512,
|
|
455
|
+
generator=torch.Generator("cpu").manual_seed(0)
|
|
456
|
+
).images[0]
|
|
457
|
+
image.save(f"flux-fill-dev.png")`,
|
|
458
|
+
];
|
|
459
|
+
|
|
460
|
+
const diffusers_inpainting = (model: ModelData) => [
|
|
461
|
+
`import torch
|
|
462
|
+
from diffusers import AutoPipelineForInpainting
|
|
463
|
+
from diffusers.utils import load_image
|
|
464
|
+
|
|
465
|
+
pipe = AutoPipelineForInpainting.from_pretrained("${model.id}", torch_dtype=torch.float16, variant="fp16").to("cuda")
|
|
466
|
+
|
|
467
|
+
img_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png"
|
|
468
|
+
mask_url = "https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png"
|
|
469
|
+
|
|
470
|
+
image = load_image(img_url).resize((1024, 1024))
|
|
471
|
+
mask_image = load_image(mask_url).resize((1024, 1024))
|
|
472
|
+
|
|
473
|
+
prompt = "a tiger sitting on a park bench"
|
|
474
|
+
generator = torch.Generator(device="cuda").manual_seed(0)
|
|
475
|
+
|
|
476
|
+
image = pipe(
|
|
477
|
+
prompt=prompt,
|
|
478
|
+
image=image,
|
|
479
|
+
mask_image=mask_image,
|
|
480
|
+
guidance_scale=8.0,
|
|
481
|
+
num_inference_steps=20, # steps between 15 and 30 work well for us
|
|
482
|
+
strength=0.99, # make sure to use \`strength\` below 1.0
|
|
483
|
+
generator=generator,
|
|
484
|
+
).images[0]`,
|
|
485
|
+
];
|
|
486
|
+
|
|
437
487
|
export const diffusers = (model: ModelData): string[] => {
|
|
438
|
-
if (
|
|
488
|
+
if (
|
|
489
|
+
model.tags.includes("StableDiffusionInpaintPipeline") ||
|
|
490
|
+
model.tags.includes("StableDiffusionXLInpaintPipeline")
|
|
491
|
+
) {
|
|
492
|
+
return diffusers_inpainting(model);
|
|
493
|
+
} else if (model.tags.includes("controlnet")) {
|
|
439
494
|
return diffusers_controlnet(model);
|
|
440
495
|
} else if (model.tags.includes("lora")) {
|
|
441
496
|
if (model.pipeline_tag === "image-to-image") {
|
|
@@ -449,6 +504,8 @@ export const diffusers = (model: ModelData): string[] => {
|
|
|
449
504
|
}
|
|
450
505
|
} else if (model.tags.includes("textual_inversion")) {
|
|
451
506
|
return diffusers_textual_inversion(model);
|
|
507
|
+
} else if (model.tags.includes("FluxFillPipeline")) {
|
|
508
|
+
return diffusers_flux_fill(model);
|
|
452
509
|
} else if (model.pipeline_tag === "image-to-video") {
|
|
453
510
|
return diffusers_image_to_video(model);
|
|
454
511
|
} else if (model.pipeline_tag === "image-to-image") {
|
|
@@ -961,16 +1018,59 @@ export const paddlenlp = (model: ModelData): string[] => {
|
|
|
961
1018
|
}
|
|
962
1019
|
};
|
|
963
1020
|
|
|
964
|
-
export const paddleocr = (model: ModelData): string[] =>
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
1021
|
+
export const paddleocr = (model: ModelData): string[] => {
|
|
1022
|
+
const mapping: Record<string, { className: string }> = {
|
|
1023
|
+
textline_detection: { className: "TextDetection" },
|
|
1024
|
+
textline_recognition: { className: "TextRecognition" },
|
|
1025
|
+
seal_text_detection: { className: "SealTextDetection" },
|
|
1026
|
+
doc_img_unwarping: { className: "TextImageUnwarping" },
|
|
1027
|
+
doc_img_orientation_classification: { className: "DocImgOrientationClassification" },
|
|
1028
|
+
textline_orientation_classification: { className: "TextLineOrientationClassification" },
|
|
1029
|
+
chart_parsing: { className: "ChartParsing" },
|
|
1030
|
+
formula_recognition: { className: "FormulaRecognition" },
|
|
1031
|
+
layout_detection: { className: "LayoutDetection" },
|
|
1032
|
+
table_cells_detection: { className: "TableCellsDetection" },
|
|
1033
|
+
wired_table_classification: { className: "TableClassification" },
|
|
1034
|
+
table_structure_recognition: { className: "TableStructureRecognition" },
|
|
1035
|
+
};
|
|
1036
|
+
|
|
1037
|
+
if (model.tags.includes("doc_vlm")) {
|
|
1038
|
+
return [
|
|
1039
|
+
`# pip install paddleocr
|
|
1040
|
+
from paddleocr import DocVLM
|
|
1041
|
+
model = DocVLM(model_name="${model.id}")
|
|
1042
|
+
output = model.predict(
|
|
1043
|
+
input={"image": "path/to/image.png", "query": "Parsing this image and output the content in Markdown format."},
|
|
1044
|
+
batch_size=1
|
|
1045
|
+
)
|
|
1046
|
+
for res in output:
|
|
1047
|
+
res.print()
|
|
1048
|
+
res.save_to_img(save_path="./output/")
|
|
1049
|
+
res.save_to_json(save_path="./output/res.json")`,
|
|
1050
|
+
];
|
|
1051
|
+
}
|
|
1052
|
+
|
|
1053
|
+
for (const tag of model.tags) {
|
|
1054
|
+
if (tag in mapping) {
|
|
1055
|
+
const { className } = mapping[tag];
|
|
1056
|
+
return [
|
|
1057
|
+
`# pip install paddleocr
|
|
1058
|
+
from paddleocr import ${className}
|
|
1059
|
+
model = ${className}(model_name="${model.id}")
|
|
968
1060
|
output = model.predict(input="path/to/image.png", batch_size=1)
|
|
969
1061
|
for res in output:
|
|
970
1062
|
res.print()
|
|
971
1063
|
res.save_to_img(save_path="./output/")
|
|
972
1064
|
res.save_to_json(save_path="./output/res.json")`,
|
|
973
|
-
];
|
|
1065
|
+
];
|
|
1066
|
+
}
|
|
1067
|
+
}
|
|
1068
|
+
|
|
1069
|
+
return [
|
|
1070
|
+
`# Please refer to the document for information on how to use the model.
|
|
1071
|
+
# https://paddlepaddle.github.io/PaddleOCR/latest/en/version3.x/module_usage/module_overview.html`,
|
|
1072
|
+
];
|
|
1073
|
+
};
|
|
974
1074
|
|
|
975
1075
|
export const perception_encoder = (model: ModelData): string[] => {
|
|
976
1076
|
const clip_model = `# Use PE-Core models as CLIP models
|
|
@@ -1587,10 +1687,9 @@ image = sana(
|
|
|
1587
1687
|
export const videoprism = (model: ModelData): string[] => [
|
|
1588
1688
|
`# Install from https://github.com/google-deepmind/videoprism
|
|
1589
1689
|
import jax
|
|
1590
|
-
import jax.numpy as jnp
|
|
1591
1690
|
from videoprism import models as vp
|
|
1592
1691
|
|
|
1593
|
-
flax_model = vp.
|
|
1692
|
+
flax_model = vp.get_model("${model.id}")
|
|
1594
1693
|
loaded_state = vp.load_pretrained_weights("${model.id}")
|
|
1595
1694
|
|
|
1596
1695
|
@jax.jit
|
package/src/model-libraries.ts
CHANGED
|
@@ -465,12 +465,6 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
465
465
|
repoUrl: "https://github.com/Tencent/HunyuanDiT",
|
|
466
466
|
countDownloads: `path:"pytorch_model_ema.pt" OR path:"pytorch_model_distill.pt"`,
|
|
467
467
|
},
|
|
468
|
-
"hunyuan3d-2": {
|
|
469
|
-
prettyLabel: "Hunyuan3D-2",
|
|
470
|
-
repoName: "Hunyuan3D-2",
|
|
471
|
-
repoUrl: "https://github.com/Tencent/Hunyuan3D-2",
|
|
472
|
-
countDownloads: `path_filename:"model_index" OR path_filename:"config"`,
|
|
473
|
-
},
|
|
474
468
|
imstoucan: {
|
|
475
469
|
prettyLabel: "IMS Toucan",
|
|
476
470
|
repoName: "IMS-Toucan",
|