optimum-rbln 0.7.3a5__tar.gz → 0.7.3.post1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- optimum_rbln-0.7.3.post1/.github/version.yaml +1 -0
- optimum_rbln-0.7.3.post1/.github/workflows/deploy-on-tag.yaml +34 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/rbln_optimum_pytest.yaml +24 -4
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/PKG-INFO +1 -1
- optimum_rbln-0.7.3.post1/examples/kandinsky2_2/run_kandinsky2_2.py +57 -0
- optimum_rbln-0.7.3.post1/examples/kandinsky2_2/run_kandinsky2_2_combined.py +31 -0
- optimum_rbln-0.7.3.post1/examples/kandinsky2_2/run_kandinsky2_2_img2img.py +61 -0
- optimum_rbln-0.7.3.post1/examples/kandinsky2_2/run_kandinsky2_2_img2img_combined.py +46 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/kandinsky2_2/run_kandinsky2_2_inpaint.py +6 -2
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/kandinsky2_2/run_kandinsky2_2_inpaint_combined.py +13 -8
- optimum_rbln-0.7.3.post1/examples/kandinsky2_2/run_kandinsky2_2_prior_interpolate.py +64 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/__init__.py +8 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/__version__.py +2 -2
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/__init__.py +8 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/modeling_diffusers.py +99 -111
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/autoencoders/vq_model.py +11 -3
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/unets/unet_2d_condition.py +15 -8
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/__init__.py +8 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/__init__.py +7 -1
- optimum_rbln-0.7.3.post1/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2.py +25 -0
- optimum_rbln-0.7.3.post1/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +189 -0
- optimum_rbln-0.7.3.post1/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_img2img.py +25 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_inpaint.py +3 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/modeling_base.py +0 -11
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/bart/modeling_bart.py +2 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py +12 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py +40 -41
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py +19 -13
- optimum_rbln-0.7.3.post1/src/optimum/rbln/transformers/models/t5/modeling_t5.py +210 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/import_utils.py +7 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/test_diffusers.py +39 -10
- optimum_rbln-0.7.3a5/.github/version.yaml +0 -1
- optimum_rbln-0.7.3a5/.github/workflows/deploy-on-tag.yaml +0 -14
- optimum_rbln-0.7.3a5/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +0 -83
- optimum_rbln-0.7.3a5/src/optimum/rbln/transformers/models/t5/modeling_t5.py +0 -417
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/ISSUE_TEMPLATE/config.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/ISSUE_TEMPLATE/model_request.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/pull_request_template.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/scripts/auto_code_review.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/scripts/validate_pr_checklist.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/auto_code_review.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/check_code_quality.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/deploy.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/pr-title-check.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/pr_checklist_validator.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/rbln_check_compiler.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/rbln_dispatch_pytest.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/rbln_optimum_inference_test.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/rbln_scheduled_test.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/rbln_trigger_on_pr.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.gitignore +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/CODE_OF_CONDUCT.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/CONTRIBUTING.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/LICENSE +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/README.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/assets/rbln_logo.png +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/audio-classification/run_ast_audio_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/depth-estimation/run_dpt.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/image-classification/run_image_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/image-classification/run_vit_image_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/image-to-text/run_llava_next_image_to_text.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/question-answering/run_question_answering.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/speech-recognition/run_wav2vec2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/speech-recognition/run_whisper.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/stable-diffusion/run_stable_diffusion.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/stable-diffusion/run_stable_diffusion_controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/stable-diffusion/run_stable_diffusion_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/stable-diffusion/run_stable_diffusion_img2img_controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/stable-diffusion/run_stable_diffusion_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/stable-diffusion/run_stable_diffusion_lora.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/stable-diffusion/run_stable_diffusion_multicontrolnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text-classification/run_bge_m3_text_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text-classification/run_bge_reranker_v2_m3_text_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text-classification/run_secureBERT.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text-classification/run_t5_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text-classification/run_twitter_roberta_text_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text2text-generation/run_bart_text2text_generation.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text2text-generation/run_llama_peft.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/text2text-generation/run_llama_text2text_generation.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/pyproject.toml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/scripts/uv-lock.sh +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/scripts/uv-sync.sh +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/autoencoders/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/autoencoders/autoencoder_kl.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/autoencoders/vae.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/transformers/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/transformers/prior_transformer.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/transformers/transformer_sd3.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/models/unets/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/controlnet/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_prior.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/modeling.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/modeling_config.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/ops/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/ops/attn.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/ops/flash_attn.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/ops/kv_cache_update.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/modeling_alias.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/modeling_generic.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/modeling_rope_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/auto/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/auto/auto_factory.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/auto/modeling_auto.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/bart/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/bart/bart_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/bert/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/bert/modeling_bert.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/clip/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/clip/modeling_clip.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/decoderonly/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/dpt/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/dpt/modeling_dpt.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/exaone/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/exaone/exaone_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/exaone/modeling_exaone.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/gemma/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/gemma/gemma_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/gemma/modeling_gemma.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/gpt2/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/gpt2/gpt2_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/gpt2/modeling_gpt2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/llama/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/llama/llama_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/llama/modeling_llama.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/llava_next/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/llava_next/modeling_llava_next.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/midm/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/midm/midm_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/midm/modeling_midm.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/mistral/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/mistral/mistral_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/mistral/modeling_mistral.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/phi/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/phi/modeling_phi.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/phi/phi_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/qwen2/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/qwen2/modeling_qwen2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/qwen2/qwen2_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/seq2seq/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/seq2seq/seq2seq_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/t5/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/t5/t5_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/wav2vec2/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/whisper/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/whisper/generation_whisper.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/whisper/modeling_whisper.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/whisper/whisper_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/xlm_roberta/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/utils/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/transformers/utils/rbln_quantization.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/decorator_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/hub.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/logging.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/model_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/runtime_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/save_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/src/optimum/rbln/utils/submodule.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/psnr.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/requirements_sdxl.txt +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/run_stable_diffusion_xl_base.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/test_base.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/test_llm.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/tests/test_transformers.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/uv.lock +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
rebel_compiler_version: 0.7.3
|
@@ -0,0 +1,34 @@
|
|
1
|
+
name: Deploy package on tag released
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
tags:
|
6
|
+
- 'v*'
|
7
|
+
workflow_dispatch:
|
8
|
+
|
9
|
+
jobs:
|
10
|
+
deploy-on-tag:
|
11
|
+
uses: ./.github/workflows/deploy.yaml
|
12
|
+
with:
|
13
|
+
version: ${{ github.ref_name }}
|
14
|
+
secrets: inherit
|
15
|
+
|
16
|
+
trigger-vllm-model-compilation:
|
17
|
+
name: trigger vllm model compile and generation ci
|
18
|
+
needs: [deploy-on-tag]
|
19
|
+
runs-on: rebel-k8s-runner
|
20
|
+
steps:
|
21
|
+
- uses: actions/github-script@v6
|
22
|
+
with:
|
23
|
+
github-token: ${{ secrets.GIT_PAT }}
|
24
|
+
script: |
|
25
|
+
const result = await github.rest.actions.createWorkflowDispatch({
|
26
|
+
owner: 'rebellions-sw',
|
27
|
+
repo: 'rebel_compiler',
|
28
|
+
workflow_id: 'rebel_dispatch_model_generation_for_vllm.yaml',
|
29
|
+
ref: 'dev',
|
30
|
+
inputs: {
|
31
|
+
optimum_rbln_version: ${{ github.ref_name }},
|
32
|
+
}
|
33
|
+
})
|
34
|
+
console.log(result)
|
{optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/.github/workflows/rbln_optimum_pytest.yaml
RENAMED
@@ -38,6 +38,13 @@ jobs:
|
|
38
38
|
submodules: recursive
|
39
39
|
fetch-depth: 0
|
40
40
|
|
41
|
+
- name: Get commit message if not provided
|
42
|
+
id: get_commit_message
|
43
|
+
if: ${{ inputs.commit_message == '' }}
|
44
|
+
run: |
|
45
|
+
COMMIT_MESSAGE=$(git log -1 --pretty=%B)
|
46
|
+
echo "message=$COMMIT_MESSAGE" >> $GITHUB_OUTPUT
|
47
|
+
|
41
48
|
- name: Setup uv
|
42
49
|
uses: astral-sh/setup-uv@v3
|
43
50
|
with:
|
@@ -55,21 +62,34 @@ jobs:
|
|
55
62
|
run: |
|
56
63
|
PYPI_URL=$(echo ${{ env.REBEL_PYPI_ENDPOINT }} | sed "s/\/\//\0${{ env.REBEL_PYPI_USERNAME}}:${{ env.REBEL_PYPI_PASSWORD}}@/")
|
57
64
|
uv pip install --extra-index-url $PYPI_URL rebel-compiler==${{ inputs.rebel_compiler_version }}
|
58
|
-
|
65
|
+
|
59
66
|
- name: Run pytest (transformers)
|
60
67
|
env:
|
61
68
|
OPTIMUM_RBLN_TEST_LEVEL: ${{ inputs.test_level }}
|
62
69
|
run: |
|
63
|
-
|
70
|
+
echo
|
71
|
+
if ${{ !contains( steps.get_commit_message.outputs.message , '[skip-transformers]') }}; then
|
72
|
+
uv run --no-sync pytest tests/test_transformers.py
|
73
|
+
else
|
74
|
+
echo "Found [skip-transformers] in commit message, skipping CI"
|
75
|
+
fi
|
64
76
|
|
65
77
|
- name: Run pytest (diffusers)
|
66
78
|
env:
|
67
79
|
OPTIMUM_RBLN_TEST_LEVEL: ${{ inputs.test_level }}
|
68
80
|
run: |
|
69
|
-
|
81
|
+
if ${{ !contains( steps.get_commit_message.outputs.message , '[skip-diffusers]') }}; then
|
82
|
+
uv run --no-sync pytest tests/test_diffusers.py
|
83
|
+
else
|
84
|
+
echo "Found [skip-diffusers] in commit message, skipping CI"
|
85
|
+
fi
|
70
86
|
|
71
87
|
- name: Run pytest (llm)
|
72
88
|
env:
|
73
89
|
OPTIMUM_RBLN_TEST_LEVEL: ${{ inputs.test_level }}
|
74
90
|
run: |
|
75
|
-
|
91
|
+
if ${{ !contains( steps.get_commit_message.outputs.message , '[skip-llms]') }}; then
|
92
|
+
uv run --no-sync pytest tests/test_llm.py
|
93
|
+
else
|
94
|
+
echo "Found [skip-llms] in commit message, skipping CI"
|
95
|
+
fi
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: optimum-rbln
|
3
|
-
Version: 0.7.
|
3
|
+
Version: 0.7.3.post1
|
4
4
|
Summary: Optimum RBLN is the interface between the Hugging Face Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
|
5
5
|
Project-URL: Homepage, https://rebellions.ai
|
6
6
|
Project-URL: Documentation, https://docs.rbln.ai
|
@@ -0,0 +1,57 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
|
6
|
+
from optimum.rbln import RBLNKandinskyV22Pipeline, RBLNKandinskyV22PriorPipeline
|
7
|
+
|
8
|
+
|
9
|
+
def main(
|
10
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
11
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
12
|
+
from_diffusers: bool = False,
|
13
|
+
prompt: str = "red cat, 4k photo",
|
14
|
+
):
|
15
|
+
if from_diffusers:
|
16
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
17
|
+
model_id=prior_model_id,
|
18
|
+
export=True,
|
19
|
+
)
|
20
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
21
|
+
|
22
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
23
|
+
model_id=inpaint_model_id,
|
24
|
+
export=True,
|
25
|
+
rbln_img_height=768,
|
26
|
+
rbln_img_width=768,
|
27
|
+
)
|
28
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
29
|
+
else:
|
30
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
31
|
+
model_id=os.path.basename(prior_model_id),
|
32
|
+
export=False,
|
33
|
+
)
|
34
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
35
|
+
model_id=os.path.basename(inpaint_model_id),
|
36
|
+
export=False,
|
37
|
+
)
|
38
|
+
|
39
|
+
generator = torch.manual_seed(42)
|
40
|
+
out = prior_pipe(prompt, generator=generator)
|
41
|
+
image_emb = out.image_embeds
|
42
|
+
zero_image_emb = out.negative_image_embeds
|
43
|
+
|
44
|
+
out = pipe(
|
45
|
+
image_embeds=image_emb,
|
46
|
+
negative_image_embeds=zero_image_emb,
|
47
|
+
height=768,
|
48
|
+
width=768,
|
49
|
+
num_inference_steps=50,
|
50
|
+
generator=generator,
|
51
|
+
)
|
52
|
+
image = out.images[0]
|
53
|
+
image.save(f"{prompt}.png")
|
54
|
+
|
55
|
+
|
56
|
+
if __name__ == "__main__":
|
57
|
+
fire.Fire(main)
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
|
6
|
+
from optimum.rbln import RBLNKandinskyV22CombinedPipeline
|
7
|
+
|
8
|
+
|
9
|
+
def main(
|
10
|
+
model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
11
|
+
from_diffusers: bool = False,
|
12
|
+
prompt: str = "red cat, 4k photo",
|
13
|
+
):
|
14
|
+
if from_diffusers:
|
15
|
+
pipe = RBLNKandinskyV22CombinedPipeline.from_pretrained(
|
16
|
+
model_id=model_id,
|
17
|
+
export=True,
|
18
|
+
rbln_img_height=768,
|
19
|
+
rbln_img_width=768,
|
20
|
+
)
|
21
|
+
pipe.save_pretrained(os.path.basename(model_id))
|
22
|
+
else:
|
23
|
+
pipe = RBLNKandinskyV22CombinedPipeline.from_pretrained(model_id=os.path.basename(model_id), export=False)
|
24
|
+
|
25
|
+
generator = torch.manual_seed(42)
|
26
|
+
image = pipe(prompt, height=768, width=768, num_inference_steps=50, generator=generator).images[0]
|
27
|
+
image.save(f"{prompt}.png")
|
28
|
+
|
29
|
+
|
30
|
+
if __name__ == "__main__":
|
31
|
+
fire.Fire(main)
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Img2ImgPipeline, RBLNKandinskyV22PriorPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
12
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
13
|
+
from_diffusers: bool = False,
|
14
|
+
prompt: str = "A red cartoon frog, 4k",
|
15
|
+
):
|
16
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/frog.png"
|
17
|
+
init_image = load_image(img_url)
|
18
|
+
|
19
|
+
if from_diffusers:
|
20
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
21
|
+
model_id=prior_model_id,
|
22
|
+
export=True,
|
23
|
+
)
|
24
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
25
|
+
|
26
|
+
pipe = RBLNKandinskyV22Img2ImgPipeline.from_pretrained(
|
27
|
+
model_id=inpaint_model_id,
|
28
|
+
export=True,
|
29
|
+
rbln_img_height=768,
|
30
|
+
rbln_img_width=768,
|
31
|
+
)
|
32
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
33
|
+
else:
|
34
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
35
|
+
model_id=os.path.basename(prior_model_id),
|
36
|
+
export=False,
|
37
|
+
)
|
38
|
+
pipe = RBLNKandinskyV22Img2ImgPipeline.from_pretrained(
|
39
|
+
model_id=os.path.basename(inpaint_model_id),
|
40
|
+
export=False,
|
41
|
+
)
|
42
|
+
|
43
|
+
generator = torch.manual_seed(42)
|
44
|
+
image_emb, zero_image_emb = prior_pipe(prompt, generator=generator, return_dict=False)
|
45
|
+
|
46
|
+
out = pipe(
|
47
|
+
image=init_image,
|
48
|
+
image_embeds=image_emb,
|
49
|
+
negative_image_embeds=zero_image_emb,
|
50
|
+
height=768,
|
51
|
+
width=768,
|
52
|
+
num_inference_steps=100,
|
53
|
+
strength=0.2,
|
54
|
+
generator=generator,
|
55
|
+
)
|
56
|
+
image = out.images[0]
|
57
|
+
image.save(f"{prompt}.png")
|
58
|
+
|
59
|
+
|
60
|
+
if __name__ == "__main__":
|
61
|
+
fire.Fire(main)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Img2ImgCombinedPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
12
|
+
from_diffusers: bool = False,
|
13
|
+
prompt: str = "A red cartoon frog, 4k",
|
14
|
+
):
|
15
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/frog.png"
|
16
|
+
init_image = load_image(img_url)
|
17
|
+
|
18
|
+
if from_diffusers:
|
19
|
+
pipe = RBLNKandinskyV22Img2ImgCombinedPipeline.from_pretrained(
|
20
|
+
model_id=model_id,
|
21
|
+
export=True,
|
22
|
+
rbln_img_height=768,
|
23
|
+
rbln_img_width=768,
|
24
|
+
)
|
25
|
+
pipe.save_pretrained(os.path.basename(model_id))
|
26
|
+
else:
|
27
|
+
pipe = RBLNKandinskyV22Img2ImgCombinedPipeline.from_pretrained(
|
28
|
+
model_id=os.path.basename(model_id), export=False
|
29
|
+
)
|
30
|
+
|
31
|
+
generator = torch.manual_seed(42)
|
32
|
+
|
33
|
+
image = pipe(
|
34
|
+
prompt=prompt,
|
35
|
+
image=init_image,
|
36
|
+
height=768,
|
37
|
+
width=768,
|
38
|
+
num_inference_steps=100,
|
39
|
+
strength=0.2,
|
40
|
+
generator=generator,
|
41
|
+
).images[0]
|
42
|
+
image.save(f"{prompt}.png")
|
43
|
+
|
44
|
+
|
45
|
+
if __name__ == "__main__":
|
46
|
+
fire.Fire(main)
|
{optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3.post1}/examples/kandinsky2_2/run_kandinsky2_2_inpaint.py
RENAMED
@@ -27,6 +27,8 @@ def main(
|
|
27
27
|
pipe = RBLNKandinskyV22InpaintPipeline.from_pretrained(
|
28
28
|
model_id=inpaint_model_id,
|
29
29
|
export=True,
|
30
|
+
rbln_img_width=768,
|
31
|
+
rbln_img_height=768,
|
30
32
|
)
|
31
33
|
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
32
34
|
else:
|
@@ -42,8 +44,10 @@ def main(
|
|
42
44
|
generator = torch.manual_seed(42)
|
43
45
|
image_emb, zero_image_emb = prior_pipe(prompt, generator=generator, return_dict=False)
|
44
46
|
|
45
|
-
|
46
|
-
|
47
|
+
# Mask out the desired area to inpaint
|
48
|
+
# In this example, we will draw a hat on the cat's head
|
49
|
+
mask = np.zeros((768, 768), dtype=np.float32)
|
50
|
+
mask[:250, 250:-250] = 1
|
47
51
|
|
48
52
|
out = pipe(
|
49
53
|
image=init_image,
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import os
|
2
2
|
|
3
3
|
import fire
|
4
|
+
import numpy as np
|
4
5
|
import torch
|
5
6
|
from diffusers.utils import load_image
|
6
7
|
|
@@ -10,19 +11,17 @@ from optimum.rbln import RBLNKandinskyV22InpaintCombinedPipeline
|
|
10
11
|
def main(
|
11
12
|
model_id: str = "kandinsky-community/kandinsky-2-2-decoder-inpaint",
|
12
13
|
from_diffusers: bool = False,
|
13
|
-
prompt: str = "
|
14
|
+
prompt: str = "a hat",
|
14
15
|
):
|
15
|
-
img_url = "https://huggingface.co/datasets/
|
16
|
-
|
17
|
-
"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/inpaint_mask.png"
|
18
|
-
)
|
19
|
-
source = load_image(img_url)
|
20
|
-
mask = load_image(mask_url)
|
16
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/cat.png"
|
17
|
+
init_image = load_image(img_url)
|
21
18
|
|
22
19
|
if from_diffusers:
|
23
20
|
pipe = RBLNKandinskyV22InpaintCombinedPipeline.from_pretrained(
|
24
21
|
model_id=model_id,
|
25
22
|
export=True,
|
23
|
+
rbln_img_height=768,
|
24
|
+
rbln_img_width=768,
|
26
25
|
)
|
27
26
|
pipe.save_pretrained(os.path.basename(model_id))
|
28
27
|
else:
|
@@ -30,7 +29,13 @@ def main(
|
|
30
29
|
model_id=os.path.basename(model_id), export=False
|
31
30
|
)
|
32
31
|
|
33
|
-
|
32
|
+
generator = torch.manual_seed(42)
|
33
|
+
# Mask out the desired area to inpaint
|
34
|
+
# In this example, we will draw a hat on the cat's head
|
35
|
+
mask = np.zeros((768, 768), dtype=np.float32)
|
36
|
+
mask[:250, 250:-250] = 1
|
37
|
+
|
38
|
+
image = pipe(prompt, image=init_image, mask_image=mask, generator=generator).images[0]
|
34
39
|
image.save(f"{prompt}.png")
|
35
40
|
|
36
41
|
|
@@ -0,0 +1,64 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Pipeline, RBLNKandinskyV22PriorPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
12
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
13
|
+
from_diffusers: bool = False,
|
14
|
+
):
|
15
|
+
img1 = load_image(
|
16
|
+
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/cat.png"
|
17
|
+
)
|
18
|
+
img2 = load_image(
|
19
|
+
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/starry_night.jpeg"
|
20
|
+
)
|
21
|
+
|
22
|
+
if from_diffusers:
|
23
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
24
|
+
model_id=prior_model_id,
|
25
|
+
export=True,
|
26
|
+
)
|
27
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
28
|
+
|
29
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
30
|
+
model_id=inpaint_model_id,
|
31
|
+
export=True,
|
32
|
+
rbln_img_height=768,
|
33
|
+
rbln_img_width=768,
|
34
|
+
)
|
35
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
36
|
+
else:
|
37
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
38
|
+
model_id=os.path.basename(prior_model_id),
|
39
|
+
export=False,
|
40
|
+
)
|
41
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
42
|
+
model_id=os.path.basename(inpaint_model_id),
|
43
|
+
export=False,
|
44
|
+
)
|
45
|
+
|
46
|
+
images_texts = ["a cat", img1, img2]
|
47
|
+
weights = [0.3, 0.3, 0.4]
|
48
|
+
generator = torch.manual_seed(42)
|
49
|
+
out = prior_pipe.interpolate(images_texts, weights, generator=generator)
|
50
|
+
image_emb = out.image_embeds
|
51
|
+
zero_image_emb = out.negative_image_embeds
|
52
|
+
|
53
|
+
out = pipe(
|
54
|
+
image_embeds=image_emb,
|
55
|
+
negative_image_embeds=zero_image_emb,
|
56
|
+
num_inference_steps=50,
|
57
|
+
generator=generator,
|
58
|
+
)
|
59
|
+
image = out.images[0]
|
60
|
+
image.save("starry_cat.png")
|
61
|
+
|
62
|
+
|
63
|
+
if __name__ == "__main__":
|
64
|
+
fire.Fire(main)
|
@@ -78,9 +78,13 @@ _import_structure = {
|
|
78
78
|
"RBLNAutoencoderKL",
|
79
79
|
"RBLNControlNetModel",
|
80
80
|
"RBLNPriorTransformer",
|
81
|
+
"RBLNKandinskyV22CombinedPipeline",
|
82
|
+
"RBLNKandinskyV22Img2ImgCombinedPipeline",
|
81
83
|
"RBLNKandinskyV22InpaintCombinedPipeline",
|
82
84
|
"RBLNKandinskyV22InpaintPipeline",
|
85
|
+
"RBLNKandinskyV22Img2ImgPipeline",
|
83
86
|
"RBLNKandinskyV22PriorPipeline",
|
87
|
+
"RBLNKandinskyV22Pipeline",
|
84
88
|
"RBLNStableDiffusionPipeline",
|
85
89
|
"RBLNStableDiffusionXLPipeline",
|
86
90
|
"RBLNUNet2DConditionModel",
|
@@ -107,8 +111,12 @@ if TYPE_CHECKING:
|
|
107
111
|
RBLNAutoencoderKL,
|
108
112
|
RBLNControlNetModel,
|
109
113
|
RBLNDiffusionMixin,
|
114
|
+
RBLNKandinskyV22CombinedPipeline,
|
115
|
+
RBLNKandinskyV22Img2ImgCombinedPipeline,
|
116
|
+
RBLNKandinskyV22Img2ImgPipeline,
|
110
117
|
RBLNKandinskyV22InpaintCombinedPipeline,
|
111
118
|
RBLNKandinskyV22InpaintPipeline,
|
119
|
+
RBLNKandinskyV22Pipeline,
|
112
120
|
RBLNKandinskyV22PriorPipeline,
|
113
121
|
RBLNMultiControlNetModel,
|
114
122
|
RBLNPriorTransformer,
|
@@ -17,5 +17,5 @@ __version__: str
|
|
17
17
|
__version_tuple__: VERSION_TUPLE
|
18
18
|
version_tuple: VERSION_TUPLE
|
19
19
|
|
20
|
-
__version__ = version = '0.7.
|
21
|
-
__version_tuple__ = version_tuple = (0, 7, 3
|
20
|
+
__version__ = version = '0.7.3.post1'
|
21
|
+
__version_tuple__ = version_tuple = (0, 7, 3)
|
@@ -24,9 +24,13 @@ ALL_IMPORTABLE_CLASSES.update(LOADABLE_CLASSES["optimum.rbln"])
|
|
24
24
|
|
25
25
|
_import_structure = {
|
26
26
|
"pipelines": [
|
27
|
+
"RBLNKandinskyV22CombinedPipeline",
|
28
|
+
"RBLNKandinskyV22Img2ImgCombinedPipeline",
|
27
29
|
"RBLNKandinskyV22InpaintCombinedPipeline",
|
28
30
|
"RBLNKandinskyV22InpaintPipeline",
|
31
|
+
"RBLNKandinskyV22Img2ImgPipeline",
|
29
32
|
"RBLNKandinskyV22PriorPipeline",
|
33
|
+
"RBLNKandinskyV22Pipeline",
|
30
34
|
"RBLNStableDiffusionPipeline",
|
31
35
|
"RBLNStableDiffusionXLPipeline",
|
32
36
|
"RBLNStableDiffusionImg2ImgPipeline",
|
@@ -66,8 +70,12 @@ if TYPE_CHECKING:
|
|
66
70
|
RBLNVQModel,
|
67
71
|
)
|
68
72
|
from .pipelines import (
|
73
|
+
RBLNKandinskyV22CombinedPipeline,
|
74
|
+
RBLNKandinskyV22Img2ImgCombinedPipeline,
|
75
|
+
RBLNKandinskyV22Img2ImgPipeline,
|
69
76
|
RBLNKandinskyV22InpaintCombinedPipeline,
|
70
77
|
RBLNKandinskyV22InpaintPipeline,
|
78
|
+
RBLNKandinskyV22Pipeline,
|
71
79
|
RBLNKandinskyV22PriorPipeline,
|
72
80
|
RBLNMultiControlNetModel,
|
73
81
|
RBLNStableDiffusion3Img2ImgPipeline,
|