optimum-rbln 0.7.3a5__tar.gz → 0.7.3a6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- optimum_rbln-0.7.3a6/.github/version.yaml +1 -0
- optimum_rbln-0.7.3a6/.github/workflows/deploy-on-tag.yaml +34 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/PKG-INFO +1 -1
- optimum_rbln-0.7.3a6/examples/kandinsky2_2/run_kandinsky2_2.py +57 -0
- optimum_rbln-0.7.3a6/examples/kandinsky2_2/run_kandinsky2_2_combined.py +31 -0
- optimum_rbln-0.7.3a6/examples/kandinsky2_2/run_kandinsky2_2_img2img.py +61 -0
- optimum_rbln-0.7.3a6/examples/kandinsky2_2/run_kandinsky2_2_img2img_combined.py +46 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/kandinsky2_2/run_kandinsky2_2_inpaint.py +6 -2
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/kandinsky2_2/run_kandinsky2_2_inpaint_combined.py +13 -8
- optimum_rbln-0.7.3a6/examples/kandinsky2_2/run_kandinsky2_2_prior_interpolate.py +64 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/__init__.py +8 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/__version__.py +2 -2
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/__init__.py +8 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/autoencoders/vq_model.py +11 -3
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/unets/unet_2d_condition.py +15 -8
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/__init__.py +8 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/__init__.py +7 -1
- optimum_rbln-0.7.3a6/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2.py +25 -0
- optimum_rbln-0.7.3a6/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +189 -0
- optimum_rbln-0.7.3a6/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_img2img.py +25 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_inpaint.py +3 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/modeling_base.py +0 -11
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py +40 -41
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/test_diffusers.py +8 -10
- optimum_rbln-0.7.3a5/.github/version.yaml +0 -1
- optimum_rbln-0.7.3a5/.github/workflows/deploy-on-tag.yaml +0 -14
- optimum_rbln-0.7.3a5/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +0 -83
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/ISSUE_TEMPLATE/config.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/ISSUE_TEMPLATE/model_request.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/pull_request_template.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/scripts/auto_code_review.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/scripts/validate_pr_checklist.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/auto_code_review.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/check_code_quality.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/deploy.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/pr-title-check.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/pr_checklist_validator.yml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/rbln_check_compiler.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/rbln_dispatch_pytest.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/rbln_optimum_inference_test.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/rbln_optimum_pytest.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/rbln_scheduled_test.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.github/workflows/rbln_trigger_on_pr.yaml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/.gitignore +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/CODE_OF_CONDUCT.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/CONTRIBUTING.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/LICENSE +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/README.md +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/assets/rbln_logo.png +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/audio-classification/run_ast_audio_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/depth-estimation/run_dpt.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/image-classification/run_image_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/image-classification/run_vit_image_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/image-to-text/run_llava_next_image_to_text.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/question-answering/run_question_answering.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/speech-recognition/run_wav2vec2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/speech-recognition/run_whisper.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/stable-diffusion/run_stable_diffusion.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/stable-diffusion/run_stable_diffusion_controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/stable-diffusion/run_stable_diffusion_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/stable-diffusion/run_stable_diffusion_img2img_controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/stable-diffusion/run_stable_diffusion_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/stable-diffusion/run_stable_diffusion_lora.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/stable-diffusion/run_stable_diffusion_multicontrolnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text-classification/run_bge_m3_text_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text-classification/run_bge_reranker_v2_m3_text_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text-classification/run_secureBERT.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text-classification/run_t5_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text-classification/run_twitter_roberta_text_classification.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text2text-generation/run_bart_text2text_generation.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text2text-generation/run_llama_peft.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/text2text-generation/run_llama_text2text_generation.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/pyproject.toml +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/scripts/uv-lock.sh +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/scripts/uv-sync.sh +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/modeling_diffusers.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/autoencoders/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/autoencoders/autoencoder_kl.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/autoencoders/vae.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/transformers/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/transformers/prior_transformer.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/transformers/transformer_sd3.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/models/unets/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/controlnet/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_prior.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_inpaint.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/modeling.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/modeling_config.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/ops/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/ops/attn.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/ops/flash_attn.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/ops/kv_cache_update.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/modeling_alias.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/modeling_generic.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/modeling_rope_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/auto/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/auto/auto_factory.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/auto/modeling_auto.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/bart/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/bart/bart_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/bart/modeling_bart.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/bert/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/bert/modeling_bert.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/clip/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/clip/modeling_clip.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/decoderonly/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/dpt/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/dpt/modeling_dpt.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/exaone/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/exaone/exaone_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/exaone/modeling_exaone.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/gemma/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/gemma/gemma_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/gemma/modeling_gemma.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/gpt2/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/gpt2/gpt2_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/gpt2/modeling_gpt2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/llama/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/llama/llama_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/llama/modeling_llama.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/llava_next/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/llava_next/modeling_llava_next.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/midm/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/midm/midm_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/midm/modeling_midm.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/mistral/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/mistral/mistral_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/mistral/modeling_mistral.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/phi/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/phi/modeling_phi.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/phi/phi_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/qwen2/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/qwen2/modeling_qwen2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/qwen2/qwen2_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/seq2seq/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/seq2seq/seq2seq_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/t5/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/t5/modeling_t5.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/t5/t5_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/wav2vec2/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/whisper/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/whisper/generation_whisper.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/whisper/modeling_whisper.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/whisper/whisper_architecture.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/xlm_roberta/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/utils/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/transformers/utils/rbln_quantization.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/decorator_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/hub.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/import_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/logging.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/model_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/runtime_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/save_utils.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/utils/submodule.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/__init__.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/psnr.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/requirements_sdxl.txt +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/run_stable_diffusion_xl_base.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/test_base.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/test_llm.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/tests/test_transformers.py +0 -0
- {optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/uv.lock +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
rebel_compiler_version: 0.7.3.dev166+g05e9b26d
|
@@ -0,0 +1,34 @@
|
|
1
|
+
name: Deploy package on tag released
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
tags:
|
6
|
+
- 'v*'
|
7
|
+
workflow_dispatch:
|
8
|
+
|
9
|
+
jobs:
|
10
|
+
deploy-on-tag:
|
11
|
+
uses: ./.github/workflows/deploy.yaml
|
12
|
+
with:
|
13
|
+
version: ${{ github.ref_name }}
|
14
|
+
secrets: inherit
|
15
|
+
|
16
|
+
trigger-vllm-model-compilation:
|
17
|
+
name: trigger vllm model compile and generation ci
|
18
|
+
needs: [deploy-on-tag]
|
19
|
+
runs-on: rebel-k8s-runner
|
20
|
+
steps:
|
21
|
+
- uses: actions/github-script@v6
|
22
|
+
with:
|
23
|
+
github-token: ${{ secrets.GIT_PAT }}
|
24
|
+
script: |
|
25
|
+
const result = await github.rest.actions.createWorkflowDispatch({
|
26
|
+
owner: 'rebellions-sw',
|
27
|
+
repo: 'rebel_compiler',
|
28
|
+
workflow_id: 'rebel_dispatch_model_generation_for_vllm.yaml',
|
29
|
+
ref: 'dev',
|
30
|
+
inputs: {
|
31
|
+
optimum_rbln_version: ${{ github.ref_name }},
|
32
|
+
}
|
33
|
+
})
|
34
|
+
console.log(result)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: optimum-rbln
|
3
|
-
Version: 0.7.
|
3
|
+
Version: 0.7.3a6
|
4
4
|
Summary: Optimum RBLN is the interface between the Hugging Face Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
|
5
5
|
Project-URL: Homepage, https://rebellions.ai
|
6
6
|
Project-URL: Documentation, https://docs.rbln.ai
|
@@ -0,0 +1,57 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
|
6
|
+
from optimum.rbln import RBLNKandinskyV22Pipeline, RBLNKandinskyV22PriorPipeline
|
7
|
+
|
8
|
+
|
9
|
+
def main(
|
10
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
11
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
12
|
+
from_diffusers: bool = False,
|
13
|
+
prompt: str = "red cat, 4k photo",
|
14
|
+
):
|
15
|
+
if from_diffusers:
|
16
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
17
|
+
model_id=prior_model_id,
|
18
|
+
export=True,
|
19
|
+
)
|
20
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
21
|
+
|
22
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
23
|
+
model_id=inpaint_model_id,
|
24
|
+
export=True,
|
25
|
+
rbln_img_height=768,
|
26
|
+
rbln_img_width=768,
|
27
|
+
)
|
28
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
29
|
+
else:
|
30
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
31
|
+
model_id=os.path.basename(prior_model_id),
|
32
|
+
export=False,
|
33
|
+
)
|
34
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
35
|
+
model_id=os.path.basename(inpaint_model_id),
|
36
|
+
export=False,
|
37
|
+
)
|
38
|
+
|
39
|
+
generator = torch.manual_seed(42)
|
40
|
+
out = prior_pipe(prompt, generator=generator)
|
41
|
+
image_emb = out.image_embeds
|
42
|
+
zero_image_emb = out.negative_image_embeds
|
43
|
+
|
44
|
+
out = pipe(
|
45
|
+
image_embeds=image_emb,
|
46
|
+
negative_image_embeds=zero_image_emb,
|
47
|
+
height=768,
|
48
|
+
width=768,
|
49
|
+
num_inference_steps=50,
|
50
|
+
generator=generator,
|
51
|
+
)
|
52
|
+
image = out.images[0]
|
53
|
+
image.save(f"{prompt}.png")
|
54
|
+
|
55
|
+
|
56
|
+
if __name__ == "__main__":
|
57
|
+
fire.Fire(main)
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
|
6
|
+
from optimum.rbln import RBLNKandinskyV22CombinedPipeline
|
7
|
+
|
8
|
+
|
9
|
+
def main(
|
10
|
+
model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
11
|
+
from_diffusers: bool = False,
|
12
|
+
prompt: str = "red cat, 4k photo",
|
13
|
+
):
|
14
|
+
if from_diffusers:
|
15
|
+
pipe = RBLNKandinskyV22CombinedPipeline.from_pretrained(
|
16
|
+
model_id=model_id,
|
17
|
+
export=True,
|
18
|
+
rbln_img_height=768,
|
19
|
+
rbln_img_width=768,
|
20
|
+
)
|
21
|
+
pipe.save_pretrained(os.path.basename(model_id))
|
22
|
+
else:
|
23
|
+
pipe = RBLNKandinskyV22CombinedPipeline.from_pretrained(model_id=os.path.basename(model_id), export=False)
|
24
|
+
|
25
|
+
generator = torch.manual_seed(42)
|
26
|
+
image = pipe(prompt, height=768, width=768, num_inference_steps=50, generator=generator).images[0]
|
27
|
+
image.save(f"{prompt}.png")
|
28
|
+
|
29
|
+
|
30
|
+
if __name__ == "__main__":
|
31
|
+
fire.Fire(main)
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Img2ImgPipeline, RBLNKandinskyV22PriorPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
12
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
13
|
+
from_diffusers: bool = False,
|
14
|
+
prompt: str = "A red cartoon frog, 4k",
|
15
|
+
):
|
16
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/frog.png"
|
17
|
+
init_image = load_image(img_url)
|
18
|
+
|
19
|
+
if from_diffusers:
|
20
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
21
|
+
model_id=prior_model_id,
|
22
|
+
export=True,
|
23
|
+
)
|
24
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
25
|
+
|
26
|
+
pipe = RBLNKandinskyV22Img2ImgPipeline.from_pretrained(
|
27
|
+
model_id=inpaint_model_id,
|
28
|
+
export=True,
|
29
|
+
rbln_img_height=768,
|
30
|
+
rbln_img_width=768,
|
31
|
+
)
|
32
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
33
|
+
else:
|
34
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
35
|
+
model_id=os.path.basename(prior_model_id),
|
36
|
+
export=False,
|
37
|
+
)
|
38
|
+
pipe = RBLNKandinskyV22Img2ImgPipeline.from_pretrained(
|
39
|
+
model_id=os.path.basename(inpaint_model_id),
|
40
|
+
export=False,
|
41
|
+
)
|
42
|
+
|
43
|
+
generator = torch.manual_seed(42)
|
44
|
+
image_emb, zero_image_emb = prior_pipe(prompt, generator=generator, return_dict=False)
|
45
|
+
|
46
|
+
out = pipe(
|
47
|
+
image=init_image,
|
48
|
+
image_embeds=image_emb,
|
49
|
+
negative_image_embeds=zero_image_emb,
|
50
|
+
height=768,
|
51
|
+
width=768,
|
52
|
+
num_inference_steps=100,
|
53
|
+
strength=0.2,
|
54
|
+
generator=generator,
|
55
|
+
)
|
56
|
+
image = out.images[0]
|
57
|
+
image.save(f"{prompt}.png")
|
58
|
+
|
59
|
+
|
60
|
+
if __name__ == "__main__":
|
61
|
+
fire.Fire(main)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Img2ImgCombinedPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
12
|
+
from_diffusers: bool = False,
|
13
|
+
prompt: str = "A red cartoon frog, 4k",
|
14
|
+
):
|
15
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/frog.png"
|
16
|
+
init_image = load_image(img_url)
|
17
|
+
|
18
|
+
if from_diffusers:
|
19
|
+
pipe = RBLNKandinskyV22Img2ImgCombinedPipeline.from_pretrained(
|
20
|
+
model_id=model_id,
|
21
|
+
export=True,
|
22
|
+
rbln_img_height=768,
|
23
|
+
rbln_img_width=768,
|
24
|
+
)
|
25
|
+
pipe.save_pretrained(os.path.basename(model_id))
|
26
|
+
else:
|
27
|
+
pipe = RBLNKandinskyV22Img2ImgCombinedPipeline.from_pretrained(
|
28
|
+
model_id=os.path.basename(model_id), export=False
|
29
|
+
)
|
30
|
+
|
31
|
+
generator = torch.manual_seed(42)
|
32
|
+
|
33
|
+
image = pipe(
|
34
|
+
prompt=prompt,
|
35
|
+
image=init_image,
|
36
|
+
height=768,
|
37
|
+
width=768,
|
38
|
+
num_inference_steps=100,
|
39
|
+
strength=0.2,
|
40
|
+
generator=generator,
|
41
|
+
).images[0]
|
42
|
+
image.save(f"{prompt}.png")
|
43
|
+
|
44
|
+
|
45
|
+
if __name__ == "__main__":
|
46
|
+
fire.Fire(main)
|
{optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/examples/kandinsky2_2/run_kandinsky2_2_inpaint.py
RENAMED
@@ -27,6 +27,8 @@ def main(
|
|
27
27
|
pipe = RBLNKandinskyV22InpaintPipeline.from_pretrained(
|
28
28
|
model_id=inpaint_model_id,
|
29
29
|
export=True,
|
30
|
+
rbln_img_width=768,
|
31
|
+
rbln_img_height=768,
|
30
32
|
)
|
31
33
|
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
32
34
|
else:
|
@@ -42,8 +44,10 @@ def main(
|
|
42
44
|
generator = torch.manual_seed(42)
|
43
45
|
image_emb, zero_image_emb = prior_pipe(prompt, generator=generator, return_dict=False)
|
44
46
|
|
45
|
-
|
46
|
-
|
47
|
+
# Mask out the desired area to inpaint
|
48
|
+
# In this example, we will draw a hat on the cat's head
|
49
|
+
mask = np.zeros((768, 768), dtype=np.float32)
|
50
|
+
mask[:250, 250:-250] = 1
|
47
51
|
|
48
52
|
out = pipe(
|
49
53
|
image=init_image,
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import os
|
2
2
|
|
3
3
|
import fire
|
4
|
+
import numpy as np
|
4
5
|
import torch
|
5
6
|
from diffusers.utils import load_image
|
6
7
|
|
@@ -10,19 +11,17 @@ from optimum.rbln import RBLNKandinskyV22InpaintCombinedPipeline
|
|
10
11
|
def main(
|
11
12
|
model_id: str = "kandinsky-community/kandinsky-2-2-decoder-inpaint",
|
12
13
|
from_diffusers: bool = False,
|
13
|
-
prompt: str = "
|
14
|
+
prompt: str = "a hat",
|
14
15
|
):
|
15
|
-
img_url = "https://huggingface.co/datasets/
|
16
|
-
|
17
|
-
"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/inpaint_mask.png"
|
18
|
-
)
|
19
|
-
source = load_image(img_url)
|
20
|
-
mask = load_image(mask_url)
|
16
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/cat.png"
|
17
|
+
init_image = load_image(img_url)
|
21
18
|
|
22
19
|
if from_diffusers:
|
23
20
|
pipe = RBLNKandinskyV22InpaintCombinedPipeline.from_pretrained(
|
24
21
|
model_id=model_id,
|
25
22
|
export=True,
|
23
|
+
rbln_img_height=768,
|
24
|
+
rbln_img_width=768,
|
26
25
|
)
|
27
26
|
pipe.save_pretrained(os.path.basename(model_id))
|
28
27
|
else:
|
@@ -30,7 +29,13 @@ def main(
|
|
30
29
|
model_id=os.path.basename(model_id), export=False
|
31
30
|
)
|
32
31
|
|
33
|
-
|
32
|
+
generator = torch.manual_seed(42)
|
33
|
+
# Mask out the desired area to inpaint
|
34
|
+
# In this example, we will draw a hat on the cat's head
|
35
|
+
mask = np.zeros((768, 768), dtype=np.float32)
|
36
|
+
mask[:250, 250:-250] = 1
|
37
|
+
|
38
|
+
image = pipe(prompt, image=init_image, mask_image=mask, generator=generator).images[0]
|
34
39
|
image.save(f"{prompt}.png")
|
35
40
|
|
36
41
|
|
@@ -0,0 +1,64 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Pipeline, RBLNKandinskyV22PriorPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
12
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
13
|
+
from_diffusers: bool = False,
|
14
|
+
):
|
15
|
+
img1 = load_image(
|
16
|
+
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/cat.png"
|
17
|
+
)
|
18
|
+
img2 = load_image(
|
19
|
+
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/starry_night.jpeg"
|
20
|
+
)
|
21
|
+
|
22
|
+
if from_diffusers:
|
23
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
24
|
+
model_id=prior_model_id,
|
25
|
+
export=True,
|
26
|
+
)
|
27
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
28
|
+
|
29
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
30
|
+
model_id=inpaint_model_id,
|
31
|
+
export=True,
|
32
|
+
rbln_img_height=768,
|
33
|
+
rbln_img_width=768,
|
34
|
+
)
|
35
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
36
|
+
else:
|
37
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
38
|
+
model_id=os.path.basename(prior_model_id),
|
39
|
+
export=False,
|
40
|
+
)
|
41
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
42
|
+
model_id=os.path.basename(inpaint_model_id),
|
43
|
+
export=False,
|
44
|
+
)
|
45
|
+
|
46
|
+
images_texts = ["a cat", img1, img2]
|
47
|
+
weights = [0.3, 0.3, 0.4]
|
48
|
+
generator = torch.manual_seed(42)
|
49
|
+
out = prior_pipe.interpolate(images_texts, weights, generator=generator)
|
50
|
+
image_emb = out.image_embeds
|
51
|
+
zero_image_emb = out.negative_image_embeds
|
52
|
+
|
53
|
+
out = pipe(
|
54
|
+
image_embeds=image_emb,
|
55
|
+
negative_image_embeds=zero_image_emb,
|
56
|
+
num_inference_steps=50,
|
57
|
+
generator=generator,
|
58
|
+
)
|
59
|
+
image = out.images[0]
|
60
|
+
image.save("starry_cat.png")
|
61
|
+
|
62
|
+
|
63
|
+
if __name__ == "__main__":
|
64
|
+
fire.Fire(main)
|
@@ -78,9 +78,13 @@ _import_structure = {
|
|
78
78
|
"RBLNAutoencoderKL",
|
79
79
|
"RBLNControlNetModel",
|
80
80
|
"RBLNPriorTransformer",
|
81
|
+
"RBLNKandinskyV22CombinedPipeline",
|
82
|
+
"RBLNKandinskyV22Img2ImgCombinedPipeline",
|
81
83
|
"RBLNKandinskyV22InpaintCombinedPipeline",
|
82
84
|
"RBLNKandinskyV22InpaintPipeline",
|
85
|
+
"RBLNKandinskyV22Img2ImgPipeline",
|
83
86
|
"RBLNKandinskyV22PriorPipeline",
|
87
|
+
"RBLNKandinskyV22Pipeline",
|
84
88
|
"RBLNStableDiffusionPipeline",
|
85
89
|
"RBLNStableDiffusionXLPipeline",
|
86
90
|
"RBLNUNet2DConditionModel",
|
@@ -107,8 +111,12 @@ if TYPE_CHECKING:
|
|
107
111
|
RBLNAutoencoderKL,
|
108
112
|
RBLNControlNetModel,
|
109
113
|
RBLNDiffusionMixin,
|
114
|
+
RBLNKandinskyV22CombinedPipeline,
|
115
|
+
RBLNKandinskyV22Img2ImgCombinedPipeline,
|
116
|
+
RBLNKandinskyV22Img2ImgPipeline,
|
110
117
|
RBLNKandinskyV22InpaintCombinedPipeline,
|
111
118
|
RBLNKandinskyV22InpaintPipeline,
|
119
|
+
RBLNKandinskyV22Pipeline,
|
112
120
|
RBLNKandinskyV22PriorPipeline,
|
113
121
|
RBLNMultiControlNetModel,
|
114
122
|
RBLNPriorTransformer,
|
@@ -17,5 +17,5 @@ __version__: str
|
|
17
17
|
__version_tuple__: VERSION_TUPLE
|
18
18
|
version_tuple: VERSION_TUPLE
|
19
19
|
|
20
|
-
__version__ = version = '0.7.
|
21
|
-
__version_tuple__ = version_tuple = (0, 7, 3, '
|
20
|
+
__version__ = version = '0.7.3a6'
|
21
|
+
__version_tuple__ = version_tuple = (0, 7, 3, 'a6')
|
@@ -24,9 +24,13 @@ ALL_IMPORTABLE_CLASSES.update(LOADABLE_CLASSES["optimum.rbln"])
|
|
24
24
|
|
25
25
|
_import_structure = {
|
26
26
|
"pipelines": [
|
27
|
+
"RBLNKandinskyV22CombinedPipeline",
|
28
|
+
"RBLNKandinskyV22Img2ImgCombinedPipeline",
|
27
29
|
"RBLNKandinskyV22InpaintCombinedPipeline",
|
28
30
|
"RBLNKandinskyV22InpaintPipeline",
|
31
|
+
"RBLNKandinskyV22Img2ImgPipeline",
|
29
32
|
"RBLNKandinskyV22PriorPipeline",
|
33
|
+
"RBLNKandinskyV22Pipeline",
|
30
34
|
"RBLNStableDiffusionPipeline",
|
31
35
|
"RBLNStableDiffusionXLPipeline",
|
32
36
|
"RBLNStableDiffusionImg2ImgPipeline",
|
@@ -66,8 +70,12 @@ if TYPE_CHECKING:
|
|
66
70
|
RBLNVQModel,
|
67
71
|
)
|
68
72
|
from .pipelines import (
|
73
|
+
RBLNKandinskyV22CombinedPipeline,
|
74
|
+
RBLNKandinskyV22Img2ImgCombinedPipeline,
|
75
|
+
RBLNKandinskyV22Img2ImgPipeline,
|
69
76
|
RBLNKandinskyV22InpaintCombinedPipeline,
|
70
77
|
RBLNKandinskyV22InpaintPipeline,
|
78
|
+
RBLNKandinskyV22Pipeline,
|
71
79
|
RBLNKandinskyV22PriorPipeline,
|
72
80
|
RBLNMultiControlNetModel,
|
73
81
|
RBLNStableDiffusion3Img2ImgPipeline,
|
@@ -90,9 +90,17 @@ class RBLNVQModel(RBLNModel):
|
|
90
90
|
model_config: "PretrainedConfig",
|
91
91
|
rbln_kwargs: Dict[str, Any] = {},
|
92
92
|
) -> RBLNConfig:
|
93
|
-
batch_size = rbln_kwargs.get("batch_size")
|
94
|
-
|
95
|
-
|
93
|
+
batch_size = rbln_kwargs.get("batch_size")
|
94
|
+
if batch_size is None:
|
95
|
+
batch_size = 1
|
96
|
+
|
97
|
+
height = rbln_kwargs.get("img_height")
|
98
|
+
if height is None:
|
99
|
+
height = 512
|
100
|
+
|
101
|
+
width = rbln_kwargs.get("img_width")
|
102
|
+
if width is None:
|
103
|
+
width = 512
|
96
104
|
|
97
105
|
if hasattr(model_config, "block_out_channels"):
|
98
106
|
scale_factor = 2 ** (len(model_config.block_out_channels) - 1)
|
@@ -176,15 +176,22 @@ class RBLNUNet2DConditionModel(RBLNModel):
|
|
176
176
|
raise ValueError("Both image height and image width must be given or not given")
|
177
177
|
elif image_size[0] is None and image_size[1] is None:
|
178
178
|
if rbln_config["img2img_pipeline"]:
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
179
|
+
if hasattr(pipe, "vae"):
|
180
|
+
# In case of img2img, sample size of unet is determined by vae encoder.
|
181
|
+
vae_sample_size = pipe.vae.config.sample_size
|
182
|
+
if isinstance(vae_sample_size, int):
|
183
|
+
sample_size = vae_sample_size // scale_factor
|
184
|
+
else:
|
185
|
+
sample_size = (
|
186
|
+
vae_sample_size[0] // scale_factor,
|
187
|
+
vae_sample_size[1] // scale_factor,
|
188
|
+
)
|
189
|
+
elif hasattr(pipe, "movq"):
|
190
|
+
logger.warning(
|
191
|
+
"RBLN config 'img_height' and 'img_width' should have been provided for this pipeline. "
|
192
|
+
"Both variable will be set 512 by default."
|
187
193
|
)
|
194
|
+
sample_size = (512 // scale_factor, 512 // scale_factor)
|
188
195
|
else:
|
189
196
|
sample_size = pipe.unet.config.sample_size
|
190
197
|
else:
|
{optimum_rbln-0.7.3a5 → optimum_rbln-0.7.3a6}/src/optimum/rbln/diffusers/pipelines/__init__.py
RENAMED
@@ -26,9 +26,13 @@ _import_structure = {
|
|
26
26
|
"RBLNStableDiffusionXLControlNetPipeline",
|
27
27
|
],
|
28
28
|
"kandinsky2_2": [
|
29
|
+
"RBLNKandinskyV22CombinedPipeline",
|
30
|
+
"RBLNKandinskyV22Img2ImgCombinedPipeline",
|
29
31
|
"RBLNKandinskyV22InpaintCombinedPipeline",
|
30
32
|
"RBLNKandinskyV22InpaintPipeline",
|
33
|
+
"RBLNKandinskyV22Img2ImgPipeline",
|
31
34
|
"RBLNKandinskyV22PriorPipeline",
|
35
|
+
"RBLNKandinskyV22Pipeline",
|
32
36
|
],
|
33
37
|
"stable_diffusion": [
|
34
38
|
"RBLNStableDiffusionImg2ImgPipeline",
|
@@ -55,8 +59,12 @@ if TYPE_CHECKING:
|
|
55
59
|
RBLNStableDiffusionXLControlNetPipeline,
|
56
60
|
)
|
57
61
|
from .kandinsky2_2 import (
|
62
|
+
RBLNKandinskyV22CombinedPipeline,
|
63
|
+
RBLNKandinskyV22Img2ImgCombinedPipeline,
|
64
|
+
RBLNKandinskyV22Img2ImgPipeline,
|
58
65
|
RBLNKandinskyV22InpaintCombinedPipeline,
|
59
66
|
RBLNKandinskyV22InpaintPipeline,
|
67
|
+
RBLNKandinskyV22Pipeline,
|
60
68
|
RBLNKandinskyV22PriorPipeline,
|
61
69
|
)
|
62
70
|
from .stable_diffusion import (
|
@@ -12,6 +12,12 @@
|
|
12
12
|
# See the License for the specific language governing permissions and
|
13
13
|
# limitations under the License.
|
14
14
|
|
15
|
-
from .
|
15
|
+
from .pipeline_kandinsky2_2 import RBLNKandinskyV22Pipeline
|
16
|
+
from .pipeline_kandinsky2_2_combined import (
|
17
|
+
RBLNKandinskyV22CombinedPipeline,
|
18
|
+
RBLNKandinskyV22Img2ImgCombinedPipeline,
|
19
|
+
RBLNKandinskyV22InpaintCombinedPipeline,
|
20
|
+
)
|
21
|
+
from .pipeline_kandinsky2_2_img2img import RBLNKandinskyV22Img2ImgPipeline
|
16
22
|
from .pipeline_kandinsky2_2_inpaint import RBLNKandinskyV22InpaintPipeline
|
17
23
|
from .pipeline_kandinsky2_2_prior import RBLNKandinskyV22PriorPipeline
|
optimum_rbln-0.7.3a6/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2.py
ADDED
@@ -0,0 +1,25 @@
|
|
1
|
+
# Copyright 2024 Rebellions Inc.
|
2
|
+
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at:
|
6
|
+
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
from diffusers import KandinskyV22Pipeline
|
16
|
+
|
17
|
+
from ...modeling_diffusers import RBLNDiffusionMixin
|
18
|
+
|
19
|
+
|
20
|
+
class RBLNKandinskyV22Pipeline(RBLNDiffusionMixin, KandinskyV22Pipeline):
|
21
|
+
original_class = KandinskyV22Pipeline
|
22
|
+
_submodules = ["unet", "movq"]
|
23
|
+
|
24
|
+
def get_compiled_image_size(self):
|
25
|
+
return self.movq.image_size
|