optimum-rbln 0.7.2rc1__tar.gz → 0.7.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/scripts/auto_code_review.py +0 -51
- optimum_rbln-0.7.3/.github/version.yaml +1 -0
- optimum_rbln-0.7.3/.github/workflows/auto_code_review.yml +72 -0
- optimum_rbln-0.7.3/.github/workflows/deploy-on-tag.yaml +34 -0
- optimum_rbln-0.7.3/.github/workflows/rbln_check_compiler.yaml +61 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/workflows/rbln_optimum_pytest.yaml +32 -7
- optimum_rbln-0.7.3/.github/workflows/rbln_scheduled_test.yaml +51 -0
- optimum_rbln-0.7.3/.github/workflows/rbln_trigger_on_pr.yaml +94 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/PKG-INFO +1 -1
- optimum_rbln-0.7.3/examples/kandinsky2_2/run_kandinsky2_2.py +57 -0
- optimum_rbln-0.7.3/examples/kandinsky2_2/run_kandinsky2_2_combined.py +31 -0
- optimum_rbln-0.7.3/examples/kandinsky2_2/run_kandinsky2_2_img2img.py +61 -0
- optimum_rbln-0.7.3/examples/kandinsky2_2/run_kandinsky2_2_img2img_combined.py +46 -0
- optimum_rbln-0.7.3/examples/kandinsky2_2/run_kandinsky2_2_inpaint.py +64 -0
- optimum_rbln-0.7.2rc1/examples/kandinsky2_2/run_kandinsky2_2_inpaint.py → optimum_rbln-0.7.3/examples/kandinsky2_2/run_kandinsky2_2_inpaint_combined.py +13 -8
- optimum_rbln-0.7.3/examples/kandinsky2_2/run_kandinsky2_2_prior_interpolate.py +64 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/__init__.py +8 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/__version__.py +9 -4
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/__init__.py +8 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/modeling_diffusers.py +103 -109
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/autoencoders/vq_model.py +11 -3
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/unets/unet_2d_condition.py +15 -8
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/__init__.py +8 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/__init__.py +7 -1
- optimum_rbln-0.7.3/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2.py +25 -0
- optimum_rbln-0.7.3/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +189 -0
- optimum_rbln-0.7.3/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_img2img.py +25 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_inpaint.py +3 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/modeling.py +4 -1
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/modeling_base.py +16 -3
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/ops/__init__.py +6 -2
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/ops/attn.py +94 -85
- optimum_rbln-0.7.3/src/optimum/rbln/ops/flash_attn.py +82 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/ops/kv_cache_update.py +4 -4
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/modeling_generic.py +3 -3
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/bart/bart_architecture.py +10 -6
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/bart/modeling_bart.py +6 -2
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/bert/modeling_bert.py +1 -1
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/decoderonly/decoderonly_architecture.py +264 -133
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/decoderonly/modeling_decoderonly.py +276 -29
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/exaone/exaone_architecture.py +11 -4
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/gemma/gemma_architecture.py +11 -4
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/gpt2/gpt2_architecture.py +5 -3
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/midm/midm_architecture.py +5 -3
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/phi/phi_architecture.py +9 -7
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/seq2seq/modeling_seq2seq.py +50 -13
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/seq2seq/seq2seq_architecture.py +60 -36
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/t5/modeling_t5.py +3 -1
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/t5/t5_architecture.py +65 -3
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/whisper/whisper_architecture.py +26 -36
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/xlm_roberta/modeling_xlm_roberta.py +1 -14
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/import_utils.py +7 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/test_base.py +26 -31
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/test_diffusers.py +39 -10
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/test_llm.py +34 -35
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/test_transformers.py +3 -3
- optimum_rbln-0.7.2rc1/.github/workflows/auto_code_review.yml +0 -33
- optimum_rbln-0.7.2rc1/.github/workflows/deploy-on-tag.yaml +0 -14
- optimum_rbln-0.7.2rc1/.github/workflows/rbln_trigger_on_pr.yaml +0 -96
- optimum_rbln-0.7.2rc1/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_combined.py +0 -83
- optimum_rbln-0.7.2rc1/src/optimum/rbln/ops/flash_attn.py +0 -61
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/ISSUE_TEMPLATE/config.yml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/ISSUE_TEMPLATE/model_request.md +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/pull_request_template.md +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/scripts/validate_pr_checklist.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/workflows/check_code_quality.yml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/workflows/deploy.yaml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/workflows/pr-title-check.yaml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/workflows/pr_checklist_validator.yml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/workflows/rbln_dispatch_pytest.yaml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.github/workflows/rbln_optimum_inference_test.yaml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/.gitignore +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/CODE_OF_CONDUCT.md +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/CONTRIBUTING.md +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/LICENSE +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/README.md +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/assets/rbln_logo.png +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/audio-classification/run_ast_audio_classification.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/depth-estimation/run_dpt.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/image-classification/run_image_classification.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/image-classification/run_vit_image_classification.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/image-to-text/run_llava_next_image_to_text.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/question-answering/run_question_answering.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/speech-recognition/run_wav2vec2.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/speech-recognition/run_whisper.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/stable-diffusion/run_stable_diffusion.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/stable-diffusion/run_stable_diffusion_controlnet.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/stable-diffusion/run_stable_diffusion_img2img.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/stable-diffusion/run_stable_diffusion_img2img_controlnet.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/stable-diffusion/run_stable_diffusion_inpaint.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/stable-diffusion/run_stable_diffusion_lora.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/stable-diffusion/run_stable_diffusion_multicontrolnet.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text-classification/run_bge_m3_text_classification.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text-classification/run_bge_reranker_v2_m3_text_classification.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text-classification/run_secureBERT.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text-classification/run_t5_classification.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text-classification/run_twitter_roberta_text_classification.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text2text-generation/run_bart_text2text_generation.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text2text-generation/run_llama_peft.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/examples/text2text-generation/run_llama_text2text_generation.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/pyproject.toml +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/scripts/uv-lock.sh +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/scripts/uv-sync.sh +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/autoencoders/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/autoencoders/autoencoder_kl.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/autoencoders/vae.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/controlnet.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/transformers/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/transformers/prior_transformer.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/transformers/transformer_sd3.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/models/unets/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/controlnet/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/controlnet/multicontrolnet.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/controlnet/pipeline_controlnet_sd_xl_img2img.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/kandinsky2_2/pipeline_kandinsky2_2_prior.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_img2img.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_3/pipeline_stable_diffusion_3_inpaint.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_img2img.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/diffusers/pipelines/stable_diffusion_xl/pipeline_stable_diffusion_xl_inpaint.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/modeling_config.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/modeling_alias.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/modeling_rope_utils.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/auto/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/auto/auto_factory.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/auto/modeling_auto.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/bart/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/bert/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/clip/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/clip/modeling_clip.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/decoderonly/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/dpt/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/dpt/modeling_dpt.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/exaone/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/exaone/modeling_exaone.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/gemma/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/gemma/modeling_gemma.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/gpt2/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/gpt2/modeling_gpt2.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/llama/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/llama/llama_architecture.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/llama/modeling_llama.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/llava_next/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/llava_next/modeling_llava_next.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/midm/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/midm/modeling_midm.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/mistral/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/mistral/mistral_architecture.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/mistral/modeling_mistral.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/phi/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/phi/modeling_phi.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/qwen2/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/qwen2/modeling_qwen2.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/qwen2/qwen2_architecture.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/seq2seq/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/t5/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/wav2vec2/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/wav2vec2/modeling_wav2vec2.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/whisper/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/whisper/generation_whisper.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/whisper/modeling_whisper.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/models/xlm_roberta/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/utils/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/transformers/utils/rbln_quantization.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/decorator_utils.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/hub.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/logging.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/model_utils.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/runtime_utils.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/save_utils.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/src/optimum/rbln/utils/submodule.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/__init__.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/psnr.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/requirements_sdxl.txt +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/tests/run_stable_diffusion_xl_base.py +0 -0
- {optimum_rbln-0.7.2rc1 → optimum_rbln-0.7.3}/uv.lock +0 -0
@@ -97,50 +97,6 @@ def remove_file_from_diff(diff_content, file_to_remove):
|
|
97
97
|
return "\n".join(result)
|
98
98
|
|
99
99
|
|
100
|
-
def skip_bot(pr):
|
101
|
-
global force_review
|
102
|
-
"""
|
103
|
-
Review if
|
104
|
-
1. last commit messages starts with "[autoreview]"
|
105
|
-
2. last comment contains "/autoreview"
|
106
|
-
"""
|
107
|
-
|
108
|
-
# Check commit message
|
109
|
-
commits = list(pr.get_commits())
|
110
|
-
if len(commits) == 0:
|
111
|
-
return True
|
112
|
-
|
113
|
-
last_commit = commits[-1]
|
114
|
-
try:
|
115
|
-
commit_message = last_commit.raw_data["commit"]["message"]
|
116
|
-
except KeyError:
|
117
|
-
commit_message = ""
|
118
|
-
|
119
|
-
if commit_message.startswith("[autoreview]"):
|
120
|
-
return False
|
121
|
-
|
122
|
-
# Check the last comment
|
123
|
-
comments = list(pr.get_issue_comments())
|
124
|
-
if len(comments) == 0:
|
125
|
-
return True
|
126
|
-
|
127
|
-
last = comments[-1]
|
128
|
-
if last.user.login.find("github-actions") != -1:
|
129
|
-
return True
|
130
|
-
|
131
|
-
if last.body.find("/autoreview") == -1:
|
132
|
-
return True
|
133
|
-
|
134
|
-
if last.reactions["heart"] > 0:
|
135
|
-
return True
|
136
|
-
|
137
|
-
if last.body.find("force") != -1:
|
138
|
-
force_review = True
|
139
|
-
|
140
|
-
last.create_reaction("heart")
|
141
|
-
return False
|
142
|
-
|
143
|
-
|
144
100
|
def main():
|
145
101
|
github_token = os.getenv("GITHUB_TOKEN")
|
146
102
|
pr_number = os.getenv("PR_NUMBER")
|
@@ -155,13 +111,6 @@ def main():
|
|
155
111
|
repo = g.get_repo(os.getenv("GITHUB_REPOSITORY"))
|
156
112
|
pr = repo.get_pull(int(pr_number))
|
157
113
|
|
158
|
-
if skip_bot(pr):
|
159
|
-
print(
|
160
|
-
"To invoke review, Write '/autoreview' and re-run github actions,"
|
161
|
-
" or start the commit message with '[autoreview]'. "
|
162
|
-
)
|
163
|
-
sys.exit(0)
|
164
|
-
|
165
114
|
# Get PR diff
|
166
115
|
diff = get_pr_diff()
|
167
116
|
diff = remove_file_from_diff(diff, "uv.lock")
|
@@ -0,0 +1 @@
|
|
1
|
+
rebel_compiler_version: 0.7.3.dev166+g05e9b26d
|
@@ -0,0 +1,72 @@
|
|
1
|
+
name: Auto Code Review
|
2
|
+
|
3
|
+
on:
|
4
|
+
pull_request:
|
5
|
+
issue_comment:
|
6
|
+
types: [created]
|
7
|
+
push:
|
8
|
+
branches:
|
9
|
+
- '**'
|
10
|
+
|
11
|
+
env:
|
12
|
+
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
|
13
|
+
GOOGLE_MODEL_ID: ${{ vars.GOOGLE_MODEL_ID }}
|
14
|
+
|
15
|
+
jobs:
|
16
|
+
auto-review:
|
17
|
+
runs-on: ubuntu-latest
|
18
|
+
steps:
|
19
|
+
- name: Checkout repository
|
20
|
+
uses: actions/checkout@v3
|
21
|
+
with:
|
22
|
+
fetch-depth: 2
|
23
|
+
|
24
|
+
- name: Check if review should run
|
25
|
+
id: check
|
26
|
+
run: |
|
27
|
+
PR_NUMBER=""
|
28
|
+
SHOULD_RUN="false"
|
29
|
+
|
30
|
+
# For push events, check commit message
|
31
|
+
if [[ "${{ github.event_name }}" == "push" ]]; then
|
32
|
+
if [[ "${{ contains(github.event.head_commit.message, '[autoreview]') }}" == "true" ]]; then
|
33
|
+
SHOULD_RUN="true"
|
34
|
+
# Use GitHub CLI to find PR associated with this commit
|
35
|
+
PR_NUMBER=$(gh pr list --head ${{ github.ref_name }} --json number --jq '.[0].number')
|
36
|
+
fi
|
37
|
+
|
38
|
+
# For PR events
|
39
|
+
elif [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
40
|
+
PR_NUMBER="${{ github.event.pull_request.number }}"
|
41
|
+
|
42
|
+
# For comment events, check if it's "/autoreview"
|
43
|
+
elif [[ "${{ github.event_name }}" == "issue_comment" ]]; then
|
44
|
+
if [[ "${{ github.event.issue.pull_request != null }}" == "true" && "${{ contains(github.event.comment.body, '/autoreview') }}" == "true" ]]; then
|
45
|
+
SHOULD_RUN="true"
|
46
|
+
PR_NUMBER="${{ github.event.issue.number }}"
|
47
|
+
fi
|
48
|
+
fi
|
49
|
+
|
50
|
+
echo "should_run=$SHOULD_RUN" >> $GITHUB_OUTPUT
|
51
|
+
echo "pr_number=$PR_NUMBER" >> $GITHUB_OUTPUT
|
52
|
+
env:
|
53
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
54
|
+
|
55
|
+
- name: Set up Python
|
56
|
+
if: steps.check.outputs.should_run == 'true' && steps.check.outputs.pr_number != ''
|
57
|
+
uses: actions/setup-python@v4
|
58
|
+
with:
|
59
|
+
python-version: '3.x'
|
60
|
+
|
61
|
+
- name: Install dependencies
|
62
|
+
if: steps.check.outputs.should_run == 'true' && steps.check.outputs.pr_number != ''
|
63
|
+
run: |
|
64
|
+
python -m pip install --upgrade pip
|
65
|
+
pip install google-generativeai PyGithub
|
66
|
+
|
67
|
+
- name: Run Auto Code Review
|
68
|
+
if: steps.check.outputs.should_run == 'true' && steps.check.outputs.pr_number != ''
|
69
|
+
env:
|
70
|
+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
71
|
+
PR_NUMBER: ${{ steps.check.outputs.pr_number }}
|
72
|
+
run: python .github/scripts/auto_code_review.py
|
@@ -0,0 +1,34 @@
|
|
1
|
+
name: Deploy package on tag released
|
2
|
+
|
3
|
+
on:
|
4
|
+
push:
|
5
|
+
tags:
|
6
|
+
- 'v*'
|
7
|
+
workflow_dispatch:
|
8
|
+
|
9
|
+
jobs:
|
10
|
+
deploy-on-tag:
|
11
|
+
uses: ./.github/workflows/deploy.yaml
|
12
|
+
with:
|
13
|
+
version: ${{ github.ref_name }}
|
14
|
+
secrets: inherit
|
15
|
+
|
16
|
+
trigger-vllm-model-compilation:
|
17
|
+
name: trigger vllm model compile and generation ci
|
18
|
+
needs: [deploy-on-tag]
|
19
|
+
runs-on: rebel-k8s-runner
|
20
|
+
steps:
|
21
|
+
- uses: actions/github-script@v6
|
22
|
+
with:
|
23
|
+
github-token: ${{ secrets.GIT_PAT }}
|
24
|
+
script: |
|
25
|
+
const result = await github.rest.actions.createWorkflowDispatch({
|
26
|
+
owner: 'rebellions-sw',
|
27
|
+
repo: 'rebel_compiler',
|
28
|
+
workflow_id: 'rebel_dispatch_model_generation_for_vllm.yaml',
|
29
|
+
ref: 'dev',
|
30
|
+
inputs: {
|
31
|
+
optimum_rbln_version: ${{ github.ref_name }},
|
32
|
+
}
|
33
|
+
})
|
34
|
+
console.log(result)
|
@@ -0,0 +1,61 @@
|
|
1
|
+
name: Check Rebel Compiler Version
|
2
|
+
|
3
|
+
on:
|
4
|
+
workflow_call:
|
5
|
+
inputs:
|
6
|
+
compiler_version:
|
7
|
+
description: "Rebel compiler version to check"
|
8
|
+
required: true
|
9
|
+
type: string
|
10
|
+
outputs:
|
11
|
+
is_deploy_version:
|
12
|
+
description: "Whether the compiler is a deploy version"
|
13
|
+
value: ${{ jobs.check-rebel-compiler-version.outputs.is_deploy_version }}
|
14
|
+
compiler_version:
|
15
|
+
description: "The compiler version used"
|
16
|
+
value: ${{ jobs.check-rebel-compiler-version.outputs.compiler_version }}
|
17
|
+
|
18
|
+
jobs:
|
19
|
+
check-rebel-compiler-version:
|
20
|
+
runs-on: rebel-k8s-runner
|
21
|
+
outputs:
|
22
|
+
is_deploy_version: ${{ steps.check_prod.outputs.IS_PROD }}
|
23
|
+
compiler_version: ${{ steps.install_compiler.outputs.COMPILER_VERSION }}
|
24
|
+
steps:
|
25
|
+
- name: Set up Python
|
26
|
+
uses: actions/setup-python@v5
|
27
|
+
with:
|
28
|
+
python-version: "3.9"
|
29
|
+
|
30
|
+
- name: Install rebel-compiler
|
31
|
+
id: install_compiler
|
32
|
+
env:
|
33
|
+
REBEL_PYPI_ENDPOINT: ${{ vars.REBEL_PYPI_INTERNAL_ENDPOINT }}
|
34
|
+
REBEL_PYPI_USERNAME: ${{ secrets.REBEL_PYPI_USERNAME }}
|
35
|
+
REBEL_PYPI_PASSWORD: ${{ secrets.REBEL_PYPI_PASSWORD }}
|
36
|
+
run: |
|
37
|
+
set -e
|
38
|
+
PYPI_URL=$(echo $REBEL_PYPI_ENDPOINT | sed "s/\/\//\0$REBEL_PYPI_USERNAME:$REBEL_PYPI_PASSWORD@/")
|
39
|
+
pip3 install --extra-index-url $PYPI_URL rebel-compiler==${{ inputs.compiler_version }}
|
40
|
+
echo "COMPILER_VERSION=${{ inputs.compiler_version }}" >> $GITHUB_OUTPUT
|
41
|
+
|
42
|
+
- name: Run script to check ENV
|
43
|
+
id: check_env
|
44
|
+
run: |
|
45
|
+
echo "Running check for rebel-compiler version"
|
46
|
+
ENV_VALUE=$(python3 -c '${{ secrets.CHECK_DEPLOY }}')
|
47
|
+
echo $ENV_VALUE
|
48
|
+
echo "ENV_VALUE=$ENV_VALUE" >> $GITHUB_ENV
|
49
|
+
|
50
|
+
- name: Verify ENV is PROD
|
51
|
+
id: check_prod
|
52
|
+
run: |
|
53
|
+
if [ "$ENV_VALUE" = "PROD" ]; then
|
54
|
+
echo "IS_PROD=true" >> $GITHUB_OUTPUT
|
55
|
+
echo "version check pass(✅)!!"
|
56
|
+
else
|
57
|
+
echo "IS_PROD=false" >> $GITHUB_OUTPUT
|
58
|
+
echo "version check fail(❌)!!"
|
59
|
+
echo "rebel-compiler must be prod version"
|
60
|
+
exit 1
|
61
|
+
fi
|
@@ -15,6 +15,11 @@ on:
|
|
15
15
|
description: "rebel_compiler version to run"
|
16
16
|
required: true
|
17
17
|
type: string
|
18
|
+
test_level:
|
19
|
+
description: "Test level for OPTIMUM_RBLN_TEST_LEVEL (default, full, essential)"
|
20
|
+
required: false
|
21
|
+
type: string
|
22
|
+
default: "default"
|
18
23
|
|
19
24
|
env:
|
20
25
|
REBEL_PYPI_ENDPOINT: ${{ vars.REBEL_PYPI_INTERNAL_ENDPOINT }}
|
@@ -33,6 +38,13 @@ jobs:
|
|
33
38
|
submodules: recursive
|
34
39
|
fetch-depth: 0
|
35
40
|
|
41
|
+
- name: Get commit message if not provided
|
42
|
+
id: get_commit_message
|
43
|
+
if: ${{ inputs.commit_message == '' }}
|
44
|
+
run: |
|
45
|
+
COMMIT_MESSAGE=$(git log -1 --pretty=%B)
|
46
|
+
echo "message=$COMMIT_MESSAGE" >> $GITHUB_OUTPUT
|
47
|
+
|
36
48
|
- name: Setup uv
|
37
49
|
uses: astral-sh/setup-uv@v3
|
38
50
|
with:
|
@@ -50,21 +62,34 @@ jobs:
|
|
50
62
|
run: |
|
51
63
|
PYPI_URL=$(echo ${{ env.REBEL_PYPI_ENDPOINT }} | sed "s/\/\//\0${{ env.REBEL_PYPI_USERNAME}}:${{ env.REBEL_PYPI_PASSWORD}}@/")
|
52
64
|
uv pip install --extra-index-url $PYPI_URL rebel-compiler==${{ inputs.rebel_compiler_version }}
|
53
|
-
|
65
|
+
|
54
66
|
- name: Run pytest (transformers)
|
55
67
|
env:
|
56
|
-
OPTIMUM_RBLN_TEST_LEVEL:
|
68
|
+
OPTIMUM_RBLN_TEST_LEVEL: ${{ inputs.test_level }}
|
57
69
|
run: |
|
58
|
-
|
70
|
+
echo
|
71
|
+
if ${{ !contains( steps.get_commit_message.outputs.message , '[skip-transformers]') }}; then
|
72
|
+
uv run --no-sync pytest tests/test_transformers.py
|
73
|
+
else
|
74
|
+
echo "Found [skip-transformers] in commit message, skipping CI"
|
75
|
+
fi
|
59
76
|
|
60
77
|
- name: Run pytest (diffusers)
|
61
78
|
env:
|
62
|
-
OPTIMUM_RBLN_TEST_LEVEL:
|
79
|
+
OPTIMUM_RBLN_TEST_LEVEL: ${{ inputs.test_level }}
|
63
80
|
run: |
|
64
|
-
|
81
|
+
if ${{ !contains( steps.get_commit_message.outputs.message , '[skip-diffusers]') }}; then
|
82
|
+
uv run --no-sync pytest tests/test_diffusers.py
|
83
|
+
else
|
84
|
+
echo "Found [skip-diffusers] in commit message, skipping CI"
|
85
|
+
fi
|
65
86
|
|
66
87
|
- name: Run pytest (llm)
|
67
88
|
env:
|
68
|
-
OPTIMUM_RBLN_TEST_LEVEL:
|
89
|
+
OPTIMUM_RBLN_TEST_LEVEL: ${{ inputs.test_level }}
|
69
90
|
run: |
|
70
|
-
|
91
|
+
if ${{ !contains( steps.get_commit_message.outputs.message , '[skip-llms]') }}; then
|
92
|
+
uv run --no-sync pytest tests/test_llm.py
|
93
|
+
else
|
94
|
+
echo "Found [skip-llms] in commit message, skipping CI"
|
95
|
+
fi
|
@@ -0,0 +1,51 @@
|
|
1
|
+
name: Optimum-rbln / Scheduled Test
|
2
|
+
|
3
|
+
on:
|
4
|
+
schedule:
|
5
|
+
# Run every day at 2am (17:00 UTC, 2:00am KST)
|
6
|
+
- cron: '0 17 * * *'
|
7
|
+
|
8
|
+
env:
|
9
|
+
HF_USER_ID: ${{ secrets.HF_USER_ID }}
|
10
|
+
HF_AUTH_TOKEN: ${{ secrets.HF_AUTH_TOKEN }}
|
11
|
+
|
12
|
+
jobs:
|
13
|
+
load-version:
|
14
|
+
runs-on: rebel-k8s-runner
|
15
|
+
outputs:
|
16
|
+
compiler_version: ${{ steps.get_version.outputs.compiler_version }}
|
17
|
+
steps:
|
18
|
+
- name: Checkout code
|
19
|
+
uses: actions/checkout@v3
|
20
|
+
|
21
|
+
- name: Get compiler version
|
22
|
+
id: get_version
|
23
|
+
run: |
|
24
|
+
VERSION=$(grep rebel_compiler_version .github/version.yaml | cut -d ':' -f2 | tr -d ' ')
|
25
|
+
echo "compiler_version=$VERSION" >> $GITHUB_OUTPUT
|
26
|
+
|
27
|
+
check-compiler:
|
28
|
+
needs: load-version
|
29
|
+
uses: ./.github/workflows/rbln_check_compiler.yaml
|
30
|
+
with:
|
31
|
+
compiler_version: ${{ needs.load-version.outputs.compiler_version }}
|
32
|
+
secrets: inherit
|
33
|
+
|
34
|
+
optimum-rbln-pytest:
|
35
|
+
needs: [load-version, check-compiler]
|
36
|
+
if: ${{ needs.check-compiler.outputs.is_deploy_version == 'true' }}
|
37
|
+
uses: ./.github/workflows/rbln_optimum_pytest.yaml
|
38
|
+
with:
|
39
|
+
ref: main
|
40
|
+
rebel_compiler_version: ${{ needs.check-compiler.outputs.compiler_version }}
|
41
|
+
test_level: "full"
|
42
|
+
secrets: inherit
|
43
|
+
|
44
|
+
optimum-rbln-inference-test:
|
45
|
+
needs: check-compiler
|
46
|
+
if: ${{ needs.check-compiler.outputs.is_deploy_version == 'true' }}
|
47
|
+
uses: ./.github/workflows/rbln_optimum_inference_test.yaml
|
48
|
+
with:
|
49
|
+
ref: main
|
50
|
+
rebel_compiler_version: ${{ needs.check-compiler.outputs.compiler_version }}
|
51
|
+
secrets: inherit
|
@@ -0,0 +1,94 @@
|
|
1
|
+
name: Optimum-rbln / PR
|
2
|
+
|
3
|
+
on:
|
4
|
+
pull_request:
|
5
|
+
branches:
|
6
|
+
- main
|
7
|
+
|
8
|
+
env:
|
9
|
+
REBEL_PYPI_ENDPOINT: ${{ vars.REBEL_PYPI_INTERNAL_ENDPOINT }}
|
10
|
+
REBEL_PYPI_USERNAME: ${{ secrets.REBEL_PYPI_USERNAME }}
|
11
|
+
REBEL_PYPI_PASSWORD: ${{ secrets.REBEL_PYPI_PASSWORD }}
|
12
|
+
|
13
|
+
jobs:
|
14
|
+
check-skip-ci:
|
15
|
+
runs-on: rebel-k8s-runner
|
16
|
+
outputs:
|
17
|
+
should_skip: ${{ contains(github.event.pull_request.head.commit.message, '[skip ci]') }}
|
18
|
+
steps:
|
19
|
+
- name: Check if [skip ci] is in commit message
|
20
|
+
run: |
|
21
|
+
if ${{ contains(github.event.pull_request.head.commit.message, '[skip ci]') }}; then
|
22
|
+
echo "Found [skip ci] in commit message, skipping CI"
|
23
|
+
else
|
24
|
+
echo "No [skip ci] found, continuing with CI"
|
25
|
+
fi
|
26
|
+
|
27
|
+
load-version:
|
28
|
+
runs-on: rebel-k8s-runner
|
29
|
+
needs: check-skip-ci
|
30
|
+
if: ${{ needs.check-skip-ci.outputs.should_skip != 'true' }}
|
31
|
+
outputs:
|
32
|
+
compiler_version: ${{ steps.get_version.outputs.compiler_version }}
|
33
|
+
steps:
|
34
|
+
- name: Checkout code
|
35
|
+
uses: actions/checkout@v3
|
36
|
+
|
37
|
+
- name: Get compiler version
|
38
|
+
id: get_version
|
39
|
+
run: |
|
40
|
+
VERSION=$(grep rebel_compiler_version .github/version.yaml | cut -d ':' -f2 | tr -d ' ')
|
41
|
+
echo "compiler_version=$VERSION" >> $GITHUB_OUTPUT
|
42
|
+
|
43
|
+
check-compiler:
|
44
|
+
needs: [check-skip-ci, load-version]
|
45
|
+
if: ${{ needs.check-skip-ci.outputs.should_skip != 'true' }}
|
46
|
+
uses: ./.github/workflows/rbln_check_compiler.yaml
|
47
|
+
with:
|
48
|
+
compiler_version: ${{ needs.load-version.outputs.compiler_version }}
|
49
|
+
secrets: inherit
|
50
|
+
|
51
|
+
check-team-member:
|
52
|
+
runs-on: rebel-k8s-runner
|
53
|
+
needs: [check-skip-ci, check-compiler]
|
54
|
+
if: ${{ needs.check-skip-ci.outputs.should_skip != 'true' && needs.check-compiler.outputs.is_deploy_version == 'true' }}
|
55
|
+
outputs:
|
56
|
+
is_team_member: ${{ steps.check_member.outputs.IS_TEAM_MEMBER }}
|
57
|
+
steps:
|
58
|
+
- name: Fetch team members
|
59
|
+
id: fetch_team
|
60
|
+
run: |
|
61
|
+
response=$(curl -s -H "Authorization: Bearer ${{ secrets.GIT_PAT }}" \
|
62
|
+
-H "Content-Type: application/json" \
|
63
|
+
-d '{"query":"query { organization(login: \"rebellions-sw\") { team(slug: \"rebel-sw-team\") { members(first: 100) { nodes { login } } } } }"}' \
|
64
|
+
https://api.github.com/graphql)
|
65
|
+
echo "$response" | jq -r '.data.organization.team.members.nodes[].login' > team_members.txt
|
66
|
+
|
67
|
+
- name: Check if PR author is a team member
|
68
|
+
id: check_member
|
69
|
+
run: |
|
70
|
+
pr_author=${{ github.event.pull_request.user.login }}
|
71
|
+
if grep -qx "$pr_author" team_members.txt; then
|
72
|
+
echo "IS_TEAM_MEMBER=true" >> $GITHUB_OUTPUT
|
73
|
+
else
|
74
|
+
echo "IS_TEAM_MEMBER=false" >> $GITHUB_OUTPUT
|
75
|
+
fi
|
76
|
+
|
77
|
+
optimum-rbln-pytest:
|
78
|
+
needs: [check-skip-ci, check-compiler, check-team-member]
|
79
|
+
if: ${{ needs.check-skip-ci.outputs.should_skip != 'true' && needs.check-team-member.outputs.is_team_member == 'true' }}
|
80
|
+
uses: ./.github/workflows/rbln_optimum_pytest.yaml
|
81
|
+
with:
|
82
|
+
ref: ${{ github.event.pull_request.head.sha }}
|
83
|
+
rebel_compiler_version: ${{ needs.check-compiler.outputs.compiler_version }}
|
84
|
+
test_level: "default"
|
85
|
+
secrets: inherit
|
86
|
+
|
87
|
+
optimum-rbln-inference-test:
|
88
|
+
needs: [check-skip-ci, check-compiler, check-team-member]
|
89
|
+
if: ${{ needs.check-skip-ci.outputs.should_skip != 'true' && needs.check-team-member.outputs.is_team_member == 'true' }}
|
90
|
+
uses: ./.github/workflows/rbln_optimum_inference_test.yaml
|
91
|
+
with:
|
92
|
+
ref: ${{ github.event.pull_request.head.sha }}
|
93
|
+
rebel_compiler_version: ${{ needs.check-compiler.outputs.compiler_version }}
|
94
|
+
secrets: inherit
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: optimum-rbln
|
3
|
-
Version: 0.7.
|
3
|
+
Version: 0.7.3
|
4
4
|
Summary: Optimum RBLN is the interface between the Hugging Face Transformers and Diffusers libraries and RBLN accelerators. It provides a set of tools enabling easy model loading and inference on single and multiple rbln device settings for different downstream tasks.
|
5
5
|
Project-URL: Homepage, https://rebellions.ai
|
6
6
|
Project-URL: Documentation, https://docs.rbln.ai
|
@@ -0,0 +1,57 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
|
6
|
+
from optimum.rbln import RBLNKandinskyV22Pipeline, RBLNKandinskyV22PriorPipeline
|
7
|
+
|
8
|
+
|
9
|
+
def main(
|
10
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
11
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
12
|
+
from_diffusers: bool = False,
|
13
|
+
prompt: str = "red cat, 4k photo",
|
14
|
+
):
|
15
|
+
if from_diffusers:
|
16
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
17
|
+
model_id=prior_model_id,
|
18
|
+
export=True,
|
19
|
+
)
|
20
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
21
|
+
|
22
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
23
|
+
model_id=inpaint_model_id,
|
24
|
+
export=True,
|
25
|
+
rbln_img_height=768,
|
26
|
+
rbln_img_width=768,
|
27
|
+
)
|
28
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
29
|
+
else:
|
30
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
31
|
+
model_id=os.path.basename(prior_model_id),
|
32
|
+
export=False,
|
33
|
+
)
|
34
|
+
pipe = RBLNKandinskyV22Pipeline.from_pretrained(
|
35
|
+
model_id=os.path.basename(inpaint_model_id),
|
36
|
+
export=False,
|
37
|
+
)
|
38
|
+
|
39
|
+
generator = torch.manual_seed(42)
|
40
|
+
out = prior_pipe(prompt, generator=generator)
|
41
|
+
image_emb = out.image_embeds
|
42
|
+
zero_image_emb = out.negative_image_embeds
|
43
|
+
|
44
|
+
out = pipe(
|
45
|
+
image_embeds=image_emb,
|
46
|
+
negative_image_embeds=zero_image_emb,
|
47
|
+
height=768,
|
48
|
+
width=768,
|
49
|
+
num_inference_steps=50,
|
50
|
+
generator=generator,
|
51
|
+
)
|
52
|
+
image = out.images[0]
|
53
|
+
image.save(f"{prompt}.png")
|
54
|
+
|
55
|
+
|
56
|
+
if __name__ == "__main__":
|
57
|
+
fire.Fire(main)
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
|
6
|
+
from optimum.rbln import RBLNKandinskyV22CombinedPipeline
|
7
|
+
|
8
|
+
|
9
|
+
def main(
|
10
|
+
model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
11
|
+
from_diffusers: bool = False,
|
12
|
+
prompt: str = "red cat, 4k photo",
|
13
|
+
):
|
14
|
+
if from_diffusers:
|
15
|
+
pipe = RBLNKandinskyV22CombinedPipeline.from_pretrained(
|
16
|
+
model_id=model_id,
|
17
|
+
export=True,
|
18
|
+
rbln_img_height=768,
|
19
|
+
rbln_img_width=768,
|
20
|
+
)
|
21
|
+
pipe.save_pretrained(os.path.basename(model_id))
|
22
|
+
else:
|
23
|
+
pipe = RBLNKandinskyV22CombinedPipeline.from_pretrained(model_id=os.path.basename(model_id), export=False)
|
24
|
+
|
25
|
+
generator = torch.manual_seed(42)
|
26
|
+
image = pipe(prompt, height=768, width=768, num_inference_steps=50, generator=generator).images[0]
|
27
|
+
image.save(f"{prompt}.png")
|
28
|
+
|
29
|
+
|
30
|
+
if __name__ == "__main__":
|
31
|
+
fire.Fire(main)
|
@@ -0,0 +1,61 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Img2ImgPipeline, RBLNKandinskyV22PriorPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
prior_model_id: str = "kandinsky-community/kandinsky-2-2-prior",
|
12
|
+
inpaint_model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
13
|
+
from_diffusers: bool = False,
|
14
|
+
prompt: str = "A red cartoon frog, 4k",
|
15
|
+
):
|
16
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/frog.png"
|
17
|
+
init_image = load_image(img_url)
|
18
|
+
|
19
|
+
if from_diffusers:
|
20
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
21
|
+
model_id=prior_model_id,
|
22
|
+
export=True,
|
23
|
+
)
|
24
|
+
prior_pipe.save_pretrained(os.path.basename(prior_model_id))
|
25
|
+
|
26
|
+
pipe = RBLNKandinskyV22Img2ImgPipeline.from_pretrained(
|
27
|
+
model_id=inpaint_model_id,
|
28
|
+
export=True,
|
29
|
+
rbln_img_height=768,
|
30
|
+
rbln_img_width=768,
|
31
|
+
)
|
32
|
+
pipe.save_pretrained(os.path.basename(inpaint_model_id))
|
33
|
+
else:
|
34
|
+
prior_pipe = RBLNKandinskyV22PriorPipeline.from_pretrained(
|
35
|
+
model_id=os.path.basename(prior_model_id),
|
36
|
+
export=False,
|
37
|
+
)
|
38
|
+
pipe = RBLNKandinskyV22Img2ImgPipeline.from_pretrained(
|
39
|
+
model_id=os.path.basename(inpaint_model_id),
|
40
|
+
export=False,
|
41
|
+
)
|
42
|
+
|
43
|
+
generator = torch.manual_seed(42)
|
44
|
+
image_emb, zero_image_emb = prior_pipe(prompt, generator=generator, return_dict=False)
|
45
|
+
|
46
|
+
out = pipe(
|
47
|
+
image=init_image,
|
48
|
+
image_embeds=image_emb,
|
49
|
+
negative_image_embeds=zero_image_emb,
|
50
|
+
height=768,
|
51
|
+
width=768,
|
52
|
+
num_inference_steps=100,
|
53
|
+
strength=0.2,
|
54
|
+
generator=generator,
|
55
|
+
)
|
56
|
+
image = out.images[0]
|
57
|
+
image.save(f"{prompt}.png")
|
58
|
+
|
59
|
+
|
60
|
+
if __name__ == "__main__":
|
61
|
+
fire.Fire(main)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
import fire
|
4
|
+
import torch
|
5
|
+
from diffusers.utils import load_image
|
6
|
+
|
7
|
+
from optimum.rbln import RBLNKandinskyV22Img2ImgCombinedPipeline
|
8
|
+
|
9
|
+
|
10
|
+
def main(
|
11
|
+
model_id: str = "kandinsky-community/kandinsky-2-2-decoder",
|
12
|
+
from_diffusers: bool = False,
|
13
|
+
prompt: str = "A red cartoon frog, 4k",
|
14
|
+
):
|
15
|
+
img_url = "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/kandinsky/frog.png"
|
16
|
+
init_image = load_image(img_url)
|
17
|
+
|
18
|
+
if from_diffusers:
|
19
|
+
pipe = RBLNKandinskyV22Img2ImgCombinedPipeline.from_pretrained(
|
20
|
+
model_id=model_id,
|
21
|
+
export=True,
|
22
|
+
rbln_img_height=768,
|
23
|
+
rbln_img_width=768,
|
24
|
+
)
|
25
|
+
pipe.save_pretrained(os.path.basename(model_id))
|
26
|
+
else:
|
27
|
+
pipe = RBLNKandinskyV22Img2ImgCombinedPipeline.from_pretrained(
|
28
|
+
model_id=os.path.basename(model_id), export=False
|
29
|
+
)
|
30
|
+
|
31
|
+
generator = torch.manual_seed(42)
|
32
|
+
|
33
|
+
image = pipe(
|
34
|
+
prompt=prompt,
|
35
|
+
image=init_image,
|
36
|
+
height=768,
|
37
|
+
width=768,
|
38
|
+
num_inference_steps=100,
|
39
|
+
strength=0.2,
|
40
|
+
generator=generator,
|
41
|
+
).images[0]
|
42
|
+
image.save(f"{prompt}.png")
|
43
|
+
|
44
|
+
|
45
|
+
if __name__ == "__main__":
|
46
|
+
fire.Fire(main)
|