ipex-llm 2.3.0b20250513__py3-none-manylinux2010_x86_64.whl → 2.3.0b20250515__py3-none-manylinux2010_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ipex_llm/serving/fastchat/model_worker.py +1 -1
- ipex_llm/transformers/model.py +1 -1
- ipex_llm/transformers/npu_model.py +3 -3
- ipex_llm/transformers/npu_models/phi3.py +1 -1
- ipex_llm/transformers/speculative.py +2 -1
- ipex_llm/transformers/utils.py +1 -1
- ipex_llm/utils/lazy_load_torch.py +1 -1
- {ipex_llm-2.3.0b20250513.dist-info → ipex_llm-2.3.0b20250515.dist-info}/METADATA +11 -11
- {ipex_llm-2.3.0b20250513.dist-info → ipex_llm-2.3.0b20250515.dist-info}/RECORD +15 -15
- {ipex_llm-2.3.0b20250513.data → ipex_llm-2.3.0b20250515.data}/scripts/ipex-llm-init +0 -0
- {ipex_llm-2.3.0b20250513.data → ipex_llm-2.3.0b20250515.data}/scripts/llm-chat +0 -0
- {ipex_llm-2.3.0b20250513.data → ipex_llm-2.3.0b20250515.data}/scripts/llm-cli +0 -0
- {ipex_llm-2.3.0b20250513.dist-info → ipex_llm-2.3.0b20250515.dist-info}/WHEEL +0 -0
- {ipex_llm-2.3.0b20250513.dist-info → ipex_llm-2.3.0b20250515.dist-info}/entry_points.txt +0 -0
- {ipex_llm-2.3.0b20250513.dist-info → ipex_llm-2.3.0b20250515.dist-info}/top_level.txt +0 -0
@@ -470,7 +470,7 @@ if __name__ == "__main__":
|
|
470
470
|
|
471
471
|
if args.gpus:
|
472
472
|
invalidInputError(len(args.gpus.split(",")) > args.num_gpus, f"Larger --num-gpus "
|
473
|
-
"({args.num_gpus}) than --gpus {args.gpus}!")
|
473
|
+
f"({args.num_gpus}) than --gpus {args.gpus}!")
|
474
474
|
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpus
|
475
475
|
|
476
476
|
gptq_config = GptqConfig(
|
ipex_llm/transformers/model.py
CHANGED
@@ -672,7 +672,7 @@ class _BaseAutoModelClass:
|
|
672
672
|
else:
|
673
673
|
invalidInputError(False,
|
674
674
|
f'`torch_dtype` can be either `torch.dtype` or `"auto"`,'
|
675
|
-
'but received {torch_dtype}')
|
675
|
+
f'but received {torch_dtype}')
|
676
676
|
dtype_orig = model_class._set_default_torch_dtype(torch_dtype)
|
677
677
|
|
678
678
|
# Pretrained Model
|
@@ -217,7 +217,7 @@ class _BaseAutoModelClass:
|
|
217
217
|
max_prompt_len < max_context_len,
|
218
218
|
(
|
219
219
|
f"max_prompt_len ({max_prompt_len}) should be less"
|
220
|
-
" than max_context_len ({max_context_len})"
|
220
|
+
f" than max_context_len ({max_context_len})"
|
221
221
|
),
|
222
222
|
)
|
223
223
|
optimize_kwargs = {
|
@@ -553,7 +553,7 @@ class _BaseAutoModelClass:
|
|
553
553
|
invalidInputError(
|
554
554
|
False,
|
555
555
|
f'`torch_dtype` can be either `torch.dtype` or `"auto"`,'
|
556
|
-
"but received {torch_dtype}",
|
556
|
+
f"but received {torch_dtype}",
|
557
557
|
)
|
558
558
|
dtype_orig = model_class._set_default_torch_dtype(torch_dtype)
|
559
559
|
|
@@ -588,7 +588,7 @@ class _BaseAutoModelClass:
|
|
588
588
|
max_prompt_len < max_context_len,
|
589
589
|
(
|
590
590
|
f"max_prompt_len ({max_prompt_len}) should be less"
|
591
|
-
" than max_context_len ({max_context_len})"
|
591
|
+
f" than max_context_len ({max_context_len})"
|
592
592
|
),
|
593
593
|
)
|
594
594
|
from ipex_llm.transformers.npu_models.convert_mp import optimize_llm_pre
|
@@ -127,7 +127,7 @@ def phi3_attention_forward(
|
|
127
127
|
invalidInputError(
|
128
128
|
False,
|
129
129
|
f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)},"
|
130
|
-
" but is {attention_mask.size()}"
|
130
|
+
f" but is {attention_mask.size()}"
|
131
131
|
)
|
132
132
|
attn_weights = attn_weights + attention_mask
|
133
133
|
|
@@ -54,7 +54,8 @@ if version.parse(trans_version) >= version.parse("4.39.0"):
|
|
54
54
|
try:
|
55
55
|
from trl.core import top_k_top_p_filtering
|
56
56
|
except ModuleNotFoundError:
|
57
|
-
log4Error.invalidInputError(False,
|
57
|
+
log4Error.invalidInputError(False,
|
58
|
+
"For transformers version >= 4.39.0, pip install trl==0.11.0")
|
58
59
|
else:
|
59
60
|
from transformers import top_k_top_p_filtering
|
60
61
|
|
ipex_llm/transformers/utils.py
CHANGED
@@ -92,7 +92,7 @@ def load_state_dict(checkpoint_file: Union[str, os.PathLike]):
|
|
92
92
|
except Exception as e:
|
93
93
|
invalidInputError(False,
|
94
94
|
f"Unable to load weights"
|
95
|
-
"from pytorch checkpoint file for '{checkpoint_file}' "
|
95
|
+
f"from pytorch checkpoint file for '{checkpoint_file}' "
|
96
96
|
f"at '{checkpoint_file}'. ")
|
97
97
|
|
98
98
|
|
@@ -112,7 +112,7 @@ def _load(pickle_fp, map_location, picklemoudle, pickle_file='data.pkl', zip_fil
|
|
112
112
|
data = fp.read(size)
|
113
113
|
return torch.frombuffer(bytearray(data), dtype=dtype)
|
114
114
|
description = f'storage data_type={data_type} ' \
|
115
|
-
'path-in-zip={filename} path={self.zip_file.filename}'
|
115
|
+
f'path-in-zip={filename} path={self.zip_file.filename}'
|
116
116
|
return LazyStorage(load=load, kind=pid[1], description=description)
|
117
117
|
|
118
118
|
@staticmethod
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ipex-llm
|
3
|
-
Version: 2.3.
|
3
|
+
Version: 2.3.0b20250515
|
4
4
|
Summary: Large Language Model Develop Toolkit
|
5
5
|
Home-page: https://github.com/intel-analytics/ipex-llm
|
6
6
|
Author: BigDL Authors
|
@@ -27,7 +27,7 @@ Requires-Dist: intel-openmp ; (platform_machine == "x86_64" or platform_machine
|
|
27
27
|
Requires-Dist: torch ==2.1.2+cpu ; (platform_system == "Linux") and extra == 'all'
|
28
28
|
Requires-Dist: torch ==2.1.2 ; (platform_system == "Windows") and extra == 'all'
|
29
29
|
Provides-Extra: cpp
|
30
|
-
Requires-Dist: bigdl-core-cpp ==2.7.
|
30
|
+
Requires-Dist: bigdl-core-cpp ==2.7.0b20250515 ; extra == 'cpp'
|
31
31
|
Requires-Dist: setuptools ; extra == 'cpp'
|
32
32
|
Requires-Dist: onednn-devel ==2025.0.1 ; (platform_system == "Windows") and extra == 'cpp'
|
33
33
|
Requires-Dist: onednn ==2025.0.1 ; (platform_system == "Windows") and extra == 'cpp'
|
@@ -60,7 +60,7 @@ Requires-Dist: transformers ==4.40.0 ; extra == 'npu'
|
|
60
60
|
Requires-Dist: intel-openmp ; (platform_machine == "x86_64" or platform_machine == "AMD64") and extra == 'npu'
|
61
61
|
Requires-Dist: torch ==2.1.2+cpu ; (platform_system == "Linux") and extra == 'npu'
|
62
62
|
Requires-Dist: torch ==2.1.2 ; (platform_system == "Windows") and extra == 'npu'
|
63
|
-
Requires-Dist: bigdl-core-npu ==2.7.
|
63
|
+
Requires-Dist: bigdl-core-npu ==2.7.0b20250515 ; (platform_system == "Windows") and extra == 'npu'
|
64
64
|
Provides-Extra: serving
|
65
65
|
Requires-Dist: py-cpuinfo ; extra == 'serving'
|
66
66
|
Requires-Dist: fschat[model_worker,webui] ==0.2.36 ; extra == 'serving'
|
@@ -80,9 +80,9 @@ Requires-Dist: setuptools <70.0.0 ; extra == 'xpu'
|
|
80
80
|
Requires-Dist: torch ==2.1.0a0 ; extra == 'xpu'
|
81
81
|
Requires-Dist: torchvision ==0.16.0a0 ; extra == 'xpu'
|
82
82
|
Requires-Dist: intel-extension-for-pytorch ==2.1.10+xpu ; extra == 'xpu'
|
83
|
-
Requires-Dist: bigdl-core-xe-21 ==2.7.
|
84
|
-
Requires-Dist: bigdl-core-xe-batch-21 ==2.7.
|
85
|
-
Requires-Dist: bigdl-core-xe-addons-21 ==2.7.
|
83
|
+
Requires-Dist: bigdl-core-xe-21 ==2.7.0b20250515 ; extra == 'xpu'
|
84
|
+
Requires-Dist: bigdl-core-xe-batch-21 ==2.7.0b20250515 ; extra == 'xpu'
|
85
|
+
Requires-Dist: bigdl-core-xe-addons-21 ==2.7.0b20250515 ; extra == 'xpu'
|
86
86
|
Provides-Extra: xpu-2-1
|
87
87
|
Requires-Dist: py-cpuinfo ; extra == 'xpu-2-1'
|
88
88
|
Requires-Dist: protobuf ; extra == 'xpu-2-1'
|
@@ -97,9 +97,9 @@ Requires-Dist: setuptools <70.0.0 ; extra == 'xpu-2-1'
|
|
97
97
|
Requires-Dist: torch ==2.1.0a0 ; extra == 'xpu-2-1'
|
98
98
|
Requires-Dist: torchvision ==0.16.0a0 ; extra == 'xpu-2-1'
|
99
99
|
Requires-Dist: intel-extension-for-pytorch ==2.1.10+xpu ; extra == 'xpu-2-1'
|
100
|
-
Requires-Dist: bigdl-core-xe-21 ==2.7.
|
101
|
-
Requires-Dist: bigdl-core-xe-batch-21 ==2.7.
|
102
|
-
Requires-Dist: bigdl-core-xe-addons-21 ==2.7.
|
100
|
+
Requires-Dist: bigdl-core-xe-21 ==2.7.0b20250515 ; extra == 'xpu-2-1'
|
101
|
+
Requires-Dist: bigdl-core-xe-batch-21 ==2.7.0b20250515 ; extra == 'xpu-2-1'
|
102
|
+
Requires-Dist: bigdl-core-xe-addons-21 ==2.7.0b20250515 ; extra == 'xpu-2-1'
|
103
103
|
Requires-Dist: intel-openmp ; (platform_machine == "x86_64" or platform_machine == "AMD64") and extra == 'xpu-2-1'
|
104
104
|
Requires-Dist: dpcpp-cpp-rt ==2024.0.2 ; (platform_system == "Windows") and extra == 'xpu-2-1'
|
105
105
|
Requires-Dist: mkl-dpcpp ==2024.0.0 ; (platform_system == "Windows") and extra == 'xpu-2-1'
|
@@ -117,7 +117,7 @@ Requires-Dist: setuptools ; extra == 'xpu-2-6'
|
|
117
117
|
Requires-Dist: torch ==2.6.0+xpu ; extra == 'xpu-2-6'
|
118
118
|
Requires-Dist: torchvision ==0.21.0+xpu ; extra == 'xpu-2-6'
|
119
119
|
Requires-Dist: torchaudio ==2.6.0+xpu ; extra == 'xpu-2-6'
|
120
|
-
Requires-Dist: bigdl-core-xe-all ==2.7.
|
120
|
+
Requires-Dist: bigdl-core-xe-all ==2.7.0b20250515 ; extra == 'xpu-2-6'
|
121
121
|
Requires-Dist: onednn-devel ==2025.0.1 ; extra == 'xpu-2-6'
|
122
122
|
Requires-Dist: onednn ==2025.0.1 ; extra == 'xpu-2-6'
|
123
123
|
Requires-Dist: dpcpp-cpp-rt ==2025.0.2 ; extra == 'xpu-2-6'
|
@@ -132,7 +132,7 @@ Requires-Dist: tokenizers ==0.15.2 ; extra == 'xpu-2-6-arl'
|
|
132
132
|
Requires-Dist: accelerate ==0.23.0 ; extra == 'xpu-2-6-arl'
|
133
133
|
Requires-Dist: tabulate ; extra == 'xpu-2-6-arl'
|
134
134
|
Requires-Dist: setuptools ; extra == 'xpu-2-6-arl'
|
135
|
-
Requires-Dist: bigdl-core-xe-all ==2.7.
|
135
|
+
Requires-Dist: bigdl-core-xe-all ==2.7.0b20250515 ; extra == 'xpu-2-6-arl'
|
136
136
|
Requires-Dist: onednn-devel ==2025.0.1 ; extra == 'xpu-2-6-arl'
|
137
137
|
Requires-Dist: onednn ==2025.0.1 ; extra == 'xpu-2-6-arl'
|
138
138
|
Requires-Dist: dpcpp-cpp-rt ==2025.0.2 ; extra == 'xpu-2-6-arl'
|
@@ -89,7 +89,7 @@ ipex_llm/serving/fastapi/tgi_protocol.py,sha256=rPySgmBODkJ14OrS_KJaFe5RpqkHNH9c
|
|
89
89
|
ipex_llm/serving/fastchat/__init__.py,sha256=b2IXvVqQ5cItki021h8s3ymW12RPu8QNPprq4Mn3bDM,586
|
90
90
|
ipex_llm/serving/fastchat/bigdl_llm_model.py,sha256=NXEN_3EPmcP3dDnvug4MokEXXE2zVUnENgBYxfubqic,10084
|
91
91
|
ipex_llm/serving/fastchat/ipex_llm_worker.py,sha256=vCqPi_ISuWU02Ngs6nEihwW-e8CDyazQa6qsXv-hvLU,19647
|
92
|
-
ipex_llm/serving/fastchat/model_worker.py,sha256=
|
92
|
+
ipex_llm/serving/fastchat/model_worker.py,sha256=jvRrrSs8rDQXR1Q4J8cq6VeBozU-TW-zaAerrLdkQHc,16650
|
93
93
|
ipex_llm/serving/fastchat/tgi_api_protocol.py,sha256=brT3k3-V0NJrU4fRqUwWjC0O3iOitdttDfduXXEefh0,5918
|
94
94
|
ipex_llm/serving/fastchat/tgi_api_server.py,sha256=agNTAEiZPSuj3dEdIdYKwkoY0cXOUDX06DiM9VP2knQ,24418
|
95
95
|
ipex_llm/serving/fastchat/vllm_worker.py,sha256=ZLz2Q9GxJO6r_LOiP6epgCRjBGk-K4EB1SNEWSJp5DA,11091
|
@@ -102,17 +102,17 @@ ipex_llm/transformers/lisa.py,sha256=F5WxbtXQ7RdKulj83h_2DnEIgKiKGZf7zvOmg6QBl2s
|
|
102
102
|
ipex_llm/transformers/loader.py,sha256=c9qfJSC6-in-mkd-iKb1igk3nHWUYS3QtyH2cOazmKc,6825
|
103
103
|
ipex_llm/transformers/lookup.py,sha256=b6OlZ9OV10R9qeWw8mVryVpDxszkjwLkldvi7GPMJY8,19614
|
104
104
|
ipex_llm/transformers/low_bit_linear.py,sha256=1S8H684odAx5ZVDx_qNMv4FFrkJekZduao8datZYiqw,39201
|
105
|
-
ipex_llm/transformers/model.py,sha256=
|
105
|
+
ipex_llm/transformers/model.py,sha256=tWTzKsCz8A1P5gYEeG9KZgpxQgbP9hQ-TWAdkebA6Jg,40886
|
106
106
|
ipex_llm/transformers/modelling_bigdl.py,sha256=7JpNVMuyq_OmtNUaMFMXdxPWZp2q0QHC02QeA-VTPOw,6709
|
107
|
-
ipex_llm/transformers/npu_model.py,sha256=
|
107
|
+
ipex_llm/transformers/npu_model.py,sha256=X8mdY6N9TYlxG41wmFloX44ZUjyitFzdKbhzO7TToFY,40309
|
108
108
|
ipex_llm/transformers/patches.py,sha256=G9KcXxo42H1HJEDaroq4JbBN5P0P0lty7U7kk7-g4tw,991
|
109
109
|
ipex_llm/transformers/pipeline_parallel.py,sha256=uNZpOXljNmdoEYnP8U-VFiN4dRZb2piQbIf2bG9LQnE,49051
|
110
110
|
ipex_llm/transformers/qlora.py,sha256=qV9Y6G5kAaet77LLA3oXn3qQY4ayyAPZ7NAjOlHCS7g,14967
|
111
111
|
ipex_llm/transformers/relora.py,sha256=-dYzUV0P-IhO2jFdnzN9-v_sFzJpRj3ZwN9eCJzOoCw,16567
|
112
|
-
ipex_llm/transformers/speculative.py,sha256=
|
112
|
+
ipex_llm/transformers/speculative.py,sha256=_5LkyLvdAtkpMfOFBhzbtgHS7MzkJz_EbNpTt4gLQEg,63412
|
113
113
|
ipex_llm/transformers/streamer.py,sha256=RrVlLblzCOtABRUpaMXAyaMnCGgLUtAi_YesLumRbww,4842
|
114
114
|
ipex_llm/transformers/training_patch.py,sha256=oxMkUtqyvqJiprw6dE3skkYfD1HOmUlH9N0hBkbn0G0,10799
|
115
|
-
ipex_llm/transformers/utils.py,sha256=
|
115
|
+
ipex_llm/transformers/utils.py,sha256=FCin1mKRmDyQ9J8dCJgcPOhOh1DUtawrL0wZZ7iSnOw,17458
|
116
116
|
ipex_llm/transformers/xgrammar.py,sha256=dd-e0DO0s-t-idngVzncnPAa_Gxb8YNoRJ3WROkwrs8,1840
|
117
117
|
ipex_llm/transformers/xpu_customize_fwd.py,sha256=PUBYLnTbaBXUs3Dnte9Gqln2XFk8iA62SmloWjr7GJI,7668
|
118
118
|
ipex_llm/transformers/xpu_ops.py,sha256=z95iTtcDQvNyJOvB4A6B_ECTYjHp4A7x-FsssoETOMs,4914
|
@@ -214,7 +214,7 @@ ipex_llm/transformers/npu_models/mistral.py,sha256=iRdmIQI_bbbZxRCYRvnV4rWjX2t-6
|
|
214
214
|
ipex_llm/transformers/npu_models/mp_models_base.py,sha256=19iXZhX7SBQNXlAANn4gB1Enrk1FDmsGkmgI0m19vKg,28555
|
215
215
|
ipex_llm/transformers/npu_models/npu_llm_cpp.py,sha256=B40sBujvy31ETFBgcYAf4CN23UuTCBEJVaxjIMaoEHk,4268
|
216
216
|
ipex_llm/transformers/npu_models/paraformer_mp.py,sha256=lGEjmKHW_Pk3BE3nqa1ZVgJ3P5p4lNp7p6wMV7KrtCU,37871
|
217
|
-
ipex_llm/transformers/npu_models/phi3.py,sha256=
|
217
|
+
ipex_llm/transformers/npu_models/phi3.py,sha256=n3WRQMcaL1iJOuchQh3P-MCulSCYWVzNM_JCh32q3_o,6556
|
218
218
|
ipex_llm/transformers/npu_models/phi3_v.py,sha256=EMZuTPkGfuDVp9c5BU1HyzXHWKswHRQ8bvQjzocIyHA,7737
|
219
219
|
ipex_llm/transformers/npu_models/qwen2.py,sha256=RDNtPK8kxMk3z8A4S53saTrw2klgkzo4oa7voJLwr1o,12085
|
220
220
|
ipex_llm/transformers/npu_models/qwen2_mp.py,sha256=EKiI80rnQ43WUF_2wWCy75mx-rbjAbRQSB49OgjZFNo,45003
|
@@ -241,7 +241,7 @@ ipex_llm/utils/convert_util.py,sha256=X1eLTdF9crlca6jPEQKymEITRx_57i_87dhl1htPEh
|
|
241
241
|
ipex_llm/utils/glibc_checker.py,sha256=bm6kN6gbpA7GKtnUgsCE6K16iZZyil-Ylp55SoRvHG8,2093
|
242
242
|
ipex_llm/utils/ipex_importer.py,sha256=pQu3IbvXZjgSEw7T42EZwbfVXpCYEm5tB2QHtzYmp1I,5922
|
243
243
|
ipex_llm/utils/isa_checker.py,sha256=SisZ32B1G7meZvgRKZcani1WevIhMwum7nilZ3sHgXY,2259
|
244
|
-
ipex_llm/utils/lazy_load_torch.py,sha256=
|
244
|
+
ipex_llm/utils/lazy_load_torch.py,sha256=GeolOQFIj2Lnn47nUCGx66bqEqznW0my4atbTQJp6Us,7130
|
245
245
|
ipex_llm/utils/modules.py,sha256=LZ7FvggSSb2QMHYeeHDmtI8ziIvXVHsF4hV-vOUK60Q,764
|
246
246
|
ipex_llm/utils/utils.py,sha256=jfDRc622QF2U2YZpmqo0evnaElHbVBDoRZqH_6hUXmI,1556
|
247
247
|
ipex_llm/utils/common/__init__.py,sha256=pV_8I-oa8RrHSCJg6ADBY_Tre6vXCqAMyhfpj1UhvYE,988
|
@@ -265,11 +265,11 @@ ipex_llm/vllm/xpu/engine/__init__.py,sha256=sOvwLx_Zj0jiRCGj9W3DgGTfcSU3hABYhgIQ
|
|
265
265
|
ipex_llm/vllm/xpu/engine/engine.py,sha256=XAprw7VifjfnR915TZOaKcxe3QCFsVBgxzS8qOdn1yg,14462
|
266
266
|
ipex_llm/vllm/xpu/entrypoints/openai/api_server.py,sha256=uWHyyHLw-B8wXBnQw9_MCG81tKK9Jb0dyq1xfYHgoNw,45905
|
267
267
|
ipex_llm/vllm/xpu/entrypoints/openai/cli_args.py,sha256=hB398yYtKauASRzevctScdbFIjiiSGMAe1bwEuIHrhY,10893
|
268
|
-
ipex_llm-2.3.
|
269
|
-
ipex_llm-2.3.
|
270
|
-
ipex_llm-2.3.
|
271
|
-
ipex_llm-2.3.
|
272
|
-
ipex_llm-2.3.
|
273
|
-
ipex_llm-2.3.
|
274
|
-
ipex_llm-2.3.
|
275
|
-
ipex_llm-2.3.
|
268
|
+
ipex_llm-2.3.0b20250515.data/scripts/ipex-llm-init,sha256=fLQsT2dRL6H5bThb4GuIWotAuqoLsIxFwA-0c2qmaO8,6672
|
269
|
+
ipex_llm-2.3.0b20250515.data/scripts/llm-chat,sha256=TdUnUmNapzuoe1c8IzrdVOQwWEg8IqsMSBRlOD3daZM,2249
|
270
|
+
ipex_llm-2.3.0b20250515.data/scripts/llm-cli,sha256=RXGPlLElHxcKzoUxljEMBIAXbzCDysXL-Nxw-xF-7LU,2457
|
271
|
+
ipex_llm-2.3.0b20250515.dist-info/METADATA,sha256=_lbNuaDcLJThqZdxaHamNmc-qDmC7S7F7E_MgDqH_lk,8865
|
272
|
+
ipex_llm-2.3.0b20250515.dist-info/WHEEL,sha256=PPJcBMAZibF_2GFE9NmOJGqiaSMPiNFbJd6QaJjdA6Y,109
|
273
|
+
ipex_llm-2.3.0b20250515.dist-info/entry_points.txt,sha256=TiUyBB2MRmfF3ko-pyAEzqeBCRnyhu27bNOAsWPp3e8,61
|
274
|
+
ipex_llm-2.3.0b20250515.dist-info/top_level.txt,sha256=CGCMHM-SyqUabU4h8RqJ2KTYckQUO3LvIWwmUQ6Qbzw,9
|
275
|
+
ipex_llm-2.3.0b20250515.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|