xinference 1.2.2__py3-none-any.whl → 1.3.0.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of xinference might be problematic. Click here for more details.

Files changed (68) hide show
  1. xinference/_version.py +3 -3
  2. xinference/client/restful/restful_client.py +9 -1
  3. xinference/core/model.py +19 -0
  4. xinference/core/resource.py +7 -1
  5. xinference/core/status_guard.py +1 -0
  6. xinference/core/supervisor.py +228 -19
  7. xinference/core/utils.py +1 -29
  8. xinference/core/worker.py +28 -2
  9. xinference/deploy/cmdline.py +33 -3
  10. xinference/deploy/test/test_cmdline.py +32 -0
  11. xinference/device_utils.py +43 -1
  12. xinference/model/audio/kokoro.py +19 -36
  13. xinference/model/audio/model_spec.json +1 -1
  14. xinference/model/image/stable_diffusion/core.py +15 -6
  15. xinference/model/llm/llm_family.json +521 -6
  16. xinference/model/llm/llm_family.py +3 -1
  17. xinference/model/llm/llm_family_modelscope.json +559 -6
  18. xinference/model/llm/reasoning_parsers/__init__.py +13 -0
  19. xinference/model/llm/reasoning_parsers/abs_reasoning_parsers.py +98 -0
  20. xinference/model/llm/reasoning_parsers/deepseek_r1_reasoning_parser.py +140 -0
  21. xinference/model/llm/sglang/core.py +99 -11
  22. xinference/model/llm/transformers/intern_vl.py +23 -14
  23. xinference/model/llm/utils.py +55 -18
  24. xinference/model/llm/vllm/core.py +23 -2
  25. xinference/model/llm/vllm/xavier/executor.py +2 -2
  26. xinference/model/llm/vllm/xavier/scheduler.py +3 -3
  27. xinference/thirdparty/internvl/conversation.py +26 -17
  28. xinference/types.py +2 -0
  29. xinference/web/ui/build/asset-manifest.json +6 -6
  30. xinference/web/ui/build/index.html +1 -1
  31. xinference/web/ui/build/static/css/main.f8177338.css +2 -0
  32. xinference/web/ui/build/static/css/main.f8177338.css.map +1 -0
  33. xinference/web/ui/build/static/js/main.ad42919c.js +3 -0
  34. xinference/web/ui/build/static/js/main.ad42919c.js.map +1 -0
  35. xinference/web/ui/node_modules/.cache/babel-loader/074a42304bbbaa79e1bfc3b28502457a390df55708de9006f4cc8e35c60aea87.json +1 -0
  36. xinference/web/ui/node_modules/.cache/babel-loader/0acb065326560592b10888234242f94f67efe28458b90f273d4d4fba9daa0cd2.json +1 -0
  37. xinference/web/ui/node_modules/.cache/babel-loader/279ace390216236a82b3d8995c78eca4d637ac9a523e9f521a2d9c76607a43d7.json +1 -0
  38. xinference/web/ui/node_modules/.cache/babel-loader/630a7bd592596cc6e291fc32238ce7c08238038a64ed8ccee0eb0c13c9902910.json +1 -0
  39. xinference/web/ui/node_modules/.cache/babel-loader/6cb9f6c62ab4042f0b11c5d75e51187188e9d6f5f08b1d63e796e051bafdb457.json +1 -0
  40. xinference/web/ui/node_modules/.cache/babel-loader/8f9af2979e45d4648f0cfae108363e58ee421c29a9d4e7329b6f06d9adfd4133.json +1 -0
  41. xinference/web/ui/node_modules/.cache/babel-loader/914c33e91c1012e3bcd3e96f3a25884cbef148290632d0266dab972b8cc1e95f.json +1 -0
  42. xinference/web/ui/node_modules/.cache/babel-loader/9c8b1a86e7c65b2b2599a205e30920652d6c2105f926508ef5bcf29a3ef4ce76.json +1 -0
  43. xinference/web/ui/node_modules/.cache/babel-loader/b7939cd3a48adf12fccfdd0803019b5cc235ff7de3a297dae70ce635e0eea13e.json +1 -0
  44. xinference/web/ui/node_modules/.cache/babel-loader/efe7cd132c27a8f9fd5352a394c491fd5fb0da0348cf9fcbd923164a32365eab.json +1 -0
  45. xinference/web/ui/node_modules/.cache/babel-loader/f04f666b77b44d7be3e16034d6b0074de2ba9c254f1fae15222b3148608fa8b3.json +1 -0
  46. xinference/web/ui/node_modules/.cache/babel-loader/fecf076bcd198a458c2a6ab0e85e40dc1c99994c353164e79c469be162cb74c9.json +1 -0
  47. xinference/web/ui/src/locales/en.json +14 -1
  48. xinference/web/ui/src/locales/zh.json +14 -1
  49. {xinference-1.2.2.dist-info → xinference-1.3.0.post1.dist-info}/METADATA +11 -11
  50. {xinference-1.2.2.dist-info → xinference-1.3.0.post1.dist-info}/RECORD +55 -49
  51. xinference/web/ui/build/static/css/main.51a587ff.css +0 -2
  52. xinference/web/ui/build/static/css/main.51a587ff.css.map +0 -1
  53. xinference/web/ui/build/static/js/main.b0936c54.js +0 -3
  54. xinference/web/ui/build/static/js/main.b0936c54.js.map +0 -1
  55. xinference/web/ui/node_modules/.cache/babel-loader/0c2fb5375667931c4a331c99e0d87dc145e8f327cea3f44d6e56f54c7c1d4020.json +0 -1
  56. xinference/web/ui/node_modules/.cache/babel-loader/185ceb8872d562e032b47e79df6a45670e06345b8ed70aad1a131e0476783c5c.json +0 -1
  57. xinference/web/ui/node_modules/.cache/babel-loader/3eefb411b24c2b3ce053570ef50daccf154022f0e168be5ed0fec21394baf9f4.json +0 -1
  58. xinference/web/ui/node_modules/.cache/babel-loader/63c8e07687ea53a4f8a910ee5e42e0eb26cd1acbfbe820f3e3248a786ee51401.json +0 -1
  59. xinference/web/ui/node_modules/.cache/babel-loader/a3ff866acddf34917a7ee399e0e571a4dfd8ba66d5057db885f243e16a6eb17d.json +0 -1
  60. xinference/web/ui/node_modules/.cache/babel-loader/a7f1a71f6580dfe810c685a9c1d68e318f71e1fa258fbe50b87a6ac37cc0a598.json +0 -1
  61. xinference/web/ui/node_modules/.cache/babel-loader/bdee44abeadc4abc17d41c52eb49c6e19a4b1a267b6e16876ce91bdeeebfc52d.json +0 -1
  62. xinference/web/ui/node_modules/.cache/babel-loader/d7664d18c4ddbad9c3a6a31b91f7c00fb0dde804608674a9860ee50f33e54708.json +0 -1
  63. xinference/web/ui/node_modules/.cache/babel-loader/ed57202cb79649bb716400436590245547df241988fc7c8e1d85d132299542d2.json +0 -1
  64. /xinference/web/ui/build/static/js/{main.b0936c54.js.LICENSE.txt → main.ad42919c.js.LICENSE.txt} +0 -0
  65. {xinference-1.2.2.dist-info → xinference-1.3.0.post1.dist-info}/LICENSE +0 -0
  66. {xinference-1.2.2.dist-info → xinference-1.3.0.post1.dist-info}/WHEEL +0 -0
  67. {xinference-1.2.2.dist-info → xinference-1.3.0.post1.dist-info}/entry_points.txt +0 -0
  68. {xinference-1.2.2.dist-info → xinference-1.3.0.post1.dist-info}/top_level.txt +0 -0
@@ -4497,6 +4497,179 @@
4497
4497
  "stop_token_ids": [],
4498
4498
  "stop": []
4499
4499
  },
4500
+ {
4501
+ "version": 1,
4502
+ "context_length": 16384,
4503
+ "model_name": "InternVL2.5",
4504
+ "model_lang": [
4505
+ "en",
4506
+ "zh"
4507
+ ],
4508
+ "model_ability": [
4509
+ "chat",
4510
+ "vision"
4511
+ ],
4512
+ "model_description": "InternVL 2.5 is an open-source multimodal large language model (MLLM) to bridge the capability gap between open-source and proprietary commercial models in multimodal understanding. ",
4513
+ "model_specs": [
4514
+ {
4515
+ "model_format": "pytorch",
4516
+ "model_size_in_billions": 1,
4517
+ "quantizations": [
4518
+ "4-bit",
4519
+ "8-bit",
4520
+ "none"
4521
+ ],
4522
+ "model_hub": "modelscope",
4523
+ "model_id": "OpenGVLab/InternVL2_5-1B",
4524
+ "model_revision": "master"
4525
+ },
4526
+ {
4527
+ "model_format": "awq",
4528
+ "model_size_in_billions": 1,
4529
+ "quantizations": [
4530
+ "Int4"
4531
+ ],
4532
+ "model_hub": "modelscope",
4533
+ "model_id": "OpenGVLab/InternVL2_5-1B-AWQ",
4534
+ "model_revision": "master"
4535
+ },
4536
+ {
4537
+ "model_format": "pytorch",
4538
+ "model_size_in_billions": 2,
4539
+ "quantizations": [
4540
+ "4-bit",
4541
+ "8-bit",
4542
+ "none"
4543
+ ],
4544
+ "model_hub": "modelscope",
4545
+ "model_id": "OpenGVLab/InternVL2_5-2B",
4546
+ "model_revision": "master"
4547
+ },
4548
+ {
4549
+ "model_format": "awq",
4550
+ "model_size_in_billions": 2,
4551
+ "quantizations": [
4552
+ "Int4"
4553
+ ],
4554
+ "model_hub": "modelscope",
4555
+ "model_id": "OpenGVLab/InternVL2_5-2B-AWQ",
4556
+ "model_revision": "master"
4557
+ },
4558
+ {
4559
+ "model_format": "pytorch",
4560
+ "model_size_in_billions": 4,
4561
+ "quantizations": [
4562
+ "4-bit",
4563
+ "8-bit",
4564
+ "none"
4565
+ ],
4566
+ "model_hub": "modelscope",
4567
+ "model_id": "OpenGVLab/InternVL2_5-4B",
4568
+ "model_revision": "master"
4569
+ },
4570
+ {
4571
+ "model_format": "awq",
4572
+ "model_size_in_billions": 4,
4573
+ "quantizations": [
4574
+ "Int4"
4575
+ ],
4576
+ "model_hub": "modelscope",
4577
+ "model_id": "OpenGVLab/InternVL2_5-4B-AWQ",
4578
+ "model_revision": "master"
4579
+ },
4580
+ {
4581
+ "model_format": "pytorch",
4582
+ "model_size_in_billions": 8,
4583
+ "quantizations": [
4584
+ "4-bit",
4585
+ "8-bit",
4586
+ "none"
4587
+ ],
4588
+ "model_hub": "modelscope",
4589
+ "model_id": "OpenGVLab/InternVL2_5-8B",
4590
+ "model_revision": "master"
4591
+ },
4592
+ {
4593
+ "model_format": "awq",
4594
+ "model_size_in_billions": 8,
4595
+ "quantizations": [
4596
+ "Int4"
4597
+ ],
4598
+ "model_hub": "modelscope",
4599
+ "model_id": "OpenGVLab/InternVL2_5-8B-AWQ",
4600
+ "model_revision": "master"
4601
+ },
4602
+ {
4603
+ "model_format": "pytorch",
4604
+ "model_size_in_billions": 26,
4605
+ "quantizations": [
4606
+ "4-bit",
4607
+ "8-bit",
4608
+ "none"
4609
+ ],
4610
+ "model_hub": "modelscope",
4611
+ "model_id": "OpenGVLab/InternVL2_5-26B",
4612
+ "model_revision": "master"
4613
+ },
4614
+ {
4615
+ "model_format": "awq",
4616
+ "model_size_in_billions": 26,
4617
+ "quantizations": [
4618
+ "Int4"
4619
+ ],
4620
+ "model_hub": "modelscope",
4621
+ "model_id": "OpenGVLab/InternVL2_5-26B-AWQ",
4622
+ "model_revision": "master"
4623
+ },
4624
+ {
4625
+ "model_format": "pytorch",
4626
+ "model_size_in_billions": 38,
4627
+ "quantizations": [
4628
+ "4-bit",
4629
+ "8-bit",
4630
+ "none"
4631
+ ],
4632
+ "model_hub": "modelscope",
4633
+ "model_id": "OpenGVLab/InternVL2_5-38B",
4634
+ "model_revision": "master"
4635
+ },
4636
+ {
4637
+ "model_format": "awq",
4638
+ "model_size_in_billions": 38,
4639
+ "quantizations": [
4640
+ "Int4"
4641
+ ],
4642
+ "model_hub": "modelscope",
4643
+ "model_id": "OpenGVLab/InternVL2_5-38B-AWQ",
4644
+ "model_revision": "master"
4645
+ },
4646
+ {
4647
+ "model_format": "pytorch",
4648
+ "model_size_in_billions": 78,
4649
+ "quantizations": [
4650
+ "4-bit",
4651
+ "8-bit",
4652
+ "none"
4653
+ ],
4654
+ "model_hub": "modelscope",
4655
+ "model_id": "OpenGVLab/InternVL2_5-78B",
4656
+ "model_revision": "master"
4657
+ },
4658
+ {
4659
+ "model_format": "awq",
4660
+ "model_size_in_billions": 78,
4661
+ "quantizations": [
4662
+ "Int4"
4663
+ ],
4664
+ "model_hub": "modelscope",
4665
+ "model_id": "OpenGVLab/InternVL2_5-78B-AWQ",
4666
+ "model_revision": "master"
4667
+ }
4668
+ ],
4669
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
4670
+ "stop_token_ids": [],
4671
+ "stop": []
4672
+ },
4500
4673
  {
4501
4674
  "version": 1,
4502
4675
  "context_length": 8192,
@@ -5181,6 +5354,380 @@
5181
5354
  "<|end▁of▁sentence|>"
5182
5355
  ]
5183
5356
  },
5357
+ {
5358
+ "version": 1,
5359
+ "context_length": 163840,
5360
+ "model_name": "deepseek-v3",
5361
+ "model_lang": [
5362
+ "en",
5363
+ "zh"
5364
+ ],
5365
+ "model_ability": [
5366
+ "chat",
5367
+ "reasoning"
5368
+ ],
5369
+ "model_description": "DeepSeek-V3, a strong Mixture-of-Experts (MoE) language model with 671B total parameters with 37B activated for each token. ",
5370
+ "model_specs": [
5371
+ {
5372
+ "model_format": "pytorch",
5373
+ "model_size_in_billions": 671,
5374
+ "quantizations": [
5375
+ "4-bit",
5376
+ "8-bit",
5377
+ "none"
5378
+ ],
5379
+ "model_id": "deepseek-ai/DeepSeek-V3",
5380
+ "model_hub": "modelscope",
5381
+ "model_revision": "master"
5382
+ },
5383
+ {
5384
+ "model_format": "awq",
5385
+ "model_size_in_billions": 671,
5386
+ "quantizations": [
5387
+ "Int4"
5388
+ ],
5389
+ "model_id": "cognitivecomputations/DeepSeek-V3-awq",
5390
+ "model_hub": "modelscope",
5391
+ "model_revision": "master"
5392
+ },
5393
+ {
5394
+ "model_format": "ggufv2",
5395
+ "model_size_in_billions": 671,
5396
+ "quantizations": [
5397
+ "Q2_K_L",
5398
+ "Q2_K_XS",
5399
+ "Q3_K_M",
5400
+ "Q4_K_M",
5401
+ "Q5_K_M",
5402
+ "Q6_K",
5403
+ "Q8_0"
5404
+ ],
5405
+ "model_id": "unsloth/DeepSeek-V3-GGUF",
5406
+ "model_hub": "modelscope",
5407
+ "model_file_name_template": "DeepSeek-V3-{quantization}/DeepSeek-V3-{quantization}.gguf",
5408
+ "model_file_name_split_template": "DeepSeek-V3-{quantization}/DeepSeek-V3-{quantization}-{part}.gguf",
5409
+ "quantization_parts": {
5410
+ "Q2_K_L": [
5411
+ "00001-of-00005",
5412
+ "00002-of-00005",
5413
+ "00003-of-00005",
5414
+ "00004-of-00005",
5415
+ "00005-of-00005"
5416
+ ],
5417
+ "Q2_K_XS": [
5418
+ "00001-of-00005",
5419
+ "00002-of-00005",
5420
+ "00003-of-00005",
5421
+ "00004-of-00005",
5422
+ "00005-of-00005"
5423
+ ],
5424
+ "Q3_K_M": [
5425
+ "00001-of-00007",
5426
+ "00002-of-00007",
5427
+ "00003-of-00007",
5428
+ "00004-of-00007",
5429
+ "00005-of-00007",
5430
+ "00006-of-00007",
5431
+ "00007-of-00007"
5432
+ ],
5433
+ "Q4_K_M": [
5434
+ "00001-of-00009",
5435
+ "00002-of-00009",
5436
+ "00003-of-00009",
5437
+ "00004-of-00009",
5438
+ "00005-of-00009",
5439
+ "00006-of-00009",
5440
+ "00007-of-00009",
5441
+ "00008-of-00009",
5442
+ "00009-of-00009"
5443
+ ],
5444
+ "Q5_K_M": [
5445
+ "00001-of-00010",
5446
+ "00002-of-00010",
5447
+ "00003-of-00010",
5448
+ "00004-of-00010",
5449
+ "00005-of-00010",
5450
+ "00006-of-00010",
5451
+ "00007-of-00010",
5452
+ "00008-of-00010",
5453
+ "00009-of-00010",
5454
+ "00010-of-00010"
5455
+ ],
5456
+ "Q6_K": [
5457
+ "00001-of-00012",
5458
+ "00002-of-00012",
5459
+ "00003-of-00012",
5460
+ "00004-of-00012",
5461
+ "00005-of-00012",
5462
+ "00006-of-00012",
5463
+ "00007-of-00012",
5464
+ "00008-of-00012",
5465
+ "00009-of-00012",
5466
+ "00010-of-00012",
5467
+ "00011-of-00012",
5468
+ "00012-of-00012"
5469
+ ],
5470
+ "Q8_0": [
5471
+ "00001-of-00016",
5472
+ "00002-of-00016",
5473
+ "00003-of-00016",
5474
+ "00004-of-00016",
5475
+ "00005-of-00016",
5476
+ "00006-of-00016",
5477
+ "00007-of-00016",
5478
+ "00008-of-00016",
5479
+ "00009-of-00016",
5480
+ "00010-of-00016",
5481
+ "00011-of-00016",
5482
+ "00012-of-00016",
5483
+ "00013-of-00016",
5484
+ "00014-of-00016",
5485
+ "00015-of-00016",
5486
+ "00016-of-00016"
5487
+ ]
5488
+ }
5489
+ },
5490
+ {
5491
+ "model_format": "mlx",
5492
+ "model_size_in_billions": 671,
5493
+ "quantizations": [
5494
+ "3bit",
5495
+ "4bit"
5496
+ ],
5497
+ "model_id": "mlx-community/DeepSeek-V3-{quantization}",
5498
+ "model_hub": "modelscope"
5499
+ }
5500
+ ],
5501
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
5502
+ "stop_token_ids": [
5503
+ 1
5504
+ ],
5505
+ "stop": [
5506
+ "<|end▁of▁sentence|>"
5507
+ ],
5508
+ "reasoning_start_tag": "<think>",
5509
+ "reasoning_end_tag": "</think>"
5510
+ },
5511
+ {
5512
+ "version": 1,
5513
+ "context_length": 163840,
5514
+ "model_name": "deepseek-r1",
5515
+ "model_lang": [
5516
+ "en",
5517
+ "zh"
5518
+ ],
5519
+ "model_ability": [
5520
+ "chat"
5521
+ ],
5522
+ "model_description": "DeepSeek-R1, which incorporates cold-start data before RL. DeepSeek-R1 achieves performance comparable to OpenAI-o1 across math, code, and reasoning tasks.",
5523
+ "model_specs": [
5524
+ {
5525
+ "model_format": "pytorch",
5526
+ "model_size_in_billions": 671,
5527
+ "quantizations": [
5528
+ "4-bit",
5529
+ "8-bit",
5530
+ "none"
5531
+ ],
5532
+ "model_id": "deepseek-ai/DeepSeek-R1",
5533
+ "model_hub": "modelscope",
5534
+ "model_revision": "master"
5535
+ },
5536
+ {
5537
+ "model_format": "awq",
5538
+ "model_size_in_billions": 671,
5539
+ "quantizations": [
5540
+ "Int4"
5541
+ ],
5542
+ "model_id": "cognitivecomputations/DeepSeek-R1-awq",
5543
+ "model_hub": "modelscope",
5544
+ "model_revision": "master"
5545
+ },
5546
+ {
5547
+ "model_format": "ggufv2",
5548
+ "model_size_in_billions": 671,
5549
+ "quantizations": [
5550
+ "UD-IQ1_S",
5551
+ "UD-IQ1_M",
5552
+ "UD-IQ2_XXS",
5553
+ "UD-Q2_K_XL",
5554
+ "Q2_K",
5555
+ "Q2_K_L",
5556
+ "Q2_K_XS",
5557
+ "Q3_K_M",
5558
+ "Q4_K_M",
5559
+ "Q5_K_M",
5560
+ "Q6_K",
5561
+ "Q8_0",
5562
+ "BF16"
5563
+ ],
5564
+ "model_id": "unsloth/DeepSeek-R1-GGUF",
5565
+ "model_hub": "modelscope",
5566
+ "model_file_name_template": "DeepSeek-R1-{quantization}/DeepSeek-R1-{quantization}.gguf",
5567
+ "model_file_name_split_template": "DeepSeek-R1-{quantization}/DeepSeek-R1-{quantization}-{part}.gguf",
5568
+ "quantization_parts": {
5569
+ "UD-IQ1_S": [
5570
+ "00001-of-00003",
5571
+ "00002-of-00003",
5572
+ "00003-of-00003"
5573
+ ],
5574
+ "UD-IQ1_M": [
5575
+ "00001-of-00004",
5576
+ "00002-of-00004",
5577
+ "00003-of-00004",
5578
+ "00004-of-00004"
5579
+ ],
5580
+ "UD-IQ2_XXS": [
5581
+ "00001-of-00004",
5582
+ "00002-of-00004",
5583
+ "00003-of-00004",
5584
+ "00004-of-00004"
5585
+ ],
5586
+ "UD-Q2_K_XL": [
5587
+ "00001-of-00005",
5588
+ "00002-of-00005",
5589
+ "00003-of-00005",
5590
+ "00004-of-00005",
5591
+ "00005-of-00005"
5592
+ ],
5593
+ "Q2_K": [
5594
+ "00001-of-00005",
5595
+ "00002-of-00005",
5596
+ "00003-of-00005",
5597
+ "00004-of-00005",
5598
+ "00005-of-00005"
5599
+ ],
5600
+ "Q2_K_L": [
5601
+ "00001-of-00005",
5602
+ "00002-of-00005",
5603
+ "00003-of-00005",
5604
+ "00004-of-00005",
5605
+ "00005-of-00005"
5606
+ ],
5607
+ "Q2_K_XS": [
5608
+ "00001-of-00005",
5609
+ "00002-of-00005",
5610
+ "00003-of-00005",
5611
+ "00004-of-00005",
5612
+ "00005-of-00005"
5613
+ ],
5614
+ "Q3_K_M": [
5615
+ "00001-of-00007",
5616
+ "00002-of-00007",
5617
+ "00003-of-00007",
5618
+ "00004-of-00007",
5619
+ "00005-of-00007",
5620
+ "00006-of-00007",
5621
+ "00007-of-00007"
5622
+ ],
5623
+ "Q4_K_M": [
5624
+ "00001-of-00009",
5625
+ "00002-of-00009",
5626
+ "00003-of-00009",
5627
+ "00004-of-00009",
5628
+ "00005-of-00009",
5629
+ "00006-of-00009",
5630
+ "00007-of-00009",
5631
+ "00008-of-00009",
5632
+ "00009-of-00009"
5633
+ ],
5634
+ "Q5_K_M": [
5635
+ "00001-of-00010",
5636
+ "00002-of-00010",
5637
+ "00003-of-00010",
5638
+ "00004-of-00010",
5639
+ "00005-of-00010",
5640
+ "00006-of-00010",
5641
+ "00007-of-00010",
5642
+ "00008-of-00010",
5643
+ "00009-of-00010",
5644
+ "00010-of-00010"
5645
+ ],
5646
+ "Q6_K": [
5647
+ "00001-of-00012",
5648
+ "00002-of-00012",
5649
+ "00003-of-00012",
5650
+ "00004-of-00012",
5651
+ "00005-of-00012",
5652
+ "00006-of-00012",
5653
+ "00007-of-00012",
5654
+ "00008-of-00012",
5655
+ "00009-of-00012",
5656
+ "00010-of-00012",
5657
+ "00011-of-00012",
5658
+ "00012-of-00012"
5659
+ ],
5660
+ "Q8_0": [
5661
+ "00001-of-00015",
5662
+ "00002-of-00015",
5663
+ "00003-of-00015",
5664
+ "00004-of-00015",
5665
+ "00005-of-00015",
5666
+ "00006-of-00015",
5667
+ "00007-of-00015",
5668
+ "00008-of-00015",
5669
+ "00009-of-00015",
5670
+ "00010-of-00015",
5671
+ "00011-of-00015",
5672
+ "00012-of-00015",
5673
+ "00013-of-00015",
5674
+ "00014-of-00015",
5675
+ "00015-of-00015"
5676
+ ],
5677
+ "BF16": [
5678
+ "00001-of-00030",
5679
+ "00002-of-00030",
5680
+ "00003-of-00030",
5681
+ "00004-of-00030",
5682
+ "00005-of-00030",
5683
+ "00006-of-00030",
5684
+ "00007-of-00030",
5685
+ "00008-of-00030",
5686
+ "00009-of-00030",
5687
+ "00010-of-00030",
5688
+ "00011-of-00030",
5689
+ "00012-of-00030",
5690
+ "00013-of-00030",
5691
+ "00014-of-00030",
5692
+ "00015-of-00030",
5693
+ "00016-of-00030",
5694
+ "00017-of-00030",
5695
+ "00018-of-00030",
5696
+ "00019-of-00030",
5697
+ "00020-of-00030",
5698
+ "00021-of-00030",
5699
+ "00022-of-00030",
5700
+ "00023-of-00030",
5701
+ "00024-of-00030",
5702
+ "00025-of-00030",
5703
+ "00026-of-00030",
5704
+ "00027-of-00030",
5705
+ "00028-of-00030",
5706
+ "00029-of-00030",
5707
+ "00030-of-00030"
5708
+ ]
5709
+ }
5710
+ },
5711
+ {
5712
+ "model_format": "mlx",
5713
+ "model_size_in_billions": 671,
5714
+ "quantizations": [
5715
+ "2bit",
5716
+ "3bit",
5717
+ "4bit"
5718
+ ],
5719
+ "model_id": "mlx-community/DeepSeek-R1-{quantization}",
5720
+ "model_hub": "modelscope"
5721
+ }
5722
+ ],
5723
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='', is_first_sp=true) %}{%- for message in messages %}{%- if message['role'] == 'system' %}{%- if ns.is_first_sp %}{% set ns.system_prompt = ns.system_prompt + message['content'] %}{% set ns.is_first_sp = false %}{%- else %}{% set ns.system_prompt = ns.system_prompt + '\\n\\n' + message['content'] %}{%- endif %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{{'<|Assistant|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>'}}{% endif %}",
5724
+ "stop_token_ids": [
5725
+ 1
5726
+ ],
5727
+ "stop": [
5728
+ "<|end▁of▁sentence|>"
5729
+ ]
5730
+ },
5184
5731
  {
5185
5732
  "version": 1,
5186
5733
  "context_length": 131072,
@@ -6534,7 +7081,8 @@
6534
7081
  "zh"
6535
7082
  ],
6536
7083
  "model_ability": [
6537
- "chat"
7084
+ "chat",
7085
+ "reasoning"
6538
7086
  ],
6539
7087
  "model_description": "deepseek-r1-distill-qwen is distilled from DeepSeek-R1 based on Qwen",
6540
7088
  "model_specs": [
@@ -6718,13 +7266,15 @@
6718
7266
  "model_hub": "modelscope"
6719
7267
  }
6720
7268
  ],
6721
- "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>'}}{% endif %}",
7269
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='', is_first_sp=true) %}{%- for message in messages %}{%- if message['role'] == 'system' %}{%- if ns.is_first_sp %}{% set ns.system_prompt = ns.system_prompt + message['content'] %}{% set ns.is_first_sp = false %}{%- else %}{% set ns.system_prompt = ns.system_prompt + '\\n\\n' + message['content'] %}{%- endif %}{%- endif %}{%- endfor %}{{ bos_token }}{{ ns.system_prompt }}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and 'tool_calls' in message %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls'] %}{%- if not ns.is_first %}{%- if message['content'] is none %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- else %}{{'<|Assistant|>' + message['content'] + '<|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- endif %}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- endif %}{%- endfor %}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- if message['role'] == 'assistant' and 'tool_calls' not in message %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
6722
7270
  "stop_token_ids": [
6723
7271
  151643
6724
7272
  ],
6725
7273
  "stop": [
6726
7274
  "<|end▁of▁sentence|>"
6727
- ]
7275
+ ],
7276
+ "reasoning_start_tag": "<think>",
7277
+ "reasoning_end_tag": "</think>"
6728
7278
  },
6729
7279
  {
6730
7280
  "version": 1,
@@ -6735,7 +7285,8 @@
6735
7285
  "zh"
6736
7286
  ],
6737
7287
  "model_ability": [
6738
- "chat"
7288
+ "chat",
7289
+ "reasoning"
6739
7290
  ],
6740
7291
  "model_description": "deepseek-r1-distill-llama is distilled from DeepSeek-R1 based on Llama",
6741
7292
  "model_specs": [
@@ -6837,13 +7388,15 @@
6837
7388
  "model_hub": "modelscope"
6838
7389
  }
6839
7390
  ],
6840
- "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>'}}{% endif %}",
7391
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
6841
7392
  "stop_token_ids": [
6842
7393
  151643
6843
7394
  ],
6844
7395
  "stop": [
6845
7396
  "<|end▁of▁sentence|>"
6846
- ]
7397
+ ],
7398
+ "reasoning_start_tag": "<think>",
7399
+ "reasoning_end_tag": "</think>"
6847
7400
  },
6848
7401
  {
6849
7402
  "version": 1,
@@ -0,0 +1,13 @@
1
+ # Copyright 2022-2024 XProbe Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.