xinference 1.3.1.post1__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of xinference might be problematic. Click here for more details.

Files changed (42) hide show
  1. xinference/_version.py +3 -3
  2. xinference/core/chat_interface.py +1 -1
  3. xinference/model/llm/__init__.py +3 -0
  4. xinference/model/llm/llama_cpp/core.py +44 -14
  5. xinference/model/llm/llm_family.json +271 -12
  6. xinference/model/llm/llm_family_modelscope.json +248 -13
  7. xinference/model/llm/mlx/core.py +15 -11
  8. xinference/model/llm/reasoning_parser.py +14 -6
  9. xinference/model/llm/sglang/core.py +2 -0
  10. xinference/model/llm/transformers/core.py +3 -2
  11. xinference/model/llm/transformers/gemma3.py +185 -0
  12. xinference/model/llm/transformers/intern_vl.py +0 -2
  13. xinference/model/llm/utils.py +37 -29
  14. xinference/model/llm/vllm/core.py +8 -3
  15. xinference/types.py +2 -2
  16. xinference/web/ui/build/asset-manifest.json +6 -6
  17. xinference/web/ui/build/index.html +1 -1
  18. xinference/web/ui/build/static/css/main.b494ae7e.css +2 -0
  19. xinference/web/ui/build/static/css/main.b494ae7e.css.map +1 -0
  20. xinference/web/ui/build/static/js/main.3cea968e.js +3 -0
  21. xinference/web/ui/build/static/js/main.3cea968e.js.map +1 -0
  22. xinference/web/ui/node_modules/.cache/babel-loader/7f59e45e3f268ab8a4788b6fb024cf8dab088736dff22f5a3a39c122a83ab930.json +1 -0
  23. xinference/web/ui/node_modules/.cache/babel-loader/cc97b49285d7717c63374766c789141a4329a04582ab32756d7e0e614d4c5c7f.json +1 -0
  24. xinference/web/ui/node_modules/.cache/babel-loader/dcd60488509450bfff37bfff56de2c096d51de17dd00ec60d4db49c8b483ada1.json +1 -0
  25. xinference/web/ui/node_modules/.cache/babel-loader/f199e8173f6409a5802ed44acb95f218388131136504b2e9132129e150c92f9a.json +1 -0
  26. xinference/web/ui/src/locales/en.json +2 -2
  27. xinference/web/ui/src/locales/zh.json +1 -1
  28. {xinference-1.3.1.post1.dist-info → xinference-1.4.0.dist-info}/METADATA +1 -1
  29. {xinference-1.3.1.post1.dist-info → xinference-1.4.0.dist-info}/RECORD +34 -33
  30. xinference/web/ui/build/static/css/main.f8177338.css +0 -2
  31. xinference/web/ui/build/static/css/main.f8177338.css.map +0 -1
  32. xinference/web/ui/build/static/js/main.55b70cb7.js +0 -3
  33. xinference/web/ui/build/static/js/main.55b70cb7.js.map +0 -1
  34. xinference/web/ui/node_modules/.cache/babel-loader/2deac8d5636974533e3714f34e94fc754f9153a07c6ee11e72846cb8eae47e4b.json +0 -1
  35. xinference/web/ui/node_modules/.cache/babel-loader/db16a983bc08a05f0439cc61ca0840e49e1d8400eef678909f16c032a418a3d6.json +0 -1
  36. xinference/web/ui/node_modules/.cache/babel-loader/e23d476fcbf6fd69c8986bf82133d257d28aa8fc9a5cab231d81c1c75c58cd99.json +0 -1
  37. xinference/web/ui/node_modules/.cache/babel-loader/e7a8c37fda8725cab69c7ef8c627060bd7fc806adc67e00fe628ba148cb86d7f.json +0 -1
  38. /xinference/web/ui/build/static/js/{main.55b70cb7.js.LICENSE.txt → main.3cea968e.js.LICENSE.txt} +0 -0
  39. {xinference-1.3.1.post1.dist-info → xinference-1.4.0.dist-info}/LICENSE +0 -0
  40. {xinference-1.3.1.post1.dist-info → xinference-1.4.0.dist-info}/WHEEL +0 -0
  41. {xinference-1.3.1.post1.dist-info → xinference-1.4.0.dist-info}/entry_points.txt +0 -0
  42. {xinference-1.3.1.post1.dist-info → xinference-1.4.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,185 @@
1
+ # Copyright 2022-2025 XProbe Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ import logging
17
+ import sys
18
+ import uuid
19
+ from typing import Iterator, List, Optional, Union
20
+
21
+ from ....model.utils import select_device
22
+ from ....types import (
23
+ ChatCompletion,
24
+ ChatCompletionChunk,
25
+ ChatCompletionMessage,
26
+ CompletionChunk,
27
+ )
28
+ from ..llm_family import LLMFamilyV1, LLMSpecV1
29
+ from ..utils import generate_chat_completion, generate_completion_chunk
30
+ from .core import PytorchChatModel, PytorchGenerateConfig
31
+ from .utils import cache_clean
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ class Gemma3TextChatModel(PytorchChatModel):
37
+ @classmethod
38
+ def match(
39
+ cls, model_family: "LLMFamilyV1", model_spec: "LLMSpecV1", quantization: str
40
+ ) -> bool:
41
+ if model_spec.model_format not in ["pytorch", "gptq", "awq"]:
42
+ return False
43
+ llm_family = model_family.model_family or model_family.model_name
44
+ if "gemma-3-1b-it".lower() in llm_family.lower():
45
+ return True
46
+ return False
47
+
48
+
49
+ class Gemma3ChatModel(PytorchChatModel):
50
+ def __init__(self, *args, **kwargs):
51
+ super().__init__(*args, **kwargs)
52
+ self._tokenizer = None
53
+ self._model = None
54
+ self._device = None
55
+ self._processor = None
56
+
57
+ @classmethod
58
+ def match(
59
+ cls, model_family: "LLMFamilyV1", model_spec: "LLMSpecV1", quantization: str
60
+ ) -> bool:
61
+ if model_spec.model_format not in ["pytorch", "gptq", "awq"]:
62
+ return False
63
+ llm_family = model_family.model_family or model_family.model_name
64
+ if "gemma-3-it".lower() in llm_family.lower():
65
+ return True
66
+ return False
67
+
68
+ def load(self):
69
+ from transformers import AutoProcessor, Gemma3ForConditionalGeneration
70
+
71
+ device = self._pytorch_model_config.get("device", "auto")
72
+ device = select_device(device)
73
+ self._device = device
74
+ # for multiple GPU, set back to auto to make multiple devices work
75
+ device = "auto" if device == "cuda" else device
76
+
77
+ self._processor = AutoProcessor.from_pretrained(self.model_path)
78
+ self._tokenizer = self._processor.tokenizer
79
+ self._model = Gemma3ForConditionalGeneration.from_pretrained(
80
+ self.model_path,
81
+ device_map="auto",
82
+ torch_dtype="bfloat16",
83
+ )
84
+
85
+ @cache_clean
86
+ def chat(
87
+ self,
88
+ messages: List[ChatCompletionMessage], # type: ignore
89
+ generate_config: Optional[PytorchGenerateConfig] = None,
90
+ ) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]:
91
+ messages = self._transform_messages(messages)
92
+
93
+ generate_config = generate_config if generate_config else {}
94
+
95
+ stream = generate_config.get("stream", False) if generate_config else False
96
+
97
+ if stream:
98
+ it = self._generate_stream(messages, generate_config)
99
+ return self._to_chat_completion_chunks(it)
100
+ else:
101
+ c = self._generate(messages, generate_config)
102
+ return c
103
+
104
+ def _generate(
105
+ self, messages: List, config: PytorchGenerateConfig = {}
106
+ ) -> ChatCompletion:
107
+ inputs = self._processor.apply_chat_template(
108
+ messages,
109
+ add_generation_prompt=True,
110
+ tokenize=True,
111
+ return_dict=True,
112
+ return_tensors="pt",
113
+ ).to(self._device)
114
+ input_len = inputs["input_ids"].shape[-1]
115
+
116
+ generation = self._model.generate(**inputs, do_sample=False)
117
+ generation = generation[0][input_len:]
118
+
119
+ decoded = self._processor.decode(generation, skip_special_tokens=True)
120
+ return generate_chat_completion(self.model_uid, decoded)
121
+
122
+ def _generate_stream(
123
+ self, messages: List, config: PytorchGenerateConfig = {}
124
+ ) -> Iterator[CompletionChunk]:
125
+ from threading import Thread
126
+
127
+ from transformers import TextIteratorStreamer
128
+
129
+ inputs = self._processor.apply_chat_template(
130
+ messages,
131
+ add_generation_prompt=True,
132
+ tokenize=True,
133
+ return_dict=True,
134
+ return_tensors="pt",
135
+ ).to(self._device)
136
+
137
+ tokenizer = self._tokenizer
138
+ streamer = TextIteratorStreamer(
139
+ tokenizer, timeout=60.0, skip_prompt=True, skip_special_tokens=True
140
+ )
141
+
142
+ gen_kwargs = {"streamer": streamer, **inputs}
143
+ error = None
144
+
145
+ def model_generate():
146
+ try:
147
+ return self._model.generate(**gen_kwargs)
148
+ except Exception:
149
+ nonlocal error
150
+ error = sys.exc_info()
151
+ streamer.end()
152
+ raise
153
+
154
+ thread = Thread(target=model_generate)
155
+ thread.start()
156
+
157
+ completion_id = str(uuid.uuid1())
158
+ for new_text in streamer:
159
+ yield generate_completion_chunk(
160
+ chunk_text=new_text,
161
+ finish_reason=None,
162
+ chunk_id=completion_id,
163
+ model_uid=self.model_uid,
164
+ prompt_tokens=-1,
165
+ completion_tokens=-1,
166
+ total_tokens=-1,
167
+ has_choice=True,
168
+ has_content=True,
169
+ )
170
+
171
+ if error:
172
+ _, err, tb = error # type: ignore
173
+ raise err.with_traceback(tb)
174
+
175
+ yield generate_completion_chunk(
176
+ chunk_text=None,
177
+ finish_reason="stop",
178
+ chunk_id=completion_id,
179
+ model_uid=self.model_uid,
180
+ prompt_tokens=-1,
181
+ completion_tokens=-1,
182
+ total_tokens=-1,
183
+ has_choice=True,
184
+ has_content=False,
185
+ )
@@ -245,8 +245,6 @@ class InternVLChatModel(PytorchChatModel):
245
245
  family = model_family.model_family or model_family.model_name
246
246
  if "internvl" not in family.lower():
247
247
  return False
248
- if "pytorch" not in model_spec.model_format:
249
- return False
250
248
  return True
251
249
 
252
250
  def _get_model_class(self):
@@ -79,8 +79,7 @@ LLAMA3_TOOL_CALL_FAMILY = [
79
79
  ]
80
80
 
81
81
  DEEPSEEK_TOOL_CALL_FAMILY = [
82
- "deepseek-r1-distill-qwen",
83
- "deepseek-r1-distill-llama",
82
+ "deepseek-v3",
84
83
  ]
85
84
 
86
85
  TOOL_CALL_FAMILY = (
@@ -296,12 +295,19 @@ class ChatModelMixin:
296
295
  "finish_reason": choice["finish_reason"],
297
296
  }
298
297
  )
298
+ assert choices is not None
299
+ usage = (
300
+ chunk["usage"]
301
+ if choices[0]["finish_reason"] is not None and reasoning_parser is not None
302
+ else None
303
+ )
299
304
  chat_chunk = {
300
305
  "id": "chat" + chunk["id"],
301
306
  "model": chunk["model"],
302
307
  "created": chunk["created"],
303
308
  "object": "chat.completion.chunk",
304
309
  "choices": choices_list,
310
+ "usage": usage,
305
311
  }
306
312
  return cast(ChatCompletionChunk, chat_chunk)
307
313
 
@@ -313,12 +319,8 @@ class ChatModelMixin:
313
319
  ) -> ChatCompletionChunk:
314
320
  choices_list = []
315
321
  for i, choice in enumerate(chunk["choices"]):
316
- delta = {
317
- "role": "assistant",
318
- }
319
- if reasoning_parser is None:
320
- delta["content"] = ""
321
- else:
322
+ delta = {"role": "assistant", "content": ""}
323
+ if reasoning_parser is not None:
322
324
  delta["reasoning_content"] = ""
323
325
  choices_list.append(
324
326
  {
@@ -359,9 +361,7 @@ class ChatModelMixin:
359
361
  reasoning_parse: Optional[ReasoningParser] = None,
360
362
  ) -> Iterator[ChatCompletionChunk]:
361
363
  previous_texts = [""]
362
- for i, chunk in enumerate(chunks):
363
- if i == 0:
364
- yield cls._get_first_chat_completion_chunk(chunk, reasoning_parse)
364
+ for _, chunk in enumerate(chunks):
365
365
  # usage
366
366
  choices = chunk.get("choices")
367
367
  if not choices:
@@ -407,14 +407,10 @@ class ChatModelMixin:
407
407
  chunks: AsyncGenerator[CompletionChunk, None],
408
408
  reasoning_parser: Optional[ReasoningParser] = None,
409
409
  ) -> AsyncGenerator[ChatCompletionChunk, None]:
410
- i = 0
411
410
  previous_texts = [""]
412
411
  async for chunk in chunks:
413
- if i == 0:
414
- chat_chunk = cls._get_first_chat_completion_chunk(
415
- chunk, reasoning_parser
416
- )
417
- elif not chunk.get("choices"):
412
+ choices = chunk.get("choices")
413
+ if not choices:
418
414
  # usage
419
415
  chat_chunk = cls._get_final_chat_completion_chunk(chunk)
420
416
  else:
@@ -422,7 +418,6 @@ class ChatModelMixin:
422
418
  chunk, reasoning_parser, previous_texts
423
419
  )
424
420
  yield chat_chunk
425
- i += 1
426
421
 
427
422
  @staticmethod
428
423
  def _to_chat_completion(
@@ -533,7 +528,7 @@ class ChatModelMixin:
533
528
  @classmethod
534
529
  def _eval_deepseek_chat_arguments(cls, c) -> List[Tuple]:
535
530
  """
536
- Parses tool calls from deepseek-r1 format and removes duplicates.
531
+ Parses tool calls from deepseek-v3 format and removes duplicates.
537
532
 
538
533
  Returns:
539
534
  List[Tuple[Optional[str], Optional[str], Optional[dict]]]
@@ -541,20 +536,24 @@ class ChatModelMixin:
541
536
  - (content, None, None) if parsing failed (content is raw JSON text).
542
537
 
543
538
  Example input:
544
- <|tool▁call|>get_current_weather
545
539
  ```json
546
- {"location": "tokyo", "unit": "fahrenheit"}
540
+ {
541
+ "name": "get_weather_and_time",
542
+ "parameters": {
543
+ "location": "Hangzhou"
544
+ }
545
+ }
547
546
  ```
548
547
 
549
548
  Output:
550
549
  [
551
- (None, "get_current_weather", {"location": "tokyo", "unit": "fahrenheit"})
550
+ (None, "get_current_weather", {"location": "Hangzhou"})
552
551
  ]
553
552
  """
554
553
 
555
554
  text = c["choices"][0]["text"]
556
555
 
557
- pattern = r"<|tool▁call|>(\w+)\s*```json\s*(.*?)\s*```"
556
+ pattern = r"\s*```json\s*(.*?)\s*```"
558
557
  matches = re.findall(pattern, text, re.DOTALL)
559
558
 
560
559
  if not matches:
@@ -563,22 +562,31 @@ class ChatModelMixin:
563
562
  tool_calls = set() # Used for deduplication
564
563
  results = []
565
564
 
566
- for function_name, args_json in matches:
565
+ for raw_json in matches:
566
+ func_and_args = None
567
567
  try:
568
- arguments = json.loads(args_json)
568
+ func_and_args = json.loads(raw_json)
569
569
  # Convert dictionary to frozenset for deduplication
570
- arguments_hashable = frozenset(arguments.items())
571
- tool_call_tuple = (None, function_name, arguments)
570
+ arguments_hashable = frozenset(func_and_args["parameters"])
571
+ tool_call_tuple = (
572
+ None,
573
+ func_and_args["name"],
574
+ func_and_args["parameters"],
575
+ )
572
576
  except json.JSONDecodeError:
573
577
  tool_call_tuple = (
574
- args_json,
578
+ raw_json,
575
579
  None,
576
580
  None,
577
581
  ) # If parsing fails, treat as raw content
578
582
  arguments_hashable = None # No need for hashing
579
583
 
580
584
  # Avoid duplicate entries
581
- dedup_key = (function_name, arguments_hashable)
585
+ dedup_key = (
586
+ (func_and_args["name"], arguments_hashable)
587
+ if func_and_args is not None
588
+ else (raw_json)
589
+ )
582
590
  if dedup_key not in tool_calls:
583
591
  tool_calls.add(dedup_key)
584
592
  results.append(tool_call_tuple)
@@ -216,6 +216,10 @@ if VLLM_INSTALLED and vllm.__version__ >= "0.7.2":
216
216
  if VLLM_INSTALLED and vllm.__version__ >= "0.7.3":
217
217
  VLLM_SUPPORTED_CHAT_MODELS.append("qwen2.5-instruct-1m")
218
218
 
219
+ if VLLM_INSTALLED and vllm.__version__ >= "0.8.0":
220
+ VLLM_SUPPORTED_CHAT_MODELS.append("gemma-3-1b-it")
221
+ VLLM_SUPPORTED_VISION_MODEL_LIST.append("gemma-3-it")
222
+
219
223
 
220
224
  class VLLMModel(LLM):
221
225
  def __init__(
@@ -840,10 +844,11 @@ class VLLMChatModel(VLLMModel, ChatModelMixin):
840
844
  model_family = self.model_family.model_family or self.model_family.model_name
841
845
  full_context_kwargs = {}
842
846
  if tools:
843
- if model_family in QWEN_TOOL_CALL_FAMILY:
847
+ if (
848
+ model_family in QWEN_TOOL_CALL_FAMILY
849
+ or model_family in DEEPSEEK_TOOL_CALL_FAMILY
850
+ ):
844
851
  full_context_kwargs["tools"] = tools
845
- elif model_family in DEEPSEEK_TOOL_CALL_FAMILY:
846
- self._tools_to_messages_for_deepseek(messages, tools)
847
852
  assert self.model_family.chat_template is not None
848
853
  full_prompt = self.get_full_context(
849
854
  messages, self.model_family.chat_template, **full_context_kwargs
xinference/types.py CHANGED
@@ -201,8 +201,8 @@ class ChatCompletion(TypedDict):
201
201
 
202
202
  class ChatCompletionChunkDelta(TypedDict):
203
203
  role: NotRequired[str]
204
- reasoning_content: NotRequired[str]
205
- content: NotRequired[str]
204
+ reasoning_content: NotRequired[Union[str, None]]
205
+ content: NotRequired[Union[str, None]]
206
206
  tool_calls: NotRequired[List[ToolCalls]]
207
207
 
208
208
 
@@ -1,14 +1,14 @@
1
1
  {
2
2
  "files": {
3
- "main.css": "./static/css/main.f8177338.css",
4
- "main.js": "./static/js/main.55b70cb7.js",
3
+ "main.css": "./static/css/main.b494ae7e.css",
4
+ "main.js": "./static/js/main.3cea968e.js",
5
5
  "static/media/icon.webp": "./static/media/icon.4603d52c63041e5dfbfd.webp",
6
6
  "index.html": "./index.html",
7
- "main.f8177338.css.map": "./static/css/main.f8177338.css.map",
8
- "main.55b70cb7.js.map": "./static/js/main.55b70cb7.js.map"
7
+ "main.b494ae7e.css.map": "./static/css/main.b494ae7e.css.map",
8
+ "main.3cea968e.js.map": "./static/js/main.3cea968e.js.map"
9
9
  },
10
10
  "entrypoints": [
11
- "static/css/main.f8177338.css",
12
- "static/js/main.55b70cb7.js"
11
+ "static/css/main.b494ae7e.css",
12
+ "static/js/main.3cea968e.js"
13
13
  ]
14
14
  }
@@ -1 +1 @@
1
- <!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site created using create-react-app"/><link rel="apple-touch-icon" href="./logo192.png"/><link rel="manifest" href="./manifest.json"/><title>Xinference</title><script defer="defer" src="./static/js/main.55b70cb7.js"></script><link href="./static/css/main.f8177338.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
1
+ <!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="icon" href="./favicon.svg"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="Web site created using create-react-app"/><link rel="apple-touch-icon" href="./logo192.png"/><link rel="manifest" href="./manifest.json"/><title>Xinference</title><script defer="defer" src="./static/js/main.3cea968e.js"></script><link href="./static/css/main.b494ae7e.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
@@ -0,0 +1,2 @@
1
+ .container{cursor:pointer;display:block}.container,.descriptionCard{border-radius:20px;height:300px;position:relative;width:300px}.descriptionCard{left:-1px;padding:20px;top:-1px}.cardTitle{display:flex;justify-content:space-between}.iconButtonBox{align-items:center;display:flex}.drawerCard{min-height:100%;padding:20px 80px 0;position:relative;width:60vw}.p{-webkit-line-clamp:4;-webkit-box-orient:vertical;display:-webkit-box;font-size:14px;overflow:hidden;padding:0 10px;text-overflow:ellipsis;word-break:break-word}.pasteText{color:#1976d2;cursor:pointer;font-size:18px!important;margin-inline:10px}.pasteText:hover{color:#1976d2b3}.copyToCommandLine{color:#1976d2;cursor:pointer;font-size:16px!important}.copyToCommandLine:hover{color:#1976d2b3}.formContainer{height:80%;overflow:scroll;padding:0 10px 160px}.buttonsContainer{align-items:center;bottom:50px;display:flex;justify-content:space-between;left:100px;position:absolute;right:100px}.buttonContainer{background-color:initial;border-width:0;width:45%}.buttonItem{border:1px solid #e5e7eb;border-radius:4px;cursor:pointer;padding:5px;width:100%}.buttonItem:hover{border-color:#888}.instructionText{color:#666;font-size:12px;font-style:italic;margin:30px 0;text-align:center}.iconRow{bottom:20px;justify-content:space-between;left:20px;position:absolute;right:20px}.iconItem,.iconRow{align-items:center;display:flex}.iconItem{flex-direction:column;margin:20px}.boldIconText{font-size:1.2em;font-weight:700}.muiIcon{font-size:1.5em}.smallText{font-size:.8em}.dialogBox{height:607px;margin:32px;overflow-x:scroll;width:1241px}.dialogTitle{display:flex;justify-content:space-between;padding:20px 20px 7px}.dialogTitle-model_name{font-size:18px;font-weight:700}.pathBox{cursor:pointer;overflow:hidden;text-overflow:ellipsis;white-space:nowrap;width:160px}.pathBox2{width:300px}.empty{color:#555;font-size:20px;left:50%;position:absolute;top:30%;-webkit-transform:translate(-50%);transform:translate(-50%)}.deleteDialog{align-items:center;display:flex}.warningIcon{color:#ed6c02;margin-right:10px}.jsonDialog{border-radius:8px;color:#000;display:flex;flex-direction:column;padding:10px 30px}.jsonDialog-title{align-items:center;display:flex;justify-content:space-between;margin:10px 0 20px}.title-name{font-size:16px;font-weight:700}.main-box{height:500px;width:700px}.but-box{display:flex;justify-content:end;margin-top:20px}.drawer{bottom:0;left:0;opacity:0;position:fixed;right:0;top:0;transition:visibility .3s ease,opacity .3s ease;visibility:hidden;z-index:1000}.drawer.open{opacity:1;visibility:visible}.drawer-overlay{background-color:rgba(0,0,0,.5);left:0;z-index:999}.drawer-content,.drawer-overlay{bottom:0;position:absolute;right:0;top:0}.drawer-content{background-color:#fff;box-shadow:-2px 0 10px rgba(0,0,0,.1);overflow-y:auto;-webkit-transform:translateX(100%);transform:translateX(100%);transition:-webkit-transform .3s ease;transition:transform .3s ease;transition:transform .3s ease,-webkit-transform .3s ease;z-index:1000}.drawer.open .drawer-content{-webkit-transform:translateX(0);transform:translateX(0)}.copyText{color:#666;cursor:pointer;font-size:14px!important}.copyText:hover{color:#1976d2}.formBox{max-height:80vh;max-width:50vw;min-width:50vw;overflow:auto;padding:40px 20px 0 0;position:relative;transition:all .4s ease-in-out}.broaden{max-width:100%;min-width:100%;padding-right:0}.show-json{align-items:center;display:flex;right:60px;top:90px}.icon,.show-json{position:absolute}.icon{cursor:pointer;margin-left:20px;right:-40px}.icon:hover{color:#1976d2}.arrow{font-size:24px!important}.jsonBox{min-height:80vh;position:relative;transition:all .4s ease-in-out;width:100%}.hide{overflow:hidden;-webkit-transform:translate(30vw);transform:translate(30vw);width:0}.checkboxWrapper{align-items:center;display:flex;flex-wrap:wrap;width:100%}.jsonBox-header{align-items:center;display:flex;justify-content:space-between}.jsonBox-title{font-weight:700;line-height:40px}.textarea{background-color:initial;border:1px solid #ddd;border-radius:5px;color:#666;height:calc(100% - 40px);padding:5px 10px;resize:none;width:100%}.addBtn{margin-left:20px!important}.item{border:1px solid #ddd;border-radius:10px;margin:10px 50px 0;overflow:hidden;padding:20px;position:relative}.item:hover .deleteBtn{-webkit-transform:translateX(-50px);transform:translateX(-50px)}.deleteBtn{background-color:#1976d2;border-radius:25px;height:50px;line-height:70px;position:absolute;right:20px;text-align:center;top:calc(50% - 25px);-webkit-transform:translateX(80px);transform:translateX(80px);transition:all .3s ease-in-out;width:50px}.deleteBtn:hover{box-shadow:0 0 10px #aaa;cursor:pointer}.deleteIcon{color:#fff;font-size:28px!important}.chat_template_box{align-items:start;display:flex;gap:10px}.chat_template_test{width:30%}.chat_template_test_mainBox{border:1px solid #ccc;border-radius:4px;height:137px;overflow:scroll;padding:10px}.chat_template_test_tip{color:rgba(0,0,0,.6);font-size:10px;margin:4px 14px 0}.test_res_box{border:1px solid #ddd;border-radius:4px;margin-top:5px;min-height:55px;padding:10px}.css-19qh8xo-MuiInputBase-input-MuiOutlinedInput-input.Mui-disabled{-webkit-text-fill-color:#000!important}
2
+ /*# sourceMappingURL=main.b494ae7e.css.map*/
@@ -0,0 +1 @@
1
+ {"version":3,"file":"static/css/main.b494ae7e.css","mappings":"AAAA,WAKE,cAAe,CAJf,aAMF,CACA,4BAFE,kBAAmB,CAFnB,YAAa,CAFb,iBAAkB,CAClB,WAaF,CARA,iBAGE,SAAU,CAGV,YAAa,CAJb,QAMF,CACA,WACE,YAAa,CACb,6BACF,CACA,eAEE,kBAAmB,CADnB,YAEF,CACA,YAGE,eAAgB,CADhB,mBAAoB,CADpB,iBAAkB,CAGlB,UACF,CACA,GAEE,oBAAqB,CACrB,2BAA4B,CAF5B,mBAAoB,CAMpB,cAAe,CAHf,eAAgB,CAIhB,cAAiB,CAHjB,sBAAuB,CACvB,qBAGF,CACA,WAEE,aAAc,CACd,cAAe,CAFf,wBAA0B,CAG1B,kBACF,CACA,iBACE,eACF,CACA,mBAEE,aAAc,CACd,cAAe,CAFf,wBAGF,CACA,yBACE,eACF,CACA,eACE,UAAW,CACX,eAAgB,CAEhB,oBACF,CACA,kBAOE,kBAAmB,CALnB,WAAY,CAGZ,YAAa,CACb,6BAA8B,CAH9B,UAAW,CAFX,iBAAkB,CAGlB,WAIF,CACA,iBAGE,wBAA6B,CAD7B,cAAiB,CADjB,SAGF,CACA,YAOE,wBAAqB,CAHrB,iBAAkB,CAHlB,cAAe,CAEf,WAAY,CADZ,UAMF,CACA,kBACE,iBACF,CACA,iBAEE,UAAc,CADd,cAAe,CAEf,iBAAkB,CAClB,aAAc,CACd,iBACF,CACA,SAEE,WAAY,CAIZ,6BAA8B,CAH9B,SAAU,CAFV,iBAAkB,CAGlB,UAIF,CACA,mBAFE,kBAAmB,CAFnB,YASF,CALA,UAEE,qBAAsB,CAEtB,WACF,CACA,cAEE,eAAgB,CADhB,eAEF,CACA,SACE,eACF,CACA,WACE,cACF,CACA,WAEE,YAAa,CACb,WAAY,CACZ,iBAAkB,CAHlB,YAIF,CACA,aACE,YAAa,CACb,6BAA8B,CAC9B,qBACF,CACA,wBACE,cAAe,CACf,eACF,CACA,SAEE,cAAe,CACf,eAAgB,CAEhB,sBAAuB,CADvB,kBAAmB,CAHnB,WAKF,CACA,UACE,WACF,CACA,OAKE,UAAW,CADX,cAAe,CAFf,QAAS,CADT,iBAAkB,CAElB,OAAQ,CAGR,iCAA6B,CAA7B,yBACF,CACA,cAEE,kBAAmB,CADnB,YAEF,CACA,aAEE,aAAuB,CADvB,iBAEF,CACA,YAKE,iBAAkB,CADlB,UAAW,CAHX,YAAa,CACb,qBAAsB,CACtB,iBAGF,CACA,kBAGE,kBAAmB,CAFnB,YAAa,CACb,6BAA8B,CAE9B,kBACF,CACA,YACE,cAAe,CACf,eACF,CACA,UAEE,YAAa,CADb,WAEF,CACA,SACE,YAAa,CACb,mBAAoB,CACpB,eACF,CACA,QAKE,QAAS,CACT,MAAO,CAEP,SAAU,CANV,cAAe,CAEf,OAAQ,CADR,KAAM,CAMN,+CAAmD,CAFnD,iBAAkB,CANlB,YASF,CACA,aAEE,SAAU,CADV,kBAEF,CACA,gBAME,+BAAoC,CADpC,MAAO,CAEP,WACF,CACA,gCALE,QAAS,CAHT,iBAAkB,CAElB,OAAQ,CADR,KAkBF,CAXA,gBAKE,qBAAuB,CAEvB,qCAA0C,CAG1C,eAAgB,CADhB,kCAA2B,CAA3B,0BAA2B,CAD3B,qCAA+B,CAA/B,6BAA+B,CAA/B,wDAA+B,CAF/B,YAKF,CACA,6BACE,+BAAwB,CAAxB,uBACF,CCnOA,UAEE,UAAW,CACX,cAAe,CAFf,wBAGF,CAEA,gBACE,aACF,CCRA,SAIE,eAAgB,CAFhB,cAAe,CACf,cAAe,CAEf,aAAc,CACd,qBAAsB,CALtB,iBAAkB,CAMlB,8BACF,CAEA,SACE,cAAe,CACf,cAAe,CACf,eACF,CAEA,WAEE,kBAAmB,CADnB,YAAa,CAIb,UAAW,CADX,QAEF,CAEA,iBALE,iBAUF,CALA,MAGE,cAAe,CACf,gBAAiB,CAFjB,WAGF,CAEA,YACE,aACF,CAEA,OACE,wBACF,CAEA,SAEE,eAAgB,CADhB,iBAAkB,CAGlB,8BAAgC,CADhC,UAEF,CAEA,MAGE,eAAgB,CADhB,iCAA6B,CAA7B,yBAA6B,CAD7B,OAGF,CAEA,iBAGE,kBAAmB,CAFnB,YAAa,CACb,cAAe,CAEf,UACF,CAEA,gBAGE,kBAAmB,CAFnB,YAAa,CACb,6BAEF,CAEA,eAEE,eAAgB,CADhB,gBAEF,CAEA,UAQE,wBAA6B,CAJ7B,qBAAsB,CACtB,iBAAkB,CAElB,UAAW,CALX,wBAAyB,CACzB,gBAAiB,CAGjB,WAAY,CALZ,UAQF,CAEA,QACE,0BACF,CAEA,MAEE,qBAAsB,CAGtB,kBAAmB,CAFnB,kBAAmB,CAGnB,eAAgB,CAFhB,YAAa,CAHb,iBAMF,CAEA,uBACE,mCAA4B,CAA5B,2BACF,CAEA,WAUE,wBAAyB,CADzB,kBAAmB,CAJnB,WAAY,CAGZ,gBAAiB,CAPjB,iBAAkB,CAClB,UAAW,CAKX,iBAAkB,CAJlB,oBAAqB,CAGrB,kCAA2B,CAA3B,0BAA2B,CAK3B,8BAAgC,CAPhC,UAQF,CAEA,iBAEE,wBAAyB,CADzB,cAEF,CAEA,YAEE,UAAW,CADX,wBAEF,CAEA,mBAEE,iBAAkB,CADlB,YAAa,CAEb,QACF,CAEA,oBACE,SACF,CAEA,4BAGE,qBAAsB,CACtB,iBAAkB,CAHlB,YAAa,CAIb,eAAgB,CAHhB,YAIF,CAEA,wBAGE,oBAAyB,CAFzB,cAAe,CACf,iBAEF,CAEA,cACE,qBAAsB,CAItB,iBAAkB,CADlB,cAAe,CAFf,eAAgB,CAChB,YAGF,CAEA,oEACE,sCACF","sources":["scenes/launch_model/styles/modelCardStyle.css","components/copyComponent/style.css","scenes/register_model/styles/registerModelStyle.css"],"sourcesContent":[".container {\n display: block;\n position: relative;\n width: 300px;\n height: 300px;\n cursor: pointer;\n border-radius: 20px;\n}\n.descriptionCard {\n position: relative;\n top: -1px;\n left: -1px;\n width: 300px;\n height: 300px;\n padding: 20px;\n border-radius: 20px;\n}\n.cardTitle {\n display: flex;\n justify-content: space-between;\n}\n.iconButtonBox {\n display: flex;\n align-items: center;\n}\n.drawerCard {\n position: relative;\n padding: 20px 80px 0;\n min-height: 100%;\n width: 60vw;\n}\n.p {\n display: -webkit-box;\n -webkit-line-clamp: 4;\n -webkit-box-orient: vertical;\n overflow: hidden;\n text-overflow: ellipsis;\n word-break: break-word;\n font-size: 14px;\n padding: 0px 10px;\n}\n.pasteText {\n font-size: 18px !important;\n color: #1976d2;\n cursor: pointer;\n margin-inline: 10px;\n}\n.pasteText:hover {\n color: #1976d2b3;\n}\n.copyToCommandLine {\n font-size: 16px !important;\n color: #1976d2;\n cursor: pointer;\n}\n.copyToCommandLine:hover {\n color: #1976d2b3;\n}\n.formContainer {\n height: 80%;\n overflow: scroll;\n padding: 0 10px;\n padding-bottom: 160px;\n}\n.buttonsContainer {\n position: absolute;\n bottom: 50px;\n left: 100px;\n right: 100px;\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.buttonContainer {\n width: 45%;\n border-width: 0px;\n background-color: transparent;\n}\n.buttonItem {\n cursor: pointer;\n width: 100%;\n padding: 5px;\n border-radius: 4px;\n border: 1px solid #e5e7eb;\n border-width: 1px;\n border-color: #e5e7eb;\n}\n.buttonItem:hover {\n border-color: #888;\n}\n.instructionText {\n font-size: 12px;\n color: #666666;\n font-style: italic;\n margin: 30px 0;\n text-align: center;\n}\n.iconRow {\n position: absolute;\n bottom: 20px;\n left: 20px;\n right: 20px;\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n.iconItem {\n display: flex;\n flex-direction: column;\n align-items: center;\n margin: 20px;\n}\n.boldIconText {\n font-weight: bold;\n font-size: 1.2em;\n}\n.muiIcon {\n font-size: 1.5em;\n}\n.smallText {\n font-size: 0.8em;\n}\n.dialogBox {\n width: 1241px;\n height: 607px;\n margin: 32px;\n overflow-x: scroll;\n}\n.dialogTitle {\n display: flex;\n justify-content: space-between;\n padding: 20px 20px 7px;\n}\n.dialogTitle-model_name {\n font-size: 18px;\n font-weight: 700;\n}\n.pathBox {\n width: 160px;\n cursor: pointer;\n overflow: hidden;\n white-space: nowrap;\n text-overflow: ellipsis;\n}\n.pathBox2 {\n width: 300px;\n}\n.empty {\n position: absolute;\n left: 50%;\n top: 30%;\n font-size: 20px;\n color: #555;\n transform: translate(-50%, 0);\n}\n.deleteDialog {\n display: flex;\n align-items: center;\n}\n.warningIcon {\n margin-right: 10px;\n color: rgb(237, 108, 2);\n}\n.jsonDialog {\n display: flex;\n flex-direction: column;\n padding: 10px 30px;\n color: #000;\n border-radius: 8px;\n}\n.jsonDialog-title {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin: 10px 0 20px 0;\n}\n.title-name {\n font-size: 16px;\n font-weight: 700;\n}\n.main-box {\n width: 700px;\n height: 500px;\n}\n.but-box {\n display: flex;\n justify-content: end;\n margin-top: 20px;\n}\n.drawer {\n z-index: 1000;\n position: fixed;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n visibility: hidden;\n opacity: 0;\n transition: visibility 0.3s ease, opacity 0.3s ease;\n}\n.drawer.open {\n visibility: visible;\n opacity: 1;\n}\n.drawer-overlay {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n left: 0;\n background-color: rgba(0, 0, 0, 0.5);\n z-index: 999;\n}\n.drawer-content {\n position: absolute;\n top: 0;\n right: 0;\n bottom: 0;\n background-color: white;\n z-index: 1000;\n box-shadow: -2px 0 10px rgba(0, 0, 0, 0.1);\n transition: transform 0.3s ease;\n transform: translateX(100%);\n overflow-y: auto;\n}\n.drawer.open .drawer-content {\n transform: translateX(0);\n}\n",".copyText {\n font-size: 14px !important;\n color: #666;\n cursor: pointer;\n}\n\n.copyText:hover {\n color: #1976d2;\n}\n",".formBox {\n position: relative;\n max-width: 50vw;\n min-width: 50vw;\n max-height: 80vh;\n overflow: auto;\n padding: 40px 20px 0 0;\n transition: all 0.4s ease-in-out;\n}\n\n.broaden {\n max-width: 100%;\n min-width: 100%;\n padding-right: 0;\n}\n\n.show-json {\n display: flex;\n align-items: center;\n position: absolute;\n top: 90px;\n right: 60px;\n}\n\n.icon {\n position: absolute;\n right: -40px;\n cursor: pointer;\n margin-left: 20px;\n}\n\n.icon:hover {\n color: #1976d2;\n}\n\n.arrow {\n font-size: 24px !important;\n}\n\n.jsonBox {\n position: relative;\n min-height: 80vh;\n width: 100%;\n transition: all 0.4s ease-in-out;\n}\n\n.hide {\n width: 0;\n transform: translate(30vw, 0);\n overflow: hidden;\n}\n\n.checkboxWrapper {\n display: flex;\n flex-wrap: wrap;\n align-items: center;\n width: 100%;\n}\n\n.jsonBox-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n}\n\n.jsonBox-title {\n line-height: 40px;\n font-weight: 700;\n}\n\n.textarea {\n width: 100%;\n height: calc(100% - 40px);\n padding: 5px 10px;\n border: 1px solid #ddd;\n border-radius: 5px;\n resize: none;\n color: #666;\n background-color: transparent;\n}\n\n.addBtn {\n margin-left: 20px !important;\n}\n\n.item {\n position: relative;\n border: 1px solid #ddd;\n margin: 10px 50px 0;\n padding: 20px;\n border-radius: 10px;\n overflow: hidden;\n}\n\n.item:hover .deleteBtn {\n transform: translateX(-50px);\n}\n\n.deleteBtn {\n position: absolute;\n right: 20px;\n top: calc(50% - 25px);\n width: 50px;\n height: 50px;\n transform: translateX(80px);\n text-align: center;\n line-height: 70px;\n border-radius: 25px;\n background-color: #1976d2;\n transition: all 0.3s ease-in-out;\n}\n\n.deleteBtn:hover {\n cursor: pointer;\n box-shadow: 0 0 10px #aaa;\n}\n\n.deleteIcon {\n font-size: 28px !important;\n color: #fff;\n}\n\n.chat_template_box {\n display: flex;\n align-items: start;\n gap: 10px;\n}\n\n.chat_template_test {\n width: 30%;\n}\n\n.chat_template_test_mainBox {\n height: 137px;\n padding: 10px;\n border: 1px solid #ccc;\n border-radius: 4px;\n overflow: scroll;\n}\n\n.chat_template_test_tip {\n font-size: 10px;\n margin: 4px 14px 0;\n color: rgba(0, 0, 0, 0.6);\n}\n\n.test_res_box {\n border: 1px solid #ddd;\n min-height: 55px;\n padding: 10px;\n margin-top: 5px;\n border-radius: 4px;\n}\n\n.css-19qh8xo-MuiInputBase-input-MuiOutlinedInput-input.Mui-disabled {\n -webkit-text-fill-color: #000 !important;\n}\n"],"names":[],"sourceRoot":""}