pilot.linkstec 0.0.106__py3-none-any.whl → 0.0.108__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pilot/control/impl/base_controller.py +4 -0
- pilot/generater/ai_base.py +7 -0
- pilot/generater/ai_interface.py +8 -0
- pilot/generater/lmstudioai.py +121 -0
- pilot/generater/vera.py +101 -0
- pilot/job/base/generate/generateJsonBaseJob.py +11 -15
- pilot/job/base/generate/generateTextBaseJob.py +11 -15
- {pilot_linkstec-0.0.106.dist-info → pilot_linkstec-0.0.108.dist-info}/METADATA +1 -1
- {pilot_linkstec-0.0.106.dist-info → pilot_linkstec-0.0.108.dist-info}/RECORD +12 -8
- {pilot_linkstec-0.0.106.dist-info → pilot_linkstec-0.0.108.dist-info}/WHEEL +0 -0
- {pilot_linkstec-0.0.106.dist-info → pilot_linkstec-0.0.108.dist-info}/licenses/LICENSE +0 -0
- {pilot_linkstec-0.0.106.dist-info → pilot_linkstec-0.0.108.dist-info}/top_level.txt +0 -0
|
@@ -7,6 +7,7 @@ from concurrent.futures import ThreadPoolExecutor
|
|
|
7
7
|
from pilot.control.control_interface import ControlInterface
|
|
8
8
|
from pilot.unit.impl.base_unit import BaseUnit
|
|
9
9
|
from pilot.config.config_reader import ConfigReader
|
|
10
|
+
from pilot.generater.vera import VeraSingleton
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
class BaseController(ControlInterface):
|
|
@@ -14,6 +15,9 @@ class BaseController(ControlInterface):
|
|
|
14
15
|
config_dto = None
|
|
15
16
|
|
|
16
17
|
def __init__(self):
|
|
18
|
+
#try:
|
|
19
|
+
# VeraSingleton.get_instance()
|
|
20
|
+
#except FileNotFoundError:
|
|
17
21
|
pass
|
|
18
22
|
|
|
19
23
|
def _init_unit(self):
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
from typing import Dict, Any, Optional
|
|
3
|
+
from pilot.generater.ai_base import AIBase
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
class lmstudioAISingleton(AIBase):
|
|
8
|
+
_instance: Optional['lmstudioAISingleton'] = None
|
|
9
|
+
_lock = threading.Lock()
|
|
10
|
+
|
|
11
|
+
def __new__(cls):
|
|
12
|
+
if cls._instance is None:
|
|
13
|
+
with cls._lock:
|
|
14
|
+
if cls._instance is None:
|
|
15
|
+
cls._instance = super(lmstudioAISingleton, cls).__new__(cls)
|
|
16
|
+
cls._instance._initialized = False
|
|
17
|
+
return cls._instance
|
|
18
|
+
|
|
19
|
+
def __init__(self, model_name: str = "openai/gpt-oss-20b",base_url:str="http://127.0.0.1:3000/v1"):
|
|
20
|
+
if not self._initialized:
|
|
21
|
+
with self._lock:
|
|
22
|
+
if not self._initialized:
|
|
23
|
+
self.model_name = model_name
|
|
24
|
+
self.base_url = base_url
|
|
25
|
+
self._session = requests.Session()
|
|
26
|
+
self._initialized = True
|
|
27
|
+
|
|
28
|
+
def generate_content(self, prompt: str) -> Dict[str, Any]:
|
|
29
|
+
"""複数スレッドから安全に呼び出し可能"""
|
|
30
|
+
try:
|
|
31
|
+
payload = {
|
|
32
|
+
"model": self.model_name,
|
|
33
|
+
"messages": [
|
|
34
|
+
{"role": "user", "content": prompt}
|
|
35
|
+
]
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
resp = self._session.post(
|
|
39
|
+
f"{self.base_url}/chat/completions",
|
|
40
|
+
json=payload,
|
|
41
|
+
timeout=600
|
|
42
|
+
)
|
|
43
|
+
resp.raise_for_status()
|
|
44
|
+
data = resp.json()
|
|
45
|
+
|
|
46
|
+
content = data["choices"][0]["message"]["content"]
|
|
47
|
+
|
|
48
|
+
return {
|
|
49
|
+
"prompt": prompt,
|
|
50
|
+
"response": self._remove_code_fence(content),
|
|
51
|
+
"success": True,
|
|
52
|
+
"error": None
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
except Exception as e:
|
|
56
|
+
return {
|
|
57
|
+
"prompt": prompt,
|
|
58
|
+
"response": None,
|
|
59
|
+
"success": False,
|
|
60
|
+
"error": str(e)
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
def start_chat(self):
|
|
64
|
+
"""
|
|
65
|
+
VertexAI の ChatSession と完全互換は不可能だが、
|
|
66
|
+
既存コードを壊さないために「退化実装」を提供
|
|
67
|
+
"""
|
|
68
|
+
return _LMStudioChatSession(self)
|
|
69
|
+
|
|
70
|
+
def count_tokens(self, text: str) -> int:
|
|
71
|
+
return 1
|
|
72
|
+
#try:
|
|
73
|
+
# return len(self.encoding.encode(text))
|
|
74
|
+
#except Exception as e:
|
|
75
|
+
# print(f"トークン計算失敗: {e}")
|
|
76
|
+
# return 0
|
|
77
|
+
|
|
78
|
+
def _remove_code_fence(self, text: str) -> str:
|
|
79
|
+
lines = text.splitlines()
|
|
80
|
+
if lines and lines[0].startswith("```"):
|
|
81
|
+
lines = lines[1:]
|
|
82
|
+
if lines and lines[-1].startswith("```"):
|
|
83
|
+
lines = lines[:-1]
|
|
84
|
+
return "\n".join(lines)
|
|
85
|
+
|
|
86
|
+
@classmethod
|
|
87
|
+
def get_instance(cls, model_name,base_url) -> 'lmstudioAISingleton':
|
|
88
|
+
return cls(model_name,base_url)
|
|
89
|
+
|
|
90
|
+
class _LMStudioChatSession:
|
|
91
|
+
"""
|
|
92
|
+
VertexAI ChatSession の「最低限互換」
|
|
93
|
+
"""
|
|
94
|
+
def __init__(self, client: lmstudioAISingleton):
|
|
95
|
+
self._client = client
|
|
96
|
+
self._messages = []
|
|
97
|
+
|
|
98
|
+
def send_message(self, message: str):
|
|
99
|
+
self._messages.append({"role": "user", "content": message})
|
|
100
|
+
|
|
101
|
+
payload = {
|
|
102
|
+
"model": self._client.model_name,
|
|
103
|
+
"messages": self._messages
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
resp = self._client._session.post(
|
|
107
|
+
f"{self._client.base_url}/chat/completions",
|
|
108
|
+
json=payload,
|
|
109
|
+
timeout=60
|
|
110
|
+
)
|
|
111
|
+
resp.raise_for_status()
|
|
112
|
+
data = resp.json()
|
|
113
|
+
|
|
114
|
+
reply = data["choices"][0]["message"]["content"]
|
|
115
|
+
self._messages.append({"role": "assistant", "content": reply})
|
|
116
|
+
|
|
117
|
+
class _Resp:
|
|
118
|
+
def __init__(self, text):
|
|
119
|
+
self.text = text
|
|
120
|
+
|
|
121
|
+
return _Resp(reply)
|
pilot/generater/vera.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import configparser
|
|
2
|
+
import os
|
|
3
|
+
import threading
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Dict, Any, Optional
|
|
6
|
+
|
|
7
|
+
from pilot.generater.lmstudioai import lmstudioAISingleton
|
|
8
|
+
from pilot.generater.ai_base import AIBase
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class AIConfigDTO:
|
|
12
|
+
veraApi: str
|
|
13
|
+
veraModel: str
|
|
14
|
+
verUrl: str
|
|
15
|
+
timeOut: int
|
|
16
|
+
|
|
17
|
+
class VeraSingleton:
|
|
18
|
+
_instance: Optional['VeraSingleton'] = None
|
|
19
|
+
_lock = threading.Lock()
|
|
20
|
+
|
|
21
|
+
def __new__(cls):
|
|
22
|
+
if cls._instance is None:
|
|
23
|
+
with cls._lock:
|
|
24
|
+
if cls._instance is None:
|
|
25
|
+
cls._instance = super(VeraSingleton, cls).__new__(cls)
|
|
26
|
+
cls._instance._initialized = False
|
|
27
|
+
return cls._instance
|
|
28
|
+
|
|
29
|
+
def __init__(self):
|
|
30
|
+
if not self._initialized:
|
|
31
|
+
with self._lock:
|
|
32
|
+
|
|
33
|
+
cwd = os.getcwd()
|
|
34
|
+
filepath = os.path.join(cwd, 'config', 'vera.properties')
|
|
35
|
+
self.config = configparser.ConfigParser()
|
|
36
|
+
self.config.optionxform = str
|
|
37
|
+
|
|
38
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
39
|
+
content = f.read()
|
|
40
|
+
if not content.lstrip().startswith('['):
|
|
41
|
+
content = '[DEFAULT]\n' + content
|
|
42
|
+
self.config.read_string(content)
|
|
43
|
+
self.config_dto = self.create_ai_dto()
|
|
44
|
+
self.veraApi = self.config_dto.veraApi
|
|
45
|
+
self.model_name = self.config_dto.veraModel
|
|
46
|
+
self.base_url = self.config_dto.verUrl
|
|
47
|
+
self.timeout = self.config_dto.timeOut
|
|
48
|
+
self.ai_instance: AIBase = self.get_ai_instance()
|
|
49
|
+
self._initialized = True
|
|
50
|
+
|
|
51
|
+
def get_ai_instance(self) :
|
|
52
|
+
match self.veraApi:
|
|
53
|
+
case "lmstudio":
|
|
54
|
+
return lmstudioAISingleton.get_instance(model_name=self.model_name,base_url= self.base_url)
|
|
55
|
+
case "ollama":
|
|
56
|
+
return AIBase()
|
|
57
|
+
case "gemini":
|
|
58
|
+
return AIBase()
|
|
59
|
+
case "gemini":
|
|
60
|
+
return AIBase()
|
|
61
|
+
case "gemini":
|
|
62
|
+
return AIBase()
|
|
63
|
+
case "qwen":
|
|
64
|
+
return AIBase()
|
|
65
|
+
case "azure":
|
|
66
|
+
return AIBase()
|
|
67
|
+
return AIBase()
|
|
68
|
+
|
|
69
|
+
def generate_content(self, prompt: str) -> Dict[str, Any]:
|
|
70
|
+
return self.ai_instance.generate_content(prompt)
|
|
71
|
+
|
|
72
|
+
@classmethod
|
|
73
|
+
def get_instance(cls) -> 'VeraSingleton':
|
|
74
|
+
return cls()
|
|
75
|
+
|
|
76
|
+
def create_ai_dto(self) -> AIConfigDTO:
|
|
77
|
+
vera_api = self.get('DEFAULT', 'vera_api', fallback='.')
|
|
78
|
+
vera_model = self.get('DEFAULT', 'vera_model', fallback='.')
|
|
79
|
+
ver_url = self.get('DEFAULT', 'ver_url', fallback='.')
|
|
80
|
+
time_out = int(self.get('DEFAULT', 'time_out', fallback=1))
|
|
81
|
+
|
|
82
|
+
return AIConfigDTO(
|
|
83
|
+
veraApi = vera_api,
|
|
84
|
+
veraModel = vera_model,
|
|
85
|
+
verUrl = ver_url,
|
|
86
|
+
timeOut = time_out
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
def get(self, section, option, fallback=None, cast_type=str):
|
|
90
|
+
try:
|
|
91
|
+
if cast_type == bool:
|
|
92
|
+
return self.config.getboolean(section, option)
|
|
93
|
+
elif cast_type == int:
|
|
94
|
+
return self.config.getint(section, option)
|
|
95
|
+
elif cast_type == float:
|
|
96
|
+
return self.config.getfloat(section, option)
|
|
97
|
+
else:
|
|
98
|
+
return self.config.get(section, option)
|
|
99
|
+
except (configparser.NoSectionError, configparser.NoOptionError):
|
|
100
|
+
return fallback
|
|
101
|
+
|
|
@@ -5,7 +5,7 @@ import time
|
|
|
5
5
|
|
|
6
6
|
from pilot.job.impl.base_job import BaseJob
|
|
7
7
|
|
|
8
|
-
from pilot.generater.
|
|
8
|
+
from pilot.generater.vera import VeraSingleton
|
|
9
9
|
|
|
10
10
|
class GenerateJsonBaseJob(BaseJob):
|
|
11
11
|
|
|
@@ -14,24 +14,20 @@ class GenerateJsonBaseJob(BaseJob):
|
|
|
14
14
|
result_file_path: str
|
|
15
15
|
|
|
16
16
|
def run(self):
|
|
17
|
-
#with self._begin_file_lock:
|
|
18
|
-
# if not self.change_current_trg_to_begin():
|
|
19
|
-
# return
|
|
20
|
-
#prompt = self.get_file_content()
|
|
21
17
|
prompt = self.prompt_content
|
|
22
18
|
# トークン数チェック
|
|
23
|
-
|
|
24
|
-
token_count = vertexai.count_tokens(prompt)
|
|
25
|
-
if token_count == 0:
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
if token_count > 900000:
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
19
|
+
vera_ai = VeraSingleton.get_instance()
|
|
20
|
+
#token_count = vertexai.count_tokens(prompt)
|
|
21
|
+
#if token_count == 0:
|
|
22
|
+
# super().run()
|
|
23
|
+
# return
|
|
24
|
+
#if token_count > 900000:
|
|
25
|
+
# print(f"警告: promptのトークン数が900000を超えています ({token_count} tokens)")
|
|
26
|
+
# super().run()
|
|
27
|
+
# return
|
|
32
28
|
# VertexAI で生成
|
|
33
29
|
start = time.time()
|
|
34
|
-
result =
|
|
30
|
+
result = vera_ai.generate_content(prompt)
|
|
35
31
|
end = time.time()
|
|
36
32
|
print(f"Ai 処理時間 {self.file_path}: {end - start:.2f}秒")
|
|
37
33
|
|
|
@@ -5,7 +5,7 @@ import time
|
|
|
5
5
|
|
|
6
6
|
from pilot.job.impl.base_job import BaseJob
|
|
7
7
|
|
|
8
|
-
from pilot.generater.
|
|
8
|
+
from pilot.generater.vera import VeraSingleton
|
|
9
9
|
|
|
10
10
|
class GenerateTextBaseJob(BaseJob):
|
|
11
11
|
|
|
@@ -14,24 +14,20 @@ class GenerateTextBaseJob(BaseJob):
|
|
|
14
14
|
result_file_path: str
|
|
15
15
|
|
|
16
16
|
def run(self):
|
|
17
|
-
#with self._begin_file_lock:
|
|
18
|
-
# if not self.change_current_trg_to_begin():
|
|
19
|
-
# return
|
|
20
|
-
#prompt = self.get_file_content()
|
|
21
17
|
prompt = self.prompt_content
|
|
22
18
|
# トークン数チェック
|
|
23
|
-
|
|
24
|
-
token_count = vertexai.count_tokens(prompt)
|
|
25
|
-
if token_count == 0:
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
if token_count > 900000:
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
19
|
+
vera_ai = VeraSingleton.get_instance()
|
|
20
|
+
#token_count = vertexai.count_tokens(prompt)
|
|
21
|
+
#if token_count == 0:
|
|
22
|
+
# super().run()
|
|
23
|
+
# return
|
|
24
|
+
#if token_count > 900000:
|
|
25
|
+
# print(f"警告: promptのトークン数が900000を超えています ({token_count} tokens)")
|
|
26
|
+
# super().run()
|
|
27
|
+
# return
|
|
32
28
|
# VertexAI で生成
|
|
33
29
|
start = time.time()
|
|
34
|
-
result =
|
|
30
|
+
result = vera_ai.generate_content(prompt)
|
|
35
31
|
end = time.time()
|
|
36
32
|
print(f"AI 処理時間 {self.file_path}: {end - start:.2f}秒")
|
|
37
33
|
result_content = result.get('response', '')
|
|
@@ -16,7 +16,7 @@ pilot/config/config_reader.py,sha256=MchG-9j-Xfy8AquL_6tlvqQR65MFJ3c3hwWBl6IF2jw
|
|
|
16
16
|
pilot/control/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
17
|
pilot/control/control_interface.py,sha256=zGv380oQgAKPAIHDHeFdPYzhj2Ngo2T66NWlNloA7vY,124
|
|
18
18
|
pilot/control/impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
-
pilot/control/impl/base_controller.py,sha256=
|
|
19
|
+
pilot/control/impl/base_controller.py,sha256=CuO6rkkc6LX0DE7-54L9Sc-0Wg8nXNletFUGmeZeksE,3885
|
|
20
20
|
pilot/conver/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
21
21
|
pilot/conver/commentRemover.py,sha256=S8uwp9Glp0bdv4EFqf62WIcOTLiJZdracG2FAMKY1EY,3777
|
|
22
22
|
pilot/conver/converfileEncodding.py,sha256=YpPPTf52ujfWn8YlKuKYhSbcToQj34Gs4PmdcUbm040,1586
|
|
@@ -43,6 +43,10 @@ pilot/file_tool/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
43
43
|
pilot/file_tool/create_prompt_file.py,sha256=pvmBm_iRWRvm5Qr8gaZwSAzJ_1vrqG9Xi_UJ-NgdwV8,1994
|
|
44
44
|
pilot/file_tool/json_file_tool.py,sha256=v-qVcyKVhFfwEg4uJWDFMZU0EWvOwkYhrwhm-obMlUU,3637
|
|
45
45
|
pilot/generater/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
|
+
pilot/generater/ai_base.py,sha256=V-NSC--BfW8Q5Kn56B_kfyAQL3wL7n1rR6srKgnNWJA,192
|
|
47
|
+
pilot/generater/ai_interface.py,sha256=NxoXWa9NWAKkN5kGOIZsK4dCCJlOLii1d-v8PcJ9BYk,193
|
|
48
|
+
pilot/generater/lmstudioai.py,sha256=fxPChYuyJYjGdGHEFpfzaXigThyhInXL32RCqyuHUqU,3857
|
|
49
|
+
pilot/generater/vera.py,sha256=5TTaRwKQQMZUA-4OMuvAGdPIeOAHc2eDTsh7tluFORc,3566
|
|
46
50
|
pilot/generater/vertexai.py,sha256=UBVGPGjGb63wPeo8SLpj2X5g_wIHlPVgaewIOaT2ISw,3902
|
|
47
51
|
pilot/job/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
52
|
pilot/job/job_interface.py,sha256=EKtuj0IcdolP494aAgTtctgamyQIoFXVwRORwOQck7A,124
|
|
@@ -52,8 +56,8 @@ pilot/job/base/convert/delimiterSwitcherJob.py,sha256=m5Y92XVbCjVWGBKzYJYX8wgLMn
|
|
|
52
56
|
pilot/job/base/convert/encodingTransformerJob.py,sha256=P6txCqAcQ4w999ttA_gmA4VKr6klvTz4SrQIYT1gv6U,279
|
|
53
57
|
pilot/job/base/convert/tabReplaceJob.py,sha256=FtPLbWwDPgH1jGsK3y5_sY4lAZ4Lhsv7GQ-1A-QgAFE,743
|
|
54
58
|
pilot/job/base/generate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
|
-
pilot/job/base/generate/generateJsonBaseJob.py,sha256=
|
|
56
|
-
pilot/job/base/generate/generateTextBaseJob.py,sha256=
|
|
59
|
+
pilot/job/base/generate/generateJsonBaseJob.py,sha256=0sWNqvfV-1DgsNjyD_GUBHogjQPNRdcqx0siNNFnSsE,1208
|
|
60
|
+
pilot/job/base/generate/generateTextBaseJob.py,sha256=Pvxk2HNkQt9saBYmoRA8lhDjffMAxLMqSYvsyIlYKjg,1138
|
|
57
61
|
pilot/job/base/program/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
58
62
|
pilot/job/base/program/cobol/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
59
63
|
pilot/job/base/program/cobol/convert/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -73,8 +77,8 @@ pilot/unit/impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
73
77
|
pilot/unit/impl/base_unit.py,sha256=h6PMtd4ja0wa6mttFeI7rsv92yy96UgfLIU0QSs6Dmk,2108
|
|
74
78
|
pilot/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
75
79
|
pilot/util/files.py,sha256=v9uzfzo3Aq4xgnUIASEZeBJoA2nD9Qz_EA3P-FwzGFQ,1896
|
|
76
|
-
pilot_linkstec-0.0.
|
|
77
|
-
pilot_linkstec-0.0.
|
|
78
|
-
pilot_linkstec-0.0.
|
|
79
|
-
pilot_linkstec-0.0.
|
|
80
|
-
pilot_linkstec-0.0.
|
|
80
|
+
pilot_linkstec-0.0.108.dist-info/licenses/LICENSE,sha256=6kbiFSfobTZ7beWiKnHpN902HgBx-Jzgcme0SvKqhKY,1091
|
|
81
|
+
pilot_linkstec-0.0.108.dist-info/METADATA,sha256=bVJA6FvtR9-r1jbCvKHVe-pq3_Y6XQp-9VdqYh4mfXg,680
|
|
82
|
+
pilot_linkstec-0.0.108.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
83
|
+
pilot_linkstec-0.0.108.dist-info/top_level.txt,sha256=BijnVJdXnIPxxx3s60M848seL4Z12gNUPod6KPJxK9c,6
|
|
84
|
+
pilot_linkstec-0.0.108.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|