pilot.linkstec 0.0.90__tar.gz → 0.0.92__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/PKG-INFO +1 -1
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/pyproject.toml +1 -1
- pilot_linkstec-0.0.92/src/pilot/client/ai_client.py +75 -0
- pilot_linkstec-0.0.92/src/pilot/config/config_reader.py +133 -0
- pilot_linkstec-0.0.92/src/pilot/generater/vertexai.py +124 -0
- pilot_linkstec-0.0.92/src/pilot/job/base/convert/encodingTransformerJob.py +16 -0
- pilot_linkstec-0.0.92/src/pilot/job/base/convert/tabReplaceJob.py +27 -0
- pilot_linkstec-0.0.92/src/pilot/job/base/generater/generateJsonBaseJob.py +40 -0
- pilot_linkstec-0.0.92/src/pilot/job/base/generater/generateTextBaseJob.py +37 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/job/impl/base_job.py +2 -5
- pilot_linkstec-0.0.92/src/pilot/logging/__init__.py +0 -0
- pilot_linkstec-0.0.92/src/pilot/processor/__init__.py +0 -0
- pilot_linkstec-0.0.92/src/pilot/prompt/__init__.py +0 -0
- pilot_linkstec-0.0.92/src/pilot/splitters/__init__.py +0 -0
- pilot_linkstec-0.0.92/src/pilot/unit/__init__.py +0 -0
- pilot_linkstec-0.0.92/src/pilot/unit/impl/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/unit/impl/base_unit.py +1 -0
- pilot_linkstec-0.0.92/src/pilot/util/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot.linkstec.egg-info/PKG-INFO +1 -1
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot.linkstec.egg-info/SOURCES.txt +12 -0
- pilot_linkstec-0.0.90/src/pilot/config/config_reader.py +0 -95
- pilot_linkstec-0.0.90/src/pilot/generater/vertexai.py +0 -74
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/LICENSE +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/README.md +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/setup.cfg +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/ai_call.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/ai_info.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/chage_file_tag_base.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/db_operation_base.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/delete_commnents_base.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/file_operation.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/get_file_encoding.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/make_parsing_java_file_order_base.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/split_file_base.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/config → pilot_linkstec-0.0.92/src/pilot/client}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/control → pilot_linkstec-0.0.92/src/pilot/config}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/control/impl → pilot_linkstec-0.0.92/src/pilot/control}/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/control/control_interface.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/conver → pilot_linkstec-0.0.92/src/pilot/control/impl}/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/control/impl/base_controller.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/create_python → pilot_linkstec-0.0.92/src/pilot/conver}/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/conver/commentRemover.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/conver/converfileEncodding.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/conver/nkf_converter.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/create_python/config → pilot_linkstec-0.0.92/src/pilot/create_python}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/create_python/sample → pilot_linkstec-0.0.92/src/pilot/create_python/config}/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/create_python/create_python.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/create_python/sample/child_sample → pilot_linkstec-0.0.92/src/pilot/create_python/sample}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/create_python/sample/child_sample/job → pilot_linkstec-0.0.92/src/pilot/create_python/sample/child_sample}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/create_python/sample/config → pilot_linkstec-0.0.92/src/pilot/create_python/sample/child_sample/job}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/db → pilot_linkstec-0.0.92/src/pilot/create_python/sample/config}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/file_tool → pilot_linkstec-0.0.92/src/pilot/db}/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/create_table.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/db_connect.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/db_main.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/db_util.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/ddl/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/dml/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/sql_executor.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/sql_loader.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/db/sql_service.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/generater → pilot_linkstec-0.0.92/src/pilot/file_tool}/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/file_tool/create_prompt_file.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/file_tool/json_file_tool.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/job → pilot_linkstec-0.0.92/src/pilot/generater}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/job/impl → pilot_linkstec-0.0.92/src/pilot/job}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/logging → pilot_linkstec-0.0.92/src/pilot/job/base}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/processor → pilot_linkstec-0.0.92/src/pilot/job/base/convert}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/prompt → pilot_linkstec-0.0.92/src/pilot/job/base/generater}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/splitters → pilot_linkstec-0.0.92/src/pilot/job/base/program}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/unit → pilot_linkstec-0.0.92/src/pilot/job/base/program/cobol}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/unit/impl → pilot_linkstec-0.0.92/src/pilot/job/base/program/cobol/convert}/__init__.py +0 -0
- {pilot_linkstec-0.0.90/src/pilot/util → pilot_linkstec-0.0.92/src/pilot/job/impl}/__init__.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/job/job_interface.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/logging/logger.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/processor/code_processor.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/processor/code_processor_pipeline.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/splitters/cobolsplitter.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/unit/unit_interface.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/util/files.py +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot.linkstec.egg-info/dependency_links.txt +0 -0
- {pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot.linkstec.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
|
|
3
|
+
from pilot.config.config_reader import get_config
|
|
4
|
+
from pilot.logging.logger import get_logger
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class AIClient:
|
|
8
|
+
def __init__(self):
|
|
9
|
+
self.logger = get_logger(__name__)
|
|
10
|
+
self.config_dto = get_config()
|
|
11
|
+
self.headers = {"Content-Type": "application/json;charset=utf-8"}
|
|
12
|
+
|
|
13
|
+
def call(self, user_prompt: str, system_prompt: str = "") -> str:
|
|
14
|
+
messages = []
|
|
15
|
+
if system_prompt:
|
|
16
|
+
messages.append({"role": "system", "content": system_prompt})
|
|
17
|
+
messages.append({"role": "user", "content": user_prompt})
|
|
18
|
+
|
|
19
|
+
request_data = self._build_request_payload(messages)
|
|
20
|
+
|
|
21
|
+
response_data = self._send_post_request(self.api_url, self.headers, request_data)
|
|
22
|
+
if not isinstance(response_data, dict):
|
|
23
|
+
self.logger.error("無効なAPI応答またはリクエスト失敗")
|
|
24
|
+
return ""
|
|
25
|
+
|
|
26
|
+
result_text = self._extract_response_content(response_data)
|
|
27
|
+
return result_text
|
|
28
|
+
|
|
29
|
+
def _build_request_payload(self, messages: list[dict]) -> dict:
|
|
30
|
+
raise NotImplementedError("サブクラスでリクエストペイロードの構築を実装してください")
|
|
31
|
+
|
|
32
|
+
def _send_post_request(self, url: str, headers: dict, data: dict) -> dict or str:
|
|
33
|
+
try:
|
|
34
|
+
response = requests.post(url, headers=headers, json=data)
|
|
35
|
+
except Exception as e:
|
|
36
|
+
self.logger.error(f"リクエスト失敗: {e}")
|
|
37
|
+
return ""
|
|
38
|
+
if response.status_code != 200:
|
|
39
|
+
self.logger.error(f"ステータスコード {response.status_code}: {response.text}")
|
|
40
|
+
return ""
|
|
41
|
+
try:
|
|
42
|
+
return response.json()
|
|
43
|
+
except Exception as e:
|
|
44
|
+
self.logger.error(f"JSON解析失敗: {e}")
|
|
45
|
+
return ""
|
|
46
|
+
|
|
47
|
+
def _extract_response_content(self, response: dict) -> str:
|
|
48
|
+
raise NotImplementedError("サブクラスでレスポンスの解析を実装してください")
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class LMStudioClient(AIClient):
|
|
52
|
+
def __init__(self):
|
|
53
|
+
super().__init__()
|
|
54
|
+
self.api_url = self.config_dto.lm_studio_api_url
|
|
55
|
+
self.model_name = self.config_dto.lm_studio_model_name
|
|
56
|
+
|
|
57
|
+
def _build_request_payload(self, messages: list[dict]) -> dict:
|
|
58
|
+
payload = {
|
|
59
|
+
"model": self.model_name,
|
|
60
|
+
"stream": False,
|
|
61
|
+
"temperature": 0.8,
|
|
62
|
+
"max_tokens": 15000,
|
|
63
|
+
"messages": messages,
|
|
64
|
+
}
|
|
65
|
+
return payload
|
|
66
|
+
|
|
67
|
+
def _extract_response_content(self, response: dict) -> str:
|
|
68
|
+
if not isinstance(response, dict):
|
|
69
|
+
return str(response)
|
|
70
|
+
if "usage" in response:
|
|
71
|
+
self.logger.debug(f"使用状況: {response['usage']}")
|
|
72
|
+
choices = response.get("choices", [])
|
|
73
|
+
if choices:
|
|
74
|
+
return choices[0].get("message", {}).get("content") or str(response)
|
|
75
|
+
return str(response)
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import configparser
|
|
2
|
+
import inspect
|
|
3
|
+
import os
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class ConfigDTO:
|
|
10
|
+
project: str
|
|
11
|
+
log_level: str
|
|
12
|
+
threads: int
|
|
13
|
+
lm_studio_api_url: str
|
|
14
|
+
lm_studio_model_name: str
|
|
15
|
+
work_space: str
|
|
16
|
+
copy_path: str
|
|
17
|
+
json_file_path:str
|
|
18
|
+
steps: list[str]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ConfigReader:
|
|
22
|
+
_instance = None
|
|
23
|
+
_loaded = False
|
|
24
|
+
|
|
25
|
+
def __new__(cls, filepath=None):
|
|
26
|
+
if cls._instance is None:
|
|
27
|
+
cls._instance = super().__new__(cls)
|
|
28
|
+
return cls._instance
|
|
29
|
+
|
|
30
|
+
def __init__(self, filepath=None):
|
|
31
|
+
if self._loaded:
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
if filepath is None:
|
|
36
|
+
filepath = self.find_config_path()
|
|
37
|
+
if not os.path.exists(filepath):
|
|
38
|
+
raise FileNotFoundError(f"設定ファイルが見つかりません: {filepath}")
|
|
39
|
+
|
|
40
|
+
self.config = configparser.ConfigParser()
|
|
41
|
+
self.config.optionxform = str
|
|
42
|
+
with open(filepath, 'r', encoding='utf-8') as f:
|
|
43
|
+
content = f.read()
|
|
44
|
+
if not content.lstrip().startswith('['):
|
|
45
|
+
content = '[DEFAULT]\n' + content
|
|
46
|
+
self.config.read_string(content)
|
|
47
|
+
|
|
48
|
+
self._loaded = True
|
|
49
|
+
except Exception as e:
|
|
50
|
+
print(f"設定ファイル読み込みエラー: {e}")
|
|
51
|
+
raise
|
|
52
|
+
|
|
53
|
+
@classmethod
|
|
54
|
+
def find_config_path(cls):
|
|
55
|
+
cwd = os.getcwd()
|
|
56
|
+
candidate_path = os.path.join(cwd, 'config', 'control.properties')
|
|
57
|
+
if os.path.exists(candidate_path):
|
|
58
|
+
return candidate_path
|
|
59
|
+
|
|
60
|
+
stack = inspect.stack()
|
|
61
|
+
for frame in stack:
|
|
62
|
+
caller_file = frame.filename
|
|
63
|
+
caller_dir = os.path.dirname(os.path.abspath(caller_file))
|
|
64
|
+
possible_path = os.path.abspath(os.path.join(caller_dir, '..', '..', 'config', 'control.properties'))
|
|
65
|
+
if os.path.exists(possible_path):
|
|
66
|
+
return possible_path
|
|
67
|
+
|
|
68
|
+
base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
|
69
|
+
fallback_path = os.path.join(base_dir, 'config', 'control.properties')
|
|
70
|
+
|
|
71
|
+
if os.path.exists(fallback_path):
|
|
72
|
+
return fallback_path
|
|
73
|
+
|
|
74
|
+
raise FileNotFoundError("control.properties が期待される場所に見つかりません")
|
|
75
|
+
|
|
76
|
+
def get(self, section, option, fallback=None, cast_type=str):
|
|
77
|
+
try:
|
|
78
|
+
if cast_type == bool:
|
|
79
|
+
return self.config.getboolean(section, option)
|
|
80
|
+
elif cast_type == int:
|
|
81
|
+
return self.config.getint(section, option)
|
|
82
|
+
elif cast_type == float:
|
|
83
|
+
return self.config.getfloat(section, option)
|
|
84
|
+
else:
|
|
85
|
+
return self.config.get(section, option)
|
|
86
|
+
except (configparser.NoSectionError, configparser.NoOptionError):
|
|
87
|
+
return fallback
|
|
88
|
+
|
|
89
|
+
def get_dto(self) -> ConfigDTO:
|
|
90
|
+
project = self.get('DEFAULT', 'project', fallback='')
|
|
91
|
+
log_level = self.get('DEFAULT', 'log_level', fallback='INFO')
|
|
92
|
+
threads = self.get('DEFAULT', 'threads', fallback=1, cast_type=int)
|
|
93
|
+
lm_studio_api_url = self.get('DEFAULT', 'lm_studio_api_url', fallback='.')
|
|
94
|
+
lm_studio_model_name = self.get('DEFAULT', 'lm_studio_model_name', fallback='.')
|
|
95
|
+
work_space = self.get('DEFAULT', 'work_space', fallback='.')
|
|
96
|
+
copy_path = self.get('DEFAULT', 'copy_file_path', fallback='.')
|
|
97
|
+
json_file_path = self.get('DEFAULT', 'json_file_path', fallback='.')
|
|
98
|
+
steps_str = self.get('DEFAULT', 'steps', fallback='')
|
|
99
|
+
|
|
100
|
+
steps = [s.strip() for s in steps_str.split(',')] if steps_str else []
|
|
101
|
+
|
|
102
|
+
return ConfigDTO(
|
|
103
|
+
project=project,
|
|
104
|
+
log_level=log_level,
|
|
105
|
+
threads=threads,
|
|
106
|
+
lm_studio_api_url=lm_studio_api_url,
|
|
107
|
+
lm_studio_model_name=lm_studio_model_name,
|
|
108
|
+
work_space=work_space,
|
|
109
|
+
copy_path=copy_path,
|
|
110
|
+
json_file_path=json_file_path,
|
|
111
|
+
steps=steps
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
# 全局設定管理用の変数
|
|
116
|
+
_global_config: Optional[ConfigDTO] = None
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def init_config(filepath=None):
|
|
120
|
+
global _global_config
|
|
121
|
+
try:
|
|
122
|
+
config_reader = ConfigReader(filepath)
|
|
123
|
+
_global_config = config_reader.get_dto()
|
|
124
|
+
except Exception as e:
|
|
125
|
+
print(f"設定初期化エラー: {e}")
|
|
126
|
+
raise
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def get_config() -> ConfigDTO:
|
|
130
|
+
global _global_config
|
|
131
|
+
if _global_config is None:
|
|
132
|
+
raise RuntimeError("設定が初期化されていません。init_config() を最初に呼び出してください。")
|
|
133
|
+
return _global_config
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
from typing import Dict, Any, Optional
|
|
3
|
+
import requests
|
|
4
|
+
|
|
5
|
+
class VertexAISingleton:
|
|
6
|
+
_instance: Optional['VertexAISingleton'] = None
|
|
7
|
+
_lock = threading.Lock()
|
|
8
|
+
|
|
9
|
+
def __new__(cls, model_name: str = "openai/gpt-oss-20b"):
|
|
10
|
+
if cls._instance is None:
|
|
11
|
+
with cls._lock:
|
|
12
|
+
if cls._instance is None:
|
|
13
|
+
cls._instance = super(VertexAISingleton, cls).__new__(cls)
|
|
14
|
+
cls._instance._initialized = False
|
|
15
|
+
return cls._instance
|
|
16
|
+
|
|
17
|
+
def __init__(self, model_name: str = "openai/gpt-oss-20b"):
|
|
18
|
+
if not self._initialized:
|
|
19
|
+
with self._lock:
|
|
20
|
+
if not self._initialized:
|
|
21
|
+
self.model_name = model_name
|
|
22
|
+
self.base_url = "http://127.0.0.1:3000/v1"
|
|
23
|
+
|
|
24
|
+
#self.encoding = tiktoken.get_encoding("cl100k_base")
|
|
25
|
+
|
|
26
|
+
self._session = requests.Session()
|
|
27
|
+
|
|
28
|
+
self._initialized = True
|
|
29
|
+
|
|
30
|
+
def generate_content(self, prompt: str) -> Dict[str, Any]:
|
|
31
|
+
"""複数スレッドから安全に呼び出し可能"""
|
|
32
|
+
try:
|
|
33
|
+
payload = {
|
|
34
|
+
"model": self.model_name,
|
|
35
|
+
"messages": [
|
|
36
|
+
{"role": "user", "content": prompt}
|
|
37
|
+
],
|
|
38
|
+
"temperature": 0.7
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
resp = self._session.post(
|
|
42
|
+
f"{self.base_url}/chat/completions",
|
|
43
|
+
json=payload,
|
|
44
|
+
timeout=600
|
|
45
|
+
)
|
|
46
|
+
resp.raise_for_status()
|
|
47
|
+
data = resp.json()
|
|
48
|
+
|
|
49
|
+
content = data["choices"][0]["message"]["content"]
|
|
50
|
+
|
|
51
|
+
return {
|
|
52
|
+
"prompt": prompt,
|
|
53
|
+
"response": self._remove_code_fence(content),
|
|
54
|
+
"success": True,
|
|
55
|
+
"error": None
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
except Exception as e:
|
|
59
|
+
return {
|
|
60
|
+
"prompt": prompt,
|
|
61
|
+
"response": None,
|
|
62
|
+
"success": False,
|
|
63
|
+
"error": str(e)
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
def start_chat(self):
|
|
67
|
+
"""
|
|
68
|
+
VertexAI の ChatSession と完全互換は不可能だが、
|
|
69
|
+
既存コードを壊さないために「退化実装」を提供
|
|
70
|
+
"""
|
|
71
|
+
return _LMStudioChatSession(self)
|
|
72
|
+
|
|
73
|
+
def count_tokens(self, text: str) -> int:
|
|
74
|
+
return 1
|
|
75
|
+
#try:
|
|
76
|
+
# return len(self.encoding.encode(text))
|
|
77
|
+
#except Exception as e:
|
|
78
|
+
# print(f"トークン計算失敗: {e}")
|
|
79
|
+
# return 0
|
|
80
|
+
|
|
81
|
+
def _remove_code_fence(self, text: str) -> str:
|
|
82
|
+
lines = text.splitlines()
|
|
83
|
+
if lines and lines[0].startswith("```"):
|
|
84
|
+
lines = lines[1:]
|
|
85
|
+
if lines and lines[-1].startswith("```"):
|
|
86
|
+
lines = lines[:-1]
|
|
87
|
+
return "\n".join(lines)
|
|
88
|
+
|
|
89
|
+
@classmethod
|
|
90
|
+
def get_instance(cls, model_name: str = "openai/gpt-oss-20b") -> 'VertexAISingleton':
|
|
91
|
+
return cls(model_name)
|
|
92
|
+
|
|
93
|
+
class _LMStudioChatSession:
|
|
94
|
+
"""
|
|
95
|
+
VertexAI ChatSession の「最低限互換」
|
|
96
|
+
"""
|
|
97
|
+
def __init__(self, client: VertexAISingleton):
|
|
98
|
+
self._client = client
|
|
99
|
+
self._messages = []
|
|
100
|
+
|
|
101
|
+
def send_message(self, message: str):
|
|
102
|
+
self._messages.append({"role": "user", "content": message})
|
|
103
|
+
|
|
104
|
+
payload = {
|
|
105
|
+
"model": self._client.model_name,
|
|
106
|
+
"messages": self._messages
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
resp = self._client._session.post(
|
|
110
|
+
f"{self._client.base_url}/chat/completions",
|
|
111
|
+
json=payload,
|
|
112
|
+
timeout=60
|
|
113
|
+
)
|
|
114
|
+
resp.raise_for_status()
|
|
115
|
+
data = resp.json()
|
|
116
|
+
|
|
117
|
+
reply = data["choices"][0]["message"]["content"]
|
|
118
|
+
self._messages.append({"role": "assistant", "content": reply})
|
|
119
|
+
|
|
120
|
+
class _Resp:
|
|
121
|
+
def __init__(self, text):
|
|
122
|
+
self.text = text
|
|
123
|
+
|
|
124
|
+
return _Resp(reply)
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
|
|
3
|
+
from pilot.job.impl.base_job import BaseJob
|
|
4
|
+
|
|
5
|
+
from pilot.conver.converfileEncodding import nkf_convert
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class EncodingTransformerJob(BaseJob):
|
|
9
|
+
_begin_file_lock = threading.Lock()
|
|
10
|
+
def run(self):
|
|
11
|
+
with self._begin_file_lock:
|
|
12
|
+
if not self.change_current_trg_to_begin():
|
|
13
|
+
return
|
|
14
|
+
nkf_args = ['-w', '--overwrite']
|
|
15
|
+
nkf_convert(self.file_path, nkf_args)
|
|
16
|
+
super().run()
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
from pilot.job.impl.base_job import BaseJob
|
|
5
|
+
|
|
6
|
+
class TabReplaceJob(BaseJob):
|
|
7
|
+
_begin_file_lock = threading.Lock()
|
|
8
|
+
def run(self):
|
|
9
|
+
with self._begin_file_lock:
|
|
10
|
+
if not self.change_current_trg_to_begin():
|
|
11
|
+
return
|
|
12
|
+
self.replace_tabs_with_spaces()
|
|
13
|
+
super().run()
|
|
14
|
+
|
|
15
|
+
def replace_tabs_with_spaces(self, tab_width: int = 4):
|
|
16
|
+
replaced_text =[]
|
|
17
|
+
src_path = Path(self.file_path)
|
|
18
|
+
spaces = ' ' * tab_width
|
|
19
|
+
with open(self.file_path, 'r', encoding='utf-8', newline='') as rf:
|
|
20
|
+
for line in rf:
|
|
21
|
+
replaced_text.append(line.replace('\t', spaces))
|
|
22
|
+
|
|
23
|
+
tmp_path = src_path.parent / (src_path.name + '.tmp')
|
|
24
|
+
with open(tmp_path, 'w', encoding='utf-8', newline='') as wf:
|
|
25
|
+
wf.writelines(replaced_text)
|
|
26
|
+
|
|
27
|
+
tmp_path.replace(src_path)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
|
|
4
|
+
from pilot.job.impl.base_job import BaseJob
|
|
5
|
+
|
|
6
|
+
from pilot.generater.vertexai import VertexAISingleton
|
|
7
|
+
|
|
8
|
+
class generateJsonBaseJob(BaseJob):
|
|
9
|
+
|
|
10
|
+
prompt_content: str
|
|
11
|
+
result_content: str
|
|
12
|
+
result_file_path: str
|
|
13
|
+
|
|
14
|
+
def run(self):
|
|
15
|
+
#with self._begin_file_lock:
|
|
16
|
+
# if not self.change_current_trg_to_begin():
|
|
17
|
+
# return
|
|
18
|
+
#prompt = self.get_file_content()
|
|
19
|
+
prompt = self.prompt_content
|
|
20
|
+
# トークン数チェック
|
|
21
|
+
vertexai = VertexAISingleton.get_instance()
|
|
22
|
+
token_count = vertexai.count_tokens(prompt)
|
|
23
|
+
if token_count == 0:
|
|
24
|
+
super().run()
|
|
25
|
+
return
|
|
26
|
+
if token_count > 900000:
|
|
27
|
+
print(f"警告: promptのトークン数が900000を超えています ({token_count} tokens)")
|
|
28
|
+
super().run()
|
|
29
|
+
return
|
|
30
|
+
# VertexAI で生成
|
|
31
|
+
start = time.time()
|
|
32
|
+
result = vertexai.generate_content(prompt)
|
|
33
|
+
end = time.time()
|
|
34
|
+
print(f"Ai 処理時間 {self.file_path}: {end - start:.2f}秒")
|
|
35
|
+
|
|
36
|
+
result_content = result.get('response', '')
|
|
37
|
+
data = json.loads(result_content)
|
|
38
|
+
with open(self.result_file_path, 'w', encoding='utf-8') as f:
|
|
39
|
+
json.dump(data, f, ensure_ascii=False, indent=2)
|
|
40
|
+
super().run()
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import time
|
|
2
|
+
|
|
3
|
+
from pilot.job.impl.base_job import BaseJob
|
|
4
|
+
|
|
5
|
+
from pilot.generater.vertexai import VertexAISingleton
|
|
6
|
+
|
|
7
|
+
class generateTextBaseJob(BaseJob):
|
|
8
|
+
|
|
9
|
+
prompt_content: str
|
|
10
|
+
result_content: str
|
|
11
|
+
result_file_path: str
|
|
12
|
+
|
|
13
|
+
def run(self):
|
|
14
|
+
#with self._begin_file_lock:
|
|
15
|
+
# if not self.change_current_trg_to_begin():
|
|
16
|
+
# return
|
|
17
|
+
#prompt = self.get_file_content()
|
|
18
|
+
prompt = self.prompt_content
|
|
19
|
+
# トークン数チェック
|
|
20
|
+
vertexai = VertexAISingleton.get_instance()
|
|
21
|
+
token_count = vertexai.count_tokens(prompt)
|
|
22
|
+
if token_count == 0:
|
|
23
|
+
super().run()
|
|
24
|
+
return
|
|
25
|
+
if token_count > 900000:
|
|
26
|
+
print(f"警告: promptのトークン数が900000を超えています ({token_count} tokens)")
|
|
27
|
+
super().run()
|
|
28
|
+
return
|
|
29
|
+
# VertexAI で生成
|
|
30
|
+
start = time.time()
|
|
31
|
+
result = vertexai.generate_content(prompt)
|
|
32
|
+
end = time.time()
|
|
33
|
+
print(f"AI 処理時間 {self.file_path}: {end - start:.2f}秒")
|
|
34
|
+
result_content = result.get('response', '')
|
|
35
|
+
with open(self.result_file_path, 'w', encoding='utf-8') as f:
|
|
36
|
+
f.write(result_content)
|
|
37
|
+
super().run()
|
|
@@ -66,9 +66,6 @@ class BaseJob(JobInterface):
|
|
|
66
66
|
def current_trg_file_path(self, value):
|
|
67
67
|
self._trg_file_path = value
|
|
68
68
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
69
|
def run(self):
|
|
73
70
|
pass
|
|
74
71
|
|
|
@@ -158,7 +155,7 @@ class BaseJob(JobInterface):
|
|
|
158
155
|
return True
|
|
159
156
|
except Exception:
|
|
160
157
|
# 例外が発生した場合は False を返す
|
|
161
|
-
print("!!!!!!!!!!!!!!!!!!!!change_current_trg_to_end erro")
|
|
158
|
+
#print("!!!!!!!!!!!!!!!!!!!!change_current_trg_to_end erro")
|
|
162
159
|
return False
|
|
163
160
|
# trgファイルが存在しなければ何もしないので、そのままreturn
|
|
164
161
|
return False
|
|
@@ -341,7 +338,7 @@ class BaseJob(JobInterface):
|
|
|
341
338
|
ジョブ実行後の後処理を行うメソッド。
|
|
342
339
|
必要に応じてサブクラスでオーバーライドして使用する。
|
|
343
340
|
"""
|
|
344
|
-
|
|
341
|
+
self.change_current_trg_to_end()
|
|
345
342
|
|
|
346
343
|
def generate_basedir_file(self, ext):
|
|
347
344
|
dir_path = os.path.dirname(self.file_path)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -16,6 +16,8 @@ src/pilot/base/file_operation.py
|
|
|
16
16
|
src/pilot/base/get_file_encoding.py
|
|
17
17
|
src/pilot/base/make_parsing_java_file_order_base.py
|
|
18
18
|
src/pilot/base/split_file_base.py
|
|
19
|
+
src/pilot/client/__init__.py
|
|
20
|
+
src/pilot/client/ai_client.py
|
|
19
21
|
src/pilot/config/__init__.py
|
|
20
22
|
src/pilot/config/config_reader.py
|
|
21
23
|
src/pilot/control/__init__.py
|
|
@@ -50,6 +52,16 @@ src/pilot/generater/__init__.py
|
|
|
50
52
|
src/pilot/generater/vertexai.py
|
|
51
53
|
src/pilot/job/__init__.py
|
|
52
54
|
src/pilot/job/job_interface.py
|
|
55
|
+
src/pilot/job/base/__init__.py
|
|
56
|
+
src/pilot/job/base/convert/__init__.py
|
|
57
|
+
src/pilot/job/base/convert/encodingTransformerJob.py
|
|
58
|
+
src/pilot/job/base/convert/tabReplaceJob.py
|
|
59
|
+
src/pilot/job/base/generater/__init__.py
|
|
60
|
+
src/pilot/job/base/generater/generateJsonBaseJob.py
|
|
61
|
+
src/pilot/job/base/generater/generateTextBaseJob.py
|
|
62
|
+
src/pilot/job/base/program/__init__.py
|
|
63
|
+
src/pilot/job/base/program/cobol/__init__.py
|
|
64
|
+
src/pilot/job/base/program/cobol/convert/__init__.py
|
|
53
65
|
src/pilot/job/impl/__init__.py
|
|
54
66
|
src/pilot/job/impl/base_job.py
|
|
55
67
|
src/pilot/logging/__init__.py
|
|
@@ -1,95 +0,0 @@
|
|
|
1
|
-
import configparser
|
|
2
|
-
import os
|
|
3
|
-
import inspect
|
|
4
|
-
from dataclasses import dataclass
|
|
5
|
-
from typing import List
|
|
6
|
-
|
|
7
|
-
@dataclass
|
|
8
|
-
class ConfigDTO:
|
|
9
|
-
work_space: str
|
|
10
|
-
threads: int
|
|
11
|
-
project: str
|
|
12
|
-
steps: list[str]
|
|
13
|
-
skipsteps: list[str]
|
|
14
|
-
runsteps: list[str]
|
|
15
|
-
multisteps: list[str]
|
|
16
|
-
|
|
17
|
-
class ConfigReader:
|
|
18
|
-
def __init__(self, filename = None):
|
|
19
|
-
filepath = None
|
|
20
|
-
if filename is None:
|
|
21
|
-
filepath = self.find_config_path()
|
|
22
|
-
|
|
23
|
-
if filename is not None:
|
|
24
|
-
cwd = os.getcwd()
|
|
25
|
-
filepath = os.path.join(cwd, 'config', filename)
|
|
26
|
-
if not os.path.exists(filepath):
|
|
27
|
-
raise FileNotFoundError(f"Configuration file not found: {filepath}")
|
|
28
|
-
|
|
29
|
-
self.config = configparser.ConfigParser()
|
|
30
|
-
self.config.optionxform = str
|
|
31
|
-
|
|
32
|
-
with open(filepath, 'r', encoding='utf-8') as f:
|
|
33
|
-
content = f.read()
|
|
34
|
-
if not content.lstrip().startswith('['):
|
|
35
|
-
content = '[DEFAULT]\n' + content
|
|
36
|
-
self.config.read_string(content)
|
|
37
|
-
|
|
38
|
-
@classmethod
|
|
39
|
-
def find_config_path(cls):
|
|
40
|
-
cwd = os.getcwd()
|
|
41
|
-
candidate_path = os.path.join(cwd, 'config', 'control.properties')
|
|
42
|
-
if os.path.exists(candidate_path):
|
|
43
|
-
return candidate_path
|
|
44
|
-
|
|
45
|
-
stack = inspect.stack()
|
|
46
|
-
for frame in stack:
|
|
47
|
-
caller_file = frame.filename
|
|
48
|
-
caller_dir = os.path.dirname(os.path.abspath(caller_file))
|
|
49
|
-
possible_path = os.path.abspath(os.path.join(caller_dir, '..', '..', 'config', 'control.properties'))
|
|
50
|
-
if os.path.exists(possible_path):
|
|
51
|
-
return possible_path
|
|
52
|
-
|
|
53
|
-
base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
|
54
|
-
fallback_path = os.path.join(base_dir, 'config', 'control.properties')
|
|
55
|
-
if os.path.exists(fallback_path):
|
|
56
|
-
return fallback_path
|
|
57
|
-
|
|
58
|
-
raise FileNotFoundError("control.properties not found in expected locations")
|
|
59
|
-
|
|
60
|
-
def get(self, section, option, fallback=None, cast_type=str):
|
|
61
|
-
try:
|
|
62
|
-
if cast_type == bool:
|
|
63
|
-
return self.config.getboolean(section, option)
|
|
64
|
-
elif cast_type == int:
|
|
65
|
-
return self.config.getint(section, option)
|
|
66
|
-
elif cast_type == float:
|
|
67
|
-
return self.config.getfloat(section, option)
|
|
68
|
-
else:
|
|
69
|
-
return self.config.get(section, option)
|
|
70
|
-
except (configparser.NoSectionError, configparser.NoOptionError):
|
|
71
|
-
return fallback
|
|
72
|
-
|
|
73
|
-
def get_dto(self) -> ConfigDTO:
|
|
74
|
-
input_path = self.get('DEFAULT', 'input_path', fallback='.')
|
|
75
|
-
work_space = self.get('DEFAULT', 'work_space', fallback='.')
|
|
76
|
-
threads = int(self.get('DEFAULT', 'threads', fallback=1))
|
|
77
|
-
project = self.get('DEFAULT', 'project', fallback='')
|
|
78
|
-
steps_str = self.get('DEFAULT', 'steps', fallback='')
|
|
79
|
-
steps = [s.strip() for s in steps_str.split(',')] if steps_str else []
|
|
80
|
-
skipsteps_str = self.get('DEFAULT', 'skipsteps', fallback='')
|
|
81
|
-
skipsteps = [s.strip() for s in skipsteps_str.split(',')] if skipsteps_str else []
|
|
82
|
-
runsteps_str = self.get('DEFAULT', 'runsteps', fallback='')
|
|
83
|
-
runsteps = [s.strip() for s in runsteps_str.split(',')] if runsteps_str else []
|
|
84
|
-
multisteps_str = self.get('DEFAULT', 'multisteps', fallback='')
|
|
85
|
-
multisteps = [s.strip() for s in multisteps_str.split(',')] if multisteps_str else []
|
|
86
|
-
|
|
87
|
-
return ConfigDTO(
|
|
88
|
-
work_space=work_space,
|
|
89
|
-
threads=threads,
|
|
90
|
-
project=project,
|
|
91
|
-
steps=steps,
|
|
92
|
-
skipsteps=skipsteps,
|
|
93
|
-
runsteps=runsteps,
|
|
94
|
-
multisteps=multisteps
|
|
95
|
-
)
|
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
import threading
|
|
2
|
-
from typing import Dict, Any, Optional
|
|
3
|
-
|
|
4
|
-
import tiktoken
|
|
5
|
-
from vertexai.generative_models import GenerativeModel, ChatSession
|
|
6
|
-
import os
|
|
7
|
-
|
|
8
|
-
class VertexAISingleton:
|
|
9
|
-
_instance: Optional['VertexAISingleton'] = None
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
_lock = threading.Lock()
|
|
13
|
-
_tokenizer_cache = {}
|
|
14
|
-
encoding = None
|
|
15
|
-
|
|
16
|
-
def __new__(cls, model_name: str = "gemini-2.5-pro"):
|
|
17
|
-
if cls._instance is None:
|
|
18
|
-
with cls._lock:
|
|
19
|
-
if cls._instance is None:
|
|
20
|
-
cls._instance = super(VertexAISingleton, cls).__new__(cls)
|
|
21
|
-
cls._instance._initialized = False
|
|
22
|
-
return cls._instance
|
|
23
|
-
|
|
24
|
-
def __init__(self, model_name: str = "gemini-2.5-pro"):
|
|
25
|
-
if not self._initialized:
|
|
26
|
-
with self._lock:
|
|
27
|
-
if not self._initialized:
|
|
28
|
-
self.model = GenerativeModel(model_name)
|
|
29
|
-
self.encoding = tiktoken.get_encoding("cl100k_base")
|
|
30
|
-
self._initialized = True
|
|
31
|
-
|
|
32
|
-
def generate_content(self, prompt: str) -> Dict[str, Any]:
|
|
33
|
-
"""複数スレッドから安全に呼び出し可能"""
|
|
34
|
-
try:
|
|
35
|
-
response = self.model.generate_content(prompt)
|
|
36
|
-
return {
|
|
37
|
-
"prompt": prompt,
|
|
38
|
-
"response": self._remove_code_fence(response.text),
|
|
39
|
-
"success": True,
|
|
40
|
-
"error": None
|
|
41
|
-
}
|
|
42
|
-
except Exception as e:
|
|
43
|
-
return {
|
|
44
|
-
"prompt": prompt,
|
|
45
|
-
"response": None,
|
|
46
|
-
"success": False,
|
|
47
|
-
"error": str(e)
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
def start_chat(self) -> ChatSession:
|
|
51
|
-
"""新しいチャットセッションを開始"""
|
|
52
|
-
return self.model.start_chat()
|
|
53
|
-
|
|
54
|
-
def count_tokens(self, text: str) -> int:
|
|
55
|
-
"""与えられたテキストのトークン数を返す(bert-base-uncasedのみ使用)"""
|
|
56
|
-
try:
|
|
57
|
-
tokens = self.encoding.encode(text)
|
|
58
|
-
return len(tokens)
|
|
59
|
-
except Exception as e:
|
|
60
|
-
print(f"トークン計算失敗: {e}")
|
|
61
|
-
return 0
|
|
62
|
-
|
|
63
|
-
def _remove_code_fence(self, text: str) -> str:
|
|
64
|
-
lines = text.splitlines()
|
|
65
|
-
if lines and lines[0].startswith("```"):
|
|
66
|
-
lines = lines[1:]
|
|
67
|
-
if lines and lines[-1].startswith("```"):
|
|
68
|
-
lines = lines[:-1]
|
|
69
|
-
return "\n".join(lines)
|
|
70
|
-
|
|
71
|
-
@classmethod
|
|
72
|
-
def get_instance(cls, model_name: str = "gemini-2.5-pro") -> 'VertexAISingleton':
|
|
73
|
-
"""インスタンスを取得"""
|
|
74
|
-
return cls(model_name)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/base/make_parsing_java_file_order_base.py
RENAMED
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/config → pilot_linkstec-0.0.92/src/pilot/client}/__init__.py
RENAMED
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/control → pilot_linkstec-0.0.92/src/pilot/config}/__init__.py
RENAMED
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/control/impl → pilot_linkstec-0.0.92/src/pilot/control}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/conver → pilot_linkstec-0.0.92/src/pilot/control/impl}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/create_python → pilot_linkstec-0.0.92/src/pilot/conver}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/file_tool → pilot_linkstec-0.0.92/src/pilot/db}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/generater → pilot_linkstec-0.0.92/src/pilot/file_tool}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/job → pilot_linkstec-0.0.92/src/pilot/generater}/__init__.py
RENAMED
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/job/impl → pilot_linkstec-0.0.92/src/pilot/job}/__init__.py
RENAMED
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/logging → pilot_linkstec-0.0.92/src/pilot/job/base}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90/src/pilot/util → pilot_linkstec-0.0.92/src/pilot/job/impl}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot/processor/code_processor_pipeline.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pilot_linkstec-0.0.90 → pilot_linkstec-0.0.92}/src/pilot.linkstec.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|