BackcastPro 0.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- BackcastPro/__init__.py +28 -0
- BackcastPro/_broker.py +430 -0
- BackcastPro/_stats.py +177 -0
- BackcastPro/api/__init__.py +4 -0
- BackcastPro/api/board.py +130 -0
- BackcastPro/api/chart.py +527 -0
- BackcastPro/api/db_manager.py +283 -0
- BackcastPro/api/db_stocks_board.py +428 -0
- BackcastPro/api/db_stocks_daily.py +507 -0
- BackcastPro/api/db_stocks_info.py +260 -0
- BackcastPro/api/lib/__init__.py +4 -0
- BackcastPro/api/lib/e_api.py +588 -0
- BackcastPro/api/lib/jquants.py +384 -0
- BackcastPro/api/lib/kabusap.py +222 -0
- BackcastPro/api/lib/stooq.py +409 -0
- BackcastPro/api/lib/util.py +38 -0
- BackcastPro/api/stocks_board.py +77 -0
- BackcastPro/api/stocks_info.py +88 -0
- BackcastPro/api/stocks_price.py +131 -0
- BackcastPro/backtest.py +594 -0
- BackcastPro/order.py +161 -0
- BackcastPro/position.py +60 -0
- BackcastPro/trade.py +227 -0
- backcastpro-0.3.4.dist-info/METADATA +112 -0
- backcastpro-0.3.4.dist-info/RECORD +26 -0
- backcastpro-0.3.4.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
from .db_manager import db_manager
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import duckdb
|
|
4
|
+
import os
|
|
5
|
+
from typing import List, Tuple, Optional, Dict
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
import logging
|
|
8
|
+
from contextlib import contextmanager
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
logger.setLevel(logging.INFO)
|
|
12
|
+
|
|
13
|
+
class db_stocks_info(db_manager):
|
|
14
|
+
|
|
15
|
+
def __init__(self):
|
|
16
|
+
super().__init__()
|
|
17
|
+
|
|
18
|
+
def save_listed_info(self, df: pd.DataFrame) -> None:
|
|
19
|
+
"""
|
|
20
|
+
各上場銘柄の基本情報一覧をDuckDBに保存(アップサート、動的テーブル作成対応)
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
df (pd.DataFrame): J-Quantsのカラムを想定(Date, Code, CompanyName, CompanyNameEnglish, ...)
|
|
24
|
+
"""
|
|
25
|
+
try:
|
|
26
|
+
if not self.isEnable:
|
|
27
|
+
return
|
|
28
|
+
|
|
29
|
+
if df is None or df.empty:
|
|
30
|
+
logger.info("上場銘柄情報が空のため保存をスキップしました")
|
|
31
|
+
return
|
|
32
|
+
|
|
33
|
+
# 必須カラムの定義
|
|
34
|
+
required_columns = [
|
|
35
|
+
'Date', 'Code', 'CompanyName', 'CompanyNameEnglish',
|
|
36
|
+
'Sector17Code', 'Sector17CodeName',
|
|
37
|
+
'Sector33Code', 'Sector33CodeName',
|
|
38
|
+
'ScaleCategory', 'MarketCode', 'MarketCodeName'
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
# 必須カラムが存在するかチェック
|
|
42
|
+
missing_columns = [col for col in required_columns if col not in df.columns]
|
|
43
|
+
if missing_columns:
|
|
44
|
+
logger.warning(f"必須カラムが不足しています: {missing_columns}。保存をスキップします。")
|
|
45
|
+
return
|
|
46
|
+
|
|
47
|
+
# 必須カラムのみを選択
|
|
48
|
+
df_to_save = df[required_columns].copy()
|
|
49
|
+
|
|
50
|
+
# 日付形式を統一(YYYY-MM-DD)
|
|
51
|
+
df_to_save['Date'] = pd.to_datetime(df_to_save['Date']).dt.strftime('%Y-%m-%d')
|
|
52
|
+
df_to_save['Code'] = df_to_save['Code'].astype(str)
|
|
53
|
+
|
|
54
|
+
# DataFrame 内の (Code, Date) の重複を除外
|
|
55
|
+
df_to_save = df_to_save.drop_duplicates(subset=['Code', 'Date'], keep='first')
|
|
56
|
+
logger.info(f"重複を除外後: {len(df_to_save)} 件")
|
|
57
|
+
|
|
58
|
+
with self.get_db() as db:
|
|
59
|
+
table_name = "listed_info"
|
|
60
|
+
db.execute("BEGIN TRANSACTION")
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
if self._table_exists(db, table_name):
|
|
64
|
+
logger.info(f"テーブル:{table_name} は既に存在しています。新規データをチェックします。")
|
|
65
|
+
|
|
66
|
+
existing_df = db.execute(
|
|
67
|
+
f'SELECT DISTINCT "Code", "Date" FROM {table_name}'
|
|
68
|
+
).fetchdf()
|
|
69
|
+
|
|
70
|
+
if not existing_df.empty:
|
|
71
|
+
existing_df['Date'] = pd.to_datetime(existing_df['Date']).dt.strftime('%Y-%m-%d')
|
|
72
|
+
existing_df['Code'] = existing_df['Code'].astype(str)
|
|
73
|
+
existing_pairs = set(
|
|
74
|
+
[(row['Code'], row['Date']) for _, row in existing_df.iterrows()]
|
|
75
|
+
)
|
|
76
|
+
else:
|
|
77
|
+
existing_pairs = set()
|
|
78
|
+
|
|
79
|
+
new_pairs = set(
|
|
80
|
+
[(row['Code'], row['Date']) for _, row in df_to_save.iterrows()]
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
unique_pairs = new_pairs - existing_pairs
|
|
84
|
+
|
|
85
|
+
if unique_pairs:
|
|
86
|
+
mask = df_to_save.apply(
|
|
87
|
+
lambda row: (row['Code'], row['Date']) in unique_pairs,
|
|
88
|
+
axis=1
|
|
89
|
+
)
|
|
90
|
+
new_data_df = df_to_save[mask].copy()
|
|
91
|
+
logger.info(f"新規データ {len(new_data_df)} 件を追加します")
|
|
92
|
+
self._batch_insert_data(db, table_name, new_data_df)
|
|
93
|
+
else:
|
|
94
|
+
logger.info(f"新規データはありません")
|
|
95
|
+
|
|
96
|
+
else:
|
|
97
|
+
if not self._table_exists(db, table_name):
|
|
98
|
+
logger.info(f"新しいテーブル {table_name} を作成します")
|
|
99
|
+
|
|
100
|
+
primary_keys = ['Code', 'Date']
|
|
101
|
+
self._create_table_from_dataframe(db, table_name, df_to_save, primary_keys)
|
|
102
|
+
|
|
103
|
+
db.execute(f'CREATE INDEX IF NOT EXISTS idx_{table_name}_code ON {table_name}("Code")')
|
|
104
|
+
db.execute(f'CREATE INDEX IF NOT EXISTS idx_{table_name}_date ON {table_name}("Date")')
|
|
105
|
+
db.execute(f'CREATE INDEX IF NOT EXISTS idx_{table_name}_sector17 ON {table_name}("Sector17Code")')
|
|
106
|
+
|
|
107
|
+
self._batch_insert_data(db, table_name, df_to_save)
|
|
108
|
+
logger.info(f"データ {len(df_to_save)} 件を挿入しました")
|
|
109
|
+
|
|
110
|
+
db.execute("COMMIT")
|
|
111
|
+
logger.info(f"上場銘柄情報をDuckDBに保存しました: 件数={len(df_to_save)}")
|
|
112
|
+
|
|
113
|
+
except Exception as e:
|
|
114
|
+
db.execute("ROLLBACK")
|
|
115
|
+
raise e
|
|
116
|
+
|
|
117
|
+
except Exception as e:
|
|
118
|
+
logger.error(f"上場銘柄情報の保存に失敗しました: {str(e)}", exc_info=True)
|
|
119
|
+
raise
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def load_listed_info_from_cache(self, code: str = "", date: str = "") -> pd.DataFrame:
|
|
123
|
+
"""
|
|
124
|
+
上場銘柄情報をDuckDBから取得
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
code (str, optional): 銘柄コード(指定時はその銘柄のみ取得)
|
|
128
|
+
date (str, optional): 日付(YYYY-MM-DD形式、指定時はその日付のデータのみ取得)
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
pd.DataFrame: 上場銘柄情報データフレーム
|
|
132
|
+
"""
|
|
133
|
+
try:
|
|
134
|
+
if not self.isEnable:
|
|
135
|
+
return pd.DataFrame()
|
|
136
|
+
|
|
137
|
+
table_name = "listed_info"
|
|
138
|
+
|
|
139
|
+
with self.get_db() as db:
|
|
140
|
+
if not self._table_exists(db, table_name):
|
|
141
|
+
logger.debug(f"キャッシュにデータがありません(外部APIから取得します)")
|
|
142
|
+
return pd.DataFrame()
|
|
143
|
+
|
|
144
|
+
params = []
|
|
145
|
+
cond_parts = []
|
|
146
|
+
|
|
147
|
+
if code:
|
|
148
|
+
cond_parts.append('"Code" = ?')
|
|
149
|
+
params.append(str(code))
|
|
150
|
+
|
|
151
|
+
if date:
|
|
152
|
+
if isinstance(date, str):
|
|
153
|
+
date = pd.to_datetime(date).strftime('%Y-%m-%d')
|
|
154
|
+
cond_parts.append('"Date" = ?')
|
|
155
|
+
params.append(date)
|
|
156
|
+
|
|
157
|
+
where_clause = f"WHERE {' AND '.join(cond_parts)}" if cond_parts else ""
|
|
158
|
+
query = f'SELECT * FROM {table_name} {where_clause} ORDER BY "Date" DESC, "Code"'
|
|
159
|
+
|
|
160
|
+
df = db.execute(query, params).fetchdf()
|
|
161
|
+
|
|
162
|
+
if not df.empty:
|
|
163
|
+
logger.info(f"上場銘柄情報をDuckDBから読み込みました ({len(df)}件)")
|
|
164
|
+
else:
|
|
165
|
+
logger.debug(f"キャッシュにデータがありません")
|
|
166
|
+
|
|
167
|
+
return df
|
|
168
|
+
|
|
169
|
+
except Exception as e:
|
|
170
|
+
logger.error(f"キャッシュの読み込みに失敗しました: {str(e)}", exc_info=True)
|
|
171
|
+
return pd.DataFrame()
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def ensure_db_ready(self) -> None:
|
|
175
|
+
"""
|
|
176
|
+
DuckDBファイルの準備を行う(存在しなければFTPからダウンロードを試行)
|
|
177
|
+
"""
|
|
178
|
+
if not self.isEnable:
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
db_path = os.path.join(self.cache_dir, "listed_info.duckdb")
|
|
182
|
+
|
|
183
|
+
if not os.path.exists(db_path):
|
|
184
|
+
os.makedirs(os.path.dirname(db_path), exist_ok=True)
|
|
185
|
+
# FTPからダウンロードを試行
|
|
186
|
+
if self._download_from_ftp(db_path):
|
|
187
|
+
logger.info(f"DuckDBファイルをFTPからダウンロードしました: {db_path}")
|
|
188
|
+
else:
|
|
189
|
+
logger.debug(f"FTPにDuckDBファイルが存在しません: listed_info.duckdb")
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
@contextmanager
|
|
193
|
+
def get_db(self):
|
|
194
|
+
"""
|
|
195
|
+
DuckDBデータベース接続を取得(コンテキストマネージャー対応)
|
|
196
|
+
|
|
197
|
+
Yields:
|
|
198
|
+
duckdb.DuckDBPyConnection: DuckDB接続オブジェクト
|
|
199
|
+
"""
|
|
200
|
+
db_path = os.path.join(self.cache_dir, "listed_info.duckdb")
|
|
201
|
+
if not os.path.exists(db_path):
|
|
202
|
+
os.makedirs(os.path.dirname(db_path), exist_ok=True)
|
|
203
|
+
# FTPからダウンロードを試行
|
|
204
|
+
if self._download_from_ftp(db_path):
|
|
205
|
+
logger.info(f"DuckDBファイルをFTPからダウンロードしました: {db_path}")
|
|
206
|
+
else:
|
|
207
|
+
logger.info(f"DuckDBファイルを作成しました: {db_path}")
|
|
208
|
+
|
|
209
|
+
db = duckdb.connect(db_path)
|
|
210
|
+
try:
|
|
211
|
+
yield db
|
|
212
|
+
finally:
|
|
213
|
+
db.close()
|
|
214
|
+
|
|
215
|
+
def _download_from_ftp(self, local_path: str) -> bool:
|
|
216
|
+
"""
|
|
217
|
+
FTPサーバーからDuckDBファイルをダウンロード
|
|
218
|
+
"""
|
|
219
|
+
import ftplib
|
|
220
|
+
|
|
221
|
+
FTP_HOST = 'backcast.i234.me'
|
|
222
|
+
FTP_USER = 'sasaco_worker'
|
|
223
|
+
FTP_PASSWORD = 'S#1y9c%7o9'
|
|
224
|
+
FTP_PORT = 21
|
|
225
|
+
REMOTE_DIR = '/StockData/jp'
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
with ftplib.FTP() as ftp:
|
|
229
|
+
ftp.connect(FTP_HOST, FTP_PORT)
|
|
230
|
+
ftp.login(FTP_USER, FTP_PASSWORD)
|
|
231
|
+
|
|
232
|
+
remote_file = "listed_info.duckdb"
|
|
233
|
+
|
|
234
|
+
# ファイルサイズ確認(存在確認も兼ねる)
|
|
235
|
+
try:
|
|
236
|
+
ftp.voidcmd(f"TYPE I")
|
|
237
|
+
size = ftp.size(remote_file)
|
|
238
|
+
if size is None: # sizeコマンドがサポートされていない場合のフォールバックは省略
|
|
239
|
+
pass
|
|
240
|
+
except Exception:
|
|
241
|
+
logger.debug(f"FTPサーバーにファイルが見つかりません: {remote_file}")
|
|
242
|
+
return False
|
|
243
|
+
|
|
244
|
+
logger.info(f"FTPダウンロード開始: {remote_file} -> {local_path}")
|
|
245
|
+
|
|
246
|
+
with open(local_path, 'wb') as f:
|
|
247
|
+
ftp.retrbinary(f"RETR {remote_file}", f.write)
|
|
248
|
+
|
|
249
|
+
logger.info(f"FTPダウンロード完了: {local_path}")
|
|
250
|
+
return True
|
|
251
|
+
|
|
252
|
+
except Exception as e:
|
|
253
|
+
logger.warning(f"FTPダウンロード失敗: {e}")
|
|
254
|
+
# ダウンロード中の不完全なファイルが残っている場合は削除
|
|
255
|
+
if os.path.exists(local_path):
|
|
256
|
+
try:
|
|
257
|
+
os.remove(local_path)
|
|
258
|
+
except:
|
|
259
|
+
pass
|
|
260
|
+
return False
|